Version 1.18.0-dev.4.0

Merge 'bb96f289217952cd7d298444519fc07dca995dee' into dev
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 8f44d60..23a3e2a 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -2,6 +2,9 @@
 
 ### Core library changes
 
+* `dart:core`
+  * Improved performance when parsing some common URIs.
+  * Fixed bug in `Uri.resolve` (SDK issue [26804](https://github.com/dart-lang/sdk/issues/26804)).
 * `dart:io`
   * Adds file locking modes `FileLock.BLOCKING_SHARED` and
     `FileLock.BLOCKING_EXCLUSIVE`.
diff --git a/DEPS b/DEPS
index 62967ba..cbcfa64 100644
--- a/DEPS
+++ b/DEPS
@@ -55,7 +55,7 @@
   "dart2js_info_rev" : "@0a221eaf16aec3879c45719de656680ccb80d8a1",
   "dart_services_rev" : "@7aea2574e6f3924bf409a80afb8ad52aa2be4f97",
   "dart_style_tag": "@0.2.4",
-  "dartdoc_tag" : "@v0.9.6+1",
+  "dartdoc_tag" : "@v0.9.6+2",
   "dev_compiler_rev": "@7e9708eb5e9f3fcdc68b9af039d78cf39ce502b7",
   "fixnum_tag": "@0.10.5",
   "func_rev": "@8d4aea75c21be2179cb00dc2b94a71414653094e",
@@ -78,7 +78,7 @@
   "mime_rev": "@75890811d4af5af080351ba8a2853ad4c8df98dd",
   "mustache4dart_rev" : "@5724cfd85151e5b6b53ddcd3380daf188fe47f92",
   "oauth2_tag": "@1.0.0",
-  "observatory_pub_packages_rev": "@cf90eb9077177d3d6b3fd5e8289477c2385c026a",
+  "observatory_pub_packages_rev": "@e5e1e543bea10d4bed95b22ad3e7aa2b20a23584",
   "observe_rev": "@eee2b8ec34236fa46982575fbccff84f61202ac6",
   "package_config_rev": "@0.1.5",
   "path_tag": "@1.3.6",
diff --git a/README.fuchsia b/README.fuchsia
new file mode 100644
index 0000000..f09ff26
--- /dev/null
+++ b/README.fuchsia
@@ -0,0 +1,41 @@
+This is a README file describing how to build Dart for Fuchsia. It assumes that
+you have built the magenta kernel under //magenta, its toolchains are
+under //toolchains, and that you have a Dart checkout under //dart. It is early
+days and this is crufty. The process will improve from here.
+
+1. First, set up some symlinks in your Dart checkout:
+
+  //dart/third_party/fuchsia_tools/toolchains
+      -> symlinked to //toolchains
+  //dart/third_party/fuchsia_tools/sysroot/x86_64/usr
+      -> symlinked to //magenta/build-magenta-qemu-x86-64/sysroot/
+
+  Also, copy the linker script:
+
+  //magenta$ cp kernel/arch/x86/64/user.ld build-magenta-qemu-x86-64/sysroot/
+
+  and similarly for arm64.
+
+2. Build:
+
+  //dart$ tools/build.py -m product -a x64 --os=fuchsia fuchsia_test
+
+  This will produce //dart/out/ProductFuchsiaX64/fuchsia_test
+
+3. Strip it:
+
+  //dart$ third_party/fuchsia_tools/toolchains/x86_64-elf-5.3.0-Linux-x86_64/bin/x86_64-elf-strip out/ProductFuchsiaX64/fuchsia_test -o out/ProductFuchsiaX64/fuchsia_test.stripped
+
+4. Make a file //magenta/fuchsia_test.manifest containing:
+
+  bin/fuchsia_test=//dart/out/ProductFuchsiaX64/fuchsia_test.stripped
+
+  Where //dart is the actual path to your Dart checkout.
+
+5. Make an extra bootfs:
+
+  //magenta$ build-magenta-qemu-x86-64/tools/mkbootfs -o fuchsia_test.bootfs fuchsia_test.manifest
+
+6. Run:
+
+  //magenta$ ./scripts/run-magenta-x86-64 -x fuchsia_test.bootfs
diff --git a/build/OWNERS b/build/OWNERS
new file mode 100644
index 0000000..17d067c
--- /dev/null
+++ b/build/OWNERS
@@ -0,0 +1,5 @@
+cjhopman@chromium.org
+dpranke@chromium.org
+jochen@chromium.org
+scottmg@chromium.org
+thakis@chromium.org
diff --git a/build/PRESUBMIT.py b/build/PRESUBMIT.py
new file mode 100644
index 0000000..fca962f
--- /dev/null
+++ b/build/PRESUBMIT.py
@@ -0,0 +1,16 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+
+def _RunTests(input_api, output_api):
+  return (input_api.canned_checks.RunUnitTestsInDirectory(
+          input_api, output_api, '.', whitelist=[r'.+_test.py$']))
+
+
+def CheckChangeOnUpload(input_api, output_api):
+  return _RunTests(input_api, output_api)
+
+
+def CheckChangeOnCommit(input_api, output_api):
+  return _RunTests(input_api, output_api)
diff --git a/build/README.chromium b/build/README.chromium
new file mode 100644
index 0000000..012df35
--- /dev/null
+++ b/build/README.chromium
@@ -0,0 +1,15 @@
+List of property sheets to be included by projects:
+  common.vsprops
+    Not used anymore. No-op. Kept for compatibility with current projects.
+
+  debug.vsprops
+    Enables debug settings. Must be included directly in Debug configuration. Includes internal\essential.vsprops.
+
+  external_code.vsprops
+    Contains settings made to simplify usage of external (non-Google) code. It relaxes the warning levels. Should be included after debug.vsprops or release.vsprops to override their settings.
+
+  output_dll_copy.rules
+    Run to enable automatic copy of DLL when they are as an input file in a vcproj project.
+
+  release.vsprops
+    Enables release settings. Must be included directly in Release configuration. Includes internal\essential.vsprops. Also includes "internal\release_impl$(CHROME_BUILD_TYPE).vsprops". So the behavior is dependant on the CHROME_BUILD_TYPE environment variable.
diff --git a/build/README.dart b/build/README.dart
new file mode 100644
index 0000000..ac57fbe
--- /dev/null
+++ b/build/README.dart
@@ -0,0 +1,7 @@
+This directory was taken from a snapshot of flutter/engine/src/build/.
+
+The snapshot was taken with a recursive copy `cp -R` of the directory from
+the flutter repository.
+
+The contents is used to support the GN build system.
+
diff --git a/build/all.gyp b/build/all.gyp
new file mode 100644
index 0000000..b36fae6
--- /dev/null
+++ b/build/all.gyp
@@ -0,0 +1,1442 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+  'variables': {
+    # A hook that can be overridden in other repositories to add additional
+    # compilation targets to 'All'.
+    'app_targets%': [],
+    # For Android-specific targets.
+    'android_app_targets%': [],
+  },
+  'targets': [
+    {
+      'target_name': 'All',
+      'type': 'none',
+      'xcode_create_dependents_test_runner': 1,
+      'dependencies': [
+        '<@(app_targets)',
+        'some.gyp:*',
+        '../base/base.gyp:*',
+        '../components/components.gyp:*',
+        '../components/components_tests.gyp:*',
+        '../content/content.gyp:*',
+        '../crypto/crypto.gyp:*',
+        '../net/net.gyp:*',
+        '../sdch/sdch.gyp:*',
+        '../sql/sql.gyp:*',
+        '../testing/gmock.gyp:*',
+        '../testing/gtest.gyp:*',
+        '../third_party/icu/icu.gyp:*',
+        '../third_party/libxml/libxml.gyp:*',
+        '../third_party/sqlite/sqlite.gyp:*',
+        '../third_party/zlib/zlib.gyp:*',
+        '../ui/accessibility/accessibility.gyp:*',
+        '../ui/base/ui_base.gyp:*',
+        '../ui/display/display.gyp:display_unittests',
+        '../ui/snapshot/snapshot.gyp:*',
+        '../url/url.gyp:*',
+      ],
+      'conditions': [
+        ['OS!="ios" and OS!="mac"', {
+          'dependencies': [
+            '../ui/touch_selection/ui_touch_selection.gyp:*',
+          ],
+        }],
+        ['OS=="ios"', {
+          'dependencies': [
+            '../chrome/chrome.gyp:browser',
+            '../chrome/chrome.gyp:browser_ui',
+            '../ios/ios.gyp:*',
+            # NOTE: This list of targets is present because
+            # mojo_base.gyp:mojo_base cannot be built on iOS, as
+            # javascript-related targets cause v8 to be built.
+            '../mojo/mojo_base.gyp:mojo_common_lib',
+            '../mojo/mojo_base.gyp:mojo_common_unittests',
+            '../google_apis/google_apis.gyp:google_apis_unittests',
+            '../skia/skia_tests.gyp:skia_unittests',
+            '../third_party/mojo/mojo_edk.gyp:mojo_system_impl',
+            '../third_party/mojo/mojo_edk_tests.gyp:mojo_public_bindings_unittests',
+            '../third_party/mojo/mojo_edk_tests.gyp:mojo_public_environment_unittests',
+            '../third_party/mojo/mojo_edk_tests.gyp:mojo_public_system_unittests',
+            '../third_party/mojo/mojo_edk_tests.gyp:mojo_public_utility_unittests',
+            '../third_party/mojo/mojo_edk_tests.gyp:mojo_system_unittests',
+            '../third_party/mojo/mojo_public.gyp:mojo_cpp_bindings',
+            '../third_party/mojo/mojo_public.gyp:mojo_public_test_utils',
+            '../third_party/mojo/mojo_public.gyp:mojo_system',
+            '../ui/base/ui_base_tests.gyp:ui_base_unittests',
+            '../ui/gfx/gfx_tests.gyp:gfx_unittests',
+          ],
+        }],
+        ['OS=="android"', {
+          'dependencies': [
+            '../content/content_shell_and_tests.gyp:content_shell_apk',
+            '<@(android_app_targets)',
+            'android_builder_tests',
+            '../tools/telemetry/telemetry.gyp:*#host',
+            # TODO(nyquist) This should instead by a target for sync when all of
+            # the sync-related code for Android has been upstreamed.
+            # See http://crbug.com/159203
+            '../third_party/cacheinvalidation/cacheinvalidation.gyp:cacheinvalidation_javalib',
+          ],
+          'conditions': [
+            ['chromecast==0', {
+              'dependencies': [
+                '../android_webview/android_webview.gyp:android_webview_apk',
+                '../android_webview/android_webview.gyp:system_webview_apk',
+                '../android_webview/android_webview_shell.gyp:android_webview_shell_apk',
+                '../chrome/android/chrome_apk.gyp:chrome_public_apk',
+                '../chrome/chrome.gyp:chrome_shell_apk',
+                '../chrome/chrome.gyp:chrome_sync_shell_apk',
+                '../remoting/remoting.gyp:remoting_apk',
+              ],
+            }],
+            # TODO: Enable packed relocations for x64. See: b/20532404
+            ['target_arch != "x64"', {
+              'dependencies': [
+                '../third_party/android_platform/relocation_packer.gyp:android_relocation_packer_unittests#host',
+              ],
+            }],
+          ],
+        }, {
+          'dependencies': [
+            '../content/content_shell_and_tests.gyp:*',
+            # TODO: This should build on Android and the target should move to the list above.
+            '../sync/sync.gyp:*',
+          ],
+        }],
+        ['OS!="ios" and OS!="android" and chromecast==0', {
+          'dependencies': [
+            '../third_party/re2/re2.gyp:re2',
+            '../chrome/chrome.gyp:*',
+            '../chrome/tools/profile_reset/jtl_compiler.gyp:*',
+            '../cc/blink/cc_blink_tests.gyp:*',
+            '../cc/cc_tests.gyp:*',
+            '../device/usb/usb.gyp:*',
+            '../extensions/extensions.gyp:*',
+            '../extensions/extensions_tests.gyp:*',
+            '../gin/gin.gyp:*',
+            '../gpu/gpu.gyp:*',
+            '../gpu/tools/tools.gyp:*',
+            '../ipc/ipc.gyp:*',
+            '../ipc/mojo/ipc_mojo.gyp:*',
+            '../jingle/jingle.gyp:*',
+            '../media/cast/cast.gyp:*',
+            '../media/media.gyp:*',
+            '../media/midi/midi.gyp:*',
+            '../mojo/mojo.gyp:*',
+            '../mojo/mojo_base.gyp:*',
+            '../ppapi/ppapi.gyp:*',
+            '../ppapi/ppapi_internal.gyp:*',
+            '../ppapi/tools/ppapi_tools.gyp:*',
+            '../printing/printing.gyp:*',
+            '../skia/skia.gyp:*',
+            '../sync/tools/sync_tools.gyp:*',
+            '../third_party/WebKit/public/all.gyp:*',
+            '../third_party/cacheinvalidation/cacheinvalidation.gyp:*',
+            '../third_party/codesighs/codesighs.gyp:*',
+            '../third_party/ffmpeg/ffmpeg.gyp:*',
+            '../third_party/iccjpeg/iccjpeg.gyp:*',
+            '../third_party/libpng/libpng.gyp:*',
+            '../third_party/libusb/libusb.gyp:*',
+            '../third_party/libwebp/libwebp.gyp:*',
+            '../third_party/libxslt/libxslt.gyp:*',
+            '../third_party/lzma_sdk/lzma_sdk.gyp:*',
+            '../third_party/mesa/mesa.gyp:*',
+            '../third_party/modp_b64/modp_b64.gyp:*',
+            '../third_party/npapi/npapi.gyp:*',
+            '../third_party/ots/ots.gyp:*',
+            '../third_party/pdfium/samples/samples.gyp:*',
+            '../third_party/qcms/qcms.gyp:*',
+            '../tools/gn/gn.gyp:*',
+            '../tools/perf/clear_system_cache/clear_system_cache.gyp:*',
+            '../tools/telemetry/telemetry.gyp:*',
+            '../v8/tools/gyp/v8.gyp:*',
+            '<(libjpeg_gyp_path):*',
+          ],
+        }],
+        ['OS!="ios"', {
+          'dependencies': [
+            '../device/bluetooth/bluetooth.gyp:*',
+            '../device/device_tests.gyp:*',
+            '../gpu/skia_runner/skia_runner.gyp:*',
+          ],
+        }],
+        ['use_openssl==0 and (OS=="mac" or OS=="ios" or OS=="win")', {
+          'dependencies': [
+            '../third_party/nss/nss.gyp:*',
+           ],
+        }],
+        ['OS=="win" or OS=="ios" or OS=="linux"', {
+          'dependencies': [
+            '../breakpad/breakpad.gyp:*',
+           ],
+        }],
+        ['OS=="mac"', {
+          'dependencies': [
+            '../sandbox/sandbox.gyp:*',
+            '../third_party/crashpad/crashpad/crashpad.gyp:*',
+            '../third_party/ocmock/ocmock.gyp:*',
+          ],
+        }],
+        ['OS=="linux"', {
+          'dependencies': [
+            '../courgette/courgette.gyp:*',
+            '../sandbox/sandbox.gyp:*',
+          ],
+          'conditions': [
+            ['branding=="Chrome"', {
+              'dependencies': [
+                '../chrome/chrome.gyp:linux_packages_<(channel)',
+              ],
+            }],
+            ['enable_ipc_fuzzer==1', {
+              'dependencies': [
+                '../tools/ipc_fuzzer/ipc_fuzzer.gyp:*',
+              ],
+            }],
+            ['use_dbus==1', {
+              'dependencies': [
+                '../dbus/dbus.gyp:*',
+              ],
+            }],
+          ],
+        }],
+        ['chromecast==1', {
+          'dependencies': [
+            '../chromecast/chromecast.gyp:*',
+          ],
+        }],
+        ['use_x11==1', {
+          'dependencies': [
+            '../tools/xdisplaycheck/xdisplaycheck.gyp:*',
+          ],
+        }],
+        ['OS=="win"', {
+          'conditions': [
+            ['win_use_allocator_shim==1', {
+              'dependencies': [
+                '../base/allocator/allocator.gyp:*',
+              ],
+            }],
+          ],
+          'dependencies': [
+            '../chrome/tools/crash_service/caps/caps.gyp:*',
+            '../chrome_elf/chrome_elf.gyp:*',
+            '../cloud_print/cloud_print.gyp:*',
+            '../courgette/courgette.gyp:*',
+            '../rlz/rlz.gyp:*',
+            '../sandbox/sandbox.gyp:*',
+            '<(angle_path)/src/angle.gyp:*',
+            '../third_party/bspatch/bspatch.gyp:*',
+            '../tools/win/static_initializers/static_initializers.gyp:*',
+          ],
+        }, {
+          'dependencies': [
+            '../third_party/libevent/libevent.gyp:*',
+          ],
+        }],
+        ['toolkit_views==1', {
+          'dependencies': [
+            '../ui/views/controls/webview/webview.gyp:*',
+            '../ui/views/views.gyp:*',
+          ],
+        }],
+        ['use_aura==1', {
+          'dependencies': [
+            '../ui/aura/aura.gyp:*',
+            '../ui/aura_extra/aura_extra.gyp:*',
+          ],
+        }],
+        ['use_ash==1', {
+          'dependencies': [
+            '../ash/ash.gyp:*',
+          ],
+        }],
+        ['remoting==1', {
+          'dependencies': [
+            '../remoting/remoting_all.gyp:remoting_all',
+          ],
+        }],
+        ['use_openssl==0', {
+          'dependencies': [
+            '../net/third_party/nss/ssl.gyp:*',
+          ],
+        }],
+        ['use_openssl==1', {
+          'dependencies': [
+            '../third_party/boringssl/boringssl.gyp:*',
+            '../third_party/boringssl/boringssl_tests.gyp:*',
+          ],
+        }],
+        ['enable_app_list==1', {
+          'dependencies': [
+            '../ui/app_list/app_list.gyp:*',
+          ],
+        }],
+        ['OS!="android" and OS!="ios"', {
+          'dependencies': [
+            '../google_apis/gcm/gcm.gyp:*',
+          ],
+        }],
+        ['(chromeos==1 or OS=="linux" or OS=="win" or OS=="mac") and chromecast==0', {
+          'dependencies': [
+            '../extensions/shell/app_shell.gyp:*',
+          ],
+        }],
+        ['envoy==1', {
+          'dependencies': [
+            '../envoy/envoy.gyp:*',
+          ],
+        }],
+      ],
+    }, # target_name: All
+    {
+      'target_name': 'All_syzygy',
+      'type': 'none',
+      'conditions': [
+        ['OS=="win" and fastbuild==0 and target_arch=="ia32" and '
+            '(syzyasan==1 or syzygy_optimize==1)', {
+          'dependencies': [
+            '../chrome/installer/mini_installer_syzygy.gyp:*',
+          ],
+        }],
+      ],
+    }, # target_name: All_syzygy
+    {
+      # Note: Android uses android_builder_tests below.
+      # TODO: Consider merging that with this target.
+      'target_name': 'chromium_builder_tests',
+      'type': 'none',
+      'dependencies': [
+        '../base/base.gyp:base_unittests',
+        '../components/components_tests.gyp:components_unittests',
+        '../crypto/crypto.gyp:crypto_unittests',
+        '../net/net.gyp:net_unittests',
+        '../skia/skia_tests.gyp:skia_unittests',
+        '../sql/sql.gyp:sql_unittests',
+        '../sync/sync.gyp:sync_unit_tests',
+        '../ui/base/ui_base_tests.gyp:ui_base_unittests',
+        '../ui/display/display.gyp:display_unittests',
+        '../ui/gfx/gfx_tests.gyp:gfx_unittests',
+        '../url/url.gyp:url_unittests',
+      ],
+      'conditions': [
+        ['OS!="ios"', {
+          'dependencies': [
+            '../ui/gl/gl_tests.gyp:gl_unittests',
+          ],
+        }],
+        ['OS!="ios" and OS!="mac"', {
+          'dependencies': [
+            '../ui/touch_selection/ui_touch_selection.gyp:ui_touch_selection_unittests',
+          ],
+        }],
+        ['OS!="ios" and OS!="android"', {
+          'dependencies': [
+            '../cc/blink/cc_blink_tests.gyp:cc_blink_unittests',
+            '../cc/cc_tests.gyp:cc_unittests',
+            '../cloud_print/cloud_print.gyp:cloud_print_unittests',
+            '../content/content_shell_and_tests.gyp:content_browsertests',
+            '../content/content_shell_and_tests.gyp:content_shell',
+            '../content/content_shell_and_tests.gyp:content_unittests',
+            '../device/device_tests.gyp:device_unittests',
+            '../gin/gin.gyp:gin_unittests',
+            '../google_apis/google_apis.gyp:google_apis_unittests',
+            '../gpu/gles2_conform_support/gles2_conform_support.gyp:gles2_conform_support',
+            '../gpu/gpu.gyp:gpu_unittests',
+            '../ipc/ipc.gyp:ipc_tests',
+            '../ipc/mojo/ipc_mojo.gyp:ipc_mojo_unittests',
+            '../jingle/jingle.gyp:jingle_unittests',
+            '../media/cast/cast.gyp:cast_unittests',
+            '../media/media.gyp:media_unittests',
+            '../media/midi/midi.gyp:midi_unittests',
+            '../mojo/mojo.gyp:mojo',
+            '../ppapi/ppapi_internal.gyp:ppapi_unittests',
+            '../remoting/remoting.gyp:remoting_unittests',
+            '../third_party/WebKit/public/all.gyp:all_blink',
+            '../third_party/cacheinvalidation/cacheinvalidation.gyp:cacheinvalidation_unittests',
+            '../third_party/leveldatabase/leveldatabase.gyp:env_chromium_unittests',
+            '../third_party/libaddressinput/libaddressinput.gyp:libaddressinput_unittests',
+            '../third_party/libphonenumber/libphonenumber.gyp:libphonenumber_unittests',
+            '../tools/telemetry/telemetry.gyp:*',
+          ],
+        }],
+        ['OS!="ios" and OS!="android" and chromecast==0', {
+          'dependencies': [
+            '../chrome/chrome.gyp:browser_tests',
+            '../chrome/chrome.gyp:chromedriver_tests',
+            '../chrome/chrome.gyp:chromedriver_unittests',
+            '../chrome/chrome.gyp:interactive_ui_tests',
+            '../chrome/chrome.gyp:sync_integration_tests',
+            '../chrome/chrome.gyp:unit_tests',
+            '../extensions/extensions_tests.gyp:extensions_browsertests',
+            '../extensions/extensions_tests.gyp:extensions_unittests',
+          ],
+        }],
+        ['OS=="win"', {
+          'dependencies': [
+            '../chrome/chrome.gyp:crash_service',
+            '../chrome/chrome.gyp:installer_util_unittests',
+            '../chrome/chrome.gyp:setup_unittests',
+            # ../chrome/test/mini_installer requires mini_installer.
+            '../chrome/installer/mini_installer.gyp:mini_installer',
+            '../chrome_elf/chrome_elf.gyp:chrome_elf_unittests',
+            '../content/content_shell_and_tests.gyp:copy_test_netscape_plugin',
+            '../courgette/courgette.gyp:courgette_unittests',
+            '../sandbox/sandbox.gyp:sbox_integration_tests',
+            '../sandbox/sandbox.gyp:sbox_unittests',
+            '../sandbox/sandbox.gyp:sbox_validation_tests',
+            '../ui/app_list/app_list.gyp:app_list_unittests',
+          ],
+          'conditions': [
+            # remoting_host_installation uses lots of non-trivial GYP that tend
+            # to break because of differences between ninja and msbuild. Make
+            # sure this target is built by the builders on the main waterfall.
+            # See http://crbug.com/180600.
+            ['wix_exists == "True" and sas_dll_exists == "True"', {
+              'dependencies': [
+                '../remoting/remoting.gyp:remoting_host_installation',
+              ],
+            }],
+            ['syzyasan==1', {
+              'variables': {
+                # Disable incremental linking for all modules.
+                # 0: inherit, 1: disabled, 2: enabled.
+                'msvs_debug_link_incremental': '1',
+                'msvs_large_module_debug_link_mode': '1',
+                # Disable RTC. Syzygy explicitly doesn't support RTC
+                # instrumented binaries for now.
+                'win_debug_RuntimeChecks': '0',
+              },
+              'defines': [
+                # Disable iterator debugging (huge speed boost).
+                '_HAS_ITERATOR_DEBUGGING=0',
+              ],
+              'msvs_settings': {
+                'VCLinkerTool': {
+                  # Enable profile information (necessary for SyzyAsan
+                  # instrumentation). This is incompatible with incremental
+                  # linking.
+                  'Profile': 'true',
+                },
+              }
+            }],
+          ],
+        }],
+        ['chromeos==1', {
+          'dependencies': [
+            '../ui/chromeos/ui_chromeos.gyp:ui_chromeos_unittests',
+          ],
+        }],
+        ['OS=="linux"', {
+          'dependencies': [
+            '../sandbox/sandbox.gyp:sandbox_linux_unittests',
+          ],
+        }],
+        ['OS=="linux" and use_dbus==1', {
+          'dependencies': [
+            '../dbus/dbus.gyp:dbus_unittests',
+          ],
+        }],
+        ['OS=="mac"', {
+          'dependencies': [
+            '../ui/app_list/app_list.gyp:app_list_unittests',
+            '../ui/message_center/message_center.gyp:*',
+          ],
+        }],
+        ['test_isolation_mode != "noop"', {
+          'dependencies': [
+            'chromium_swarm_tests',
+          ],
+        }],
+        ['OS!="android"', {
+          'dependencies': [
+            '../google_apis/gcm/gcm.gyp:gcm_unit_tests',
+          ],
+        }],
+        ['enable_basic_printing==1 or enable_print_preview==1', {
+          'dependencies': [
+            '../printing/printing.gyp:printing_unittests',
+          ],
+        }],
+        ['use_aura==1', {
+          'dependencies': [
+            '../ui/app_list/app_list.gyp:app_list_unittests',
+            '../ui/aura/aura.gyp:aura_unittests',
+            '../ui/compositor/compositor.gyp:compositor_unittests',
+          ],
+        }],
+        ['use_aura==1 and chromecast==0', {
+          'dependencies': [
+            '../ui/keyboard/keyboard.gyp:keyboard_unittests',
+            '../ui/views/views.gyp:views_unittests',
+          ],
+        }],
+        ['use_aura==1 or toolkit_views==1', {
+          'dependencies': [
+            '../ui/events/events.gyp:events_unittests',
+          ],
+        }],
+        ['use_ash==1', {
+          'dependencies': [
+            '../ash/ash.gyp:ash_unittests',
+          ],
+        }],
+        ['disable_nacl==0', {
+          'dependencies': [
+            '../components/nacl.gyp:nacl_loader_unittests',
+          ],
+        }],
+        ['disable_nacl==0 and disable_nacl_untrusted==0 and enable_nacl_nonsfi_test==1', {
+          'dependencies': [
+            '../components/nacl.gyp:nacl_helper_nonsfi_unittests',
+          ],
+        }],
+        ['disable_nacl==0 and disable_nacl_untrusted==0', {
+          'dependencies': [
+            '../mojo/mojo_nacl_untrusted.gyp:libmojo',
+            '../mojo/mojo_nacl.gyp:monacl_codegen',
+            '../mojo/mojo_nacl.gyp:monacl_sel',
+            '../mojo/mojo_nacl.gyp:monacl_shell',
+          ],
+        }],
+      ],
+    }, # target_name: chromium_builder_tests
+  ],
+  'conditions': [
+    # TODO(GYP): make gn_migration.gypi work unconditionally.
+    ['OS=="mac" or OS=="win" or (OS=="linux" and target_arch=="x64" and chromecast==0)', {
+      'includes': [
+        'gn_migration.gypi',
+      ],
+    }],
+    ['OS!="ios"', {
+      'targets': [
+        {
+          'target_name': 'blink_tests',
+          'type': 'none',
+          'dependencies': [
+            '../third_party/WebKit/public/all.gyp:all_blink',
+          ],
+          'conditions': [
+            ['OS=="android"', {
+              'dependencies': [
+                '../content/content_shell_and_tests.gyp:content_shell_apk',
+                '../breakpad/breakpad.gyp:dump_syms#host',
+                '../breakpad/breakpad.gyp:minidump_stackwalk#host',
+              ],
+            }, {  # OS!="android"
+              'dependencies': [
+                '../content/content_shell_and_tests.gyp:content_shell',
+              ],
+            }],
+            ['OS=="win"', {
+              'dependencies': [
+                '../components/test_runner/test_runner.gyp:layout_test_helper',
+                '../content/content_shell_and_tests.gyp:content_shell_crash_service',
+              ],
+            }],
+            ['OS!="win" and OS!="android"', {
+              'dependencies': [
+                '../breakpad/breakpad.gyp:minidump_stackwalk',
+              ],
+            }],
+            ['OS=="mac"', {
+              'dependencies': [
+                '../components/test_runner/test_runner.gyp:layout_test_helper',
+                '../breakpad/breakpad.gyp:dump_syms#host',
+              ],
+            }],
+            ['OS=="linux"', {
+              'dependencies': [
+                '../breakpad/breakpad.gyp:dump_syms#host',
+              ],
+            }],
+          ],
+        }, # target_name: blink_tests
+      ],
+    }], # OS!=ios
+    ['OS!="ios" and OS!="android" and chromecast==0', {
+      'targets': [
+        {
+          'target_name': 'chromium_builder_nacl_win_integration',
+          'type': 'none',
+          'dependencies': [
+            'chromium_builder_tests',
+          ],
+        }, # target_name: chromium_builder_nacl_win_integration
+        {
+          'target_name': 'chromium_builder_perf',
+          'type': 'none',
+          'dependencies': [
+            '../cc/cc_tests.gyp:cc_perftests',
+            '../chrome/chrome.gyp:chrome',
+            '../chrome/chrome.gyp:load_library_perf_tests',
+            '../chrome/chrome.gyp:performance_browser_tests',
+            '../chrome/chrome.gyp:sync_performance_tests',
+            '../content/content_shell_and_tests.gyp:content_shell',
+            '../gpu/gpu.gyp:gpu_perftests',
+            '../media/media.gyp:media_perftests',
+            '../media/midi/midi.gyp:midi_unittests',
+            '../tools/perf/clear_system_cache/clear_system_cache.gyp:*',
+            '../tools/telemetry/telemetry.gyp:*',
+          ],
+          'conditions': [
+            ['OS!="ios" and OS!="win"', {
+              'dependencies': [
+                '../breakpad/breakpad.gyp:minidump_stackwalk',
+              ],
+            }],
+            ['OS=="linux"', {
+              'dependencies': [
+                '../chrome/chrome.gyp:linux_symbols'
+              ],
+            }],
+            ['OS=="win"', {
+              'dependencies': [
+                '../chrome/chrome.gyp:crash_service',
+                '../gpu/gpu.gyp:angle_perftests',
+              ],
+            }],
+            ['OS=="win" and target_arch=="ia32"', {
+              'dependencies': [
+                '../chrome/chrome.gyp:crash_service_win64',
+              ],
+            }],
+          ],
+        }, # target_name: chromium_builder_perf
+        {
+          'target_name': 'chromium_gpu_builder',
+          'type': 'none',
+          'dependencies': [
+            '../chrome/chrome.gyp:chrome',
+            '../chrome/chrome.gyp:performance_browser_tests',
+            '../content/content_shell_and_tests.gyp:content_browsertests',
+            '../content/content_shell_and_tests.gyp:content_gl_tests',
+            '../gpu/gles2_conform_support/gles2_conform_test.gyp:gles2_conform_test',
+            '../gpu/khronos_glcts_support/khronos_glcts_test.gyp:khronos_glcts_test',
+            '../gpu/gpu.gyp:gl_tests',
+            '../gpu/gpu.gyp:angle_unittests',
+            '../gpu/gpu.gyp:gpu_unittests',
+            '../tools/telemetry/telemetry.gyp:*',
+          ],
+          'conditions': [
+            ['OS!="ios" and OS!="win"', {
+              'dependencies': [
+                '../breakpad/breakpad.gyp:minidump_stackwalk',
+              ],
+            }],
+            ['OS=="linux"', {
+              'dependencies': [
+                '../chrome/chrome.gyp:linux_symbols'
+              ],
+            }],
+            ['OS=="win"', {
+              'dependencies': [
+                '../chrome/chrome.gyp:crash_service',
+              ],
+            }],
+            ['OS=="win" and target_arch=="ia32"', {
+              'dependencies': [
+                '../chrome/chrome.gyp:crash_service_win64',
+              ],
+            }],
+          ],
+        }, # target_name: chromium_gpu_builder
+        {
+          'target_name': 'chromium_gpu_debug_builder',
+          'type': 'none',
+          'dependencies': [
+            '../chrome/chrome.gyp:chrome',
+            '../content/content_shell_and_tests.gyp:content_browsertests',
+            '../content/content_shell_and_tests.gyp:content_gl_tests',
+            '../gpu/gles2_conform_support/gles2_conform_test.gyp:gles2_conform_test',
+            '../gpu/khronos_glcts_support/khronos_glcts_test.gyp:khronos_glcts_test',
+            '../gpu/gpu.gyp:gl_tests',
+            '../gpu/gpu.gyp:angle_unittests',
+            '../gpu/gpu.gyp:gpu_unittests',
+            '../tools/telemetry/telemetry.gyp:*',
+          ],
+          'conditions': [
+            ['OS!="ios" and OS!="win"', {
+              'dependencies': [
+                '../breakpad/breakpad.gyp:minidump_stackwalk',
+              ],
+            }],
+            ['OS=="linux"', {
+              'dependencies': [
+                '../chrome/chrome.gyp:linux_symbols'
+              ],
+            }],
+            ['OS=="win"', {
+              'dependencies': [
+                '../chrome/chrome.gyp:crash_service',
+              ],
+            }],
+            ['OS=="win" and target_arch=="ia32"', {
+              'dependencies': [
+                '../chrome/chrome.gyp:crash_service_win64',
+              ],
+            }],
+          ],
+        }, # target_name: chromium_gpu_debug_builder
+        {
+          # This target contains everything we need to run tests on the special
+          # device-equipped WebRTC bots. We have device-requiring tests in
+          # browser_tests and content_browsertests.
+          'target_name': 'chromium_builder_webrtc',
+          'type': 'none',
+          'dependencies': [
+            'chromium_builder_perf',
+            '../chrome/chrome.gyp:browser_tests',
+            '../content/content_shell_and_tests.gyp:content_browsertests',
+            '../content/content_shell_and_tests.gyp:content_unittests',
+            '../media/media.gyp:media_unittests',
+            '../media/midi/midi.gyp:midi_unittests',
+            '../third_party/webrtc/tools/tools.gyp:frame_analyzer',
+            '../third_party/webrtc/tools/tools.gyp:rgba_to_i420_converter',
+          ],
+          'conditions': [
+            ['remoting==1', {
+              'dependencies': [
+                '../remoting/remoting.gyp:*',
+              ],
+            }],
+          ],
+        },  # target_name: chromium_builder_webrtc
+        {
+          'target_name': 'chromium_builder_chromedriver',
+          'type': 'none',
+          'dependencies': [
+            '../chrome/chrome.gyp:chromedriver',
+            '../chrome/chrome.gyp:chromedriver_tests',
+            '../chrome/chrome.gyp:chromedriver_unittests',
+          ],
+        },  # target_name: chromium_builder_chromedriver
+        {
+          'target_name': 'chromium_builder_asan',
+          'type': 'none',
+          'dependencies': [
+            '../chrome/chrome.gyp:chrome',
+
+            # We refer to content_shell directly rather than blink_tests
+            # because we don't want the _unittests binaries.
+            '../content/content_shell_and_tests.gyp:content_shell',
+          ],
+          'conditions': [
+            ['OS!="win"', {
+              'dependencies': [
+                '../net/net.gyp:hpack_fuzz_wrapper',
+                '../net/net.gyp:dns_fuzz_stub',
+                '../skia/skia.gyp:filter_fuzz_stub',
+              ],
+            }],
+            ['enable_ipc_fuzzer==1 and component!="shared_library" and '
+                 '(OS=="linux" or OS=="win")', {
+              'dependencies': [
+                '../tools/ipc_fuzzer/ipc_fuzzer.gyp:*',
+              ],
+            }],
+            ['chromeos==0', {
+              'dependencies': [
+                '../v8/src/d8.gyp:d8#host',
+                '../third_party/pdfium/samples/samples.gyp:pdfium_test',
+              ],
+            }],
+            ['internal_filter_fuzzer==1', {
+              'dependencies': [
+                '../skia/tools/clusterfuzz-data/fuzzers/filter_fuzzer/filter_fuzzer.gyp:filter_fuzzer',
+              ],
+            }], # internal_filter_fuzzer
+            ['clang==1', {
+              'dependencies': [
+                'sanitizers/sanitizers.gyp:llvm-symbolizer',
+              ],
+            }],
+            ['OS=="win" and fastbuild==0 and target_arch=="ia32" and syzyasan==1', {
+              'dependencies': [
+                '../chrome/chrome_syzygy.gyp:chrome_dll_syzygy',
+                '../content/content_shell_and_tests.gyp:content_shell_syzyasan',
+              ],
+              'conditions': [
+                ['chrome_multiple_dll==1', {
+                  'dependencies': [
+                    '../chrome/chrome_syzygy.gyp:chrome_child_dll_syzygy',
+                  ],
+                }],
+              ],
+            }],
+          ],
+        },
+        {
+          'target_name': 'chromium_builder_nacl_sdk',
+          'type': 'none',
+          'dependencies': [
+            '../chrome/chrome.gyp:chrome',
+          ],
+          'conditions': [
+            ['OS=="win"', {
+              'dependencies': [
+                '../chrome/chrome.gyp:chrome_nacl_win64',
+              ]
+            }],
+          ],
+        },  #target_name: chromium_builder_nacl_sdk
+      ],  # targets
+    }], #OS!=ios and OS!=android
+    ['OS=="android"', {
+      'targets': [
+        {
+          # The current list of tests for android.  This is temporary
+          # until the full set supported.  If adding a new test here,
+          # please also add it to build/android/pylib/gtest/gtest_config.py,
+          # else the test is not run.
+          #
+          # WARNING:
+          # Do not add targets here without communicating the implications
+          # on tryserver triggers and load.  Discuss with
+          # chrome-infrastructure-team please.
+          'target_name': 'android_builder_tests',
+          'type': 'none',
+          'dependencies': [
+            '../base/android/jni_generator/jni_generator.gyp:jni_generator_tests',
+            '../base/base.gyp:base_unittests',
+            '../breakpad/breakpad.gyp:breakpad_unittests_deps',
+            # Also compile the tools needed to deal with minidumps, they are
+            # needed to run minidump tests upstream.
+            '../breakpad/breakpad.gyp:dump_syms#host',
+            '../breakpad/breakpad.gyp:symupload#host',
+            '../breakpad/breakpad.gyp:minidump_dump#host',
+            '../breakpad/breakpad.gyp:minidump_stackwalk#host',
+            '../build/android/pylib/device/commands/commands.gyp:chromium_commands',
+            '../cc/blink/cc_blink_tests.gyp:cc_blink_unittests',
+            '../cc/cc_tests.gyp:cc_perftests_apk',
+            '../cc/cc_tests.gyp:cc_unittests',
+            '../components/components_tests.gyp:components_unittests',
+            '../content/content_shell_and_tests.gyp:content_browsertests',
+            '../content/content_shell_and_tests.gyp:content_gl_tests',
+            '../content/content_shell_and_tests.gyp:content_junit_tests',
+            '../content/content_shell_and_tests.gyp:chromium_linker_test_apk',
+            '../content/content_shell_and_tests.gyp:content_shell_test_apk',
+            '../content/content_shell_and_tests.gyp:content_unittests',
+            '../gpu/gpu.gyp:gl_tests',
+            '../gpu/gpu.gyp:gpu_perftests_apk',
+            '../gpu/gpu.gyp:gpu_unittests',
+            '../ipc/ipc.gyp:ipc_tests',
+            '../media/media.gyp:media_perftests_apk',
+            '../media/media.gyp:media_unittests',
+            '../media/midi/midi.gyp:midi_unittests_apk',
+            '../media/midi/midi.gyp:midi_unittests',
+            '../net/net.gyp:net_unittests',
+            '../sandbox/sandbox.gyp:sandbox_linux_unittests_deps',
+            '../skia/skia_tests.gyp:skia_unittests',
+            '../sql/sql.gyp:sql_unittests',
+            '../sync/sync.gyp:sync_unit_tests',
+            '../testing/android/junit/junit_test.gyp:junit_unit_tests',
+            '../third_party/leveldatabase/leveldatabase.gyp:env_chromium_unittests',
+            '../third_party/WebKit/public/all.gyp:*',
+            '../tools/android/android_tools.gyp:android_tools',
+            '../tools/android/android_tools.gyp:memconsumer',
+            '../tools/android/findbugs_plugin/findbugs_plugin.gyp:findbugs_plugin_test',
+            '../ui/android/ui_android.gyp:ui_android_unittests',
+            '../ui/base/ui_base_tests.gyp:ui_base_unittests',
+            '../ui/events/events.gyp:events_unittests',
+            '../ui/touch_selection/ui_touch_selection.gyp:ui_touch_selection_unittests',
+            # Unit test bundles packaged as an apk.
+            '../base/base.gyp:base_unittests_apk',
+            '../cc/blink/cc_blink_tests.gyp:cc_blink_unittests_apk',
+            '../cc/cc_tests.gyp:cc_unittests_apk',
+            '../components/components_tests.gyp:components_browsertests_apk',
+            '../components/components_tests.gyp:components_unittests_apk',
+            '../content/content_shell_and_tests.gyp:content_browsertests_apk',
+            '../content/content_shell_and_tests.gyp:content_gl_tests_apk',
+            '../content/content_shell_and_tests.gyp:content_unittests_apk',
+            '../content/content_shell_and_tests.gyp:video_decode_accelerator_unittest_apk',
+            '../gpu/gpu.gyp:gl_tests_apk',
+            '../gpu/gpu.gyp:gpu_unittests_apk',
+            '../ipc/ipc.gyp:ipc_tests_apk',
+            '../media/media.gyp:media_unittests_apk',
+            '../media/midi/midi.gyp:midi_unittests_apk',
+            '../net/net.gyp:net_unittests_apk',
+            '../sandbox/sandbox.gyp:sandbox_linux_jni_unittests_apk',
+            '../skia/skia_tests.gyp:skia_unittests_apk',
+            '../sql/sql.gyp:sql_unittests_apk',
+            '../sync/sync.gyp:sync_unit_tests_apk',
+            '../tools/android/heap_profiler/heap_profiler.gyp:heap_profiler_unittests_apk',
+            '../ui/android/ui_android.gyp:ui_android_unittests_apk',
+            '../ui/base/ui_base_tests.gyp:ui_base_unittests_apk',
+            '../ui/events/events.gyp:events_unittests_apk',
+            '../ui/gfx/gfx_tests.gyp:gfx_unittests_apk',
+            '../ui/gl/gl_tests.gyp:gl_unittests_apk',
+            '../ui/touch_selection/ui_touch_selection.gyp:ui_touch_selection_unittests_apk',
+          ],
+          'conditions': [
+            ['chromecast==0', {
+              'dependencies': [
+                '../android_webview/android_webview.gyp:android_webview_unittests',
+                '../chrome/chrome.gyp:unit_tests',
+                # Unit test bundles packaged as an apk.
+                '../android_webview/android_webview.gyp:android_webview_test_apk',
+                '../android_webview/android_webview.gyp:android_webview_unittests_apk',
+                '../chrome/android/chrome_apk.gyp:chrome_public_test_apk',
+                '../chrome/chrome.gyp:chrome_junit_tests',
+                '../chrome/chrome.gyp:chrome_shell_test_apk',
+                '../chrome/chrome.gyp:chrome_sync_shell_test_apk',
+                '../chrome/chrome.gyp:chrome_shell_uiautomator_tests',
+                '../chrome/chrome.gyp:chromedriver_webview_shell_apk',
+                '../chrome/chrome.gyp:unit_tests_apk',
+                '../third_party/custom_tabs_client/src/custom_tabs_client.gyp:custom_tabs_client_example_apk',
+              ],
+            }],
+          ],
+        },
+        {
+          # WebRTC Chromium tests to run on Android.
+          'target_name': 'android_builder_chromium_webrtc',
+          'type': 'none',
+          'dependencies': [
+            '../build/android/pylib/device/commands/commands.gyp:chromium_commands',
+            '../content/content_shell_and_tests.gyp:content_browsertests',
+            '../tools/android/android_tools.gyp:android_tools',
+            '../tools/android/android_tools.gyp:memconsumer',
+            '../content/content_shell_and_tests.gyp:content_browsertests_apk',
+          ],
+        },  # target_name: android_builder_chromium_webrtc
+      ], # targets
+    }], # OS="android"
+    ['OS=="mac"', {
+      'targets': [
+        {
+          # Target to build everything plus the dmg.  We don't put the dmg
+          # in the All target because developers really don't need it.
+          'target_name': 'all_and_dmg',
+          'type': 'none',
+          'dependencies': [
+            'All',
+            '../chrome/chrome.gyp:build_app_dmg',
+          ],
+        },
+        # These targets are here so the build bots can use them to build
+        # subsets of a full tree for faster cycle times.
+        {
+          'target_name': 'chromium_builder_dbg',
+          'type': 'none',
+          'dependencies': [
+            '../cc/blink/cc_blink_tests.gyp:cc_blink_unittests',
+            '../cc/cc_tests.gyp:cc_unittests',
+            '../chrome/chrome.gyp:browser_tests',
+            '../chrome/chrome.gyp:interactive_ui_tests',
+            '../chrome/chrome.gyp:sync_integration_tests',
+            '../chrome/chrome.gyp:unit_tests',
+            '../cloud_print/cloud_print.gyp:cloud_print_unittests',
+            '../components/components_tests.gyp:components_unittests',
+            '../content/content_shell_and_tests.gyp:content_browsertests',
+            '../content/content_shell_and_tests.gyp:content_unittests',
+            '../device/device_tests.gyp:device_unittests',
+            '../google_apis/gcm/gcm.gyp:gcm_unit_tests',
+            '../gpu/gpu.gyp:gpu_unittests',
+            '../ipc/ipc.gyp:ipc_tests',
+            '../ipc/mojo/ipc_mojo.gyp:ipc_mojo_unittests',
+            '../jingle/jingle.gyp:jingle_unittests',
+            '../media/media.gyp:media_unittests',
+            '../media/midi/midi.gyp:midi_unittests',
+            '../ppapi/ppapi_internal.gyp:ppapi_unittests',
+            '../printing/printing.gyp:printing_unittests',
+            '../remoting/remoting.gyp:remoting_unittests',
+            '../rlz/rlz.gyp:*',
+            '../skia/skia_tests.gyp:skia_unittests',
+            '../sql/sql.gyp:sql_unittests',
+            '../sync/sync.gyp:sync_unit_tests',
+            '../third_party/cacheinvalidation/cacheinvalidation.gyp:cacheinvalidation_unittests',
+            '../third_party/leveldatabase/leveldatabase.gyp:env_chromium_unittests',
+            '../third_party/libaddressinput/libaddressinput.gyp:libaddressinput_unittests',
+            '../third_party/libphonenumber/libphonenumber.gyp:libphonenumber_unittests',
+            '../tools/perf/clear_system_cache/clear_system_cache.gyp:*',
+            '../tools/telemetry/telemetry.gyp:*',
+            '../ui/base/ui_base_tests.gyp:ui_base_unittests',
+            '../ui/gfx/gfx_tests.gyp:gfx_unittests',
+            '../ui/gl/gl_tests.gyp:gl_unittests',
+            '../url/url.gyp:url_unittests',
+          ],
+        },
+        {
+          'target_name': 'chromium_builder_rel',
+          'type': 'none',
+          'dependencies': [
+            '../cc/blink/cc_blink_tests.gyp:cc_blink_unittests',
+            '../cc/cc_tests.gyp:cc_unittests',
+            '../chrome/chrome.gyp:browser_tests',
+            '../chrome/chrome.gyp:performance_browser_tests',
+            '../chrome/chrome.gyp:sync_integration_tests',
+            '../chrome/chrome.gyp:unit_tests',
+            '../cloud_print/cloud_print.gyp:cloud_print_unittests',
+            '../components/components_tests.gyp:components_unittests',
+            '../content/content_shell_and_tests.gyp:content_browsertests',
+            '../content/content_shell_and_tests.gyp:content_unittests',
+            '../device/device_tests.gyp:device_unittests',
+            '../google_apis/gcm/gcm.gyp:gcm_unit_tests',
+            '../gpu/gpu.gyp:gpu_unittests',
+            '../ipc/ipc.gyp:ipc_tests',
+            '../ipc/mojo/ipc_mojo.gyp:ipc_mojo_unittests',
+            '../jingle/jingle.gyp:jingle_unittests',
+            '../media/media.gyp:media_unittests',
+            '../media/midi/midi.gyp:midi_unittests',
+            '../ppapi/ppapi_internal.gyp:ppapi_unittests',
+            '../printing/printing.gyp:printing_unittests',
+            '../remoting/remoting.gyp:remoting_unittests',
+            '../skia/skia_tests.gyp:skia_unittests',
+            '../sql/sql.gyp:sql_unittests',
+            '../sync/sync.gyp:sync_unit_tests',
+            '../third_party/cacheinvalidation/cacheinvalidation.gyp:cacheinvalidation_unittests',
+            '../third_party/leveldatabase/leveldatabase.gyp:env_chromium_unittests',
+            '../third_party/libaddressinput/libaddressinput.gyp:libaddressinput_unittests',
+            '../third_party/libphonenumber/libphonenumber.gyp:libphonenumber_unittests',
+            '../tools/perf/clear_system_cache/clear_system_cache.gyp:*',
+            '../tools/telemetry/telemetry.gyp:*',
+            '../ui/base/ui_base_tests.gyp:ui_base_unittests',
+            '../ui/gfx/gfx_tests.gyp:gfx_unittests',
+            '../ui/gl/gl_tests.gyp:gl_unittests',
+            '../url/url.gyp:url_unittests',
+          ],
+        },
+        {
+          'target_name': 'chromium_builder_dbg_tsan_mac',
+          'type': 'none',
+          'dependencies': [
+            '../base/base.gyp:base_unittests',
+            '../cloud_print/cloud_print.gyp:cloud_print_unittests',
+            '../crypto/crypto.gyp:crypto_unittests',
+            '../ipc/ipc.gyp:ipc_tests',
+            '../jingle/jingle.gyp:jingle_unittests',
+            '../media/media.gyp:media_unittests',
+            '../media/midi/midi.gyp:midi_unittests',
+            '../net/net.gyp:net_unittests',
+            '../printing/printing.gyp:printing_unittests',
+            '../remoting/remoting.gyp:remoting_unittests',
+            '../third_party/cacheinvalidation/cacheinvalidation.gyp:cacheinvalidation_unittests',
+            '../third_party/libaddressinput/libaddressinput.gyp:libaddressinput_unittests',
+            '../third_party/libphonenumber/libphonenumber.gyp:libphonenumber_unittests',
+            '../url/url.gyp:url_unittests',
+          ],
+        },
+        {
+          'target_name': 'chromium_builder_dbg_valgrind_mac',
+          'type': 'none',
+          'dependencies': [
+            '../base/base.gyp:base_unittests',
+            '../chrome/chrome.gyp:unit_tests',
+            '../cloud_print/cloud_print.gyp:cloud_print_unittests',
+            '../components/components_tests.gyp:components_unittests',
+            '../content/content_shell_and_tests.gyp:content_unittests',
+            '../crypto/crypto.gyp:crypto_unittests',
+            '../device/device_tests.gyp:device_unittests',
+            '../ipc/ipc.gyp:ipc_tests',
+            '../jingle/jingle.gyp:jingle_unittests',
+            '../media/media.gyp:media_unittests',
+            '../media/midi/midi.gyp:midi_unittests',
+            '../net/net.gyp:net_unittests',
+            '../google_apis/gcm/gcm.gyp:gcm_unit_tests',
+            '../printing/printing.gyp:printing_unittests',
+            '../remoting/remoting.gyp:remoting_unittests',
+            '../skia/skia_tests.gyp:skia_unittests',
+            '../sql/sql.gyp:sql_unittests',
+            '../sync/sync.gyp:sync_unit_tests',
+            '../third_party/cacheinvalidation/cacheinvalidation.gyp:cacheinvalidation_unittests',
+            '../third_party/leveldatabase/leveldatabase.gyp:env_chromium_unittests',
+            '../third_party/libaddressinput/libaddressinput.gyp:libaddressinput_unittests',
+            '../third_party/libphonenumber/libphonenumber.gyp:libphonenumber_unittests',
+            '../ui/base/ui_base_tests.gyp:ui_base_unittests',
+            '../ui/gfx/gfx_tests.gyp:gfx_unittests',
+            '../ui/gl/gl_tests.gyp:gl_unittests',
+            '../url/url.gyp:url_unittests',
+          ],
+        },
+      ],  # targets
+    }], # OS="mac"
+    ['OS=="win"', {
+      'targets': [
+        # These targets are here so the build bots can use them to build
+        # subsets of a full tree for faster cycle times.
+        {
+          'target_name': 'chromium_builder',
+          'type': 'none',
+          'dependencies': [
+            '../cc/blink/cc_blink_tests.gyp:cc_blink_unittests',
+            '../cc/cc_tests.gyp:cc_unittests',
+            '../chrome/chrome.gyp:browser_tests',
+            '../chrome/chrome.gyp:crash_service',
+            '../chrome/chrome.gyp:gcapi_test',
+            '../chrome/chrome.gyp:installer_util_unittests',
+            '../chrome/chrome.gyp:interactive_ui_tests',
+            '../chrome/chrome.gyp:performance_browser_tests',
+            '../chrome/chrome.gyp:setup_unittests',
+            '../chrome/chrome.gyp:sync_integration_tests',
+            '../chrome/chrome.gyp:unit_tests',
+            '../cloud_print/cloud_print.gyp:cloud_print_unittests',
+            '../components/components_tests.gyp:components_unittests',
+            '../content/content_shell_and_tests.gyp:content_browsertests',
+            '../content/content_shell_and_tests.gyp:content_unittests',
+            '../content/content_shell_and_tests.gyp:copy_test_netscape_plugin',
+            # ../chrome/test/mini_installer requires mini_installer.
+            '../chrome/installer/mini_installer.gyp:mini_installer',
+            '../courgette/courgette.gyp:courgette_unittests',
+            '../device/device_tests.gyp:device_unittests',
+            '../google_apis/gcm/gcm.gyp:gcm_unit_tests',
+            '../gpu/gpu.gyp:gpu_unittests',
+            '../ipc/ipc.gyp:ipc_tests',
+            '../ipc/mojo/ipc_mojo.gyp:ipc_mojo_unittests',
+            '../jingle/jingle.gyp:jingle_unittests',
+            '../media/media.gyp:media_unittests',
+            '../media/midi/midi.gyp:midi_unittests',
+            '../ppapi/ppapi_internal.gyp:ppapi_unittests',
+            '../printing/printing.gyp:printing_unittests',
+            '../remoting/remoting.gyp:remoting_unittests',
+            '../skia/skia_tests.gyp:skia_unittests',
+            '../sql/sql.gyp:sql_unittests',
+            '../sync/sync.gyp:sync_unit_tests',
+            '../third_party/cacheinvalidation/cacheinvalidation.gyp:cacheinvalidation_unittests',
+            '../third_party/leveldatabase/leveldatabase.gyp:env_chromium_unittests',
+            '../third_party/libaddressinput/libaddressinput.gyp:libaddressinput_unittests',
+            '../third_party/libphonenumber/libphonenumber.gyp:libphonenumber_unittests',
+            '../tools/perf/clear_system_cache/clear_system_cache.gyp:*',
+            '../tools/telemetry/telemetry.gyp:*',
+            '../ui/base/ui_base_tests.gyp:ui_base_unittests',
+            '../ui/events/events.gyp:events_unittests',
+            '../ui/gfx/gfx_tests.gyp:gfx_unittests',
+            '../ui/gl/gl_tests.gyp:gl_unittests',
+            '../ui/touch_selection/ui_touch_selection.gyp:ui_touch_selection_unittests',
+            '../ui/views/views.gyp:views_unittests',
+            '../url/url.gyp:url_unittests',
+          ],
+          'conditions': [
+            ['target_arch=="ia32"', {
+              'dependencies': [
+                '../chrome/chrome.gyp:crash_service_win64',
+              ],
+            }],
+          ],
+        },
+        {
+          'target_name': 'chromium_builder_dbg_tsan_win',
+          'type': 'none',
+          'dependencies': [
+            '../base/base.gyp:base_unittests',
+            '../cloud_print/cloud_print.gyp:cloud_print_unittests',
+            '../components/components_tests.gyp:components_unittests',
+            '../content/content_shell_and_tests.gyp:content_unittests',
+            '../crypto/crypto.gyp:crypto_unittests',
+            '../ipc/ipc.gyp:ipc_tests',
+            '../jingle/jingle.gyp:jingle_unittests',
+            '../media/media.gyp:media_unittests',
+            '../media/midi/midi.gyp:midi_unittests',
+            '../net/net.gyp:net_unittests',
+            '../printing/printing.gyp:printing_unittests',
+            '../remoting/remoting.gyp:remoting_unittests',
+            '../sql/sql.gyp:sql_unittests',
+            '../third_party/cacheinvalidation/cacheinvalidation.gyp:cacheinvalidation_unittests',
+            '../third_party/leveldatabase/leveldatabase.gyp:env_chromium_unittests',
+            '../third_party/libaddressinput/libaddressinput.gyp:libaddressinput_unittests',
+            '../third_party/libphonenumber/libphonenumber.gyp:libphonenumber_unittests',
+            '../url/url.gyp:url_unittests',
+          ],
+        },
+        {
+          'target_name': 'chromium_builder_lkgr_drmemory_win',
+          'type': 'none',
+          'dependencies': [
+            '../components/test_runner/test_runner.gyp:layout_test_helper',
+            '../content/content_shell_and_tests.gyp:content_shell',
+            '../content/content_shell_and_tests.gyp:content_shell_crash_service',
+          ],
+        },
+        {
+          'target_name': 'chromium_builder_dbg_drmemory_win',
+          'type': 'none',
+          'dependencies': [
+            '../ash/ash.gyp:ash_shell_unittests',
+            '../ash/ash.gyp:ash_unittests',
+            '../base/base.gyp:base_unittests',
+            '../cc/blink/cc_blink_tests.gyp:cc_blink_unittests',
+            '../cc/cc_tests.gyp:cc_unittests',
+            '../chrome/chrome.gyp:browser_tests',
+            '../chrome/chrome.gyp:chrome_app_unittests',
+            '../chrome/chrome.gyp:chromedriver_unittests',
+            '../chrome/chrome.gyp:installer_util_unittests',
+            '../chrome/chrome.gyp:setup_unittests',
+            '../chrome/chrome.gyp:unit_tests',
+            '../chrome_elf/chrome_elf.gyp:chrome_elf_unittests',
+            '../cloud_print/cloud_print.gyp:cloud_print_unittests',
+            '../components/components_tests.gyp:components_unittests',
+            '../components/test_runner/test_runner.gyp:layout_test_helper',
+            '../content/content_shell_and_tests.gyp:content_browsertests',
+            '../content/content_shell_and_tests.gyp:content_shell',
+            '../content/content_shell_and_tests.gyp:content_shell_crash_service',
+            '../content/content_shell_and_tests.gyp:content_unittests',
+            '../courgette/courgette.gyp:courgette_unittests',
+            '../crypto/crypto.gyp:crypto_unittests',
+            '../device/device_tests.gyp:device_unittests',
+            '../extensions/extensions_tests.gyp:extensions_browsertests',
+            '../extensions/extensions_tests.gyp:extensions_unittests',
+            '../gin/gin.gyp:gin_shell',
+            '../gin/gin.gyp:gin_unittests',
+            '../google_apis/gcm/gcm.gyp:gcm_unit_tests',
+            '../google_apis/google_apis.gyp:google_apis_unittests',
+            '../gpu/gpu.gyp:angle_unittests',
+            '../gpu/gpu.gyp:gpu_unittests',
+            '../ipc/ipc.gyp:ipc_tests',
+            '../ipc/mojo/ipc_mojo.gyp:ipc_mojo_unittests',
+            '../jingle/jingle.gyp:jingle_unittests',
+            '../media/cast/cast.gyp:cast_unittests',
+            '../media/media.gyp:media_unittests',
+            '../media/midi/midi.gyp:midi_unittests',
+            '../mojo/mojo.gyp:mojo',
+            '../net/net.gyp:net_unittests',
+            '../printing/printing.gyp:printing_unittests',
+            '../remoting/remoting.gyp:remoting_unittests',
+            '../skia/skia_tests.gyp:skia_unittests',
+            '../sql/sql.gyp:sql_unittests',
+            '../sync/sync.gyp:sync_unit_tests',
+            '../third_party/cacheinvalidation/cacheinvalidation.gyp:cacheinvalidation_unittests',
+            '../third_party/leveldatabase/leveldatabase.gyp:env_chromium_unittests',
+            '../third_party/libaddressinput/libaddressinput.gyp:libaddressinput_unittests',
+            '../third_party/libphonenumber/libphonenumber.gyp:libphonenumber_unittests',
+            '../third_party/WebKit/Source/platform/blink_platform_tests.gyp:blink_heap_unittests',
+            '../third_party/WebKit/Source/platform/blink_platform_tests.gyp:blink_platform_unittests',
+            '../ui/accessibility/accessibility.gyp:accessibility_unittests',
+            '../ui/app_list/app_list.gyp:app_list_unittests',
+            '../ui/aura/aura.gyp:aura_unittests',
+            '../ui/compositor/compositor.gyp:compositor_unittests',
+            '../ui/display/display.gyp:display_unittests',
+            '../ui/events/events.gyp:events_unittests',
+            '../ui/gfx/gfx_tests.gyp:gfx_unittests',
+            '../ui/gl/gl_tests.gyp:gl_unittests',
+            '../ui/keyboard/keyboard.gyp:keyboard_unittests',
+            '../ui/touch_selection/ui_touch_selection.gyp:ui_touch_selection_unittests',
+            '../url/url.gyp:url_unittests',
+          ],
+        },
+      ],  # targets
+      'conditions': [
+        ['branding=="Chrome"', {
+          'targets': [
+            {
+              'target_name': 'chrome_official_builder_no_unittests',
+              'type': 'none',
+              'dependencies': [
+                '../chrome/chrome.gyp:crash_service',
+                '../chrome/chrome.gyp:gcapi_dll',
+                '../chrome/chrome.gyp:pack_policy_templates',
+                '../chrome/installer/mini_installer.gyp:mini_installer',
+                '../cloud_print/cloud_print.gyp:cloud_print',
+                '../courgette/courgette.gyp:courgette',
+                '../courgette/courgette.gyp:courgette64',
+                '../remoting/remoting.gyp:remoting_webapp',
+                '../third_party/widevine/cdm/widevine_cdm.gyp:widevinecdmadapter',
+              ],
+              'conditions': [
+                ['target_arch=="ia32"', {
+                  'dependencies': [
+                    '../chrome/chrome.gyp:crash_service_win64',
+                  ],
+                }],
+                ['component != "shared_library" and wix_exists == "True" and \
+                    sas_dll_exists == "True"', {
+                  'dependencies': [
+                    '../remoting/remoting.gyp:remoting_host_installation',
+                  ],
+                }], # component != "shared_library"
+              ]
+            }, {
+              'target_name': 'chrome_official_builder',
+              'type': 'none',
+              'dependencies': [
+                'chrome_official_builder_no_unittests',
+                '../base/base.gyp:base_unittests',
+                '../chrome/chrome.gyp:browser_tests',
+                '../chrome/chrome.gyp:sync_integration_tests',
+                '../ipc/ipc.gyp:ipc_tests',
+                '../media/media.gyp:media_unittests',
+                '../media/midi/midi.gyp:midi_unittests',
+                '../net/net.gyp:net_unittests',
+                '../printing/printing.gyp:printing_unittests',
+                '../sql/sql.gyp:sql_unittests',
+                '../sync/sync.gyp:sync_unit_tests',
+                '../ui/base/ui_base_tests.gyp:ui_base_unittests',
+                '../ui/gfx/gfx_tests.gyp:gfx_unittests',
+                '../ui/gl/gl_tests.gyp:gl_unittests',
+                '../ui/touch_selection/ui_touch_selection.gyp:ui_touch_selection_unittests',
+                '../ui/views/views.gyp:views_unittests',
+                '../url/url.gyp:url_unittests',
+              ],
+            },
+          ], # targets
+        }], # branding=="Chrome"
+       ], # conditions
+    }], # OS="win"
+    ['chromeos==1', {
+      'targets': [
+        {
+          'target_name': 'chromiumos_preflight',
+          'type': 'none',
+          'dependencies': [
+            '../breakpad/breakpad.gyp:minidump_stackwalk',
+            '../chrome/chrome.gyp:chrome',
+            '../chrome/chrome.gyp:chromedriver',
+            '../content/content_shell_and_tests.gyp:video_decode_accelerator_unittest',
+            '../content/content_shell_and_tests.gyp:video_encode_accelerator_unittest',
+            '../media/media.gyp:media_unittests',
+            '../ppapi/ppapi_internal.gyp:ppapi_example_video_decode',
+            '../sandbox/sandbox.gyp:chrome_sandbox',
+            '../sandbox/sandbox.gyp:sandbox_linux_unittests',
+            '../third_party/mesa/mesa.gyp:osmesa',
+            '../tools/telemetry/telemetry.gyp:bitmaptools#host',
+            '../tools/perf/clear_system_cache/clear_system_cache.gyp:clear_system_cache',
+          ],
+          'conditions': [
+            ['disable_nacl==0', {
+              'dependencies': [
+                '../components/nacl.gyp:nacl_helper',
+                '../native_client/src/trusted/service_runtime/linux/nacl_bootstrap.gyp:nacl_helper_bootstrap',
+              ],
+            }],
+          ],
+        },
+      ],  # targets
+    }], # "chromeos==1"
+    ['use_aura==1', {
+      'targets': [
+        {
+          'target_name': 'aura_builder',
+          'type': 'none',
+          'dependencies': [
+            '../cc/blink/cc_blink_tests.gyp:cc_blink_unittests',
+            '../cc/cc_tests.gyp:cc_unittests',
+            '../components/components_tests.gyp:components_unittests',
+            '../content/content_shell_and_tests.gyp:content_browsertests',
+            '../content/content_shell_and_tests.gyp:content_unittests',
+            '../device/device_tests.gyp:device_unittests',
+            '../google_apis/gcm/gcm.gyp:gcm_unit_tests',
+            '../ppapi/ppapi_internal.gyp:ppapi_unittests',
+            '../remoting/remoting.gyp:remoting_unittests',
+            '../skia/skia_tests.gyp:skia_unittests',
+            '../ui/app_list/app_list.gyp:*',
+            '../ui/aura/aura.gyp:*',
+            '../ui/aura_extra/aura_extra.gyp:*',
+            '../ui/base/ui_base_tests.gyp:ui_base_unittests',
+            '../ui/compositor/compositor.gyp:*',
+            '../ui/display/display.gyp:display_unittests',
+            '../ui/events/events.gyp:*',
+            '../ui/gfx/gfx_tests.gyp:gfx_unittests',
+            '../ui/gl/gl_tests.gyp:gl_unittests',
+            '../ui/keyboard/keyboard.gyp:*',
+            '../ui/snapshot/snapshot.gyp:snapshot_unittests',
+            '../ui/touch_selection/ui_touch_selection.gyp:ui_touch_selection_unittests',
+            '../ui/wm/wm.gyp:*',
+            'blink_tests',
+          ],
+          'conditions': [
+            ['OS=="win"', {
+              'dependencies': [
+                '../chrome/chrome.gyp:crash_service',
+              ],
+            }],
+            ['OS=="win" and target_arch=="ia32"', {
+              'dependencies': [
+                '../chrome/chrome.gyp:crash_service_win64',
+              ],
+            }],
+            ['use_ash==1', {
+              'dependencies': [
+                '../ash/ash.gyp:ash_shell',
+                '../ash/ash.gyp:ash_unittests',
+              ],
+            }],
+            ['OS=="linux"', {
+              # Tests that currently only work on Linux.
+              'dependencies': [
+                '../base/base.gyp:base_unittests',
+                '../ipc/ipc.gyp:ipc_tests',
+                '../sql/sql.gyp:sql_unittests',
+                '../sync/sync.gyp:sync_unit_tests',
+              ],
+            }],
+            ['chromeos==1', {
+              'dependencies': [
+                '../chromeos/chromeos.gyp:chromeos_unittests',
+                '../ui/chromeos/ui_chromeos.gyp:ui_chromeos_unittests',
+              ],
+            }],
+            ['use_ozone==1', {
+              'dependencies': [
+                '../ui/ozone/ozone.gyp:*',
+                '../ui/ozone/demo/ozone_demos.gyp:*',
+              ],
+            }],
+            ['chromecast==0', {
+              'dependencies': [
+                '../chrome/chrome.gyp:browser_tests',
+                '../chrome/chrome.gyp:chrome',
+                '../chrome/chrome.gyp:interactive_ui_tests',
+                '../chrome/chrome.gyp:unit_tests',
+                '../ui/message_center/message_center.gyp:*',
+                '../ui/views/examples/examples.gyp:views_examples_with_content_exe',
+                '../ui/views/views.gyp:views',
+                '../ui/views/views.gyp:views_unittests',
+              ],
+            }],
+          ],
+        },
+      ],  # targets
+    }], # "use_aura==1"
+    ['test_isolation_mode != "noop"', {
+      'targets': [
+        {
+          'target_name': 'chromium_swarm_tests',
+          'type': 'none',
+          'dependencies': [
+            '../base/base.gyp:base_unittests_run',
+            '../content/content_shell_and_tests.gyp:content_browsertests_run',
+            '../content/content_shell_and_tests.gyp:content_unittests_run',
+            '../net/net.gyp:net_unittests_run',
+          ],
+          'conditions': [
+            ['chromecast==0', {
+              'dependencies': [
+                '../chrome/chrome.gyp:browser_tests_run',
+                '../chrome/chrome.gyp:interactive_ui_tests_run',
+                '../chrome/chrome.gyp:sync_integration_tests_run',
+                '../chrome/chrome.gyp:unit_tests_run',
+              ],
+            }],
+          ],
+        }, # target_name: chromium_swarm_tests
+      ],
+    }],
+    ['archive_chromoting_tests==1', {
+      'targets': [
+        {
+          'target_name': 'chromoting_swarm_tests',
+          'type': 'none',
+          'dependencies': [
+            '../testing/chromoting/integration_tests.gyp:*',
+          ],
+        }, # target_name: chromoting_swarm_tests
+      ]
+    }],
+    ['OS=="mac" and toolkit_views==1', {
+      'targets': [
+        {
+          'target_name': 'macviews_builder',
+          'type': 'none',
+          'dependencies': [
+            '../ui/views/examples/examples.gyp:views_examples_with_content_exe',
+            '../ui/views/views.gyp:views',
+            '../ui/views/views.gyp:views_unittests',
+          ],
+        },  # target_name: macviews_builder
+      ],  # targets
+    }],  # os=='mac' and toolkit_views==1
+  ],  # conditions
+}
diff --git a/build/android/AndroidManifest.xml b/build/android/AndroidManifest.xml
new file mode 100644
index 0000000..f27872e
--- /dev/null
+++ b/build/android/AndroidManifest.xml
@@ -0,0 +1,20 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!--
+  Copyright (c) 2012 The Chromium Authors. All rights reserved.  Use of this
+  source code is governed by a BSD-style license that can be found in the
+  LICENSE file.
+-->
+
+<!--
+  This is a dummy manifest which is required by:
+  1. aapt when generating R.java in java.gypi:
+     Nothing in the manifest is used, but it is still required by aapt.
+  2. lint: [min|target]SdkVersion are required by lint and should
+     be kept up-to-date.
+-->
+<manifest xmlns:android="http://schemas.android.com/apk/res/android"
+    package="dummy.package">
+
+    <uses-sdk android:minSdkVersion="16" android:targetSdkVersion="22" />
+
+</manifest>
diff --git a/build/android/BUILD.gn b/build/android/BUILD.gn
new file mode 100644
index 0000000..d90ad70
--- /dev/null
+++ b/build/android/BUILD.gn
@@ -0,0 +1,56 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/android/rules.gni")
+
+sun_tools_jar_path = "$root_gen_dir/sun_tools_jar/tools.jar"
+
+action("find_sun_tools_jar") {
+  script = "//build/android/gyp/find_sun_tools_jar.py"
+  depfile = "$target_gen_dir/$target_name.d"
+  outputs = [
+    depfile,
+    sun_tools_jar_path,
+  ]
+  args = [
+    "--depfile",
+    rebase_path(depfile, root_build_dir),
+    "--output",
+    rebase_path(sun_tools_jar_path, root_build_dir),
+  ]
+}
+
+java_prebuilt("sun_tools_java") {
+  jar_path = sun_tools_jar_path
+  jar_dep = ":find_sun_tools_jar"
+}
+
+action("cpplib_stripped") {
+  _strip_bin = "${android_tool_prefix}strip"
+  _soname = "libc++_shared.so"
+  _input_so = "${android_libcpp_root}/libs/${android_app_abi}/${_soname}"
+  _output_so = "${root_out_dir}/lib.stripped/${_soname}"
+
+  script = "//build/gn_run_binary.py"
+  inputs = [
+    _strip_bin,
+  ]
+  sources = [
+    _input_so,
+  ]
+  outputs = [
+    _output_so,
+  ]
+
+  _rebased_strip_bin = rebase_path(_strip_bin, root_out_dir)
+  _rebased_input_so = rebase_path(_input_so, root_out_dir)
+  _rebased_output_so = rebase_path(_output_so, root_out_dir)
+  args = [
+    _rebased_strip_bin,
+    "--strip-unneeded",
+    "-o",
+    _rebased_output_so,
+    _rebased_input_so,
+  ]
+}
diff --git a/build/android/CheckInstallApk-debug.apk b/build/android/CheckInstallApk-debug.apk
new file mode 100644
index 0000000..3dc31910
--- /dev/null
+++ b/build/android/CheckInstallApk-debug.apk
Binary files differ
diff --git a/build/android/OWNERS b/build/android/OWNERS
new file mode 100644
index 0000000..9a5d270
--- /dev/null
+++ b/build/android/OWNERS
@@ -0,0 +1,3 @@
+jbudorick@chromium.org
+klundberg@chromium.org
+pasko@chromium.org
diff --git a/build/android/PRESUBMIT.py b/build/android/PRESUBMIT.py
new file mode 100644
index 0000000..6e0a3de
--- /dev/null
+++ b/build/android/PRESUBMIT.py
@@ -0,0 +1,64 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Presubmit script for android buildbot.
+
+See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts for
+details on the presubmit API built into depot_tools.
+"""
+
+
+def CommonChecks(input_api, output_api):
+  output = []
+
+  def J(*dirs):
+    """Returns a path relative to presubmit directory."""
+    return input_api.os_path.join(input_api.PresubmitLocalPath(), *dirs)
+
+  output.extend(input_api.canned_checks.RunPylint(
+      input_api,
+      output_api,
+      black_list=[r'pylib/symbols/.*\.py$', r'gyp/.*\.py$', r'gn/.*\.py'],
+      extra_paths_list=[
+          J(), J('..', '..', 'third_party', 'android_testrunner'),
+          J('buildbot')]))
+  output.extend(input_api.canned_checks.RunPylint(
+      input_api,
+      output_api,
+      white_list=[r'gyp/.*\.py$', r'gn/.*\.py'],
+      extra_paths_list=[J('gyp'), J('gn')]))
+
+  # Disabled due to http://crbug.com/410936
+  #output.extend(input_api.canned_checks.RunUnitTestsInDirectory(
+  #input_api, output_api, J('buildbot', 'tests')))
+
+  pylib_test_env = dict(input_api.environ)
+  pylib_test_env.update({
+      'PYTHONPATH': input_api.PresubmitLocalPath(),
+      'PYTHONDONTWRITEBYTECODE': '1',
+  })
+  output.extend(input_api.canned_checks.RunUnitTests(
+      input_api,
+      output_api,
+      unit_tests=[
+          J('pylib', 'base', 'test_dispatcher_unittest.py'),
+          J('pylib', 'device', 'battery_utils_test.py'),
+          J('pylib', 'device', 'device_utils_test.py'),
+          J('pylib', 'device', 'logcat_monitor_test.py'),
+          J('pylib', 'gtest', 'gtest_test_instance_test.py'),
+          J('pylib', 'instrumentation',
+            'instrumentation_test_instance_test.py'),
+          J('pylib', 'results', 'json_results_test.py'),
+          J('pylib', 'utils', 'md5sum_test.py'),
+      ],
+      env=pylib_test_env))
+  return output
+
+
+def CheckChangeOnUpload(input_api, output_api):
+  return CommonChecks(input_api, output_api)
+
+
+def CheckChangeOnCommit(input_api, output_api):
+  return CommonChecks(input_api, output_api)
diff --git a/build/android/adb_android_webview_command_line b/build/android/adb_android_webview_command_line
new file mode 100755
index 0000000..791e270
--- /dev/null
+++ b/build/android/adb_android_webview_command_line
@@ -0,0 +1,20 @@
+#!/bin/bash
+#
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# If no flags are given, prints the current content shell flags.
+#
+# Otherwise, the given flags are used to REPLACE (not modify) the content shell
+# flags. For example:
+#   adb_android_webview_command_line --enable-webgl
+#
+# To remove all content shell flags, pass an empty string for the flags:
+#   adb_android_webview_command_line ""
+
+. $(dirname $0)/adb_command_line_functions.sh
+CMD_LINE_FILE=/data/local/tmp/android-webview-command-line
+REQUIRES_SU=0
+set_command_line "$@"
+
diff --git a/build/android/adb_chrome_public_command_line b/build/android/adb_chrome_public_command_line
new file mode 100755
index 0000000..9bf91c6
--- /dev/null
+++ b/build/android/adb_chrome_public_command_line
@@ -0,0 +1,19 @@
+#!/bin/bash
+#
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# If no flags are given, prints the current Chrome flags.
+#
+# Otherwise, the given flags are used to REPLACE (not modify) the Chrome
+# flags. For example:
+#   adb_chrome_public_command_line --enable-webgl
+#
+# To remove all Chrome flags, pass an empty string for the flags:
+#   adb_chrome_public_command_line ""
+
+. $(dirname $0)/adb_command_line_functions.sh
+CMD_LINE_FILE=/data/local/chrome-command-line
+REQUIRES_SU=1
+set_command_line "$@"
diff --git a/build/android/adb_chrome_shell_command_line b/build/android/adb_chrome_shell_command_line
new file mode 100755
index 0000000..750f906
--- /dev/null
+++ b/build/android/adb_chrome_shell_command_line
@@ -0,0 +1,20 @@
+#!/bin/bash
+#
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# If no flags are given, prints the current chrome shell flags.
+#
+# Otherwise, the given flags are used to REPLACE (not modify) the chrome shell
+# flags. For example:
+#   adb_chrome_shell_command_line --enable-webgl
+#
+# To remove all chrome shell flags, pass an empty string for the flags:
+#   adb_chrome_shell_command_line ""
+
+. $(dirname $0)/adb_command_line_functions.sh
+CMD_LINE_FILE=/data/local/tmp/chrome-shell-command-line
+REQUIRES_SU=0
+set_command_line "$@"
+
diff --git a/build/android/adb_command_line_functions.sh b/build/android/adb_command_line_functions.sh
new file mode 100755
index 0000000..7ea98b09
--- /dev/null
+++ b/build/android/adb_command_line_functions.sh
@@ -0,0 +1,40 @@
+#!/bin/bash
+#
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Variables must be set before calling:
+# CMD_LINE_FILE - Path on device to flags file.
+# REQUIRES_SU - Set to 1 if path requires root.
+function set_command_line() {
+  SU_CMD=""
+  if [[ "$REQUIRES_SU" = 1 ]]; then
+    # Older androids accept "su -c", while newer use "su uid".
+    SDK_LEVEL=$(adb shell getprop ro.build.version.sdk | tr -d '\r')
+    # E.g. if no device connected.
+    if [[ -z "$SDK_LEVEL" ]]; then
+      exit 1
+    fi
+    SU_CMD="su -c"
+    if (( $SDK_LEVEL >= 21 )); then
+      SU_CMD="su 0"
+    fi
+  fi
+
+  if [ $# -eq 0 ] ; then
+    # If nothing specified, print the command line (stripping off "chrome ")
+    adb shell "cat $CMD_LINE_FILE 2>/dev/null" | cut -d ' ' -s -f2-
+  elif [ $# -eq 1 ] && [ "$1" = '' ] ; then
+    # If given an empty string, delete the command line.
+    set -x
+    adb shell $SU_CMD rm $CMD_LINE_FILE >/dev/null
+  else
+    # Else set it.
+    set -x
+    adb shell "echo 'chrome $*' | $SU_CMD dd of=$CMD_LINE_FILE"
+    # Prevent other apps from modifying flags (this can create security issues).
+    adb shell $SU_CMD chmod 0664 $CMD_LINE_FILE
+  fi
+}
+
diff --git a/build/android/adb_content_shell_command_line b/build/android/adb_content_shell_command_line
new file mode 100755
index 0000000..2ac7ece
--- /dev/null
+++ b/build/android/adb_content_shell_command_line
@@ -0,0 +1,20 @@
+#!/bin/bash
+#
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# If no flags are given, prints the current content shell flags.
+#
+# Otherwise, the given flags are used to REPLACE (not modify) the content shell
+# flags. For example:
+#   adb_content_shell_command_line --enable-webgl
+#
+# To remove all content shell flags, pass an empty string for the flags:
+#   adb_content_shell_command_line ""
+
+. $(dirname $0)/adb_command_line_functions.sh
+CMD_LINE_FILE=/data/local/tmp/content-shell-command-line
+REQUIRES_SU=0
+set_command_line "$@"
+
diff --git a/build/android/adb_device_functions.sh b/build/android/adb_device_functions.sh
new file mode 100755
index 0000000..66cc32f
--- /dev/null
+++ b/build/android/adb_device_functions.sh
@@ -0,0 +1,139 @@
+#!/bin/bash
+#
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# A collection of functions useful for maintaining android devices
+
+
+# Run an adb command on all connected device in parallel.
+# Usage: adb_all command line to eval.  Quoting is optional.
+#
+# Examples:
+#  adb_all install Chrome.apk
+#  adb_all 'shell cat /path/to/file'
+#
+adb_all() {
+  if [[ $# == 0 ]]; then
+    echo "Usage: adb_all <adb command>.  Quoting is optional."
+    echo "Example: adb_all install Chrome.apk"
+    return 1
+  fi
+  local DEVICES=$(adb_get_devices -b)
+  local NUM_DEVICES=$(echo $DEVICES | wc -w)
+  if (( $NUM_DEVICES > 1 )); then
+    echo "Looping over $NUM_DEVICES devices"
+  fi
+  _adb_multi "$DEVICES" "$*"
+}
+
+
+# Run a command on each connected device.  Quoting the command is suggested but
+# not required.  The script setups up variable DEVICE to correspond to the
+# current serial number.  Intended for complex one_liners that don't work in
+# adb_all
+# Usage: adb_device_loop 'command line to eval'
+adb_device_loop() {
+  if [[ $# == 0 ]]; then
+    echo "Intended for more complex one-liners that cannot be done with" \
+        "adb_all."
+    echo 'Usage: adb_device_loop "echo $DEVICE: $(adb root &&' \
+        'adb shell cat /data/local.prop)"'
+    return 1
+  fi
+  local DEVICES=$(adb_get_devices)
+  if [[ -z $DEVICES ]]; then
+    return
+  fi
+  # Do not change DEVICE variable name - part of api
+  for DEVICE in $DEVICES; do
+    DEV_TYPE=$(adb -s $DEVICE shell getprop ro.product.device | sed 's/\r//')
+    echo "Running on $DEVICE ($DEV_TYPE)"
+    ANDROID_SERIAL=$DEVICE eval "$*"
+  done
+}
+
+# Erases data from any devices visible on adb.  To preserve a device,
+# disconnect it or:
+#  1) Reboot it into fastboot with 'adb reboot bootloader'
+#  2) Run wipe_all_devices to wipe remaining devices
+#  3) Restore device it with 'fastboot reboot'
+#
+#  Usage: wipe_all_devices [-f]
+#
+wipe_all_devices() {
+  if [[ -z $(which adb) || -z $(which fastboot) ]]; then
+    echo "aborting: adb and fastboot not in path"
+    return 1
+  elif ! $(groups | grep -q 'plugdev'); then
+    echo "If fastboot fails, run: 'sudo adduser $(whoami) plugdev'"
+  fi
+
+  local DEVICES=$(adb_get_devices -b)
+
+  if [[ $1 != '-f' ]]; then
+    echo "This will ERASE ALL DATA from $(echo $DEVICES | wc -w) device."
+    read -p "Hit enter to continue"
+  fi
+
+  _adb_multi "$DEVICES" "reboot bootloader"
+  # Subshell to isolate job list
+  (
+  for DEVICE in $DEVICES; do
+    fastboot_erase $DEVICE &
+  done
+  wait
+  )
+
+  # Reboot devices together
+  for DEVICE in $DEVICES; do
+    fastboot -s $DEVICE reboot
+  done
+}
+
+# Wipe a device in fastboot.
+# Usage fastboot_erase [serial]
+fastboot_erase() {
+  if [[ -n $1 ]]; then
+    echo "Wiping $1"
+    local SERIAL="-s $1"
+  else
+    if [ -z $(fastboot devices) ]; then
+      echo "No devices in fastboot, aborting."
+      echo "Check out wipe_all_devices to see if sufficient"
+      echo "You can put a device in fastboot using adb reboot bootloader"
+      return 1
+    fi
+    local SERIAL=""
+  fi
+  fastboot $SERIAL erase cache
+  fastboot $SERIAL erase userdata
+}
+
+# Get list of devices connected via adb
+# Args: -b block until adb detects a device
+adb_get_devices() {
+  local DEVICES="$(adb devices | grep 'device$')"
+  if [[ -z $DEVICES && $1 == '-b' ]]; then
+    echo '- waiting for device -' >&2
+    local DEVICES="$(adb wait-for-device devices | grep 'device$')"
+  fi
+  echo "$DEVICES" | awk -vORS=' ' '{print $1}' | sed 's/ $/\n/'
+}
+
+###################################################
+## HELPER FUNCTIONS
+###################################################
+
+# Run an adb command in parallel over a device list
+_adb_multi() {
+  local DEVICES=$1
+  local ADB_ARGS=$2
+  (
+    for DEVICE in $DEVICES; do
+      adb -s $DEVICE $ADB_ARGS &
+    done
+    wait
+  )
+}
diff --git a/build/android/adb_gdb b/build/android/adb_gdb
new file mode 100755
index 0000000..65ec7b2
--- /dev/null
+++ b/build/android/adb_gdb
@@ -0,0 +1,1047 @@
+#!/bin/bash
+#
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+
+# A generic script used to attach to a running Chromium process and
+# debug it. Most users should not use this directly, but one of the
+# wrapper scripts like adb_gdb_content_shell
+#
+# Use --help to print full usage instructions.
+#
+
+PROGNAME=$(basename "$0")
+PROGDIR=$(dirname "$0")
+
+# Location of Chromium-top-level sources.
+CHROMIUM_SRC=$(cd "$PROGDIR"/../.. >/dev/null && pwd 2>/dev/null)
+
+# Location of Chromium out/ directory.
+if [ -z "$CHROMIUM_OUT_DIR" ]; then
+  CHROMIUM_OUT_DIR=out
+fi
+
+TMPDIR=
+GDBSERVER_PIDFILE=
+TARGET_GDBSERVER=
+COMMAND_PREFIX=
+
+clean_exit () {
+  if [ "$TMPDIR" ]; then
+    GDBSERVER_PID=$(cat $GDBSERVER_PIDFILE 2>/dev/null)
+    if [ "$GDBSERVER_PID" ]; then
+      log "Killing background gdbserver process: $GDBSERVER_PID"
+      kill -9 $GDBSERVER_PID >/dev/null 2>&1
+    fi
+    if [ "$TARGET_GDBSERVER" ]; then
+      log "Removing target gdbserver binary: $TARGET_GDBSERVER."
+      "$ADB" shell "$COMMAND_PREFIX" rm "$TARGET_GDBSERVER" >/dev/null 2>&1
+    fi
+    log "Cleaning up: $TMPDIR"
+    rm -rf "$TMPDIR"
+  fi
+  trap "" EXIT
+  exit $1
+}
+
+# Ensure clean exit on Ctrl-C or normal exit.
+trap "clean_exit 1" INT HUP QUIT TERM
+trap "clean_exit \$?" EXIT
+
+panic () {
+  echo "ERROR: $@" >&2
+  exit 1
+}
+
+fail_panic () {
+  if [ $? != 0 ]; then panic "$@"; fi
+}
+
+log () {
+  if [ "$VERBOSE" -gt 0 ]; then
+    echo "$@"
+  fi
+}
+
+DEFAULT_PULL_LIBS_DIR=/tmp/$USER-adb-gdb-libs
+
+# NOTE: Allow wrapper scripts to set various default through ADB_GDB_XXX
+# environment variables. This is only for cosmetic reasons, i.e. to
+# display proper
+
+# Allow wrapper scripts to set the default activity through
+# the ADB_GDB_ACTIVITY variable. Users are still able to change the
+# final activity name through --activity=<name> option.
+#
+# This is only for cosmetic reasons, i.e. to display the proper default
+# in the --help output.
+#
+DEFAULT_ACTIVITY=${ADB_GDB_ACTIVITY:-".Main"}
+
+# Allow wrapper scripts to set the program name through ADB_GDB_PROGNAME
+PROGNAME=${ADB_GDB_PROGNAME:-$(basename "$0")}
+
+ACTIVITY=$DEFAULT_ACTIVITY
+ADB=
+ANNOTATE=
+# Note: Ignore BUILDTYPE variable, because the Ninja build doesn't use it.
+BUILDTYPE=
+FORCE=
+GDBEXEPOSTFIX=gdb
+GDBINIT=
+GDBSERVER=
+HELP=
+NDK_DIR=
+NO_PULL_LIBS=
+PACKAGE_NAME=
+PID=
+PORT=
+PRIVILEGED=
+PRIVILEGED_INDEX=
+PROGRAM_NAME="activity"
+PULL_LIBS=
+PULL_LIBS_DIR=
+SANDBOXED=
+SANDBOXED_INDEX=
+START=
+SU_PREFIX=
+SYMBOL_DIR=
+TARGET_ARCH=
+TOOLCHAIN=
+VERBOSE=0
+
+for opt; do
+  optarg=$(expr "x$opt" : 'x[^=]*=\(.*\)')
+  case $opt in
+    --adb=*)
+      ADB=$optarg
+      ;;
+    --activity=*)
+      ACTIVITY=$optarg
+      ;;
+    --annotate=3)
+      ANNOTATE=$optarg
+      ;;
+    --force)
+      FORCE=true
+      ;;
+    --gdbserver=*)
+      GDBSERVER=$optarg
+      ;;
+    --gdb=*)
+      GDB=$optarg
+      ;;
+    --help|-h|-?)
+      HELP=true
+      ;;
+    --ndk-dir=*)
+      NDK_DIR=$optarg
+      ;;
+    --no-pull-libs)
+      NO_PULL_LIBS=true
+      ;;
+    --package-name=*)
+      PACKAGE_NAME=$optarg
+      ;;
+    --pid=*)
+      PID=$optarg
+      ;;
+    --port=*)
+      PORT=$optarg
+      ;;
+    --privileged)
+      PRIVILEGED=true
+      ;;
+    --privileged=*)
+      PRIVILEGED=true
+      PRIVILEGED_INDEX=$optarg
+      ;;
+    --program-name=*)
+      PROGRAM_NAME=$optarg
+      ;;
+    --pull-libs)
+      PULL_LIBS=true
+      ;;
+    --pull-libs-dir=*)
+      PULL_LIBS_DIR=$optarg
+      ;;
+    --sandboxed)
+      SANDBOXED=true
+      ;;
+    --sandboxed=*)
+      SANDBOXED=true
+      SANDBOXED_INDEX=$optarg
+      ;;
+    --script=*)
+      GDBINIT=$optarg
+      ;;
+    --start)
+      START=true
+      ;;
+    --su-prefix=*)
+      SU_PREFIX=$optarg
+      ;;
+    --symbol-dir=*)
+      SYMBOL_DIR=$optarg
+      ;;
+    --out-dir=*)
+      CHROMIUM_OUT_DIR=$optarg
+      ;;
+    --target-arch=*)
+      TARGET_ARCH=$optarg
+      ;;
+    --toolchain=*)
+      TOOLCHAIN=$optarg
+      ;;
+    --ui)
+      GDBEXEPOSTFIX=gdbtui
+      ;;
+    --verbose)
+      VERBOSE=$(( $VERBOSE + 1 ))
+      ;;
+    --debug)
+      BUILDTYPE=Debug
+      ;;
+    --release)
+      BUILDTYPE=Release
+      ;;
+    -*)
+      panic "Unknown option $OPT, see --help." >&2
+      ;;
+    *)
+      if [ "$PACKAGE_NAME" ]; then
+        panic "You can only provide a single package name as argument!\
+ See --help."
+      fi
+      PACKAGE_NAME=$opt
+      ;;
+  esac
+done
+
+print_help_options () {
+  cat <<EOF
+EOF
+}
+
+if [ "$HELP" ]; then
+  if [ "$ADB_GDB_PROGNAME" ]; then
+    # Assume wrapper scripts all provide a default package name.
+    cat <<EOF
+Usage: $PROGNAME [options]
+
+Attach gdb to a running Android $PROGRAM_NAME process.
+EOF
+  else
+    # Assume this is a direct call to adb_gdb
+  cat <<EOF
+Usage: $PROGNAME [options] [<package-name>]
+
+Attach gdb to a running Android $PROGRAM_NAME process.
+
+If provided, <package-name> must be the name of the Android application's
+package name to be debugged. You can also use --package-name=<name> to
+specify it.
+EOF
+  fi
+
+  cat <<EOF
+
+This script is used to debug a running $PROGRAM_NAME process.
+This can be a regular Android application process, sandboxed (if you use the
+--sandboxed or --sandboxed=<num> option) or a privileged (--privileged or
+--privileged=<num>) service.
+
+This script needs several things to work properly. It will try to pick
+them up automatically for you though:
+
+   - target gdbserver binary
+   - host gdb client (e.g. arm-linux-androideabi-gdb)
+   - directory with symbolic version of $PROGRAM_NAME's shared libraries.
+
+You can also use --ndk-dir=<path> to specify an alternative NDK installation
+directory.
+
+The script tries to find the most recent version of the debug version of
+shared libraries under one of the following directories:
+
+  \$CHROMIUM_SRC/<out>/Release/lib/           (used by Ninja builds)
+  \$CHROMIUM_SRC/<out>/Debug/lib/             (used by Ninja builds)
+  \$CHROMIUM_SRC/<out>/Release/lib.target/    (used by Make builds)
+  \$CHROMIUM_SRC/<out>/Debug/lib.target/      (used by Make builds)
+
+Where <out> is 'out' by default, unless the --out=<name> option is used or
+the CHROMIUM_OUT_DIR environment variable is defined.
+
+You can restrict this search by using --release or --debug to specify the
+build type, or simply use --symbol-dir=<path> to specify the file manually.
+
+The script tries to extract the target architecture from your target device,
+but if this fails, will default to 'arm'. Use --target-arch=<name> to force
+its value.
+
+Otherwise, the script will complain, but you can use the --gdbserver,
+--gdb and --symbol-lib options to specify everything manually.
+
+An alternative to --gdb=<file> is to use --toollchain=<path> to specify
+the path to the host target-specific cross-toolchain.
+
+You will also need the 'adb' tool in your path. Otherwise, use the --adb
+option. The script will complain if there is more than one device connected
+and ANDROID_SERIAL is not defined.
+
+The first time you use it on a device, the script will pull many system
+libraries required by the process into a temporary directory. This
+is done to strongly improve the debugging experience, like allowing
+readable thread stacks and more. The libraries are copied to the following
+directory by default:
+
+  $DEFAULT_PULL_LIBS_DIR/
+
+But you can use the --pull-libs-dir=<path> option to specify an
+alternative. The script can detect when you change the connected device,
+and will re-pull the libraries only in this case. You can however force it
+with the --pull-libs option.
+
+Any local .gdbinit script will be ignored, but it is possible to pass a
+gdb command script with the --script=<file> option. Note that its commands
+will be passed to gdb after the remote connection and library symbol
+loading have completed.
+
+Valid options:
+  --help|-h|-?          Print this message.
+  --verbose             Increase verbosity.
+
+  --sandboxed           Debug first sandboxed process we find.
+  --sandboxed=<num>     Debug specific sandboxed process.
+  --symbol-dir=<path>   Specify directory with symbol shared libraries.
+  --out-dir=<path>      Specify the out directory.
+  --package-name=<name> Specify package name (alternative to 1st argument).
+  --privileged          Debug first privileged process we find.
+  --privileged=<num>    Debug specific privileged process.
+  --program-name=<name> Specify program name (cosmetic only).
+  --pid=<pid>           Specify application process pid.
+  --force               Kill any previous debugging session, if any.
+  --start               Start package's activity on device.
+  --ui                  Use gdbtui instead of gdb
+  --activity=<name>     Activity name for --start [$DEFAULT_ACTIVITY].
+  --annotate=<num>      Enable gdb annotation.
+  --script=<file>       Specify extra GDB init script.
+
+  --gdbserver=<file>    Specify target gdbserver binary.
+  --gdb=<file>          Specify host gdb client binary.
+  --target-arch=<name>  Specify NDK target arch.
+  --adb=<file>          Specify host ADB binary.
+  --port=<port>         Specify the tcp port to use.
+
+  --su-prefix=<prefix>  Prepend <prefix> to 'adb shell' commands that are
+                        run by this script. This can be useful to use
+                        the 'su' program on rooted production devices.
+                        e.g. --su-prefix="su -c"
+
+  --pull-libs           Force system libraries extraction.
+  --no-pull-libs        Do not extract any system library.
+  --libs-dir=<path>     Specify system libraries extraction directory.
+
+  --debug               Use libraries under out/Debug.
+  --release             Use libraries under out/Release.
+
+EOF
+  exit 0
+fi
+
+if [ -z "$PACKAGE_NAME" ]; then
+  panic "Please specify a package name on the command line. See --help."
+fi
+
+if [ -z "$NDK_DIR" ]; then
+  ANDROID_NDK_ROOT=$(PYTHONPATH=$CHROMIUM_SRC/build/android python -c \
+'from pylib.constants import ANDROID_NDK_ROOT; print ANDROID_NDK_ROOT,')
+else
+  if [ ! -d "$NDK_DIR" ]; then
+    panic "Invalid directory: $NDK_DIR"
+  fi
+  if [ ! -f "$NDK_DIR/ndk-build" ]; then
+    panic "Not a valid NDK directory: $NDK_DIR"
+  fi
+  ANDROID_NDK_ROOT=$NDK_DIR
+fi
+
+if [ "$GDBINIT" -a ! -f "$GDBINIT" ]; then
+  panic "Unknown --script file: $GDBINIT"
+fi
+
+# Check that ADB is in our path
+if [ -z "$ADB" ]; then
+  ADB=$(which adb 2>/dev/null)
+  if [ -z "$ADB" ]; then
+    panic "Can't find 'adb' tool in your path. Install it or use \
+--adb=<file>"
+  fi
+  log "Auto-config: --adb=$ADB"
+fi
+
+# Check that it works minimally
+ADB_VERSION=$($ADB version 2>/dev/null)
+echo "$ADB_VERSION" | fgrep -q -e "Android Debug Bridge"
+if [ $? != 0 ]; then
+  panic "Your 'adb' tool seems invalid, use --adb=<file> to specify a \
+different one: $ADB"
+fi
+
+# If there are more than one device connected, and ANDROID_SERIAL is not
+# defined, print an error message.
+NUM_DEVICES_PLUS2=$($ADB devices 2>/dev/null | wc -l)
+if [ "$NUM_DEVICES_PLUS2" -lt 3 -a -z "$ANDROID_SERIAL" ]; then
+  echo "ERROR: There is more than one Android device connected to ADB."
+  echo "Please define ANDROID_SERIAL to specify which one to use."
+  exit 1
+fi
+
+# Run a command through adb shell, strip the extra \r from the output
+# and return the correct status code to detect failures. This assumes
+# that the adb shell command prints a final \n to stdout.
+# $1+: command to run
+# Out: command's stdout
+# Return: command's status
+# Note: the command's stderr is lost
+adb_shell () {
+  local TMPOUT="$(mktemp)"
+  local LASTLINE RET
+  local ADB=${ADB:-adb}
+
+  # The weird sed rule is to strip the final \r on each output line
+  # Since 'adb shell' never returns the command's proper exit/status code,
+  # we force it to print it as '%%<status>' in the temporary output file,
+  # which we will later strip from it.
+  $ADB shell $@ ";" echo "%%\$?" 2>/dev/null | \
+      sed -e 's![[:cntrl:]]!!g' > $TMPOUT
+  # Get last line in log, which contains the exit code from the command
+  LASTLINE=$(sed -e '$!d' $TMPOUT)
+  # Extract the status code from the end of the line, which must
+  # be '%%<code>'.
+  RET=$(echo "$LASTLINE" | \
+    awk '{ if (match($0, "%%[0-9]+$")) { print substr($0,RSTART+2); } }')
+  # Remove the status code from the last line. Note that this may result
+  # in an empty line.
+  LASTLINE=$(echo "$LASTLINE" | \
+    awk '{ if (match($0, "%%[0-9]+$")) { print substr($0,1,RSTART-1); } }')
+  # The output itself: all lines except the status code.
+  sed -e '$d' $TMPOUT && printf "%s" "$LASTLINE"
+  # Remove temp file.
+  rm -f $TMPOUT
+  # Exit with the appropriate status.
+  return $RET
+}
+
+# Find the target architecture from the target device.
+# This returns an NDK-compatible architecture name.
+# out: NDK Architecture name, or empty string.
+get_gyp_target_arch () {
+  local ARCH=$(adb_shell getprop ro.product.cpu.abi)
+  case $ARCH in
+    mips|x86|x86_64) echo "$ARCH";;
+    arm64*) echo "arm64";;
+    arm*) echo "arm";;
+    *) echo "";
+  esac
+}
+
+if [ -z "$TARGET_ARCH" ]; then
+  TARGET_ARCH=$(get_gyp_target_arch)
+  if [ -z "$TARGET_ARCH" ]; then
+    TARGET_ARCH=arm
+  fi
+else
+  # Nit: accept Chromium's 'ia32' as a valid target architecture. This
+  # script prefers the NDK 'x86' name instead because it uses it to find
+  # NDK-specific files (host gdb) with it.
+  if [ "$TARGET_ARCH" = "ia32" ]; then
+    TARGET_ARCH=x86
+    log "Auto-config: --arch=$TARGET_ARCH  (equivalent to ia32)"
+  fi
+fi
+
+# Detect the NDK system name, i.e. the name used to identify the host.
+# out: NDK system name (e.g. 'linux' or 'darwin')
+get_ndk_host_system () {
+  local HOST_OS
+  if [ -z "$NDK_HOST_SYSTEM" ]; then
+    HOST_OS=$(uname -s)
+    case $HOST_OS in
+      Linux) NDK_HOST_SYSTEM=linux;;
+      Darwin) NDK_HOST_SYSTEM=darwin;;
+      *) panic "You can't run this script on this system: $HOST_OS";;
+    esac
+  fi
+  echo "$NDK_HOST_SYSTEM"
+}
+
+# Detect the NDK host architecture name.
+# out: NDK arch name (e.g. 'x86' or 'x86_64')
+get_ndk_host_arch () {
+  local HOST_ARCH HOST_OS
+  if [ -z "$NDK_HOST_ARCH" ]; then
+    HOST_OS=$(get_ndk_host_system)
+    HOST_ARCH=$(uname -p)
+    case $HOST_ARCH in
+      i?86) NDK_HOST_ARCH=x86;;
+      x86_64|amd64) NDK_HOST_ARCH=x86_64;;
+      *) panic "You can't run this script on this host architecture: $HOST_ARCH";;
+    esac
+    # Darwin trick: "uname -p" always returns i386 on 64-bit installations.
+    if [ "$HOST_OS" = darwin -a "$NDK_HOST_ARCH" = "x86" ]; then
+      # Use '/usr/bin/file', not just 'file' to avoid buggy MacPorts
+      # implementations of the tool. See http://b.android.com/53769
+      HOST_64BITS=$(/usr/bin/file -L "$SHELL" | grep -e "x86[_-]64")
+      if [ "$HOST_64BITS" ]; then
+        NDK_HOST_ARCH=x86_64
+      fi
+    fi
+  fi
+  echo "$NDK_HOST_ARCH"
+}
+
+# Convert an NDK architecture name into a GNU configure triplet.
+# $1: NDK architecture name (e.g. 'arm')
+# Out: Android GNU configure triplet (e.g. 'arm-linux-androideabi')
+get_arch_gnu_config () {
+  case $1 in
+    arm)
+      echo "arm-linux-androideabi"
+      ;;
+    arm64)
+      echo "aarch64-linux-android"
+      ;;
+    x86)
+      echo "i686-linux-android"
+      ;;
+    x86_64)
+      echo "x86_64-linux-android"
+      ;;
+    mips)
+      echo "mipsel-linux-android"
+      ;;
+    *)
+      echo "$ARCH-linux-android"
+      ;;
+  esac
+}
+
+# Convert an NDK architecture name into a toolchain name prefix
+# $1: NDK architecture name (e.g. 'arm')
+# Out: NDK toolchain name prefix (e.g. 'arm-linux-androideabi')
+get_arch_toolchain_prefix () {
+  # Return the configure triplet, except for x86!
+  if [ "$1" = "x86" ]; then
+    echo "$1"
+  else
+    get_arch_gnu_config $1
+  fi
+}
+
+# Find a NDK toolchain prebuilt file or sub-directory.
+# This will probe the various arch-specific toolchain directories
+# in the NDK for the needed file.
+# $1: NDK install path
+# $2: NDK architecture name
+# $3: prebuilt sub-path to look for.
+# Out: file path, or empty if none is found.
+get_ndk_toolchain_prebuilt () {
+  local NDK_DIR="${1%/}"
+  local ARCH="$2"
+  local SUBPATH="$3"
+  local NAME="$(get_arch_toolchain_prefix $ARCH)"
+  local FILE TARGET
+  FILE=$NDK_DIR/toolchains/$NAME-4.9/prebuilt/$SUBPATH
+  if [ ! -f "$FILE" ]; then
+    FILE=$NDK_DIR/toolchains/$NAME-4.8/prebuilt/$SUBPATH
+    if [ ! -f "$FILE" ]; then
+      FILE=
+    fi
+  fi
+  echo "$FILE"
+}
+
+# Find the path to an NDK's toolchain full prefix for a given architecture
+# $1: NDK install path
+# $2: NDK target architecture name
+# Out: install path + binary prefix (e.g.
+#      ".../path/to/bin/arm-linux-androideabi-")
+get_ndk_toolchain_fullprefix () {
+  local NDK_DIR="$1"
+  local ARCH="$2"
+  local TARGET NAME HOST_OS HOST_ARCH GCC CONFIG
+
+  # NOTE: This will need to be updated if the NDK changes the names or moves
+  #        the location of its prebuilt toolchains.
+  #
+  GCC=
+  HOST_OS=$(get_ndk_host_system)
+  HOST_ARCH=$(get_ndk_host_arch)
+  CONFIG=$(get_arch_gnu_config $ARCH)
+  GCC=$(get_ndk_toolchain_prebuilt \
+        "$NDK_DIR" "$ARCH" "$HOST_OS-$HOST_ARCH/bin/$CONFIG-gcc")
+  if [ -z "$GCC" -a "$HOST_ARCH" = "x86_64" ]; then
+    GCC=$(get_ndk_toolchain_prebuilt \
+          "$NDK_DIR" "$ARCH" "$HOST_OS-x86/bin/$CONFIG-gcc")
+  fi
+  if [ ! -f "$GCC" -a "$ARCH" = "x86" ]; then
+    # Special case, the x86 toolchain used to be incorrectly
+    # named i686-android-linux-gcc!
+    GCC=$(get_ndk_toolchain_prebuilt \
+          "$NDK_DIR" "$ARCH" "$HOST_OS-x86/bin/i686-android-linux-gcc")
+  fi
+  if [ -z "$GCC" ]; then
+    panic "Cannot find Android NDK toolchain for '$ARCH' architecture. \
+Please verify your NDK installation!"
+  fi
+  echo "${GCC%%gcc}"
+}
+
+# $1: NDK install path
+# $2: target architecture.
+get_ndk_gdbserver () {
+  local NDK_DIR="$1"
+  local ARCH=$2
+  local BINARY
+
+  # The location has moved after NDK r8
+  BINARY=$NDK_DIR/prebuilt/android-$ARCH/gdbserver/gdbserver
+  if [ ! -f "$BINARY" ]; then
+    BINARY=$(get_ndk_toolchain_prebuilt "$NDK_DIR" "$ARCH" gdbserver)
+  fi
+  echo "$BINARY"
+}
+
+# Check/probe the path to the Android toolchain installation. Always
+# use the NDK versions of gdb and gdbserver. They must match to avoid
+# issues when both binaries do not speak the same wire protocol.
+#
+if [ -z "$TOOLCHAIN" ]; then
+  ANDROID_TOOLCHAIN=$(get_ndk_toolchain_fullprefix \
+                      "$ANDROID_NDK_ROOT" "$TARGET_ARCH")
+  ANDROID_TOOLCHAIN=$(dirname "$ANDROID_TOOLCHAIN")
+  log "Auto-config: --toolchain=$ANDROID_TOOLCHAIN"
+else
+  # Be flexible, allow one to specify either the install path or the bin
+  # sub-directory in --toolchain:
+  #
+  if [ -d "$TOOLCHAIN/bin" ]; then
+    TOOLCHAIN=$TOOLCHAIN/bin
+  fi
+  ANDROID_TOOLCHAIN=$TOOLCHAIN
+fi
+
+# Cosmetic: Remove trailing directory separator.
+ANDROID_TOOLCHAIN=${ANDROID_TOOLCHAIN%/}
+
+# Find host GDB client binary
+if [ -z "$GDB" ]; then
+  GDB=$(which $ANDROID_TOOLCHAIN/*-$GDBEXEPOSTFIX 2>/dev/null | head -1)
+  if [ -z "$GDB" ]; then
+    panic "Can't find Android gdb client in your path, check your \
+--toolchain or --gdb path."
+  fi
+  log "Host gdb client: $GDB"
+fi
+
+# Find gdbserver binary, we will later push it to /data/local/tmp
+# This ensures that both gdbserver and $GDB talk the same binary protocol,
+# otherwise weird problems will appear.
+#
+if [ -z "$GDBSERVER" ]; then
+  GDBSERVER=$(get_ndk_gdbserver "$ANDROID_NDK_ROOT" "$TARGET_ARCH")
+  if [ -z "$GDBSERVER" ]; then
+    panic "Can't find NDK gdbserver binary. use --gdbserver to specify \
+valid one!"
+  fi
+  log "Auto-config: --gdbserver=$GDBSERVER"
+fi
+
+# A unique ID for this script's session. This needs to be the same in all
+# sub-shell commands we're going to launch, so take the PID of the launcher
+# process.
+TMP_ID=$$
+
+# Temporary directory, will get cleaned up on exit.
+TMPDIR=/tmp/$USER-adb-gdb-tmp-$TMP_ID
+mkdir -p "$TMPDIR" && rm -rf "$TMPDIR"/*
+
+GDBSERVER_PIDFILE="$TMPDIR"/gdbserver-$TMP_ID.pid
+
+# If --force is specified, try to kill any gdbserver process started by the
+# same user on the device. Normally, these are killed automatically by the
+# script on exit, but there are a few corner cases where this would still
+# be needed.
+if [ "$FORCE" ]; then
+  GDBSERVER_PIDS=$(adb_shell ps | awk '$9 ~ /gdbserver/ { print $2; }')
+  for GDB_PID in $GDBSERVER_PIDS; do
+    log "Killing previous gdbserver (PID=$GDB_PID)"
+    adb_shell kill -9 $GDB_PID
+  done
+fi
+
+if [ "$START" ]; then
+  log "Starting $PROGRAM_NAME on device."
+  adb_shell am start -n $PACKAGE_NAME/$ACTIVITY 2>/dev/null
+  adb_shell ps | grep -q $PACKAGE_NAME
+  fail_panic "Could not start $PROGRAM_NAME on device. Are you sure the \
+package is installed?"
+fi
+
+# Return the timestamp of a given time, as number of seconds since epoch.
+# $1: file path
+# Out: file timestamp
+get_file_timestamp () {
+  stat -c %Y "$1" 2>/dev/null
+}
+
+# Detect the build type and symbol directory. This is done by finding
+# the most recent sub-directory containing debug shared libraries under
+# $CHROMIUM_SRC/$CHROMIUM_OUT_DIR/
+#
+# $1: $BUILDTYPE value, can be empty
+# Out: nothing, but this sets SYMBOL_DIR
+#
+detect_symbol_dir () {
+  local SUBDIRS SUBDIR LIST DIR DIR_LIBS TSTAMP
+  # Note: Ninja places debug libraries under out/$BUILDTYPE/lib/, while
+  # Make places then under out/$BUILDTYPE/lib.target.
+  if [ "$1" ]; then
+    SUBDIRS="$1/lib $1/lib.target"
+  else
+    SUBDIRS="Release/lib Debug/lib Release/lib.target Debug/lib.target"
+  fi
+  LIST=$TMPDIR/scan-subdirs-$$.txt
+  printf "" > "$LIST"
+  for SUBDIR in $SUBDIRS; do
+    DIR=$CHROMIUM_SRC/$CHROMIUM_OUT_DIR/$SUBDIR
+    if [ -d "$DIR" ]; then
+      # Ignore build directories that don't contain symbol versions
+      # of the shared libraries.
+      DIR_LIBS=$(ls "$DIR"/lib*.so 2>/dev/null)
+      if [ -z "$DIR_LIBS" ]; then
+        echo "No shared libs: $DIR"
+        continue
+      fi
+      TSTAMP=$(get_file_timestamp "$DIR")
+      printf "%s %s\n" "$TSTAMP" "$SUBDIR" >> "$LIST"
+    fi
+  done
+  SUBDIR=$(cat $LIST | sort -r | head -1 | cut -d" " -f2)
+  rm -f "$LIST"
+
+  if [ -z "$SUBDIR" ]; then
+    if [ -z "$1" ]; then
+      panic "Could not find any build directory under \
+$CHROMIUM_SRC/$CHROMIUM_OUT_DIR. Please build the program first!"
+    else
+      panic "Could not find any $1 directory under \
+$CHROMIUM_SRC/$CHROMIUM_OUT_DIR. Check your build type!"
+    fi
+  fi
+
+  SYMBOL_DIR=$CHROMIUM_SRC/$CHROMIUM_OUT_DIR/$SUBDIR
+  log "Auto-config: --symbol-dir=$SYMBOL_DIR"
+}
+
+if [ -z "$SYMBOL_DIR" ]; then
+  detect_symbol_dir "$BUILDTYPE"
+fi
+
+# Allow several concurrent debugging sessions
+TARGET_GDBSERVER=/data/data/$PACKAGE_NAME/gdbserver-adb-gdb-$TMP_ID
+TMP_TARGET_GDBSERVER=/data/local/tmp/gdbserver-adb-gdb-$TMP_ID
+
+# Return the build fingerprint contained in a build.prop file.
+# $1: path to build.prop file
+get_build_fingerprint_from () {
+  cat "$1" | grep -e '^ro.build.fingerprint=' | cut -d= -f2
+}
+
+
+ORG_PULL_LIBS_DIR=$PULL_LIBS_DIR
+PULL_LIBS_DIR=${PULL_LIBS_DIR:-$DEFAULT_PULL_LIBS_DIR}
+
+HOST_FINGERPRINT=
+DEVICE_FINGERPRINT=$(adb_shell getprop ro.build.fingerprint)
+log "Device build fingerprint: $DEVICE_FINGERPRINT"
+
+# If --pull-libs-dir is not specified, and this is a platform build, look
+# if we can use the symbolic libraries under $ANDROID_PRODUCT_OUT/symbols/
+# directly, if the build fingerprint matches the device.
+if [ -z "$ORG_PULL_LIBS_DIR" -a \
+     "$ANDROID_PRODUCT_OUT" -a \
+     -f "$ANDROID_PRODUCT_OUT/system/build.prop" ]; then
+  ANDROID_FINGERPRINT=$(get_build_fingerprint_from \
+                        "$ANDROID_PRODUCT_OUT"/system/build.prop)
+  log "Android build fingerprint:  $ANDROID_FINGERPRINT"
+  if [ "$ANDROID_FINGERPRINT" = "$DEVICE_FINGERPRINT" ]; then
+    log "Perfect match!"
+    PULL_LIBS_DIR=$ANDROID_PRODUCT_OUT/symbols
+    HOST_FINGERPRINT=$ANDROID_FINGERPRINT
+    if [ "$PULL_LIBS" ]; then
+      log "Ignoring --pull-libs since the device and platform build \
+fingerprints match."
+      NO_PULL_LIBS=true
+    fi
+  fi
+fi
+
+# If neither --pull-libs an --no-pull-libs were specified, check the build
+# fingerprints of the device, and the cached system libraries on the host.
+#
+if [ -z "$NO_PULL_LIBS" -a -z "$PULL_LIBS" ]; then
+  if [ ! -f "$PULL_LIBS_DIR/build.prop" ]; then
+    log "Auto-config: --pull-libs  (no cached libraries)"
+    PULL_LIBS=true
+  else
+    HOST_FINGERPRINT=$(get_build_fingerprint_from "$PULL_LIBS_DIR/build.prop")
+    log "Host build fingerprint:   $HOST_FINGERPRINT"
+    if [ "$HOST_FINGERPRINT" == "$DEVICE_FINGERPRINT" ]; then
+      log "Auto-config: --no-pull-libs (fingerprint match)"
+      NO_PULL_LIBS=true
+    else
+      log "Auto-config: --pull-libs  (fingerprint mismatch)"
+      PULL_LIBS=true
+    fi
+  fi
+fi
+
+# Extract the system libraries from the device if necessary.
+if [ "$PULL_LIBS" -a -z "$NO_PULL_LIBS" ]; then
+  echo "Extracting system libraries into: $PULL_LIBS_DIR"
+fi
+
+mkdir -p "$PULL_LIBS_DIR"
+fail_panic "Can't create --libs-dir directory: $PULL_LIBS_DIR"
+
+# If requested, work for M-x gdb.  The gdb indirections make it
+# difficult to pass --annotate=3 to the gdb binary itself.
+GDB_ARGS=
+if [ "$ANNOTATE" ]; then
+  GDB_ARGS=$GDB_ARGS" --annotate=$ANNOTATE"
+fi
+
+# Get the PID from the first argument or else find the PID of the
+# browser process.
+if [ -z "$PID" ]; then
+  PROCESSNAME=$PACKAGE_NAME
+  if [ "$SANDBOXED_INDEX" ]; then
+    PROCESSNAME=$PROCESSNAME:sandboxed_process$SANDBOXED_INDEX
+  elif [ "$SANDBOXED" ]; then
+    PROCESSNAME=$PROCESSNAME:sandboxed_process
+    PID=$(adb_shell ps | \
+          awk '$9 ~ /^'$PROCESSNAME'/ { print $2; }' | head -1)
+  elif [ "$PRIVILEGED_INDEX" ]; then
+    PROCESSNAME=$PROCESSNAME:privileged_process$PRIVILEGED_INDEX
+  elif [ "$PRIVILEGED" ]; then
+    PROCESSNAME=$PROCESSNAME:privileged_process
+    PID=$(adb_shell ps | \
+          awk '$9 ~ /^'$PROCESSNAME'/ { print $2; }' | head -1)
+  fi
+  if [ -z "$PID" ]; then
+    PID=$(adb_shell ps | \
+          awk '$9 == "'$PROCESSNAME'" { print $2; }' | head -1)
+  fi
+  if [ -z "$PID" ]; then
+    if [ "$START" ]; then
+      panic "Can't find application process PID, did it crash?"
+    else
+      panic "Can't find application process PID, are you sure it is \
+running? Try using --start."
+    fi
+  fi
+  log "Found process PID: $PID"
+elif [ "$SANDBOXED" ]; then
+  echo "WARNING: --sandboxed option ignored due to use of --pid."
+elif [ "$PRIVILEGED" ]; then
+  echo "WARNING: --privileged option ignored due to use of --pid."
+fi
+
+# Determine if 'adb shell' runs as root or not.
+# If so, we can launch gdbserver directly, otherwise, we have to
+# use run-as $PACKAGE_NAME ..., which requires the package to be debuggable.
+#
+if [ "$SU_PREFIX" ]; then
+  # Need to check that this works properly.
+  SU_PREFIX_TEST_LOG=$TMPDIR/su-prefix.log
+  adb_shell $SU_PREFIX \"echo "foo"\" > $SU_PREFIX_TEST_LOG 2>&1
+  if [ $? != 0 -o "$(cat $SU_PREFIX_TEST_LOG)" != "foo" ]; then
+    echo "ERROR: Cannot use '$SU_PREFIX' as a valid su prefix:"
+    echo "$ adb shell $SU_PREFIX \"echo foo\""
+    cat $SU_PREFIX_TEST_LOG
+    exit 1
+  fi
+  COMMAND_PREFIX="$SU_PREFIX \""
+  COMMAND_SUFFIX="\""
+else
+  SHELL_UID=$(adb shell cat /proc/self/status | \
+              awk '$1 == "Uid:" { print $2; }')
+  log "Shell UID: $SHELL_UID"
+  if [ "$SHELL_UID" != 0 -o -n "$NO_ROOT" ]; then
+    COMMAND_PREFIX="run-as $PACKAGE_NAME"
+    COMMAND_SUFFIX=
+  else
+    COMMAND_PREFIX=
+    COMMAND_SUFFIX=
+  fi
+fi
+log "Command prefix: '$COMMAND_PREFIX'"
+log "Command suffix: '$COMMAND_SUFFIX'"
+
+# Pull device's system libraries that are mapped by our process.
+# Pulling all system libraries is too long, so determine which ones
+# we need by looking at /proc/$PID/maps instead
+if [ "$PULL_LIBS" -a -z "$NO_PULL_LIBS" ]; then
+  echo "Extracting system libraries into: $PULL_LIBS_DIR"
+  rm -f $PULL_LIBS_DIR/build.prop
+  MAPPINGS=$(adb_shell $COMMAND_PREFIX cat /proc/$PID/maps $COMMAND_SUFFIX)
+  if [ $? != 0 ]; then
+    echo "ERROR: Could not list process's memory mappings."
+    if [ "$SU_PREFIX" ]; then
+      panic "Are you sure your --su-prefix is correct?"
+    else
+      panic "Use --su-prefix if the application is not debuggable."
+    fi
+  fi
+  SYSTEM_LIBS=$(echo "$MAPPINGS" | \
+      awk '$6 ~ /\/system\/.*\.so$/ { print $6; }' | sort -u)
+  for SYSLIB in /system/bin/linker $SYSTEM_LIBS; do
+    echo "Pulling from device: $SYSLIB"
+    DST_FILE=$PULL_LIBS_DIR$SYSLIB
+    DST_DIR=$(dirname "$DST_FILE")
+    mkdir -p "$DST_DIR" && adb pull $SYSLIB "$DST_FILE" 2>/dev/null
+    fail_panic "Could not pull $SYSLIB from device !?"
+  done
+  echo "Pulling device build.prop"
+  adb pull /system/build.prop $PULL_LIBS_DIR/build.prop
+  fail_panic "Could not pull device build.prop !?"
+fi
+
+# Find all the sub-directories of $PULL_LIBS_DIR, up to depth 4
+# so we can add them to solib-search-path later.
+SOLIB_DIRS=$(find $PULL_LIBS_DIR -mindepth 1 -maxdepth 4 -type d | \
+             grep -v "^$" | tr '\n' ':')
+
+# This is a re-implementation of gdbclient, where we use compatible
+# versions of gdbserver and $GDBNAME to ensure that everything works
+# properly.
+#
+
+# Push gdbserver to the device
+log "Pushing gdbserver $GDBSERVER to $TARGET_GDBSERVER"
+adb push $GDBSERVER $TMP_TARGET_GDBSERVER &>/dev/null
+adb shell $COMMAND_PREFIX cp $TMP_TARGET_GDBSERVER $TARGET_GDBSERVER
+adb shell rm $TMP_TARGET_GDBSERVER
+fail_panic "Could not copy gdbserver to the device!"
+
+if [ -z "$PORT" ]; then
+    PORT=5039
+fi
+HOST_PORT=$PORT
+TARGET_PORT=$PORT
+
+# Select correct app_process for architecture.
+case $TARGET_ARCH in
+      arm|x86|mips) GDBEXEC=app_process;;
+      arm64|x86_64) GDBEXEC=app_process64;;
+      *) fail_panic "Unknown app_process for architecture!";;
+esac
+
+# Detect AddressSanitizer setup on the device. In that case app_process is a
+# script, and the real executable is app_process.real.
+GDBEXEC_ASAN=app_process.real
+adb_shell ls /system/bin/$GDBEXEC_ASAN
+if [ $? == 0 ]; then
+    GDBEXEC=$GDBEXEC_ASAN
+fi
+
+# Pull the app_process binary from the device.
+log "Pulling $GDBEXEC from device"
+adb pull /system/bin/$GDBEXEC "$TMPDIR"/$GDBEXEC &>/dev/null
+fail_panic "Could not retrieve $GDBEXEC from the device!"
+
+# Setup network redirection
+log "Setting network redirection (host:$HOST_PORT -> device:$TARGET_PORT)"
+adb forward tcp:$HOST_PORT tcp:$TARGET_PORT
+fail_panic "Could not setup network redirection from \
+host:localhost:$HOST_PORT to device:localhost:$TARGET_PORT!"
+
+# Start gdbserver in the background
+# Note that using run-as requires the package to be debuggable.
+#
+# If not, this will fail horribly. The alternative is to run the
+# program as root, which requires of course root privileges.
+# Maybe we should add a --root option to enable this?
+#
+log "Starting gdbserver in the background:"
+GDBSERVER_LOG=$TMPDIR/gdbserver-$TMP_ID.log
+log "adb shell $COMMAND_PREFIX $TARGET_GDBSERVER :$TARGET_PORT \
+--attach $PID $COMMAND_SUFFIX"
+("$ADB" shell $COMMAND_PREFIX $TARGET_GDBSERVER :$TARGET_PORT \
+ --attach $PID $COMMAND_SUFFIX > $GDBSERVER_LOG 2>&1) &
+GDBSERVER_PID=$!
+echo "$GDBSERVER_PID" > $GDBSERVER_PIDFILE
+log "background job pid: $GDBSERVER_PID"
+
+# Check that it is still running after a few seconds. If not, this means we
+# could not properly attach to it
+sleep 2
+log "Job control: $(jobs -l)"
+STATE=$(jobs -l | awk '$2 == "'$GDBSERVER_PID'" { print $3; }')
+if [ "$STATE" != "Running" ]; then
+  echo "ERROR: GDBServer could not attach to PID $PID!"
+  if [ $(adb_shell su -c getenforce) != "Permissive" ];  then
+    echo "Device mode is Enforcing. Changing Device mode to Permissive "
+    $(adb_shell su -c setenforce 0)
+    if [ $(adb_shell su -c getenforce) != "Permissive" ]; then
+      echo "ERROR: Failed to Change Device mode to Permissive"
+      echo "Failure log (use --verbose for more information):"
+      cat $GDBSERVER_LOG
+      exit 1
+    fi
+  else
+    echo "Failure log (use --verbose for more information):"
+    cat $GDBSERVER_LOG
+    exit 1
+  fi
+fi
+
+# Generate a file containing useful GDB initialization commands
+readonly COMMANDS=$TMPDIR/gdb.init
+log "Generating GDB initialization commands file: $COMMANDS"
+echo -n "" > $COMMANDS
+echo "set print pretty 1" >> $COMMANDS
+echo "python" >> $COMMANDS
+echo "import sys" >> $COMMANDS
+echo "sys.path.insert(0, '$CHROMIUM_SRC/tools/gdb/')" >> $COMMANDS
+echo "try:" >> $COMMANDS
+echo "  import gdb_chrome" >> $COMMANDS
+echo "finally:" >> $COMMANDS
+echo "  sys.path.pop(0)" >> $COMMANDS
+echo "end" >> $COMMANDS
+echo "file $TMPDIR/$GDBEXEC" >> $COMMANDS
+echo "directory $CHROMIUM_SRC" >> $COMMANDS
+echo "set solib-absolute-prefix $PULL_LIBS_DIR" >> $COMMANDS
+echo "set solib-search-path $SOLIB_DIRS:$PULL_LIBS_DIR:$SYMBOL_DIR" \
+    >> $COMMANDS
+echo "echo Attaching and reading symbols, this may take a while.." \
+    >> $COMMANDS
+echo "target remote :$HOST_PORT" >> $COMMANDS
+
+if [ "$GDBINIT" ]; then
+  cat "$GDBINIT" >> $COMMANDS
+fi
+
+if [ "$VERBOSE" -gt 0 ]; then
+  echo "### START $COMMANDS"
+  cat $COMMANDS
+  echo "### END $COMMANDS"
+fi
+
+log "Launching gdb client: $GDB $GDB_ARGS -x $COMMANDS"
+$GDB $GDB_ARGS -x $COMMANDS &&
+rm -f "$GDBSERVER_PIDFILE"
diff --git a/build/android/adb_gdb_android_webview_shell b/build/android/adb_gdb_android_webview_shell
new file mode 100755
index 0000000..f685fda
--- /dev/null
+++ b/build/android/adb_gdb_android_webview_shell
@@ -0,0 +1,16 @@
+#!/bin/bash
+#
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# Attach to or start a ContentShell process and debug it.
+# See --help for details.
+#
+PROGDIR=$(dirname "$0")
+export ADB_GDB_PROGNAME=$(basename "$0")
+export ADB_GDB_ACTIVITY=.AwShellActivity
+"$PROGDIR"/adb_gdb \
+    --program-name=AwShellApplication \
+    --package-name=org.chromium.android_webview.shell \
+    "$@"
diff --git a/build/android/adb_gdb_chrome_public b/build/android/adb_gdb_chrome_public
new file mode 100755
index 0000000..4366c83
--- /dev/null
+++ b/build/android/adb_gdb_chrome_public
@@ -0,0 +1,16 @@
+#!/bin/bash
+#
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# Attach to or start a ChromePublic process and debug it.
+# See --help for details.
+#
+PROGDIR=$(dirname "$0")
+export ADB_GDB_PROGNAME=$(basename "$0")
+export ADB_GDB_ACTIVITY=com.google.android.apps.chrome.Main
+"$PROGDIR"/adb_gdb \
+    --program-name=ChromePublic \
+    --package-name=org.chromium.chrome \
+    "$@"
diff --git a/build/android/adb_gdb_chrome_shell b/build/android/adb_gdb_chrome_shell
new file mode 100755
index 0000000..e5c8a30
--- /dev/null
+++ b/build/android/adb_gdb_chrome_shell
@@ -0,0 +1,16 @@
+#!/bin/bash
+#
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# Attach to or start a ChromeShell process and debug it.
+# See --help for details.
+#
+PROGDIR=$(dirname "$0")
+export ADB_GDB_PROGNAME=$(basename "$0")
+export ADB_GDB_ACTIVITY=.ChromeShellActivity
+"$PROGDIR"/adb_gdb \
+    --program-name=ChromeShell \
+    --package-name=org.chromium.chrome.shell \
+    "$@"
diff --git a/build/android/adb_gdb_content_shell b/build/android/adb_gdb_content_shell
new file mode 100755
index 0000000..18e1a61
--- /dev/null
+++ b/build/android/adb_gdb_content_shell
@@ -0,0 +1,16 @@
+#!/bin/bash
+#
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# Attach to or start a ContentShell process and debug it.
+# See --help for details.
+#
+PROGDIR=$(dirname "$0")
+export ADB_GDB_PROGNAME=$(basename "$0")
+export ADB_GDB_ACTIVITY=.ContentShellActivity
+"$PROGDIR"/adb_gdb \
+    --program-name=ContentShell \
+    --package-name=org.chromium.content_shell_apk \
+    "$@"
diff --git a/build/android/adb_gdb_cronet_sample b/build/android/adb_gdb_cronet_sample
new file mode 100755
index 0000000..8d0c864
--- /dev/null
+++ b/build/android/adb_gdb_cronet_sample
@@ -0,0 +1,16 @@
+#!/bin/bash
+#
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# Attach to or start a ContentShell process and debug it.
+# See --help for details.
+#
+PROGDIR=$(dirname "$0")
+export ADB_GDB_PROGNAME=$(basename "$0")
+export ADB_GDB_ACTIVITY=.CronetSampleActivity
+"$PROGDIR"/adb_gdb \
+    --program-name=CronetSample \
+    --package-name=org.chromium.cronet_sample_apk \
+    "$@"
diff --git a/build/android/adb_gdb_mojo_shell b/build/android/adb_gdb_mojo_shell
new file mode 100755
index 0000000..ba91149
--- /dev/null
+++ b/build/android/adb_gdb_mojo_shell
@@ -0,0 +1,16 @@
+#!/bin/bash
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# Attach to or start a ContentShell process and debug it.
+# See --help for details.
+#
+PROGDIR=$(dirname "$0")
+export ADB_GDB_PROGNAME=$(basename "$0")
+export ADB_GDB_ACTIVITY=.MojoShellActivity
+"$PROGDIR"/adb_gdb \
+    --program-name=MojoShell \
+    --package-name=org.chromium.mojo_shell_apk \
+    "$@"
diff --git a/build/android/adb_install_apk.py b/build/android/adb_install_apk.py
new file mode 100755
index 0000000..50faea7
--- /dev/null
+++ b/build/android/adb_install_apk.py
@@ -0,0 +1,114 @@
+#!/usr/bin/env python
+#
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Utility script to install APKs from the command line quickly."""
+
+import argparse
+import glob
+import logging
+import os
+import sys
+
+from pylib import constants
+from pylib.device import device_blacklist
+from pylib.device import device_errors
+from pylib.device import device_utils
+from pylib.utils import apk_helper
+from pylib.utils import run_tests_helper
+
+
+def main():
+  parser = argparse.ArgumentParser()
+
+  apk_group = parser.add_mutually_exclusive_group(required=True)
+  apk_group.add_argument('--apk', dest='apk_name',
+                         help='DEPRECATED The name of the apk containing the'
+                              ' application (with the .apk extension).')
+  apk_group.add_argument('apk_path', nargs='?',
+                         help='The path to the APK to install.')
+
+  # TODO(jbudorick): Remove once no clients pass --apk_package
+  parser.add_argument('--apk_package', help='DEPRECATED unused')
+  parser.add_argument('--split',
+                      action='append',
+                      dest='splits',
+                      help='A glob matching the apk splits. '
+                           'Can be specified multiple times.')
+  parser.add_argument('--keep_data',
+                      action='store_true',
+                      default=False,
+                      help='Keep the package data when installing '
+                           'the application.')
+  parser.add_argument('--debug', action='store_const', const='Debug',
+                      dest='build_type',
+                      default=os.environ.get('BUILDTYPE', 'Debug'),
+                      help='If set, run test suites under out/Debug. '
+                           'Default is env var BUILDTYPE or Debug')
+  parser.add_argument('--release', action='store_const', const='Release',
+                      dest='build_type',
+                      help='If set, run test suites under out/Release. '
+                           'Default is env var BUILDTYPE or Debug.')
+  parser.add_argument('-d', '--device', dest='device',
+                      help='Target device for apk to install on.')
+  parser.add_argument('-v', '--verbose', action='count',
+                      help='Enable verbose logging.')
+
+  args = parser.parse_args()
+
+  run_tests_helper.SetLogLevel(args.verbose)
+  constants.SetBuildType(args.build_type)
+
+  apk = args.apk_path or args.apk_name
+  if not apk.endswith('.apk'):
+    apk += '.apk'
+  if not os.path.exists(apk):
+    apk = os.path.join(constants.GetOutDirectory(), 'apks', apk)
+    if not os.path.exists(apk):
+      parser.error('%s not found.' % apk)
+
+  if args.splits:
+    splits = []
+    base_apk_package = apk_helper.ApkHelper(apk).GetPackageName()
+    for split_glob in args.splits:
+      apks = [f for f in glob.glob(split_glob) if f.endswith('.apk')]
+      if not apks:
+        logging.warning('No apks matched for %s.' % split_glob)
+      for f in apks:
+        helper = apk_helper.ApkHelper(f)
+        if (helper.GetPackageName() == base_apk_package
+            and helper.GetSplitName()):
+          splits.append(f)
+
+  devices = device_utils.DeviceUtils.HealthyDevices()
+
+  if args.device:
+    devices = [d for d in devices if d == args.device]
+    if not devices:
+      raise device_errors.DeviceUnreachableError(args.device)
+  elif not devices:
+    raise device_errors.NoDevicesError()
+
+  def blacklisting_install(device):
+    try:
+      if args.splits:
+        device.InstallSplitApk(apk, splits, reinstall=args.keep_data)
+      else:
+        device.Install(apk, reinstall=args.keep_data)
+    except device_errors.CommandFailedError:
+      logging.exception('Failed to install %s', args.apk_name)
+      device_blacklist.ExtendBlacklist([str(device)])
+      logging.warning('Blacklisting %s', str(device))
+    except device_errors.CommandTimeoutError:
+      logging.exception('Timed out while installing %s', args.apk_name)
+      device_blacklist.ExtendBlacklist([str(device)])
+      logging.warning('Blacklisting %s', str(device))
+
+  device_utils.DeviceUtils.parallel(devices).pMap(blacklisting_install)
+
+
+if __name__ == '__main__':
+  sys.exit(main())
+
diff --git a/build/android/adb_kill_android_webview_shell b/build/android/adb_kill_android_webview_shell
new file mode 100755
index 0000000..5f287f0
--- /dev/null
+++ b/build/android/adb_kill_android_webview_shell
@@ -0,0 +1,24 @@
+#!/bin/bash
+#
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# Kill a running android webview shell.
+#
+# Assumes you have sourced the build/android/envsetup.sh script.
+
+SHELL_PID_LINES=$(adb shell ps | grep ' org.chromium.android_webview.shell')
+VAL=$(echo "$SHELL_PID_LINES" | wc -l)
+if [ $VAL -lt 1 ] ; then
+   echo "Not running android webview shell."
+else
+   SHELL_PID=$(echo $SHELL_PID_LINES | awk '{print $2}')
+   if [ "$SHELL_PID" != "" ] ; then
+      set -x
+      adb shell kill $SHELL_PID
+      set -
+   else
+     echo "Android webview shell does not appear to be running."
+   fi
+fi
diff --git a/build/android/adb_kill_chrome_public b/build/android/adb_kill_chrome_public
new file mode 100755
index 0000000..5b539a0
--- /dev/null
+++ b/build/android/adb_kill_chrome_public
@@ -0,0 +1,24 @@
+#!/bin/bash
+#
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# Kill a running instance of ChromePublic.
+#
+# Assumes you have sourced the build/android/envsetup.sh script.
+
+SHELL_PID_LINES=$(adb shell ps | grep -w 'org.chromium.chrome')
+VAL=$(echo "$SHELL_PID_LINES" | wc -l)
+if [ $VAL -lt 1 ] ; then
+   echo "Not running ChromePublic."
+else
+   SHELL_PID=$(echo $SHELL_PID_LINES | awk '{print $2}')
+   if [ "$SHELL_PID" != "" ] ; then
+      set -x
+      adb shell kill $SHELL_PID
+      set -
+   else
+     echo "ChromePublic does not appear to be running."
+   fi
+fi
diff --git a/build/android/adb_kill_chrome_shell b/build/android/adb_kill_chrome_shell
new file mode 100755
index 0000000..2b63c9a
--- /dev/null
+++ b/build/android/adb_kill_chrome_shell
@@ -0,0 +1,24 @@
+#!/bin/bash
+#
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# Kill a running chrome shell.
+#
+# Assumes you have sourced the build/android/envsetup.sh script.
+
+SHELL_PID_LINES=$(adb shell ps | grep ' org.chromium.chrome.shell')
+VAL=$(echo "$SHELL_PID_LINES" | wc -l)
+if [ $VAL -lt 1 ] ; then
+   echo "Not running Chrome shell."
+else
+   SHELL_PID=$(echo $SHELL_PID_LINES | awk '{print $2}')
+   if [ "$SHELL_PID" != "" ] ; then
+      set -x
+      adb shell kill $SHELL_PID
+      set -
+   else
+     echo "Chrome shell does not appear to be running."
+   fi
+fi
diff --git a/build/android/adb_kill_content_shell b/build/android/adb_kill_content_shell
new file mode 100755
index 0000000..e379dd4
--- /dev/null
+++ b/build/android/adb_kill_content_shell
@@ -0,0 +1,24 @@
+#!/bin/bash
+#
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# Kill a running content shell.
+#
+# Assumes you have sourced the build/android/envsetup.sh script.
+
+SHELL_PID_LINES=$(adb shell ps | grep ' org.chromium.content_shell_apk')
+VAL=$(echo "$SHELL_PID_LINES" | wc -l)
+if [ $VAL -lt 1 ] ; then
+   echo "Not running Content shell."
+else
+   SHELL_PID=$(echo $SHELL_PID_LINES | awk '{print $2}')
+   if [ "$SHELL_PID" != "" ] ; then
+      set -x
+      adb shell kill $SHELL_PID
+      set -
+   else
+     echo "Content shell does not appear to be running."
+   fi
+fi
diff --git a/build/android/adb_logcat_monitor.py b/build/android/adb_logcat_monitor.py
new file mode 100755
index 0000000..d3cc67d
--- /dev/null
+++ b/build/android/adb_logcat_monitor.py
@@ -0,0 +1,156 @@
+#!/usr/bin/env python
+#
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Saves logcats from all connected devices.
+
+Usage: adb_logcat_monitor.py <base_dir> [<adb_binary_path>]
+
+This script will repeatedly poll adb for new devices and save logcats
+inside the <base_dir> directory, which it attempts to create.  The
+script will run until killed by an external signal.  To test, run the
+script in a shell and <Ctrl>-C it after a while.  It should be
+resilient across phone disconnects and reconnects and start the logcat
+early enough to not miss anything.
+"""
+
+import logging
+import os
+import re
+import shutil
+import signal
+import subprocess
+import sys
+import time
+
+# Map from device_id -> (process, logcat_num)
+devices = {}
+
+
+class TimeoutException(Exception):
+  """Exception used to signal a timeout."""
+  pass
+
+
+class SigtermError(Exception):
+  """Exception used to catch a sigterm."""
+  pass
+
+
+def StartLogcatIfNecessary(device_id, adb_cmd, base_dir):
+  """Spawns a adb logcat process if one is not currently running."""
+  process, logcat_num = devices[device_id]
+  if process:
+    if process.poll() is None:
+      # Logcat process is still happily running
+      return
+    else:
+      logging.info('Logcat for device %s has died', device_id)
+      error_filter = re.compile('- waiting for device -')
+      for line in process.stderr:
+        if not error_filter.match(line):
+          logging.error(device_id + ':   ' + line)
+
+  logging.info('Starting logcat %d for device %s', logcat_num,
+               device_id)
+  logcat_filename = 'logcat_%s_%03d' % (device_id, logcat_num)
+  logcat_file = open(os.path.join(base_dir, logcat_filename), 'w')
+  process = subprocess.Popen([adb_cmd, '-s', device_id,
+                              'logcat', '-v', 'threadtime'],
+                             stdout=logcat_file,
+                             stderr=subprocess.PIPE)
+  devices[device_id] = (process, logcat_num + 1)
+
+
+def GetAttachedDevices(adb_cmd):
+  """Gets the device list from adb.
+
+  We use an alarm in this function to avoid deadlocking from an external
+  dependency.
+
+  Args:
+    adb_cmd: binary to run adb
+
+  Returns:
+    list of devices or an empty list on timeout
+  """
+  signal.alarm(2)
+  try:
+    out, err = subprocess.Popen([adb_cmd, 'devices'],
+                                stdout=subprocess.PIPE,
+                                stderr=subprocess.PIPE).communicate()
+    if err:
+      logging.warning('adb device error %s', err.strip())
+    return re.findall('^(\\S+)\tdevice$', out, re.MULTILINE)
+  except TimeoutException:
+    logging.warning('"adb devices" command timed out')
+    return []
+  except (IOError, OSError):
+    logging.exception('Exception from "adb devices"')
+    return []
+  finally:
+    signal.alarm(0)
+
+
+def main(base_dir, adb_cmd='adb'):
+  """Monitor adb forever.  Expects a SIGINT (Ctrl-C) to kill."""
+  # We create the directory to ensure 'run once' semantics
+  if os.path.exists(base_dir):
+    print 'adb_logcat_monitor: %s already exists? Cleaning' % base_dir
+    shutil.rmtree(base_dir, ignore_errors=True)
+
+  os.makedirs(base_dir)
+  logging.basicConfig(filename=os.path.join(base_dir, 'eventlog'),
+                      level=logging.INFO,
+                      format='%(asctime)-2s %(levelname)-8s %(message)s')
+
+  # Set up the alarm for calling 'adb devices'. This is to ensure
+  # our script doesn't get stuck waiting for a process response
+  def TimeoutHandler(_signum, _unused_frame):
+    raise TimeoutException()
+  signal.signal(signal.SIGALRM, TimeoutHandler)
+
+  # Handle SIGTERMs to ensure clean shutdown
+  def SigtermHandler(_signum, _unused_frame):
+    raise SigtermError()
+  signal.signal(signal.SIGTERM, SigtermHandler)
+
+  logging.info('Started with pid %d', os.getpid())
+  pid_file_path = os.path.join(base_dir, 'LOGCAT_MONITOR_PID')
+
+  try:
+    with open(pid_file_path, 'w') as f:
+      f.write(str(os.getpid()))
+    while True:
+      for device_id in GetAttachedDevices(adb_cmd):
+        if not device_id in devices:
+          subprocess.call([adb_cmd, '-s', device_id, 'logcat', '-c'])
+          devices[device_id] = (None, 0)
+
+      for device in devices:
+        # This will spawn logcat watchers for any device ever detected
+        StartLogcatIfNecessary(device, adb_cmd, base_dir)
+
+      time.sleep(5)
+  except SigtermError:
+    logging.info('Received SIGTERM, shutting down')
+  except: # pylint: disable=bare-except
+    logging.exception('Unexpected exception in main.')
+  finally:
+    for process, _ in devices.itervalues():
+      if process:
+        try:
+          process.terminate()
+        except OSError:
+          pass
+    os.remove(pid_file_path)
+
+
+if __name__ == '__main__':
+  if 2 <= len(sys.argv) <= 3:
+    print 'adb_logcat_monitor: Initializing'
+    sys.exit(main(*sys.argv[1:3]))
+
+  print 'Usage: %s <base_dir> [<adb_binary_path>]' % sys.argv[0]
diff --git a/build/android/adb_logcat_printer.py b/build/android/adb_logcat_printer.py
new file mode 100755
index 0000000..55176ab
--- /dev/null
+++ b/build/android/adb_logcat_printer.py
@@ -0,0 +1,213 @@
+#!/usr/bin/env python
+#
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Shutdown adb_logcat_monitor and print accumulated logs.
+
+To test, call './adb_logcat_printer.py <base_dir>' where
+<base_dir> contains 'adb logcat -v threadtime' files named as
+logcat_<deviceID>_<sequenceNum>
+
+The script will print the files to out, and will combine multiple
+logcats from a single device if there is overlap.
+
+Additionally, if a <base_dir>/LOGCAT_MONITOR_PID exists, the script
+will attempt to terminate the contained PID by sending a SIGINT and
+monitoring for the deletion of the aforementioned file.
+"""
+# pylint: disable=W0702
+
+import cStringIO
+import logging
+import optparse
+import os
+import re
+import signal
+import sys
+import time
+
+
+# Set this to debug for more verbose output
+LOG_LEVEL = logging.INFO
+
+
+def CombineLogFiles(list_of_lists, logger):
+  """Splices together multiple logcats from the same device.
+
+  Args:
+    list_of_lists: list of pairs (filename, list of timestamped lines)
+    logger: handler to log events
+
+  Returns:
+    list of lines with duplicates removed
+  """
+  cur_device_log = ['']
+  for cur_file, cur_file_lines in list_of_lists:
+    # Ignore files with just the logcat header
+    if len(cur_file_lines) < 2:
+      continue
+    common_index = 0
+    # Skip this step if list just has empty string
+    if len(cur_device_log) > 1:
+      try:
+        line = cur_device_log[-1]
+        # Used to make sure we only splice on a timestamped line
+        if re.match(r'^\d{2}-\d{2} \d{2}:\d{2}:\d{2}.\d{3} ', line):
+          common_index = cur_file_lines.index(line)
+        else:
+          logger.warning('splice error - no timestamp in "%s"?', line.strip())
+      except ValueError:
+        # The last line was valid but wasn't found in the next file
+        cur_device_log += ['***** POSSIBLE INCOMPLETE LOGCAT *****']
+        logger.info('Unable to splice %s. Incomplete logcat?', cur_file)
+
+    cur_device_log += ['*'*30 + '  %s' % cur_file]
+    cur_device_log.extend(cur_file_lines[common_index:])
+
+  return cur_device_log
+
+
+def FindLogFiles(base_dir):
+  """Search a directory for logcat files.
+
+  Args:
+    base_dir: directory to search
+
+  Returns:
+    Mapping of device_id to a sorted list of file paths for a given device
+  """
+  logcat_filter = re.compile(r'^logcat_(\S+)_(\d+)$')
+  # list of tuples (<device_id>, <seq num>, <full file path>)
+  filtered_list = []
+  for cur_file in os.listdir(base_dir):
+    matcher = logcat_filter.match(cur_file)
+    if matcher:
+      filtered_list += [(matcher.group(1), int(matcher.group(2)),
+                         os.path.join(base_dir, cur_file))]
+  filtered_list.sort()
+  file_map = {}
+  for device_id, _, cur_file in filtered_list:
+    if device_id not in file_map:
+      file_map[device_id] = []
+
+    file_map[device_id] += [cur_file]
+  return file_map
+
+
+def GetDeviceLogs(log_filenames, logger):
+  """Read log files, combine and format.
+
+  Args:
+    log_filenames: mapping of device_id to sorted list of file paths
+    logger: logger handle for logging events
+
+  Returns:
+    list of formatted device logs, one for each device.
+  """
+  device_logs = []
+
+  for device, device_files in log_filenames.iteritems():
+    logger.debug('%s: %s', device, str(device_files))
+    device_file_lines = []
+    for cur_file in device_files:
+      with open(cur_file) as f:
+        device_file_lines += [(cur_file, f.read().splitlines())]
+    combined_lines = CombineLogFiles(device_file_lines, logger)
+    # Prepend each line with a short unique ID so it's easy to see
+    # when the device changes.  We don't use the start of the device
+    # ID because it can be the same among devices.  Example lines:
+    # AB324:  foo
+    # AB324:  blah
+    device_logs += [('\n' + device[-5:] + ':  ').join(combined_lines)]
+  return device_logs
+
+
+def ShutdownLogcatMonitor(base_dir, logger):
+  """Attempts to shutdown adb_logcat_monitor and blocks while waiting."""
+  try:
+    monitor_pid_path = os.path.join(base_dir, 'LOGCAT_MONITOR_PID')
+    with open(monitor_pid_path) as f:
+      monitor_pid = int(f.readline())
+
+    logger.info('Sending SIGTERM to %d', monitor_pid)
+    os.kill(monitor_pid, signal.SIGTERM)
+    i = 0
+    while True:
+      time.sleep(.2)
+      if not os.path.exists(monitor_pid_path):
+        return
+      if not os.path.exists('/proc/%d' % monitor_pid):
+        logger.warning('Monitor (pid %d) terminated uncleanly?', monitor_pid)
+        return
+      logger.info('Waiting for logcat process to terminate.')
+      i += 1
+      if i >= 10:
+        logger.warning('Monitor pid did not terminate. Continuing anyway.')
+        return
+
+  except (ValueError, IOError, OSError):
+    logger.exception('Error signaling logcat monitor - continuing')
+
+
+def main(argv):
+  parser = optparse.OptionParser(usage='Usage: %prog [options] <log dir>')
+  parser.add_option('--output-path',
+                    help='Output file path (if unspecified, prints to stdout)')
+  options, args = parser.parse_args(argv)
+  if len(args) != 1:
+    parser.error('Wrong number of unparsed args')
+  base_dir = args[0]
+  if options.output_path:
+    output_file = open(options.output_path, 'w')
+  else:
+    output_file = sys.stdout
+
+  log_stringio = cStringIO.StringIO()
+  logger = logging.getLogger('LogcatPrinter')
+  logger.setLevel(LOG_LEVEL)
+  sh = logging.StreamHandler(log_stringio)
+  sh.setFormatter(logging.Formatter('%(asctime)-2s %(levelname)-8s'
+                                    ' %(message)s'))
+  logger.addHandler(sh)
+
+  try:
+    # Wait at least 5 seconds after base_dir is created before printing.
+    #
+    # The idea is that 'adb logcat > file' output consists of 2 phases:
+    #  1 Dump all the saved logs to the file
+    #  2 Stream log messages as they are generated
+    #
+    # We want to give enough time for phase 1 to complete.  There's no
+    # good method to tell how long to wait, but it usually only takes a
+    # second.  On most bots, this code path won't occur at all, since
+    # adb_logcat_monitor.py command will have spawned more than 5 seconds
+    # prior to called this shell script.
+    try:
+      sleep_time = 5 - (time.time() - os.path.getctime(base_dir))
+    except OSError:
+      sleep_time = 5
+    if sleep_time > 0:
+      logger.warning('Monitor just started? Sleeping %.1fs', sleep_time)
+      time.sleep(sleep_time)
+
+    assert os.path.exists(base_dir), '%s does not exist' % base_dir
+    ShutdownLogcatMonitor(base_dir, logger)
+    separator = '\n' + '*' * 80 + '\n\n'
+    for log in GetDeviceLogs(FindLogFiles(base_dir), logger):
+      output_file.write(log)
+      output_file.write(separator)
+    with open(os.path.join(base_dir, 'eventlog')) as f:
+      output_file.write('\nLogcat Monitor Event Log\n')
+      output_file.write(f.read())
+  except:
+    logger.exception('Unexpected exception')
+
+  logger.info('Done.')
+  sh.flush()
+  output_file.write('\nLogcat Printer Event Log\n')
+  output_file.write(log_stringio.getvalue())
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv[1:]))
diff --git a/build/android/adb_profile_chrome b/build/android/adb_profile_chrome
new file mode 100755
index 0000000..21f6faf
--- /dev/null
+++ b/build/android/adb_profile_chrome
@@ -0,0 +1,8 @@
+#!/bin/bash
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# Start / stop profiling in chrome.
+exec $(dirname $0)/../../tools/profile_chrome.py $@
diff --git a/build/android/adb_reverse_forwarder.py b/build/android/adb_reverse_forwarder.py
new file mode 100755
index 0000000..3ce5359
--- /dev/null
+++ b/build/android/adb_reverse_forwarder.py
@@ -0,0 +1,80 @@
+#!/usr/bin/env python
+#
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Command line tool for forwarding ports from a device to the host.
+
+Allows an Android device to connect to services running on the host machine,
+i.e., "adb forward" in reverse. Requires |host_forwarder| and |device_forwarder|
+to be built.
+"""
+
+import logging
+import optparse
+import sys
+import time
+
+from pylib import constants
+from pylib import forwarder
+from pylib.device import adb_wrapper
+from pylib.device import device_errors
+from pylib.device import device_utils
+from pylib.utils import run_tests_helper
+
+
+def main(argv):
+  parser = optparse.OptionParser(usage='Usage: %prog [options] device_port '
+                                 'host_port [device_port_2 host_port_2] ...',
+                                 description=__doc__)
+  parser.add_option('-v',
+                    '--verbose',
+                    dest='verbose_count',
+                    default=0,
+                    action='count',
+                    help='Verbose level (multiple times for more)')
+  parser.add_option('--device',
+                    help='Serial number of device we should use.')
+  parser.add_option('--debug', action='store_const', const='Debug',
+                    dest='build_type', default='Release',
+                    help='Use Debug build of host tools instead of Release.')
+
+  options, args = parser.parse_args(argv)
+  run_tests_helper.SetLogLevel(options.verbose_count)
+
+  if len(args) < 2 or not len(args) % 2:
+    parser.error('Need even number of port pairs')
+    sys.exit(1)
+
+  try:
+    port_pairs = map(int, args[1:])
+    port_pairs = zip(port_pairs[::2], port_pairs[1::2])
+  except ValueError:
+    parser.error('Bad port number')
+    sys.exit(1)
+
+  devices = device_utils.DeviceUtils.HealthyDevices()
+
+  if options.device:
+    device = next((d for d in devices if d == options.device), None)
+    if not device:
+      raise device_errors.DeviceUnreachableError(options.device)
+  elif devices:
+    device = devices[0]
+    logging.info('No device specified. Defaulting to %s', devices[0])
+  else:
+    raise device_errors.NoDevicesError()
+
+  constants.SetBuildType(options.build_type)
+  try:
+    forwarder.Forwarder.Map(port_pairs, device)
+    while True:
+      time.sleep(60)
+  except KeyboardInterrupt:
+    sys.exit(0)
+  finally:
+    forwarder.Forwarder.UnmapAllDevicePorts(device)
+
+if __name__ == '__main__':
+  main(sys.argv)
diff --git a/build/android/adb_run_android_webview_shell b/build/android/adb_run_android_webview_shell
new file mode 100755
index 0000000..1014a73
--- /dev/null
+++ b/build/android/adb_run_android_webview_shell
@@ -0,0 +1,12 @@
+#!/bin/bash
+#
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+optional_url=$1
+
+adb shell am start \
+  -a android.intent.action.VIEW \
+  -n org.chromium.android_webview.shell/.AwShellActivity \
+  ${optional_url:+-d "$optional_url"}
diff --git a/build/android/adb_run_chrome_public b/build/android/adb_run_chrome_public
new file mode 100755
index 0000000..bf15071
--- /dev/null
+++ b/build/android/adb_run_chrome_public
@@ -0,0 +1,12 @@
+#!/bin/bash
+#
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+optional_url=$1
+
+adb shell am start \
+  -a android.intent.action.VIEW \
+  -n org.chromium.chrome/com.google.android.apps.chrome.Main \
+  ${optional_url:+-d "$optional_url"}
diff --git a/build/android/adb_run_chrome_shell b/build/android/adb_run_chrome_shell
new file mode 100755
index 0000000..79c4c32
--- /dev/null
+++ b/build/android/adb_run_chrome_shell
@@ -0,0 +1,12 @@
+#!/bin/bash
+#
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+optional_url=$1
+
+adb shell am start \
+  -a android.intent.action.VIEW \
+  -n org.chromium.chrome.shell/.ChromeShellActivity \
+  ${optional_url:+-d "$optional_url"}
diff --git a/build/android/adb_run_content_shell b/build/android/adb_run_content_shell
new file mode 100755
index 0000000..3f01f3b
--- /dev/null
+++ b/build/android/adb_run_content_shell
@@ -0,0 +1,12 @@
+#!/bin/bash
+#
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+optional_url=$1
+
+adb shell am start \
+  -a android.intent.action.VIEW \
+  -n org.chromium.content_shell_apk/.ContentShellActivity \
+  ${optional_url:+-d "$optional_url"}
diff --git a/build/android/adb_run_mojo_shell b/build/android/adb_run_mojo_shell
new file mode 100755
index 0000000..b585e4a
--- /dev/null
+++ b/build/android/adb_run_mojo_shell
@@ -0,0 +1,16 @@
+#!/bin/bash
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+optional_url=$1
+parameters=$2
+
+adb logcat -c
+adb shell am start -S \
+  -a android.intent.action.VIEW \
+  -n org.chromium.mojo_shell_apk/.MojoShellActivity \
+  ${parameters:+--esa parameters "$parameters"} \
+  ${optional_url:+-d "$optional_url"}
+adb logcat -s MojoShellApplication MojoShellActivity chromium
diff --git a/build/android/android_no_jni_exports.lst b/build/android/android_no_jni_exports.lst
new file mode 100644
index 0000000..ffc6cf7
--- /dev/null
+++ b/build/android/android_no_jni_exports.lst
@@ -0,0 +1,17 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This script makes all JNI exported symbols local, to prevent the JVM from
+# being able to find them, enforcing use of manual JNI function registration.
+# This is used for all Android binaries by default, unless they explicitly state
+# that they want JNI exported symbols to remain visible, as we need to ensure
+# the manual registration path is correct to maintain compatibility with the
+# crazy linker.
+# Check ld version script manual:
+# https://sourceware.org/binutils/docs-2.24/ld/VERSION.html#VERSION
+
+{
+  local:
+    Java_*;
+};
diff --git a/build/android/ant/BUILD.gn b/build/android/ant/BUILD.gn
new file mode 100644
index 0000000..a30fb54
--- /dev/null
+++ b/build/android/ant/BUILD.gn
@@ -0,0 +1,13 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+copy("keystore") {
+  sources = [
+    "chromium-debug.keystore",
+  ]
+
+  outputs = [
+    "$root_out_dir/chromium-debug.keystore",
+  ]
+}
diff --git a/build/android/ant/apk-package.xml b/build/android/ant/apk-package.xml
new file mode 100644
index 0000000..e8b76f7
--- /dev/null
+++ b/build/android/ant/apk-package.xml
@@ -0,0 +1,96 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+    Copyright (C) 2005-2008 The Android Open Source Project
+
+    Licensed under the Apache License, Version 2.0 (the "License");
+    you may not use this file except in compliance with the License.
+    You may obtain a copy of the License at
+
+         http://www.apache.org/licenses/LICENSE-2.0
+
+    Unless required by applicable law or agreed to in writing, software
+    distributed under the License is distributed on an "AS IS" BASIS,
+    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+    See the License for the specific language governing permissions and
+    limitations under the License.
+-->
+
+<project default="-package">
+  <property name="verbose" value="false" />
+  <property name="out.dir" location="${OUT_DIR}" />
+  <property name="out.absolute.dir" location="${out.dir}" />
+
+  <property name="sdk.dir" location="${ANDROID_SDK_ROOT}"/>
+  <property name="emma.device.jar" location="${EMMA_DEVICE_JAR}" />
+
+  <condition property="emma.enabled" value="true" else="false">
+    <equals arg1="${EMMA_INSTRUMENT}" arg2="1"/>
+  </condition>
+
+  <!-- jar file from where the tasks are loaded -->
+  <path id="android.antlibs">
+    <pathelement path="${sdk.dir}/tools/lib/ant-tasks.jar" />
+  </path>
+
+  <!-- Custom tasks -->
+  <taskdef resource="anttasks.properties" classpathref="android.antlibs" />
+
+  <condition property="build.target" value="release" else="debug">
+    <equals arg1="${CONFIGURATION_NAME}" arg2="Release" />
+  </condition>
+  <condition property="build.is.packaging.debug" value="true" else="false">
+    <equals arg1="${build.target}" arg2="debug" />
+  </condition>
+
+  <!-- Disables automatic signing. -->
+  <property name="build.is.signing.debug" value="false"/>
+
+  <!-- SDK tools assume that out.packaged.file is signed and name it "...-unaligned" -->
+  <property name="out.packaged.file" value="${UNSIGNED_APK_PATH}" />
+
+  <property name="native.libs.absolute.dir" location="${NATIVE_LIBS_DIR}" />
+
+  <!-- Intermediate files -->
+  <property name="resource.package.file.name" value="${RESOURCE_PACKAGED_APK_NAME}" />
+
+  <property name="intermediate.dex.file" location="${DEX_FILE_PATH}" />
+
+  <!-- Macro that enables passing a variable list of external jar files
+       to ApkBuilder. -->
+  <macrodef name="package-helper">
+    <element name="extra-jars" optional="yes" />
+    <sequential>
+      <apkbuilder
+          outfolder="${out.absolute.dir}"
+          resourcefile="${resource.package.file.name}"
+          apkfilepath="${out.packaged.file}"
+          debugpackaging="${build.is.packaging.debug}"
+          debugsigning="${build.is.signing.debug}"
+          verbose="${verbose}"
+          hascode="${HAS_CODE}"
+          previousBuildType="/"
+          buildType="${build.is.packaging.debug}/${build.is.signing.debug}">
+        <dex path="${intermediate.dex.file}"/>
+        <nativefolder path="${native.libs.absolute.dir}" />
+        <extra-jars/>
+      </apkbuilder>
+    </sequential>
+  </macrodef>
+
+
+  <!-- Packages the application. -->
+  <target name="-package">
+    <if condition="${emma.enabled}">
+      <then>
+        <package-helper>
+          <extra-jars>
+            <jarfile path="${emma.device.jar}" />
+          </extra-jars>
+        </package-helper>
+      </then>
+      <else>
+        <package-helper />
+      </else>
+    </if>
+  </target>
+</project>
diff --git a/build/android/ant/chromium-debug.keystore b/build/android/ant/chromium-debug.keystore
new file mode 100644
index 0000000..67eb0aa
--- /dev/null
+++ b/build/android/ant/chromium-debug.keystore
Binary files differ
diff --git a/build/android/ant/empty/res/.keep b/build/android/ant/empty/res/.keep
new file mode 100644
index 0000000..1fd038b
--- /dev/null
+++ b/build/android/ant/empty/res/.keep
@@ -0,0 +1,2 @@
+# This empty res folder can be passed to aapt while building Java libraries or
+# APKs that don't have any resources.
diff --git a/build/android/apkbuilder_action.gypi b/build/android/apkbuilder_action.gypi
new file mode 100644
index 0000000..27807d8
--- /dev/null
+++ b/build/android/apkbuilder_action.gypi
@@ -0,0 +1,79 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is a helper to java_apk.gypi. It should be used to create an
+# action that runs ApkBuilder via ANT.
+#
+# Required variables:
+#  apk_name - File name (minus path & extension) of the output apk.
+#  apk_path - Path to output apk.
+#  package_input_paths - Late-evaluated list of resource zips.
+#  native_libs_dir - Path to lib/ directory to use. Set to an empty directory
+#    if no native libs are needed.
+# Optional variables:
+#  has_code - Whether to include classes.dex in the apk.
+#  dex_path - Path to classes.dex. Used only when has_code=1.
+#  extra_inputs - List of extra action inputs.
+{
+  'variables': {
+    'variables': {
+      'has_code%': 1,
+    },
+    'conditions': [
+      ['has_code == 0', {
+        'has_code_str': 'false',
+      }, {
+        'has_code_str': 'true',
+      }],
+    ],
+    'has_code%': '<(has_code)',
+    'extra_inputs%': [],
+    # Write the inputs list to a file, so that its mtime is updated when
+    # the list of inputs changes.
+    'inputs_list_file': '>|(apk_package.<(_target_name).<(apk_name).gypcmd >@(package_input_paths))',
+    'resource_packaged_apk_name': '<(apk_name)-resources.ap_',
+    'resource_packaged_apk_path': '<(intermediate_dir)/<(resource_packaged_apk_name)',
+  },
+  'action_name': 'apkbuilder_<(apk_name)',
+  'message': 'Packaging <(apk_name)',
+  'inputs': [
+    '<(DEPTH)/build/android/ant/apk-package.xml',
+    '<(DEPTH)/build/android/gyp/util/build_utils.py',
+    '<(DEPTH)/build/android/gyp/ant.py',
+    '<(resource_packaged_apk_path)',
+    '<@(extra_inputs)',
+    '>@(package_input_paths)',
+    '>(inputs_list_file)',
+  ],
+  'outputs': [
+    '<(apk_path)',
+  ],
+  'conditions': [
+    ['has_code == 1', {
+      'inputs': ['<(dex_path)'],
+      'action': [
+        '-DDEX_FILE_PATH=<(dex_path)',
+      ]
+    }],
+  ],
+  'action': [
+    'python', '<(DEPTH)/build/android/gyp/ant.py',
+    '--',
+    '-quiet',
+    '-DHAS_CODE=<(has_code_str)',
+    '-DANDROID_SDK_ROOT=<(android_sdk_root)',
+    '-DANDROID_SDK_TOOLS=<(android_sdk_tools)',
+    '-DRESOURCE_PACKAGED_APK_NAME=<(resource_packaged_apk_name)',
+    '-DNATIVE_LIBS_DIR=<(native_libs_dir)',
+    '-DAPK_NAME=<(apk_name)',
+    '-DCONFIGURATION_NAME=<(CONFIGURATION_NAME)',
+    '-DOUT_DIR=<(intermediate_dir)',
+    '-DUNSIGNED_APK_PATH=<(apk_path)',
+    '-DEMMA_INSTRUMENT=<(emma_instrument)',
+    '-DEMMA_DEVICE_JAR=<(emma_device_jar)',
+    '-Dbasedir=.',
+    '-buildfile',
+    '<(DEPTH)/build/android/ant/apk-package.xml',
+  ]
+}
diff --git a/build/android/asan_symbolize.py b/build/android/asan_symbolize.py
new file mode 100755
index 0000000..10087a6
--- /dev/null
+++ b/build/android/asan_symbolize.py
@@ -0,0 +1,103 @@
+#!/usr/bin/env python
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+
+import collections
+import optparse
+import os
+import re
+import sys
+
+from pylib import constants
+
+# Uses symbol.py from third_party/android_platform, not python's.
+sys.path.insert(0,
+                os.path.join(constants.DIR_SOURCE_ROOT,
+                            'third_party/android_platform/development/scripts'))
+import symbol
+
+
+_RE_ASAN = re.compile(r'(.*?)(#\S*?) (\S*?) \((.*?)\+(.*?)\)')
+
+def _ParseAsanLogLine(line):
+  m = re.match(_RE_ASAN, line)
+  if not m:
+    return None
+  return {
+      'prefix': m.group(1),
+      'library': m.group(4),
+      'pos': m.group(2),
+      'rel_address': '%08x' % int(m.group(5), 16),
+  }
+
+
+def _FindASanLibraries():
+  asan_lib_dir = os.path.join(constants.DIR_SOURCE_ROOT,
+                              'third_party', 'llvm-build',
+                              'Release+Asserts', 'lib')
+  asan_libs = []
+  for src_dir, _, files in os.walk(asan_lib_dir):
+    asan_libs += [os.path.relpath(os.path.join(src_dir, f))
+                  for f in files
+                  if f.endswith('.so')]
+  return asan_libs
+
+
+def _TranslateLibPath(library, asan_libs):
+  for asan_lib in asan_libs:
+    if os.path.basename(library) == os.path.basename(asan_lib):
+      return '/' + asan_lib
+  return symbol.TranslateLibPath(library)
+
+
+def _Symbolize(asan_input):
+  asan_libs = _FindASanLibraries()
+  libraries = collections.defaultdict(list)
+  asan_lines = []
+  for asan_log_line in [a.rstrip() for a in asan_input]:
+    m = _ParseAsanLogLine(asan_log_line)
+    if m:
+      libraries[m['library']].append(m)
+    asan_lines.append({'raw_log': asan_log_line, 'parsed': m})
+
+  all_symbols = collections.defaultdict(dict)
+  for library, items in libraries.iteritems():
+    libname = _TranslateLibPath(library, asan_libs)
+    lib_relative_addrs = set([i['rel_address'] for i in items])
+    info_dict = symbol.SymbolInformationForSet(libname,
+                                               lib_relative_addrs,
+                                               True)
+    if info_dict:
+      all_symbols[library]['symbols'] = info_dict
+
+  for asan_log_line in asan_lines:
+    m = asan_log_line['parsed']
+    if not m:
+      print asan_log_line['raw_log']
+      continue
+    if (m['library'] in all_symbols and
+        m['rel_address'] in all_symbols[m['library']]['symbols']):
+      s = all_symbols[m['library']]['symbols'][m['rel_address']][0]
+      print '%s%s %s %s' % (m['prefix'], m['pos'], s[0], s[1])
+    else:
+      print asan_log_line['raw_log']
+
+
+def main():
+  parser = optparse.OptionParser()
+  parser.add_option('-l', '--logcat',
+                    help='File containing adb logcat output with ASan stacks. '
+                         'Use stdin if not specified.')
+  options, _ = parser.parse_args()
+  if options.logcat:
+    asan_input = file(options.logcat, 'r')
+  else:
+    asan_input = sys.stdin
+  _Symbolize(asan_input.readlines())
+
+
+if __name__ == "__main__":
+  sys.exit(main())
diff --git a/build/android/avd.py b/build/android/avd.py
new file mode 100755
index 0000000..c45544f
--- /dev/null
+++ b/build/android/avd.py
@@ -0,0 +1,96 @@
+#!/usr/bin/env python
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Launches Android Virtual Devices with a set configuration for testing Chrome.
+
+The script will launch a specified number of Android Virtual Devices (AVD's).
+"""
+
+
+import install_emulator_deps
+import logging
+import optparse
+import os
+import re
+import sys
+
+from pylib import cmd_helper
+from pylib import constants
+from pylib.utils import emulator
+
+
+def main(argv):
+  # ANDROID_SDK_ROOT needs to be set to the location of the SDK used to launch
+  # the emulator to find the system images upon launch.
+  emulator_sdk = os.path.join(constants.EMULATOR_SDK_ROOT, 'sdk')
+  os.environ['ANDROID_SDK_ROOT'] = emulator_sdk
+
+  opt_parser = optparse.OptionParser(description='AVD script.')
+  opt_parser.add_option('--name', help='Optinaly, name of existing AVD to '
+                        'launch. If not specified, new AVD\'s will be created')
+  opt_parser.add_option('-n', '--num', dest='emulator_count',
+                        help='Number of emulators to launch (default is 1).',
+                        type='int', default='1')
+  opt_parser.add_option('--abi', default='x86',
+                        help='Platform of emulators to launch (x86 default).')
+  opt_parser.add_option('--api-level', dest='api_level',
+                        help='API level for the image, e.g. 19 for Android 4.4',
+                        type='int', default=constants.ANDROID_SDK_VERSION)
+
+  options, _ = opt_parser.parse_args(argv[1:])
+
+  logging.basicConfig(level=logging.INFO,
+                      format='# %(asctime)-15s: %(message)s')
+  logging.root.setLevel(logging.INFO)
+
+  # Check if KVM is enabled for x86 AVD's and check for x86 system images.
+  # TODO(andrewhayden) Since we can fix all of these with install_emulator_deps
+  # why don't we just run it?
+  if options.abi == 'x86':
+    if not install_emulator_deps.CheckKVM():
+      logging.critical('ERROR: KVM must be enabled in BIOS, and installed. '
+                       'Enable KVM in BIOS and run install_emulator_deps.py')
+      return 1
+    elif not install_emulator_deps.CheckX86Image(options.api_level):
+      logging.critical('ERROR: System image for x86 AVD not installed. Run '
+                       'install_emulator_deps.py')
+      return 1
+
+  if not install_emulator_deps.CheckSDK():
+    logging.critical('ERROR: Emulator SDK not installed. Run '
+                     'install_emulator_deps.py.')
+    return 1
+
+  # If AVD is specified, check that the SDK has the required target. If not,
+  # check that the SDK has the desired target for the temporary AVD's.
+  api_level = options.api_level
+  if options.name:
+    android = os.path.join(constants.EMULATOR_SDK_ROOT, 'sdk', 'tools',
+                           'android')
+    avds_output = cmd_helper.GetCmdOutput([android, 'list', 'avd'])
+    names = re.findall(r'Name: (\w+)', avds_output)
+    api_levels = re.findall(r'API level (\d+)', avds_output)
+    try:
+      avd_index = names.index(options.name)
+    except ValueError:
+      logging.critical('ERROR: Specified AVD %s does not exist.' % options.name)
+      return 1
+    api_level = int(api_levels[avd_index])
+
+  if not install_emulator_deps.CheckSDKPlatform(api_level):
+    logging.critical('ERROR: Emulator SDK missing required target for API %d. '
+                     'Run install_emulator_deps.py.')
+    return 1
+
+  if options.name:
+    emulator.LaunchEmulator(options.name, options.abi)
+  else:
+    emulator.LaunchTempEmulators(options.emulator_count, options.abi,
+                                 options.api_level, True)
+
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv))
diff --git a/build/android/bb_run_sharded_steps.py b/build/android/bb_run_sharded_steps.py
new file mode 100755
index 0000000..6aeba5b
--- /dev/null
+++ b/build/android/bb_run_sharded_steps.py
@@ -0,0 +1,41 @@
+#!/usr/bin/env python
+#
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""DEPRECATED!
+TODO(bulach): remove me once all other repositories reference
+'test_runner.py perf' directly.
+"""
+
+import optparse
+import sys
+
+from pylib import cmd_helper
+
+
+def main(argv):
+  parser = optparse.OptionParser()
+  parser.add_option('-s', '--steps',
+                    help='A JSON file containing all the steps to be '
+                         'sharded.')
+  parser.add_option('--flaky_steps',
+                    help='A JSON file containing steps that are flaky and '
+                         'will have its exit code ignored.')
+  parser.add_option('-p', '--print_results',
+                    help='Only prints the results for the previously '
+                         'executed step, do not run it again.')
+  options, _ = parser.parse_args(argv)
+  if options.print_results:
+    return cmd_helper.RunCmd(['build/android/test_runner.py', 'perf',
+                              '--print-step', options.print_results])
+  flaky_options = []
+  if options.flaky_steps:
+    flaky_options = ['--flaky-steps', options.flaky_steps]
+  return cmd_helper.RunCmd(['build/android/test_runner.py', 'perf', '-v',
+                            '--steps', options.steps] + flaky_options)
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv))
diff --git a/build/android/buildbot/OWNERS b/build/android/buildbot/OWNERS
new file mode 100644
index 0000000..f289720
--- /dev/null
+++ b/build/android/buildbot/OWNERS
@@ -0,0 +1,6 @@
+set noparent
+
+cmp@chromium.org
+jbudorick@chromium.org
+navabi@chromium.org
+
diff --git a/build/android/buildbot/bb_annotations.py b/build/android/buildbot/bb_annotations.py
new file mode 100644
index 0000000..059d673
--- /dev/null
+++ b/build/android/buildbot/bb_annotations.py
@@ -0,0 +1,46 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Helper functions to print buildbot messages."""
+
+def PrintLink(label, url):
+  """Adds a link with name |label| linking to |url| to current buildbot step.
+
+  Args:
+    label: A string with the name of the label.
+    url: A string of the URL.
+  """
+  print '@@@STEP_LINK@%s@%s@@@' % (label, url)
+
+
+def PrintMsg(msg):
+  """Appends |msg| to the current buildbot step text.
+
+  Args:
+    msg: String to be appended.
+  """
+  print '@@@STEP_TEXT@%s@@@' % msg
+
+
+def PrintSummaryText(msg):
+  """Appends |msg| to main build summary. Visible from waterfall.
+
+  Args:
+    msg: String to be appended.
+  """
+  print '@@@STEP_SUMMARY_TEXT@%s@@@' % msg
+
+
+def PrintError():
+  """Marks the current step as failed."""
+  print '@@@STEP_FAILURE@@@'
+
+
+def PrintWarning():
+  """Marks the current step with a warning."""
+  print '@@@STEP_WARNINGS@@@'
+
+
+def PrintNamedStep(step):
+  print '@@@BUILD_STEP %s@@@' % step
diff --git a/build/android/buildbot/bb_device_status_check.py b/build/android/buildbot/bb_device_status_check.py
new file mode 100755
index 0000000..917c51e
--- /dev/null
+++ b/build/android/buildbot/bb_device_status_check.py
@@ -0,0 +1,404 @@
+#!/usr/bin/env python
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""A class to keep track of devices across builds and report state."""
+import json
+import logging
+import optparse
+import os
+import psutil
+import re
+import signal
+import smtplib
+import subprocess
+import sys
+import time
+import urllib
+
+import bb_annotations
+import bb_utils
+
+sys.path.append(os.path.join(os.path.dirname(__file__),
+                             os.pardir, os.pardir, 'util', 'lib',
+                             'common'))
+import perf_tests_results_helper  # pylint: disable=F0401
+
+sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
+from pylib import constants
+from pylib.cmd_helper import GetCmdOutput
+from pylib.device import adb_wrapper
+from pylib.device import battery_utils
+from pylib.device import device_blacklist
+from pylib.device import device_errors
+from pylib.device import device_list
+from pylib.device import device_utils
+from pylib.utils import run_tests_helper
+
+_RE_DEVICE_ID = re.compile('Device ID = (\d+)')
+
+def DeviceInfo(device, options):
+  """Gathers info on a device via various adb calls.
+
+  Args:
+    device: A DeviceUtils instance for the device to construct info about.
+
+  Returns:
+    Tuple of device type, build id, report as a string, error messages, and
+    boolean indicating whether or not device can be used for testing.
+  """
+  battery = battery_utils.BatteryUtils(device)
+
+  build_product = ''
+  build_id = ''
+  battery_level = 100
+  errors = []
+  dev_good = True
+  json_data = {}
+
+  try:
+    build_product = device.build_product
+    build_id = device.build_id
+
+    json_data = {
+      'serial': device.adb.GetDeviceSerial(),
+      'type': build_product,
+      'build': build_id,
+      'build_detail': device.GetProp('ro.build.fingerprint'),
+      'battery': {},
+      'imei_slice': 'Unknown',
+      'wifi_ip': device.GetProp('dhcp.wlan0.ipaddress'),
+    }
+
+    battery_info = {}
+    try:
+      battery_info = battery.GetBatteryInfo(timeout=5)
+      battery_level = int(battery_info.get('level', battery_level))
+      json_data['battery'] = battery_info
+    except device_errors.CommandFailedError:
+      logging.exception('Failed to get battery information for %s', str(device))
+
+    try:
+      for l in device.RunShellCommand(['dumpsys', 'iphonesubinfo'],
+                                      check_return=True, timeout=5):
+        m = _RE_DEVICE_ID.match(l)
+        if m:
+          json_data['imei_slice'] = m.group(1)[-6:]
+    except device_errors.CommandFailedError:
+      logging.exception('Failed to get IMEI slice for %s', str(device))
+
+    if battery_level < 15:
+      errors += ['Device critically low in battery.']
+      dev_good = False
+      if not battery.GetCharging():
+        battery.SetCharging(True)
+    if not options.no_provisioning_check:
+      setup_wizard_disabled = (
+          device.GetProp('ro.setupwizard.mode') == 'DISABLED')
+      if not setup_wizard_disabled and device.build_type != 'user':
+        errors += ['Setup wizard not disabled. Was it provisioned correctly?']
+    if (device.product_name == 'mantaray' and
+        battery_info.get('AC powered', None) != 'true'):
+      errors += ['Mantaray device not connected to AC power.']
+  except device_errors.CommandFailedError:
+    logging.exception('Failure while getting device status.')
+    dev_good = False
+  except device_errors.CommandTimeoutError:
+    logging.exception('Timeout while getting device status.')
+    dev_good = False
+
+  return (build_product, build_id, battery_level, errors, dev_good, json_data)
+
+
+def CheckForMissingDevices(options, devices):
+  """Uses file of previous online devices to detect broken phones.
+
+  Args:
+    options: out_dir parameter of options argument is used as the base
+      directory to load and update the cache file.
+    devices: A list of DeviceUtils instance for the currently visible and
+      online attached devices.
+  """
+  out_dir = os.path.abspath(options.out_dir)
+  device_serials = set(d.adb.GetDeviceSerial() for d in devices)
+
+  # last_devices denotes all known devices prior to this run
+  last_devices_path = os.path.join(out_dir, device_list.LAST_DEVICES_FILENAME)
+  last_missing_devices_path = os.path.join(out_dir,
+      device_list.LAST_MISSING_DEVICES_FILENAME)
+  try:
+    last_devices = device_list.GetPersistentDeviceList(last_devices_path)
+  except IOError:
+    # Ignore error, file might not exist
+    last_devices = []
+
+  try:
+    last_missing_devices = device_list.GetPersistentDeviceList(
+        last_missing_devices_path)
+  except IOError:
+    last_missing_devices = []
+
+  missing_devs = list(set(last_devices) - device_serials)
+  new_missing_devs = list(set(missing_devs) - set(last_missing_devices))
+
+  if new_missing_devs and os.environ.get('BUILDBOT_SLAVENAME'):
+    logging.info('new_missing_devs %s' % new_missing_devs)
+    devices_missing_msg = '%d devices not detected.' % len(missing_devs)
+    bb_annotations.PrintSummaryText(devices_missing_msg)
+
+    from_address = 'chrome-bot@chromium.org'
+    to_addresses = ['chrome-labs-tech-ticket@google.com',
+                    'chrome-android-device-alert@google.com']
+    cc_addresses = ['chrome-android-device-alert@google.com']
+    subject = 'Devices offline on %s, %s, %s' % (
+      os.environ.get('BUILDBOT_SLAVENAME'),
+      os.environ.get('BUILDBOT_BUILDERNAME'),
+      os.environ.get('BUILDBOT_BUILDNUMBER'))
+    msg = ('Please reboot the following devices:\n%s' %
+           '\n'.join(map(str, new_missing_devs)))
+    SendEmail(from_address, to_addresses, cc_addresses, subject, msg)
+
+  all_known_devices = list(device_serials | set(last_devices))
+  device_list.WritePersistentDeviceList(last_devices_path, all_known_devices)
+  device_list.WritePersistentDeviceList(last_missing_devices_path, missing_devs)
+
+  if not all_known_devices:
+    # This can happen if for some reason the .last_devices file is not
+    # present or if it was empty.
+    return ['No online devices. Have any devices been plugged in?']
+  if missing_devs:
+    devices_missing_msg = '%d devices not detected.' % len(missing_devs)
+    bb_annotations.PrintSummaryText(devices_missing_msg)
+    return ['Current online devices: %s' % ', '.join(d for d in device_serials),
+            '%s are no longer visible. Were they removed?' % missing_devs]
+  else:
+    new_devs = device_serials - set(last_devices)
+    if new_devs and os.path.exists(last_devices_path):
+      bb_annotations.PrintWarning()
+      bb_annotations.PrintSummaryText(
+          '%d new devices detected' % len(new_devs))
+      logging.info('New devices detected:')
+      for d in new_devs:
+        logging.info('  %s', d)
+
+
+def SendEmail(from_address, to_addresses, cc_addresses, subject, msg):
+  msg_body = '\r\n'.join(['From: %s' % from_address,
+                          'To: %s' % ', '.join(to_addresses),
+                          'CC: %s' % ', '.join(cc_addresses),
+                          'Subject: %s' % subject, '', msg])
+  try:
+    server = smtplib.SMTP('localhost')
+    server.sendmail(from_address, to_addresses, msg_body)
+    server.quit()
+  except Exception:
+    logging.exception('Failed to send alert email.')
+
+
+def RestartUsb():
+  if not os.path.isfile('/usr/bin/restart_usb'):
+    logging.error('Could not restart usb. ''/usr/bin/restart_usb not '
+                  'installed on host (see BUG=305769).')
+    return False
+
+  lsusb_proc = bb_utils.SpawnCmd(['lsusb'], stdout=subprocess.PIPE)
+  lsusb_output, _ = lsusb_proc.communicate()
+  if lsusb_proc.returncode:
+    logging.error('Could not get list of USB ports (i.e. lsusb).')
+    return lsusb_proc.returncode
+
+  usb_devices = [re.findall(r'Bus (\d\d\d) Device (\d\d\d)', lsusb_line)[0]
+                 for lsusb_line in lsusb_output.strip().split('\n')]
+
+  all_restarted = True
+  # Walk USB devices from leaves up (i.e reverse sorted) restarting the
+  # connection. If a parent node (e.g. usb hub) is restarted before the
+  # devices connected to it, the (bus, dev) for the hub can change, making the
+  # output we have wrong. This way we restart the devices before the hub.
+  for (bus, dev) in reversed(sorted(usb_devices)):
+    # Can not restart root usb connections
+    if dev != '001':
+      return_code = bb_utils.RunCmd(['/usr/bin/restart_usb', bus, dev])
+      if return_code:
+        logging.error('Error restarting USB device /dev/bus/usb/%s/%s',
+                      bus, dev)
+        all_restarted = False
+      else:
+        logging.info('Restarted USB device /dev/bus/usb/%s/%s', bus, dev)
+
+  return all_restarted
+
+
+def KillAllAdb():
+  def GetAllAdb():
+    for p in psutil.process_iter():
+      try:
+        if 'adb' in p.name:
+          yield p
+      except (psutil.NoSuchProcess, psutil.AccessDenied):
+        pass
+
+  for sig in [signal.SIGTERM, signal.SIGQUIT, signal.SIGKILL]:
+    for p in GetAllAdb():
+      try:
+        logging.info('kill %d %d (%s [%s])', sig, p.pid, p.name,
+                     ' '.join(p.cmdline))
+        p.send_signal(sig)
+      except (psutil.NoSuchProcess, psutil.AccessDenied):
+        pass
+  for p in GetAllAdb():
+    try:
+      logging.error('Unable to kill %d (%s [%s])', p.pid, p.name,
+                    ' '.join(p.cmdline))
+    except (psutil.NoSuchProcess, psutil.AccessDenied):
+      pass
+
+
+def main():
+  parser = optparse.OptionParser()
+  parser.add_option('', '--out-dir',
+                    help='Directory where the device path is stored',
+                    default=os.path.join(constants.DIR_SOURCE_ROOT, 'out'))
+  parser.add_option('--no-provisioning-check', action='store_true',
+                    help='Will not check if devices are provisioned properly.')
+  parser.add_option('--device-status-dashboard', action='store_true',
+                    help='Output device status data for dashboard.')
+  parser.add_option('--restart-usb', action='store_true',
+                    help='Restart USB ports before running device check.')
+  parser.add_option('--json-output',
+                    help='Output JSON information into a specified file.')
+  parser.add_option('-v', '--verbose', action='count', default=1,
+                    help='Log more information.')
+
+  options, args = parser.parse_args()
+  if args:
+    parser.error('Unknown options %s' % args)
+
+  run_tests_helper.SetLogLevel(options.verbose)
+
+  # Remove the last build's "bad devices" before checking device statuses.
+  device_blacklist.ResetBlacklist()
+
+  try:
+    expected_devices = device_list.GetPersistentDeviceList(
+        os.path.join(options.out_dir, device_list.LAST_DEVICES_FILENAME))
+  except IOError:
+    expected_devices = []
+  devices = device_utils.DeviceUtils.HealthyDevices()
+  device_serials = [d.adb.GetDeviceSerial() for d in devices]
+  # Only restart usb if devices are missing.
+  if set(expected_devices) != set(device_serials):
+    logging.warning('expected_devices: %s', expected_devices)
+    logging.warning('devices: %s', device_serials)
+    KillAllAdb()
+    retries = 5
+    usb_restarted = True
+    if options.restart_usb:
+      if not RestartUsb():
+        usb_restarted = False
+        bb_annotations.PrintWarning()
+        logging.error('USB reset stage failed, '
+                      'wait for any device to come back.')
+    while retries:
+      logging.info('retry adb devices...')
+      time.sleep(1)
+      devices = device_utils.DeviceUtils.HealthyDevices()
+      device_serials = [d.adb.GetDeviceSerial() for d in devices]
+      if set(expected_devices) == set(device_serials):
+        # All devices are online, keep going.
+        break
+      if not usb_restarted and devices:
+        # The USB wasn't restarted, but there's at least one device online.
+        # No point in trying to wait for all devices.
+        break
+      retries -= 1
+
+  types, builds, batteries, errors, devices_ok, json_data = (
+      [], [], [], [], [], [])
+  if devices:
+    types, builds, batteries, errors, devices_ok, json_data = (
+        zip(*[DeviceInfo(dev, options) for dev in devices]))
+
+  # Write device info to file for buildbot info display.
+  if os.path.exists('/home/chrome-bot'):
+    with open('/home/chrome-bot/.adb_device_info', 'w') as f:
+      for device in json_data:
+        try:
+          f.write('%s %s %s %.1fC %s%%\n' % (device['serial'], device['type'],
+              device['build'], float(device['battery']['temperature']) / 10,
+              device['battery']['level']))
+        except Exception:
+          pass
+
+  err_msg = CheckForMissingDevices(options, devices) or []
+
+  unique_types = list(set(types))
+  unique_builds = list(set(builds))
+
+  bb_annotations.PrintMsg('Online devices: %d. Device types %s, builds %s'
+                           % (len(devices), unique_types, unique_builds))
+
+  for j in json_data:
+    logging.info('Device %s (%s)', j.get('serial'), j.get('type'))
+    logging.info('  Build: %s (%s)', j.get('build'), j.get('build_detail'))
+    logging.info('  Current Battery Service state:')
+    for k, v in j.get('battery', {}).iteritems():
+      logging.info('    %s: %s', k, v)
+    logging.info('  IMEI slice: %s', j.get('imei_slice'))
+    logging.info('  WiFi IP: %s', j.get('wifi_ip'))
+
+
+  for dev, dev_errors in zip(devices, errors):
+    if dev_errors:
+      err_msg += ['%s errors:' % str(dev)]
+      err_msg += ['    %s' % error for error in dev_errors]
+
+  if err_msg:
+    bb_annotations.PrintWarning()
+    for e in err_msg:
+      logging.error(e)
+    from_address = 'buildbot@chromium.org'
+    to_addresses = ['chromium-android-device-alerts@google.com']
+    bot_name = os.environ.get('BUILDBOT_BUILDERNAME')
+    slave_name = os.environ.get('BUILDBOT_SLAVENAME')
+    subject = 'Device status check errors on %s, %s.' % (slave_name, bot_name)
+    SendEmail(from_address, to_addresses, [], subject, '\n'.join(err_msg))
+
+  if options.device_status_dashboard:
+    offline_devices = [
+        device_utils.DeviceUtils(a)
+        for a in adb_wrapper.AdbWrapper.Devices(is_ready=False)
+        if a.GetState() == 'offline']
+
+    perf_tests_results_helper.PrintPerfResult('BotDevices', 'OnlineDevices',
+                                              [len(devices)], 'devices')
+    perf_tests_results_helper.PrintPerfResult('BotDevices', 'OfflineDevices',
+                                              [len(offline_devices)], 'devices',
+                                              'unimportant')
+    for dev, battery in zip(devices, batteries):
+      perf_tests_results_helper.PrintPerfResult('DeviceBattery', str(dev),
+                                                [battery], '%',
+                                                'unimportant')
+
+  if options.json_output:
+    with open(options.json_output, 'wb') as f:
+      f.write(json.dumps(json_data, indent=4))
+
+  num_failed_devs = 0
+  for device_ok, device in zip(devices_ok, devices):
+    if not device_ok:
+      logging.warning('Blacklisting %s', str(device))
+      device_blacklist.ExtendBlacklist([str(device)])
+      num_failed_devs += 1
+
+  if num_failed_devs == len(devices):
+    return 2
+
+  if not devices:
+    return 1
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/build/android/buildbot/bb_device_steps.py b/build/android/buildbot/bb_device_steps.py
new file mode 100755
index 0000000..8ad42b9
--- /dev/null
+++ b/build/android/buildbot/bb_device_steps.py
@@ -0,0 +1,796 @@
+#!/usr/bin/env python
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import collections
+import glob
+import hashlib
+import json
+import os
+import random
+import re
+import shutil
+import sys
+
+import bb_utils
+import bb_annotations
+
+sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
+import provision_devices
+from pylib import constants
+from pylib.device import device_utils
+from pylib.gtest import gtest_config
+
+CHROME_SRC_DIR = bb_utils.CHROME_SRC
+DIR_BUILD_ROOT = os.path.dirname(CHROME_SRC_DIR)
+CHROME_OUT_DIR = bb_utils.CHROME_OUT_DIR
+BLINK_SCRIPTS_DIR = 'third_party/WebKit/Tools/Scripts'
+
+SLAVE_SCRIPTS_DIR = os.path.join(bb_utils.BB_BUILD_DIR, 'scripts', 'slave')
+LOGCAT_DIR = os.path.join(bb_utils.CHROME_OUT_DIR, 'logcat')
+GS_URL = 'https://storage.googleapis.com'
+GS_AUTH_URL = 'https://storage.cloud.google.com'
+
+# Describes an instrumation test suite:
+#   test: Name of test we're running.
+#   apk: apk to be installed.
+#   apk_package: package for the apk to be installed.
+#   test_apk: apk to run tests on.
+#   test_data: data folder in format destination:source.
+#   host_driven_root: The host-driven test root directory.
+#   annotation: Annotation of the tests to include.
+#   exclude_annotation: The annotation of the tests to exclude.
+I_TEST = collections.namedtuple('InstrumentationTest', [
+    'name', 'apk', 'apk_package', 'test_apk', 'test_data', 'isolate_file_path',
+    'host_driven_root', 'annotation', 'exclude_annotation', 'extra_flags'])
+
+
+def SrcPath(*path):
+  return os.path.join(CHROME_SRC_DIR, *path)
+
+
+def I(name, apk, apk_package, test_apk, test_data, isolate_file_path=None,
+      host_driven_root=None, annotation=None, exclude_annotation=None,
+      extra_flags=None):
+  return I_TEST(name, apk, apk_package, test_apk, test_data, isolate_file_path,
+                host_driven_root, annotation, exclude_annotation, extra_flags)
+
+INSTRUMENTATION_TESTS = dict((suite.name, suite) for suite in [
+    I('ContentShell',
+      'ContentShell.apk',
+      'org.chromium.content_shell_apk',
+      'ContentShellTest',
+      'content:content/test/data/android/device_files',
+      isolate_file_path='content/content_shell_test_apk.isolate'),
+    I('ChromeShell',
+      'ChromeShell.apk',
+      'org.chromium.chrome.shell',
+      'ChromeShellTest',
+      'chrome:chrome/test/data/android/device_files',
+      isolate_file_path='chrome/chrome_shell_test_apk.isolate',
+      host_driven_root=constants.CHROME_SHELL_HOST_DRIVEN_DIR),
+    I('AndroidWebView',
+      'AndroidWebView.apk',
+      'org.chromium.android_webview.shell',
+      'AndroidWebViewTest',
+      'webview:android_webview/test/data/device_files',
+      isolate_file_path='android_webview/android_webview_test_apk.isolate'),
+    I('ChromeSyncShell',
+      'ChromeSyncShell.apk',
+      'org.chromium.chrome.browser.sync',
+      'ChromeSyncShellTest',
+      None),
+    ])
+
+InstallablePackage = collections.namedtuple('InstallablePackage', [
+    'name', 'apk', 'apk_package'])
+
+INSTALLABLE_PACKAGES = dict((package.name, package) for package in (
+    [InstallablePackage(i.name, i.apk, i.apk_package)
+     for i in INSTRUMENTATION_TESTS.itervalues()] +
+    [InstallablePackage('ChromeDriverWebViewShell',
+                        'ChromeDriverWebViewShell.apk',
+                        'org.chromium.chromedriver_webview_shell')]))
+
+VALID_TESTS = set([
+    'base_junit_tests',
+    'chromedriver',
+    'chrome_proxy',
+    'components_browsertests',
+    'gfx_unittests',
+    'gl_unittests',
+    'gpu',
+    'python_unittests',
+    'telemetry_unittests',
+    'telemetry_perf_unittests',
+    'ui',
+    'unit',
+    'webkit',
+    'webkit_layout'
+])
+
+RunCmd = bb_utils.RunCmd
+
+
+def _GetRevision(options):
+  """Get the SVN revision number.
+
+  Args:
+    options: options object.
+
+  Returns:
+    The revision number.
+  """
+  revision = options.build_properties.get('got_revision')
+  if not revision:
+    revision = options.build_properties.get('revision', 'testing')
+  return revision
+
+
+def _RunTest(options, cmd, suite):
+  """Run test command with runtest.py.
+
+  Args:
+    options: options object.
+    cmd: the command to run.
+    suite: test name.
+  """
+  property_args = bb_utils.EncodeProperties(options)
+  args = [os.path.join(SLAVE_SCRIPTS_DIR, 'runtest.py')] + property_args
+  args += ['--test-platform', 'android']
+  if options.factory_properties.get('generate_gtest_json'):
+    args.append('--generate-json-file')
+    args += ['-o', 'gtest-results/%s' % suite,
+             '--annotate', 'gtest',
+             '--build-number', str(options.build_properties.get('buildnumber',
+                                                                '')),
+             '--builder-name', options.build_properties.get('buildername', '')]
+  if options.target == 'Release':
+    args += ['--target', 'Release']
+  else:
+    args += ['--target', 'Debug']
+  if options.flakiness_server:
+    args += ['--flakiness-dashboard-server=%s' %
+                options.flakiness_server]
+  args += cmd
+  RunCmd(args, cwd=DIR_BUILD_ROOT)
+
+
+def RunTestSuites(options, suites, suites_options=None):
+  """Manages an invocation of test_runner.py for gtests.
+
+  Args:
+    options: options object.
+    suites: List of suite names to run.
+    suites_options: Command line options dictionary for particular suites.
+                    For example,
+                    {'content_browsertests', ['--num_retries=1', '--release']}
+                    will add the options only to content_browsertests.
+  """
+
+  if not suites_options:
+    suites_options = {}
+
+  args = ['--verbose']
+  if options.target == 'Release':
+    args.append('--release')
+  if options.asan:
+    args.append('--tool=asan')
+  if options.gtest_filter:
+    args.append('--gtest-filter=%s' % options.gtest_filter)
+
+  for suite in suites:
+    bb_annotations.PrintNamedStep(suite)
+    cmd = [suite] + args
+    cmd += suites_options.get(suite, [])
+    if suite == 'content_browsertests' or suite == 'components_browsertests':
+      cmd.append('--num_retries=1')
+    _RunTest(options, cmd, suite)
+
+
+def RunJunitSuite(suite):
+  bb_annotations.PrintNamedStep(suite)
+  RunCmd(['build/android/test_runner.py', 'junit', '-s', suite])
+
+
+def RunChromeDriverTests(options):
+  """Run all the steps for running chromedriver tests."""
+  bb_annotations.PrintNamedStep('chromedriver_annotation')
+  RunCmd(['chrome/test/chromedriver/run_buildbot_steps.py',
+          '--android-packages=%s,%s,%s,%s' %
+          ('chrome_shell',
+           'chrome_stable',
+           'chrome_beta',
+           'chromedriver_webview_shell'),
+          '--revision=%s' % _GetRevision(options),
+          '--update-log'])
+
+def RunChromeProxyTests(options):
+  """Run the chrome_proxy tests.
+
+  Args:
+    options: options object.
+  """
+  InstallApk(options, INSTRUMENTATION_TESTS['ChromeShell'], False)
+  args = ['--browser', 'android-chrome-shell']
+  devices = device_utils.DeviceUtils.HealthyDevices()
+  if devices:
+    args = args + ['--device', devices[0].adb.GetDeviceSerial()]
+  bb_annotations.PrintNamedStep('chrome_proxy')
+  RunCmd(['tools/chrome_proxy/run_tests'] + args)
+
+
+def RunTelemetryTests(options, step_name, run_tests_path):
+  """Runs either telemetry_perf_unittests or telemetry_unittests.
+
+  Args:
+    options: options object.
+    step_name: either 'telemetry_unittests' or 'telemetry_perf_unittests'
+    run_tests_path: path to run_tests script (tools/perf/run_tests for
+                    perf_unittests and tools/telemetry/run_tests for
+                    telemetry_unittests)
+  """
+  InstallApk(options, INSTRUMENTATION_TESTS['ChromeShell'], False)
+  args = ['--browser', 'android-chrome-shell']
+  devices = device_utils.DeviceUtils.HealthyDevices()
+  if devices:
+    args = args + ['--device', 'android']
+  bb_annotations.PrintNamedStep(step_name)
+  RunCmd([run_tests_path] + args)
+
+
+def InstallApk(options, test, print_step=False):
+  """Install an apk to all phones.
+
+  Args:
+    options: options object
+    test: An I_TEST namedtuple
+    print_step: Print a buildbot step
+  """
+  if print_step:
+    bb_annotations.PrintNamedStep('install_%s' % test.name.lower())
+
+  args = ['--apk_package', test.apk_package]
+  if options.target == 'Release':
+    args.append('--release')
+  args.append(test.apk)
+
+  RunCmd(['build/android/adb_install_apk.py'] + args, halt_on_failure=True)
+
+
+def RunInstrumentationSuite(options, test, flunk_on_failure=True,
+                            python_only=False, official_build=False):
+  """Manages an invocation of test_runner.py for instrumentation tests.
+
+  Args:
+    options: options object
+    test: An I_TEST namedtuple
+    flunk_on_failure: Flunk the step if tests fail.
+    Python: Run only host driven Python tests.
+    official_build: Run official-build tests.
+  """
+  bb_annotations.PrintNamedStep('%s_instrumentation_tests' % test.name.lower())
+
+  if test.apk:
+    InstallApk(options, test)
+  args = ['--test-apk', test.test_apk, '--verbose']
+  if test.test_data:
+    args.extend(['--test_data', test.test_data])
+  if options.target == 'Release':
+    args.append('--release')
+  if options.asan:
+    args.append('--tool=asan')
+  if options.flakiness_server:
+    args.append('--flakiness-dashboard-server=%s' %
+                options.flakiness_server)
+  if options.coverage_bucket:
+    args.append('--coverage-dir=%s' % options.coverage_dir)
+  if test.isolate_file_path:
+    args.append('--isolate-file-path=%s' % test.isolate_file_path)
+  if test.host_driven_root:
+    args.append('--host-driven-root=%s' % test.host_driven_root)
+  if test.annotation:
+    args.extend(['-A', test.annotation])
+  if test.exclude_annotation:
+    args.extend(['-E', test.exclude_annotation])
+  if test.extra_flags:
+    args.extend(test.extra_flags)
+  if python_only:
+    args.append('-p')
+  if official_build:
+    # The option needs to be assigned 'True' as it does not have an action
+    # associated with it.
+    args.append('--official-build')
+
+  RunCmd(['build/android/test_runner.py', 'instrumentation'] + args,
+         flunk_on_failure=flunk_on_failure)
+
+
+def RunWebkitLint():
+  """Lint WebKit's TestExpectation files."""
+  bb_annotations.PrintNamedStep('webkit_lint')
+  RunCmd([SrcPath(os.path.join(BLINK_SCRIPTS_DIR, 'lint-test-expectations'))])
+
+
+def RunWebkitLayoutTests(options):
+  """Run layout tests on an actual device."""
+  bb_annotations.PrintNamedStep('webkit_tests')
+  cmd_args = [
+      '--no-show-results',
+      '--no-new-test-results',
+      '--full-results-html',
+      '--clobber-old-results',
+      '--exit-after-n-failures', '5000',
+      '--exit-after-n-crashes-or-timeouts', '100',
+      '--debug-rwt-logging',
+      '--results-directory', '../layout-test-results',
+      '--target', options.target,
+      '--builder-name', options.build_properties.get('buildername', ''),
+      '--build-number', str(options.build_properties.get('buildnumber', '')),
+      '--master-name', 'ChromiumWebkit',  # TODO: Get this from the cfg.
+      '--build-name', options.build_properties.get('buildername', ''),
+      '--platform=android']
+
+  for flag in 'test_results_server', 'driver_name', 'additional_driver_flag':
+    if flag in options.factory_properties:
+      cmd_args.extend(['--%s' % flag.replace('_', '-'),
+                       options.factory_properties.get(flag)])
+
+  for f in options.factory_properties.get('additional_expectations', []):
+    cmd_args.extend(
+        ['--additional-expectations=%s' % os.path.join(CHROME_SRC_DIR, *f)])
+
+  # TODO(dpranke): Remove this block after
+  # https://codereview.chromium.org/12927002/ lands.
+  for f in options.factory_properties.get('additional_expectations_files', []):
+    cmd_args.extend(
+        ['--additional-expectations=%s' % os.path.join(CHROME_SRC_DIR, *f)])
+
+  exit_code = RunCmd(
+      [SrcPath(os.path.join(BLINK_SCRIPTS_DIR, 'run-webkit-tests'))] + cmd_args)
+  if exit_code == 255: # test_run_results.UNEXPECTED_ERROR_EXIT_STATUS
+    bb_annotations.PrintMsg('?? (crashed or hung)')
+  elif exit_code == 254: # test_run_results.NO_DEVICES_EXIT_STATUS
+    bb_annotations.PrintMsg('?? (no devices found)')
+  elif exit_code == 253: # test_run_results.NO_TESTS_EXIT_STATUS
+    bb_annotations.PrintMsg('?? (no tests found)')
+  else:
+    full_results_path = os.path.join('..', 'layout-test-results',
+                                     'full_results.json')
+    if os.path.exists(full_results_path):
+      full_results = json.load(open(full_results_path))
+      unexpected_passes, unexpected_failures, unexpected_flakes = (
+          _ParseLayoutTestResults(full_results))
+      if unexpected_failures:
+        _PrintDashboardLink('failed', unexpected_failures.keys(),
+                            max_tests=25)
+      elif unexpected_passes:
+        _PrintDashboardLink('unexpected passes', unexpected_passes.keys(),
+                            max_tests=10)
+      if unexpected_flakes:
+        _PrintDashboardLink('unexpected flakes', unexpected_flakes.keys(),
+                            max_tests=10)
+
+      if exit_code == 0 and (unexpected_passes or unexpected_flakes):
+        # If exit_code != 0, RunCmd() will have already printed an error.
+        bb_annotations.PrintWarning()
+    else:
+      bb_annotations.PrintError()
+      bb_annotations.PrintMsg('?? (results missing)')
+
+  if options.factory_properties.get('archive_webkit_results', False):
+    bb_annotations.PrintNamedStep('archive_webkit_results')
+    base = 'https://storage.googleapis.com/chromium-layout-test-archives'
+    builder_name = options.build_properties.get('buildername', '')
+    build_number = str(options.build_properties.get('buildnumber', ''))
+    results_link = '%s/%s/%s/layout-test-results/results.html' % (
+        base, EscapeBuilderName(builder_name), build_number)
+    bb_annotations.PrintLink('results', results_link)
+    bb_annotations.PrintLink('(zip)', '%s/%s/%s/layout-test-results.zip' % (
+        base, EscapeBuilderName(builder_name), build_number))
+    gs_bucket = 'gs://chromium-layout-test-archives'
+    RunCmd([os.path.join(SLAVE_SCRIPTS_DIR, 'chromium',
+                         'archive_layout_test_results.py'),
+            '--results-dir', '../../layout-test-results',
+            '--build-number', build_number,
+            '--builder-name', builder_name,
+            '--gs-bucket', gs_bucket],
+            cwd=DIR_BUILD_ROOT)
+
+
+def _ParseLayoutTestResults(results):
+  """Extract the failures from the test run."""
+  # Cloned from third_party/WebKit/Tools/Scripts/print-json-test-results
+  tests = _ConvertTrieToFlatPaths(results['tests'])
+  failures = {}
+  flakes = {}
+  passes = {}
+  for (test, result) in tests.iteritems():
+    if result.get('is_unexpected'):
+      actual_results = result['actual'].split()
+      expected_results = result['expected'].split()
+      if len(actual_results) > 1:
+        # We report the first failure type back, even if the second
+        # was more severe.
+        if actual_results[1] in expected_results:
+          flakes[test] = actual_results[0]
+        else:
+          failures[test] = actual_results[0]
+      elif actual_results[0] == 'PASS':
+        passes[test] = result
+      else:
+        failures[test] = actual_results[0]
+
+  return (passes, failures, flakes)
+
+
+def _ConvertTrieToFlatPaths(trie, prefix=None):
+  """Flatten the trie of failures into a list."""
+  # Cloned from third_party/WebKit/Tools/Scripts/print-json-test-results
+  result = {}
+  for name, data in trie.iteritems():
+    if prefix:
+      name = prefix + '/' + name
+
+    if len(data) and 'actual' not in data and 'expected' not in data:
+      result.update(_ConvertTrieToFlatPaths(data, name))
+    else:
+      result[name] = data
+
+  return result
+
+
+def _PrintDashboardLink(link_text, tests, max_tests):
+  """Add a link to the flakiness dashboard in the step annotations."""
+  if len(tests) > max_tests:
+    test_list_text = ' '.join(tests[:max_tests]) + ' and more'
+  else:
+    test_list_text = ' '.join(tests)
+
+  dashboard_base = ('http://test-results.appspot.com'
+                    '/dashboards/flakiness_dashboard.html#'
+                    'master=ChromiumWebkit&tests=')
+
+  bb_annotations.PrintLink('%d %s: %s' %
+                           (len(tests), link_text, test_list_text),
+                           dashboard_base + ','.join(tests))
+
+
+def EscapeBuilderName(builder_name):
+  return re.sub('[ ()]', '_', builder_name)
+
+
+def SpawnLogcatMonitor():
+  shutil.rmtree(LOGCAT_DIR, ignore_errors=True)
+  bb_utils.SpawnCmd([
+      os.path.join(CHROME_SRC_DIR, 'build', 'android', 'adb_logcat_monitor.py'),
+      LOGCAT_DIR])
+
+  # Wait for logcat_monitor to pull existing logcat
+  RunCmd(['sleep', '5'])
+
+
+def ProvisionDevices(options):
+  bb_annotations.PrintNamedStep('provision_devices')
+
+  if not bb_utils.TESTING:
+    # Restart adb to work around bugs, sleep to wait for usb discovery.
+    device_utils.RestartServer()
+    RunCmd(['sleep', '1'])
+  provision_cmd = ['build/android/provision_devices.py', '-t', options.target]
+  if options.auto_reconnect:
+    provision_cmd.append('--auto-reconnect')
+  if options.skip_wipe:
+    provision_cmd.append('--skip-wipe')
+  if options.disable_location:
+    provision_cmd.append('--disable-location')
+  RunCmd(provision_cmd, halt_on_failure=True)
+
+
+def DeviceStatusCheck(options):
+  bb_annotations.PrintNamedStep('device_status_check')
+  cmd = ['build/android/buildbot/bb_device_status_check.py']
+  if options.restart_usb:
+    cmd.append('--restart-usb')
+  RunCmd(cmd, halt_on_failure=True)
+
+
+def GetDeviceSetupStepCmds():
+  return [
+      ('device_status_check', DeviceStatusCheck),
+      ('provision_devices', ProvisionDevices),
+  ]
+
+
+def RunUnitTests(options):
+  suites = gtest_config.STABLE_TEST_SUITES
+  if options.asan:
+    suites = [s for s in suites
+              if s not in gtest_config.ASAN_EXCLUDED_TEST_SUITES]
+  RunTestSuites(options, suites)
+
+
+def RunTelemetryUnitTests(options):
+  RunTelemetryTests(options, 'telemetry_unittests', 'tools/telemetry/run_tests')
+
+
+def RunTelemetryPerfUnitTests(options):
+  RunTelemetryTests(options, 'telemetry_perf_unittests', 'tools/perf/run_tests')
+
+
+def RunInstrumentationTests(options):
+  for test in INSTRUMENTATION_TESTS.itervalues():
+    RunInstrumentationSuite(options, test)
+
+
+def RunWebkitTests(options):
+  RunTestSuites(options, ['webkit_unit_tests', 'blink_heap_unittests'])
+  RunWebkitLint()
+
+
+def RunGPUTests(options):
+  revision = _GetRevision(options)
+  builder_name = options.build_properties.get('buildername', 'noname')
+
+  bb_annotations.PrintNamedStep('pixel_tests')
+  RunCmd(['content/test/gpu/run_gpu_test.py',
+          'pixel', '-v',
+          '--browser',
+          'android-content-shell',
+          '--build-revision',
+          str(revision),
+          '--upload-refimg-to-cloud-storage',
+          '--refimg-cloud-storage-bucket',
+          'chromium-gpu-archive/reference-images',
+          '--os-type',
+          'android',
+          '--test-machine-name',
+          EscapeBuilderName(builder_name)])
+
+  bb_annotations.PrintNamedStep('webgl_conformance_tests')
+  RunCmd(['content/test/gpu/run_gpu_test.py', '-v',
+          '--browser=android-content-shell', 'webgl_conformance',
+          '--webgl-conformance-version=1.0.1'])
+
+  bb_annotations.PrintNamedStep('android_webview_webgl_conformance_tests')
+  RunCmd(['content/test/gpu/run_gpu_test.py', '-v',
+          '--browser=android-webview-shell', 'webgl_conformance',
+          '--webgl-conformance-version=1.0.1'])
+
+  bb_annotations.PrintNamedStep('gpu_rasterization_tests')
+  RunCmd(['content/test/gpu/run_gpu_test.py',
+          'gpu_rasterization', '-v',
+          '--browser',
+          'android-content-shell',
+          '--build-revision',
+          str(revision),
+          '--test-machine-name',
+          EscapeBuilderName(builder_name)])
+
+
+def RunPythonUnitTests(_options):
+  for suite in constants.PYTHON_UNIT_TEST_SUITES:
+    bb_annotations.PrintNamedStep(suite)
+    RunCmd(['build/android/test_runner.py', 'python', '-s', suite])
+
+
+def GetTestStepCmds():
+  return [
+      ('base_junit_tests',
+          lambda _options: RunJunitSuite('base_junit_tests')),
+      ('chromedriver', RunChromeDriverTests),
+      ('chrome_proxy', RunChromeProxyTests),
+      ('components_browsertests',
+          lambda options: RunTestSuites(options, ['components_browsertests'])),
+      ('gfx_unittests',
+          lambda options: RunTestSuites(options, ['gfx_unittests'])),
+      ('gl_unittests',
+          lambda options: RunTestSuites(options, ['gl_unittests'])),
+      ('gpu', RunGPUTests),
+      ('python_unittests', RunPythonUnitTests),
+      ('telemetry_unittests', RunTelemetryUnitTests),
+      ('telemetry_perf_unittests', RunTelemetryPerfUnitTests),
+      ('ui', RunInstrumentationTests),
+      ('unit', RunUnitTests),
+      ('webkit', RunWebkitTests),
+      ('webkit_layout', RunWebkitLayoutTests),
+  ]
+
+
+def MakeGSPath(options, gs_base_dir):
+  revision = _GetRevision(options)
+  bot_id = options.build_properties.get('buildername', 'testing')
+  randhash = hashlib.sha1(str(random.random())).hexdigest()
+  gs_path = '%s/%s/%s/%s' % (gs_base_dir, bot_id, revision, randhash)
+  # remove double slashes, happens with blank revisions and confuses gsutil
+  gs_path = re.sub('/+', '/', gs_path)
+  return gs_path
+
+def UploadHTML(options, gs_base_dir, dir_to_upload, link_text,
+               link_rel_path='index.html', gs_url=GS_URL):
+  """Uploads directory at |dir_to_upload| to Google Storage and output a link.
+
+  Args:
+    options: Command line options.
+    gs_base_dir: The Google Storage base directory (e.g.
+      'chromium-code-coverage/java')
+    dir_to_upload: Absolute path to the directory to be uploaded.
+    link_text: Link text to be displayed on the step.
+    link_rel_path: Link path relative to |dir_to_upload|.
+    gs_url: Google storage URL.
+  """
+  gs_path = MakeGSPath(options, gs_base_dir)
+  RunCmd([bb_utils.GSUTIL_PATH, 'cp', '-R', dir_to_upload, 'gs://%s' % gs_path])
+  bb_annotations.PrintLink(link_text,
+                           '%s/%s/%s' % (gs_url, gs_path, link_rel_path))
+
+
+def GenerateJavaCoverageReport(options):
+  """Generates an HTML coverage report using EMMA and uploads it."""
+  bb_annotations.PrintNamedStep('java_coverage_report')
+
+  coverage_html = os.path.join(options.coverage_dir, 'coverage_html')
+  RunCmd(['build/android/generate_emma_html.py',
+          '--coverage-dir', options.coverage_dir,
+          '--metadata-dir', os.path.join(CHROME_OUT_DIR, options.target),
+          '--cleanup',
+          '--output', os.path.join(coverage_html, 'index.html')])
+  return coverage_html
+
+
+def LogcatDump(options):
+  # Print logcat, kill logcat monitor
+  bb_annotations.PrintNamedStep('logcat_dump')
+  logcat_file = os.path.join(CHROME_OUT_DIR, options.target, 'full_log.txt')
+  RunCmd([SrcPath('build', 'android', 'adb_logcat_printer.py'),
+          '--output-path', logcat_file, LOGCAT_DIR])
+  gs_path = MakeGSPath(options, 'chromium-android/logcat_dumps')
+  RunCmd([bb_utils.GSUTIL_PATH, 'cp', '-z', 'txt', logcat_file,
+          'gs://%s' % gs_path])
+  bb_annotations.PrintLink('logcat dump', '%s/%s' % (GS_AUTH_URL, gs_path))
+
+
+def RunStackToolSteps(options):
+  """Run stack tool steps.
+
+  Stack tool is run for logcat dump, optionally for ASAN.
+  """
+  bb_annotations.PrintNamedStep('Run stack tool with logcat dump')
+  logcat_file = os.path.join(CHROME_OUT_DIR, options.target, 'full_log.txt')
+  RunCmd([os.path.join(CHROME_SRC_DIR, 'third_party', 'android_platform',
+          'development', 'scripts', 'stack'),
+          '--more-info', logcat_file])
+  if options.asan_symbolize:
+    bb_annotations.PrintNamedStep('Run stack tool for ASAN')
+    RunCmd([
+        os.path.join(CHROME_SRC_DIR, 'build', 'android', 'asan_symbolize.py'),
+        '-l', logcat_file])
+
+
+def GenerateTestReport(options):
+  bb_annotations.PrintNamedStep('test_report')
+  for report in glob.glob(
+      os.path.join(CHROME_OUT_DIR, options.target, 'test_logs', '*.log')):
+    RunCmd(['cat', report])
+    os.remove(report)
+
+
+def MainTestWrapper(options):
+  try:
+    # Spawn logcat monitor
+    SpawnLogcatMonitor()
+
+    # Run all device setup steps
+    for _, cmd in GetDeviceSetupStepCmds():
+      cmd(options)
+
+    if options.install:
+      for i in options.install:
+        install_obj = INSTALLABLE_PACKAGES[i]
+        InstallApk(options, install_obj, print_step=True)
+
+    if options.test_filter:
+      bb_utils.RunSteps(options.test_filter, GetTestStepCmds(), options)
+
+    if options.coverage_bucket:
+      coverage_html = GenerateJavaCoverageReport(options)
+      UploadHTML(options, '%s/java' % options.coverage_bucket, coverage_html,
+                 'Coverage Report')
+      shutil.rmtree(coverage_html, ignore_errors=True)
+
+    if options.experimental:
+      RunTestSuites(options, gtest_config.EXPERIMENTAL_TEST_SUITES)
+
+  finally:
+    # Run all post test steps
+    LogcatDump(options)
+    if not options.disable_stack_tool:
+      RunStackToolSteps(options)
+    GenerateTestReport(options)
+    # KillHostHeartbeat() has logic to check if heartbeat process is running,
+    # and kills only if it finds the process is running on the host.
+    provision_devices.KillHostHeartbeat()
+    if options.cleanup:
+      shutil.rmtree(os.path.join(CHROME_OUT_DIR, options.target),
+          ignore_errors=True)
+
+
+def GetDeviceStepsOptParser():
+  parser = bb_utils.GetParser()
+  parser.add_option('--experimental', action='store_true',
+                    help='Run experiemental tests')
+  parser.add_option('-f', '--test-filter', metavar='<filter>', default=[],
+                    action='append',
+                    help=('Run a test suite. Test suites: "%s"' %
+                          '", "'.join(VALID_TESTS)))
+  parser.add_option('--gtest-filter',
+                    help='Filter for running a subset of tests of a gtest test')
+  parser.add_option('--asan', action='store_true', help='Run tests with asan.')
+  parser.add_option('--install', metavar='<apk name>', action="append",
+                    help='Install an apk by name')
+  parser.add_option('--no-reboot', action='store_true',
+                    help='Do not reboot devices during provisioning.')
+  parser.add_option('--coverage-bucket',
+                    help=('Bucket name to store coverage results. Coverage is '
+                          'only run if this is set.'))
+  parser.add_option('--restart-usb', action='store_true',
+                    help='Restart usb ports before device status check.')
+  parser.add_option(
+      '--flakiness-server',
+      help=('The flakiness dashboard server to which the results should be '
+            'uploaded.'))
+  parser.add_option(
+      '--auto-reconnect', action='store_true',
+      help='Push script to device which restarts adbd on disconnections.')
+  parser.add_option('--skip-wipe', action='store_true',
+                    help='Do not wipe devices during provisioning.')
+  parser.add_option('--disable-location', action='store_true',
+                    help='Disable location settings.')
+  parser.add_option(
+      '--logcat-dump-output',
+      help='The logcat dump output will be "tee"-ed into this file')
+  # During processing perf bisects, a seperate working directory created under
+  # which builds are produced. Therefore we should look for relevent output
+  # file under this directory.(/b/build/slave/<slave_name>/build/bisect/src/out)
+  parser.add_option(
+      '--chrome-output-dir',
+      help='Chrome output directory to be used while bisecting.')
+
+  parser.add_option('--disable-stack-tool', action='store_true',
+      help='Do not run stack tool.')
+  parser.add_option('--asan-symbolize', action='store_true',
+      help='Run stack tool for ASAN')
+  parser.add_option('--cleanup', action='store_true',
+      help='Delete out/<target> directory at the end of the run.')
+  return parser
+
+
+def main(argv):
+  parser = GetDeviceStepsOptParser()
+  options, args = parser.parse_args(argv[1:])
+
+  if args:
+    return sys.exit('Unused args %s' % args)
+
+  unknown_tests = set(options.test_filter) - VALID_TESTS
+  if unknown_tests:
+    return sys.exit('Unknown tests %s' % list(unknown_tests))
+
+  setattr(options, 'target', options.factory_properties.get('target', 'Debug'))
+
+  if options.chrome_output_dir:
+    global CHROME_OUT_DIR
+    global LOGCAT_DIR
+    CHROME_OUT_DIR = options.chrome_output_dir
+    LOGCAT_DIR = os.path.join(CHROME_OUT_DIR, 'logcat')
+
+  if options.coverage_bucket:
+    setattr(options, 'coverage_dir',
+            os.path.join(CHROME_OUT_DIR, options.target, 'coverage'))
+
+  MainTestWrapper(options)
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv))
diff --git a/build/android/buildbot/bb_host_steps.py b/build/android/buildbot/bb_host_steps.py
new file mode 100755
index 0000000..1e927fb
--- /dev/null
+++ b/build/android/buildbot/bb_host_steps.py
@@ -0,0 +1,133 @@
+#!/usr/bin/env python
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+import json
+import sys
+
+import bb_utils
+import bb_annotations
+
+sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
+from pylib import constants
+
+
+SLAVE_SCRIPTS_DIR = os.path.join(bb_utils.BB_BUILD_DIR, 'scripts', 'slave')
+VALID_HOST_TESTS = set(['check_webview_licenses'])
+
+DIR_BUILD_ROOT = os.path.dirname(constants.DIR_SOURCE_ROOT)
+
+# Short hand for RunCmd which is used extensively in this file.
+RunCmd = bb_utils.RunCmd
+
+
+def SrcPath(*path):
+  return os.path.join(constants.DIR_SOURCE_ROOT, *path)
+
+
+def CheckWebViewLicenses(_):
+  bb_annotations.PrintNamedStep('check_licenses')
+  RunCmd([SrcPath('android_webview', 'tools', 'webview_licenses.py'), 'scan'],
+         warning_code=1)
+
+
+def RunHooks(build_type):
+  RunCmd([SrcPath('build', 'landmines.py')])
+  build_path = SrcPath('out', build_type)
+  landmine_path = os.path.join(build_path, '.landmines_triggered')
+  clobber_env = os.environ.get('BUILDBOT_CLOBBER')
+  if clobber_env or os.path.isfile(landmine_path):
+    bb_annotations.PrintNamedStep('Clobber')
+    if not clobber_env:
+      print 'Clobbering due to triggered landmines:'
+      with open(landmine_path) as f:
+        print f.read()
+    RunCmd(['rm', '-rf', build_path])
+
+  bb_annotations.PrintNamedStep('runhooks')
+  RunCmd(['gclient', 'runhooks'], halt_on_failure=True)
+
+
+def Compile(options):
+  RunHooks(options.target)
+  cmd = [os.path.join(SLAVE_SCRIPTS_DIR, 'compile.py'),
+         '--build-tool=ninja',
+         '--compiler=goma',
+         '--target=%s' % options.target,
+         '--goma-dir=%s' % bb_utils.GOMA_DIR]
+  bb_annotations.PrintNamedStep('compile')
+  if options.build_targets:
+    build_targets = options.build_targets.split(',')
+    cmd += ['--build-args', ' '.join(build_targets)]
+  RunCmd(cmd, halt_on_failure=True, cwd=DIR_BUILD_ROOT)
+
+
+def ZipBuild(options):
+  bb_annotations.PrintNamedStep('zip_build')
+  RunCmd([
+      os.path.join(SLAVE_SCRIPTS_DIR, 'zip_build.py'),
+      '--src-dir', constants.DIR_SOURCE_ROOT,
+      '--exclude-files', 'lib.target,gen,android_webview,jingle_unittests']
+      + bb_utils.EncodeProperties(options), cwd=DIR_BUILD_ROOT)
+
+
+def ExtractBuild(options):
+  bb_annotations.PrintNamedStep('extract_build')
+  RunCmd([os.path.join(SLAVE_SCRIPTS_DIR, 'extract_build.py')]
+         + bb_utils.EncodeProperties(options), cwd=DIR_BUILD_ROOT)
+
+
+def BisectPerfRegression(options):
+  args = []
+  if options.extra_src:
+    args = ['--extra_src', options.extra_src]
+  RunCmd([SrcPath('tools', 'prepare-bisect-perf-regression.py'),
+          '-w', os.path.join(constants.DIR_SOURCE_ROOT, os.pardir)])
+  RunCmd([SrcPath('tools', 'run-bisect-perf-regression.py'),
+          '-w', os.path.join(constants.DIR_SOURCE_ROOT, os.pardir),
+          '--build-properties=%s' % json.dumps(options.build_properties)] +
+          args)
+
+
+def GetHostStepCmds():
+  return [
+      ('compile', Compile),
+      ('extract_build', ExtractBuild),
+      ('check_webview_licenses', CheckWebViewLicenses),
+      ('bisect_perf_regression', BisectPerfRegression),
+      ('zip_build', ZipBuild)
+  ]
+
+
+def GetHostStepsOptParser():
+  parser = bb_utils.GetParser()
+  parser.add_option('--steps', help='Comma separated list of host tests.')
+  parser.add_option('--build-targets', default='',
+                    help='Comma separated list of build targets.')
+  parser.add_option('--experimental', action='store_true',
+                    help='Indicate whether to compile experimental targets.')
+  parser.add_option('--extra_src', default='',
+                    help='Path to extra source file. If this is supplied, '
+                    'bisect script will use it to override default behavior.')
+
+  return parser
+
+
+def main(argv):
+  parser = GetHostStepsOptParser()
+  options, args = parser.parse_args(argv[1:])
+  if args:
+    return sys.exit('Unused args %s' % args)
+
+  setattr(options, 'target', options.factory_properties.get('target', 'Debug'))
+  setattr(options, 'extra_src',
+          options.factory_properties.get('extra_src', ''))
+
+  if options.steps:
+    bb_utils.RunSteps(options.steps.split(','), GetHostStepCmds(), options)
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv))
diff --git a/build/android/buildbot/bb_run_bot.py b/build/android/buildbot/bb_run_bot.py
new file mode 100755
index 0000000..0c8a977
--- /dev/null
+++ b/build/android/buildbot/bb_run_bot.py
@@ -0,0 +1,320 @@
+#!/usr/bin/env python
+#
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import collections
+import copy
+import json
+import os
+import pipes
+import re
+import subprocess
+import sys
+
+import bb_utils
+
+sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
+from pylib import constants
+
+
+CHROMIUM_COVERAGE_BUCKET = 'chromium-code-coverage'
+
+_BotConfig = collections.namedtuple(
+    'BotConfig', ['bot_id', 'host_obj', 'test_obj'])
+
+HostConfig = collections.namedtuple(
+    'HostConfig',
+    ['script', 'host_steps', 'extra_args', 'extra_gyp_defines', 'target_arch'])
+
+TestConfig = collections.namedtuple('Tests', ['script', 'tests', 'extra_args'])
+
+
+def BotConfig(bot_id, host_object, test_object=None):
+  return _BotConfig(bot_id, host_object, test_object)
+
+
+def DictDiff(d1, d2):
+  diff = []
+  for key in sorted(set(d1.keys() + d2.keys())):
+    if key in d1 and d1[key] != d2.get(key):
+      diff.append('- %s=%s' % (key, pipes.quote(d1[key])))
+    if key in d2 and d2[key] != d1.get(key):
+      diff.append('+ %s=%s' % (key, pipes.quote(d2[key])))
+  return '\n'.join(diff)
+
+
+def GetEnvironment(host_obj, testing, extra_env_vars=None):
+  init_env = dict(os.environ)
+  init_env['GYP_GENERATORS'] = 'ninja'
+  if extra_env_vars:
+    init_env.update(extra_env_vars)
+  envsetup_cmd = '. build/android/envsetup.sh'
+  if testing:
+    # Skip envsetup to avoid presubmit dependence on android deps.
+    print 'Testing mode - skipping "%s"' % envsetup_cmd
+    envsetup_cmd = ':'
+  else:
+    print 'Running %s' % envsetup_cmd
+  proc = subprocess.Popen(['bash', '-exc',
+    envsetup_cmd + ' >&2; python build/android/buildbot/env_to_json.py'],
+    stdout=subprocess.PIPE, stderr=subprocess.PIPE,
+    cwd=bb_utils.CHROME_SRC, env=init_env)
+  json_env, envsetup_output = proc.communicate()
+  if proc.returncode != 0:
+    print >> sys.stderr, 'FATAL Failure in envsetup.'
+    print >> sys.stderr, envsetup_output
+    sys.exit(1)
+  env = json.loads(json_env)
+  env['GYP_DEFINES'] = env.get('GYP_DEFINES', '') + \
+      ' OS=android fastbuild=1 use_goma=1 gomadir=%s' % bb_utils.GOMA_DIR
+  if host_obj.target_arch:
+    env['GYP_DEFINES'] += ' target_arch=%s' % host_obj.target_arch
+  extra_gyp = host_obj.extra_gyp_defines
+  if extra_gyp:
+    env['GYP_DEFINES'] += ' %s' % extra_gyp
+    if re.search('(asan|clang)=1', extra_gyp):
+      env.pop('CXX_target', None)
+
+  # Bots checkout chrome in /b/build/slave/<name>/build/src
+  build_internal_android = os.path.abspath(os.path.join(
+      bb_utils.CHROME_SRC, '..', '..', '..', '..', '..', 'build_internal',
+      'scripts', 'slave', 'android'))
+  if os.path.exists(build_internal_android):
+    env['PATH'] = os.pathsep.join([build_internal_android, env['PATH']])
+  return env
+
+
+def GetCommands(options, bot_config):
+  """Get a formatted list of commands.
+
+  Args:
+    options: Options object.
+    bot_config: A BotConfig named tuple.
+    host_step_script: Host step script.
+    device_step_script: Device step script.
+  Returns:
+    list of Command objects.
+  """
+  property_args = bb_utils.EncodeProperties(options)
+  commands = [[bot_config.host_obj.script,
+               '--steps=%s' % ','.join(bot_config.host_obj.host_steps)] +
+              property_args + (bot_config.host_obj.extra_args or [])]
+
+  test_obj = bot_config.test_obj
+  if test_obj:
+    run_test_cmd = [test_obj.script] + property_args
+    for test in test_obj.tests:
+      run_test_cmd.extend(['-f', test])
+    if test_obj.extra_args:
+      run_test_cmd.extend(test_obj.extra_args)
+    commands.append(run_test_cmd)
+  return commands
+
+
+def GetBotStepMap():
+  compile_step = ['compile']
+  chrome_proxy_tests = ['chrome_proxy']
+  python_unittests = ['python_unittests']
+  std_host_tests = ['check_webview_licenses']
+  std_build_steps = ['compile', 'zip_build']
+  std_test_steps = ['extract_build']
+  std_tests = ['ui', 'unit']
+  telemetry_tests = ['telemetry_perf_unittests']
+  telemetry_tests_user_build = ['telemetry_unittests',
+                                'telemetry_perf_unittests']
+  trial_tests = [
+      'base_junit_tests',
+      'components_browsertests',
+      'gfx_unittests',
+      'gl_unittests',
+  ]
+  flakiness_server = (
+      '--flakiness-server=%s' % constants.UPSTREAM_FLAKINESS_SERVER)
+  experimental = ['--experimental']
+  bisect_chrome_output_dir = os.path.abspath(
+      os.path.join(os.path.dirname(__file__), os.pardir, os.pardir, os.pardir,
+                   os.pardir, 'bisect', 'src', 'out'))
+  B = BotConfig
+  H = (lambda steps, extra_args=None, extra_gyp=None, target_arch=None:
+       HostConfig('build/android/buildbot/bb_host_steps.py', steps, extra_args,
+                  extra_gyp, target_arch))
+  T = (lambda tests, extra_args=None:
+       TestConfig('build/android/buildbot/bb_device_steps.py', tests,
+                  extra_args))
+
+  bot_configs = [
+      # Main builders
+      B('main-builder-dbg', H(std_build_steps + std_host_tests)),
+      B('main-builder-rel', H(std_build_steps)),
+      B('main-clang-builder',
+        H(compile_step, extra_gyp='clang=1 component=shared_library')),
+      B('main-clobber', H(compile_step)),
+      B('main-tests-rel', H(std_test_steps),
+        T(std_tests + telemetry_tests + chrome_proxy_tests,
+          ['--cleanup', flakiness_server])),
+      B('main-tests', H(std_test_steps),
+        T(std_tests, ['--cleanup', flakiness_server])),
+
+      # Other waterfalls
+      B('asan-builder-tests', H(compile_step,
+                                extra_gyp='asan=1 component=shared_library'),
+        T(std_tests, ['--asan', '--asan-symbolize'])),
+      B('blink-try-builder', H(compile_step)),
+      B('chromedriver-fyi-tests-dbg', H(std_test_steps),
+        T(['chromedriver'],
+          ['--install=ChromeShell', '--install=ChromeDriverWebViewShell',
+           '--skip-wipe', '--disable-location', '--cleanup'])),
+      B('fyi-x86-builder-dbg',
+        H(compile_step + std_host_tests, experimental, target_arch='ia32')),
+      B('fyi-builder-dbg',
+        H(std_build_steps + std_host_tests, experimental,
+          extra_gyp='emma_coverage=1')),
+      B('x86-builder-dbg',
+        H(compile_step + std_host_tests, target_arch='ia32')),
+      B('fyi-builder-rel', H(std_build_steps, experimental)),
+      B('fyi-tests', H(std_test_steps),
+        T(std_tests + python_unittests,
+                      ['--experimental', flakiness_server,
+                      '--coverage-bucket', CHROMIUM_COVERAGE_BUCKET,
+                      '--cleanup'])),
+      B('user-build-fyi-tests-dbg', H(std_test_steps),
+        T(sorted(telemetry_tests_user_build + trial_tests))),
+      B('fyi-component-builder-tests-dbg',
+        H(compile_step, extra_gyp='component=shared_library'),
+        T(std_tests, ['--experimental', flakiness_server])),
+      B('gpu-builder-tests-dbg',
+        H(compile_step),
+        T(['gpu'], ['--install=ContentShell'])),
+      # Pass empty T([]) so that logcat monitor and device status check are run.
+      B('perf-bisect-builder-tests-dbg',
+        H(['bisect_perf_regression']),
+        T([], ['--chrome-output-dir', bisect_chrome_output_dir])),
+      B('perf-tests-rel', H(std_test_steps),
+        T([], ['--install=ChromeShell', '--cleanup'])),
+      B('webkit-latest-webkit-tests', H(std_test_steps),
+        T(['webkit_layout', 'webkit'], ['--cleanup', '--auto-reconnect'])),
+      B('webkit-latest-contentshell', H(compile_step),
+        T(['webkit_layout'], ['--auto-reconnect'])),
+      B('builder-unit-tests', H(compile_step), T(['unit'])),
+
+      # Generic builder config (for substring match).
+      B('builder', H(std_build_steps)),
+  ]
+
+  bot_map = dict((config.bot_id, config) for config in bot_configs)
+
+  # These bots have identical configuration to ones defined earlier.
+  copy_map = [
+      ('lkgr-clobber', 'main-clobber'),
+      ('try-builder-dbg', 'main-builder-dbg'),
+      ('try-builder-rel', 'main-builder-rel'),
+      ('try-clang-builder', 'main-clang-builder'),
+      ('try-fyi-builder-dbg', 'fyi-builder-dbg'),
+      ('try-x86-builder-dbg', 'x86-builder-dbg'),
+      ('try-tests-rel', 'main-tests-rel'),
+      ('try-tests', 'main-tests'),
+      ('try-fyi-tests', 'fyi-tests'),
+      ('webkit-latest-tests', 'main-tests'),
+  ]
+  for to_id, from_id in copy_map:
+    assert to_id not in bot_map
+    # pylint: disable=W0212
+    bot_map[to_id] = copy.deepcopy(bot_map[from_id])._replace(bot_id=to_id)
+
+    # Trybots do not upload to flakiness dashboard. They should be otherwise
+    # identical in configuration to their trunk building counterparts.
+    test_obj = bot_map[to_id].test_obj
+    if to_id.startswith('try') and test_obj:
+      extra_args = test_obj.extra_args
+      if extra_args and flakiness_server in extra_args:
+        extra_args.remove(flakiness_server)
+  return bot_map
+
+
+# Return an object from the map, looking first for an exact id match.
+# If this fails, look for an id which is a substring of the specified id.
+# Choose the longest of all substring matches.
+# pylint: disable=W0622
+def GetBestMatch(id_map, id):
+  config = id_map.get(id)
+  if not config:
+    substring_matches = [x for x in id_map.iterkeys() if x in id]
+    if substring_matches:
+      max_id = max(substring_matches, key=len)
+      print 'Using config from id="%s" (substring match).' % max_id
+      config = id_map[max_id]
+  return config
+
+
+def GetRunBotOptParser():
+  parser = bb_utils.GetParser()
+  parser.add_option('--bot-id', help='Specify bot id directly.')
+  parser.add_option('--testing', action='store_true',
+                    help='For testing: print, but do not run commands')
+
+  return parser
+
+
+def GetBotConfig(options, bot_step_map):
+  bot_id = options.bot_id or options.factory_properties.get('android_bot_id')
+  if not bot_id:
+    print (sys.stderr,
+           'A bot id must be specified through option or factory_props.')
+    return
+
+  bot_config = GetBestMatch(bot_step_map, bot_id)
+  if not bot_config:
+    print 'Error: config for id="%s" cannot be inferred.' % bot_id
+  return bot_config
+
+
+def RunBotCommands(options, commands, env):
+  print 'Environment changes:'
+  print DictDiff(dict(os.environ), env)
+
+  for command in commands:
+    print bb_utils.CommandToString(command)
+    sys.stdout.flush()
+    if options.testing:
+      env['BUILDBOT_TESTING'] = '1'
+    return_code = subprocess.call(command, cwd=bb_utils.CHROME_SRC, env=env)
+    if return_code != 0:
+      return return_code
+
+
+def main(argv):
+  proc = subprocess.Popen(
+      ['/bin/hostname', '-f'], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+  hostname_stdout, hostname_stderr = proc.communicate()
+  if proc.returncode == 0:
+    print 'Running on: ' + hostname_stdout
+  else:
+    print >> sys.stderr, 'WARNING: failed to run hostname'
+    print >> sys.stderr, hostname_stdout
+    print >> sys.stderr, hostname_stderr
+    sys.exit(1)
+
+  parser = GetRunBotOptParser()
+  options, args = parser.parse_args(argv[1:])
+  if args:
+    parser.error('Unused args: %s' % args)
+
+  bot_config = GetBotConfig(options, GetBotStepMap())
+  if not bot_config:
+    sys.exit(1)
+
+  print 'Using config:', bot_config
+
+  commands = GetCommands(options, bot_config)
+  for command in commands:
+    print 'Will run: ', bb_utils.CommandToString(command)
+  print
+
+  env = GetEnvironment(bot_config.host_obj, options.testing)
+  return RunBotCommands(options, commands, env)
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv))
diff --git a/build/android/buildbot/bb_utils.py b/build/android/buildbot/bb_utils.py
new file mode 100644
index 0000000..3c16cc2
--- /dev/null
+++ b/build/android/buildbot/bb_utils.py
@@ -0,0 +1,100 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import json
+import optparse
+import os
+import pipes
+import subprocess
+import sys
+
+import bb_annotations
+
+sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
+from pylib import constants
+
+
+TESTING = 'BUILDBOT_TESTING' in os.environ
+
+BB_BUILD_DIR = os.path.abspath(
+    os.path.join(os.path.dirname(__file__), os.pardir, os.pardir, os.pardir,
+    os.pardir, os.pardir, os.pardir, os.pardir))
+
+CHROME_SRC = os.path.abspath(
+    os.path.join(os.path.dirname(__file__), '..', '..', '..'))
+
+# TODO: Figure out how to merge this with pylib.cmd_helper.OutDirectory().
+CHROME_OUT_DIR = os.path.join(CHROME_SRC, 'out')
+
+GOMA_DIR = os.environ.get('GOMA_DIR', os.path.join(BB_BUILD_DIR, 'goma'))
+
+GSUTIL_PATH = os.path.join(BB_BUILD_DIR, 'third_party', 'gsutil', 'gsutil')
+
+def CommandToString(command):
+  """Returns quoted command that can be run in bash shell."""
+  return ' '.join(map(pipes.quote, command))
+
+
+def SpawnCmd(command, stdout=None, cwd=CHROME_SRC):
+  """Spawn a process without waiting for termination."""
+  print '>', CommandToString(command)
+  sys.stdout.flush()
+  if TESTING:
+    class MockPopen(object):
+      @staticmethod
+      def wait():
+        return 0
+      @staticmethod
+      def communicate():
+        return '', ''
+    return MockPopen()
+  return subprocess.Popen(command, cwd=cwd, stdout=stdout)
+
+
+def RunCmd(command, flunk_on_failure=True, halt_on_failure=False,
+           warning_code=constants.WARNING_EXIT_CODE, stdout=None,
+           cwd=CHROME_SRC):
+  """Run a command relative to the chrome source root."""
+  code = SpawnCmd(command, stdout, cwd).wait()
+  print '<', CommandToString(command)
+  if code != 0:
+    print 'ERROR: process exited with code %d' % code
+    if code != warning_code and flunk_on_failure:
+      bb_annotations.PrintError()
+    else:
+      bb_annotations.PrintWarning()
+    # Allow steps to have both halting (i.e. 1) and non-halting exit codes.
+    if code != warning_code and halt_on_failure:
+      print 'FATAL %d != %d' % (code, warning_code)
+      sys.exit(1)
+  return code
+
+
+def GetParser():
+  def ConvertJson(option, _, value, parser):
+    setattr(parser.values, option.dest, json.loads(value))
+  parser = optparse.OptionParser()
+  parser.add_option('--build-properties', action='callback',
+                    callback=ConvertJson, type='string', default={},
+                    help='build properties in JSON format')
+  parser.add_option('--factory-properties', action='callback',
+                    callback=ConvertJson, type='string', default={},
+                    help='factory properties in JSON format')
+  return parser
+
+
+def EncodeProperties(options):
+  return ['--factory-properties=%s' % json.dumps(options.factory_properties),
+          '--build-properties=%s' % json.dumps(options.build_properties)]
+
+
+def RunSteps(steps, step_cmds, options):
+  unknown_steps = set(steps) - set(step for step, _ in step_cmds)
+  if unknown_steps:
+    print >> sys.stderr, 'FATAL: Unknown steps %s' % list(unknown_steps)
+    sys.exit(1)
+
+  for step, cmd in step_cmds:
+    if step in steps:
+      cmd(options)
diff --git a/build/android/buildbot/env_to_json.py b/build/android/buildbot/env_to_json.py
new file mode 100755
index 0000000..f9a7a44
--- /dev/null
+++ b/build/android/buildbot/env_to_json.py
@@ -0,0 +1,11 @@
+#!/usr/bin/python
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Encode current environment into json.
+
+import json
+import os
+
+print json.dumps(dict(os.environ))
diff --git a/build/android/buildbot/tests/bb_run_bot_test.py b/build/android/buildbot/tests/bb_run_bot_test.py
new file mode 100755
index 0000000..810c60d
--- /dev/null
+++ b/build/android/buildbot/tests/bb_run_bot_test.py
@@ -0,0 +1,35 @@
+#!/usr/bin/env python
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+import subprocess
+import sys
+
+BUILDBOT_DIR = os.path.join(os.path.dirname(__file__), '..')
+sys.path.append(BUILDBOT_DIR)
+import bb_run_bot
+
+def RunBotProcesses(bot_process_map):
+  code = 0
+  for bot, proc in bot_process_map:
+    _, err = proc.communicate()
+    code |= proc.returncode
+    if proc.returncode != 0:
+      print 'Error running the bot script with id="%s"' % bot, err
+
+  return code
+
+
+def main():
+  procs = [
+      (bot, subprocess.Popen(
+          [os.path.join(BUILDBOT_DIR, 'bb_run_bot.py'), '--bot-id', bot,
+          '--testing'], stdout=subprocess.PIPE, stderr=subprocess.PIPE))
+      for bot in bb_run_bot.GetBotStepMap()]
+  return RunBotProcesses(procs)
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/build/android/chrome_with_libs.gyp b/build/android/chrome_with_libs.gyp
new file mode 100644
index 0000000..690be88
--- /dev/null
+++ b/build/android/chrome_with_libs.gyp
@@ -0,0 +1,82 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to add more loadable libs into Chrome_apk.
+#
+# This is useful when building Chrome_apk with some loadable modules which are
+# not included in Chrome_apk.
+# As an example, when building Chrome_apk with
+# libpeer_target_type=loadable_module,
+# the libpeerconnection.so is not included in Chrome_apk. To add the missing
+# lib, follow the steps below:
+# - Run gyp:
+#     GYP_DEFINES="$GYP_DEFINES libpeer_target_type=loadable_module" CHROMIUM_GYP_FILE="build/android/chrome_with_libs.gyp" build/gyp_chromium
+# - Build chrome_with_libs:
+#     ninja (or make) chrome_with_libs
+#
+# This tool also allows replacing the loadable module with a new one via the
+# following steps:
+# - Build Chrome_apk with the gyp define:
+#     GYP_DEFINES="$GYP_DEFINES libpeer_target_type=loadable_module" build/gyp_chromium
+#     ninja (or make) Chrome_apk
+# - Replace libpeerconnection.so with a new one:
+#     cp the_new_one path/to/libpeerconnection.so
+# - Run gyp:
+#     GYP_DEFINES="$GYP_DEFINES libpeer_target_type=loadable_module" CHROMIUM_GYP_FILE="build/android/chrome_with_libs.gyp" build/gyp_chromium
+# - Build chrome_with_libs:
+#     ninja (or make) chrome_with_libs
+{
+  'targets': [
+    {
+      # An "All" target is required for a top-level gyp-file.
+      'target_name': 'All',
+      'type': 'none',
+      'dependencies': [
+        'chrome_with_libs',
+      ],
+    },
+    {
+      'target_name': 'chrome_with_libs',
+      'type': 'none',
+      'variables': {
+        'intermediate_dir': '<(PRODUCT_DIR)/prebuilt_libs/',
+        'chrome_unsigned_path': '<(PRODUCT_DIR)/chrome_apk/Chrome-unsigned.apk',
+        'chrome_with_libs_unsigned': '<(intermediate_dir)/Chrome-with-libs-unsigned.apk',
+        'chrome_with_libs_final': '<(PRODUCT_DIR)/apks/Chrome-with-libs.apk',
+      },
+      'dependencies': [
+        '<(DEPTH)/clank/native/framework/clank.gyp:chrome_apk'
+      ],
+      'copies': [
+        {
+          'destination': '<(intermediate_dir)/lib/<(android_app_abi)',
+          'files': [
+            '<(PRODUCT_DIR)/libpeerconnection.so',
+          ],
+        },
+      ],
+      'actions': [
+        {
+          'action_name': 'put_libs_in_chrome',
+          'variables': {
+            'inputs': [
+              '<(intermediate_dir)/lib/<(android_app_abi)/libpeerconnection.so',
+            ],
+            'input_apk_path': '<(chrome_unsigned_path)',
+            'output_apk_path': '<(chrome_with_libs_unsigned)',
+            'libraries_top_dir%': '<(intermediate_dir)',
+          },
+          'includes': [ 'create_standalone_apk_action.gypi' ],
+        },
+        {
+          'action_name': 'finalize_chrome_with_libs',
+          'variables': {
+            'input_apk_path': '<(chrome_with_libs_unsigned)',
+            'output_apk_path': '<(chrome_with_libs_final)',
+          },
+          'includes': [ 'finalize_apk_action.gypi'],
+        },
+      ],
+    }],
+}
diff --git a/build/android/create_standalone_apk_action.gypi b/build/android/create_standalone_apk_action.gypi
new file mode 100644
index 0000000..d17af7c
--- /dev/null
+++ b/build/android/create_standalone_apk_action.gypi
@@ -0,0 +1,41 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into an action to provide an action that
+# combines a directory of shared libraries and an incomplete APK into a
+# standalone APK.
+#
+# To use this, create a gyp action with the following form:
+#  {
+#    'action_name': 'some descriptive action name',
+#    'variables': {
+#      'inputs': [ 'input_path1', 'input_path2' ],
+#      'input_apk_path': '<(unsigned_apk_path)',
+#      'output_apk_path': '<(unsigned_standalone_apk_path)',
+#      'libraries_top_dir': '<(libraries_top_dir)',
+#    },
+#    'includes': [ 'relative/path/to/create_standalone_apk_action.gypi' ],
+#  },
+
+{
+  'message': 'Creating standalone APK: <(output_apk_path)',
+  'variables': {
+    'inputs': [],
+  },
+  'inputs': [
+    '<(DEPTH)/build/android/gyp/util/build_utils.py',
+    '<(DEPTH)/build/android/gyp/create_standalone_apk.py',
+    '<(input_apk_path)',
+    '>@(inputs)',
+  ],
+  'outputs': [
+    '<(output_apk_path)',
+  ],
+  'action': [
+    'python', '<(DEPTH)/build/android/gyp/create_standalone_apk.py',
+    '--libraries-top-dir=<(libraries_top_dir)',
+    '--input-apk-path=<(input_apk_path)',
+    '--output-apk-path=<(output_apk_path)',
+  ],
+}
diff --git a/build/android/developer_recommended_flags.gypi b/build/android/developer_recommended_flags.gypi
new file mode 100644
index 0000000..79c201de
--- /dev/null
+++ b/build/android/developer_recommended_flags.gypi
@@ -0,0 +1,61 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This is the set of recommended gyp variable settings for Chrome for Android development.
+#
+# These can be used by copying this file to $CHROME_SRC/chrome/supplement.gypi.
+#
+# Even better, create chrome/supplement.gypi containing the following:
+#   {
+#     'includes': [ '../build/android/developer_recommended_flags.gypi' ]
+#   }
+# and you'll get new settings automatically.
+# When using this method, you can override individual settings by setting them unconditionally (with
+# no %) in chrome/supplement.gypi.
+# I.e. to disable gyp_managed_install but use everything else:
+#   {
+#     'variables': {
+#       'gyp_managed_install': 0,
+#     },
+#     'includes': [ '../build/android/developer_recommended_flags.gypi' ]
+#   }
+
+{
+  'variables': {
+    'variables': {
+      # Set component to 'shared_library' to enable the component build. This builds native code as
+      # many small shared libraries instead of one monolithic library. This slightly reduces the time
+      # required for incremental builds.
+      'component%': 'shared_library',
+    },
+    'component%': '<(component)',
+
+    # When gyp_managed_install is set to 1, building an APK will install that APK on the connected
+    # device(/emulator). To install on multiple devices (or onto a new device), build the APK once
+    # with each device attached. This greatly reduces the time required for incremental builds.
+    #
+    # This comes with some caveats:
+    #   Only works with a single device connected (it will print a warning if
+    #     zero or multiple devices are attached).
+    #   Device must be flashed with a user-debug unsigned Android build.
+    #   Some actions are always run (i.e. ninja will never say "no work to do").
+    'gyp_managed_install%': 1,
+
+    # With gyp_managed_install, we do not necessarily need a standalone APK.
+    # When create_standalone_apk is set to 1, we will build a standalone APK
+    # anyway. For even faster builds, you can set create_standalone_apk to 0.
+    'create_standalone_apk%': 1,
+
+    # Set clang to 1 to use the clang compiler. Clang has much (much, much) better warning/error
+    # messages than gcc.
+    # TODO(cjhopman): Enable this when http://crbug.com/156420 is addressed. Until then, users can
+    # set clang to 1, but Android stack traces will sometimes be incomplete.
+    #'clang%': 1,
+
+    # Set fastbuild to 1 to build with less debugging information. This can greatly decrease linking
+    # time. The downside is that stack traces will be missing useful information (like line
+    # numbers).
+    #'fastbuild%': 1,
+  },
+}
diff --git a/build/android/dex_action.gypi b/build/android/dex_action.gypi
new file mode 100644
index 0000000..56d386f
--- /dev/null
+++ b/build/android/dex_action.gypi
@@ -0,0 +1,60 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into an action to provide a rule that dexes
+# compiled java files. If proguard_enabled == "true" and CONFIGURATION_NAME ==
+# "Release", then it will dex the proguard_enabled_input_path instead of the
+# normal dex_input_paths/dex_generated_input_paths.
+#
+# To use this, create a gyp target with the following form:
+#  {
+#    'action_name': 'some name for the action'
+#    'actions': [
+#      'variables': {
+#        'dex_input_paths': [ 'files to dex (when proguard is not used) and add to input paths' ],
+#        'dex_generated_input_dirs': [ 'dirs that contain generated files to dex' ],
+#
+#        # For targets that use proguard:
+#        'proguard_enabled': 'true',
+#        'proguard_enabled_input_path': 'path to dex when using proguard',
+#      },
+#      'includes': [ 'relative/path/to/dex_action.gypi' ],
+#    ],
+#  },
+#
+
+{
+  'message': 'Creating dex file: <(output_path)',
+  'variables': {
+    'dex_input_paths': [],
+    'dex_generated_input_dirs': [],
+    'proguard_enabled%': 'false',
+    'proguard_enabled_input_path%': '',
+    'dex_no_locals%': 0,
+    'dex_additional_options': [],
+  },
+  'inputs': [
+    '<(DEPTH)/build/android/gyp/util/build_utils.py',
+    '<(DEPTH)/build/android/gyp/util/md5_check.py',
+    '<(DEPTH)/build/android/gyp/dex.py',
+    '>@(dex_input_paths)',
+  ],
+  'outputs': [
+    '<(output_path)',
+    '<(output_path).inputs',
+  ],
+  'action': [
+    'python', '<(DEPTH)/build/android/gyp/dex.py',
+    '--dex-path=<(output_path)',
+    '--android-sdk-tools=<(android_sdk_tools)',
+    '--output-directory=<(PRODUCT_DIR)',
+    '--configuration-name=<(CONFIGURATION_NAME)',
+    '--proguard-enabled=>(proguard_enabled)',
+    '--proguard-enabled-input-path=<(proguard_enabled_input_path)',
+    '--no-locals=>(dex_no_locals)',
+    '>@(dex_additional_options)',
+    '>@(dex_input_paths)',
+    '>@(dex_generated_input_dirs)',
+  ]
+}
diff --git a/build/android/disable_lto.gypi b/build/android/disable_lto.gypi
new file mode 100644
index 0000000..e379cfd
--- /dev/null
+++ b/build/android/disable_lto.gypi
@@ -0,0 +1,20 @@
+# Copyright (c) 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included to disable LTO on a target.
+
+{
+  'target_conditions': [
+    ['_toolset=="target"', {
+      'conditions': [
+        ['OS=="android" and (use_lto==1 or use_lto_o2==1)', {
+          'cflags!': [
+            '-flto',
+            '-ffat-lto-objects',
+          ],
+        }],
+      ],
+    }],
+  ],
+}
diff --git a/build/android/empty/src/.keep b/build/android/empty/src/.keep
new file mode 100644
index 0000000..0f710b6
--- /dev/null
+++ b/build/android/empty/src/.keep
@@ -0,0 +1,6 @@
+This is a file that needs to live here until http://crbug.com/158155 has
+been fixed.
+
+The ant build system requires that a src folder is always present, and for
+some of our targets that is not the case. Giving it an empty src-folder works
+nicely though.
diff --git a/build/android/empty_proguard.flags b/build/android/empty_proguard.flags
new file mode 100644
index 0000000..53484fe
--- /dev/null
+++ b/build/android/empty_proguard.flags
@@ -0,0 +1 @@
+# Used for apk targets that do not need proguard. See build/java_apk.gypi.
diff --git a/build/android/enable_asserts.py b/build/android/enable_asserts.py
new file mode 100755
index 0000000..8fb7dca
--- /dev/null
+++ b/build/android/enable_asserts.py
@@ -0,0 +1,42 @@
+#!/usr/bin/env python
+#
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Enables dalvik vm asserts in the android device."""
+
+import argparse
+import sys
+
+from pylib.device import device_utils
+
+
+def main():
+  parser = argparse.ArgumentParser()
+
+  set_asserts_group = parser.add_mutually_exclusive_group(required=True)
+  set_asserts_group.add_argument(
+      '--enable_asserts', dest='set_asserts', action='store_true',
+      help='Sets the dalvik.vm.enableassertions property to "all"')
+  set_asserts_group.add_argument(
+      '--disable_asserts', dest='set_asserts', action='store_false',
+      help='Removes the dalvik.vm.enableassertions property')
+
+  args = parser.parse_args()
+
+  # TODO(jbudorick): Accept optional serial number and run only for the
+  # specified device when present.
+  devices = device_utils.DeviceUtils.parallel()
+
+  def set_java_asserts_and_restart(device):
+    if device.SetJavaAsserts(args.set_asserts):
+      device.RunShellCommand('stop')
+      device.RunShellCommand('start')
+
+  devices.pMap(set_java_asserts_and_restart)
+  return 0
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/build/android/envsetup.sh b/build/android/envsetup.sh
new file mode 100755
index 0000000..0545330
--- /dev/null
+++ b/build/android/envsetup.sh
@@ -0,0 +1,62 @@
+#!/bin/bash
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Sets up environment for building Chromium on Android.
+
+# Make sure we're being sourced (possibly by another script). Check for bash
+# since zsh sets $0 when sourcing.
+if [[ -n "$BASH_VERSION" && "${BASH_SOURCE:-$0}" == "$0" ]]; then
+  echo "ERROR: envsetup must be sourced."
+  exit 1
+fi
+
+# This only exists to set local variables. Don't call this manually.
+android_envsetup_main() {
+  local SCRIPT_PATH="$1"
+  local SCRIPT_DIR="$(dirname "$SCRIPT_PATH")"
+
+  local CURRENT_DIR="$(readlink -f "${SCRIPT_DIR}/../../")"
+  if [[ -z "${CHROME_SRC}" ]]; then
+    # If $CHROME_SRC was not set, assume current directory is CHROME_SRC.
+    local CHROME_SRC="${CURRENT_DIR}"
+  fi
+
+  if [[ "${CURRENT_DIR/"${CHROME_SRC}"/}" == "${CURRENT_DIR}" ]]; then
+    # If current directory is not in $CHROME_SRC, it might be set for other
+    # source tree. If $CHROME_SRC was set correctly and we are in the correct
+    # directory, "${CURRENT_DIR/"${CHROME_SRC}"/}" will be "".
+    # Otherwise, it will equal to "${CURRENT_DIR}"
+    echo "Warning: Current directory is out of CHROME_SRC, it may not be \
+  the one you want."
+    echo "${CHROME_SRC}"
+  fi
+
+  # Allow the caller to override a few environment variables. If any of them is
+  # unset, we default to a sane value that's known to work. This allows for
+  # experimentation with a custom SDK.
+  if [[ -z "${ANDROID_SDK_ROOT}" || ! -d "${ANDROID_SDK_ROOT}" ]]; then
+    local ANDROID_SDK_ROOT="${CHROME_SRC}/third_party/android_tools/sdk/"
+  fi
+
+  # Add Android SDK tools to system path.
+  export PATH=$PATH:${ANDROID_SDK_ROOT}/platform-tools
+
+  # Add Android utility tools to the system path.
+  export PATH=$PATH:${ANDROID_SDK_ROOT}/tools/
+
+  # Add Chromium Android development scripts to system path.
+  # Must be after CHROME_SRC is set.
+  export PATH=$PATH:${CHROME_SRC}/build/android
+
+  export ENVSETUP_GYP_CHROME_SRC=${CHROME_SRC}  # TODO(thakis): Remove.
+}
+# In zsh, $0 is the name of the file being sourced.
+android_envsetup_main "${BASH_SOURCE:-$0}"
+unset -f android_envsetup_main
+
+android_gyp() {
+  echo "Please call build/gyp_chromium instead. android_gyp is going away."
+  "${ENVSETUP_GYP_CHROME_SRC}/build/gyp_chromium" --depth="${ENVSETUP_GYP_CHROME_SRC}" --check "$@"
+}
diff --git a/build/android/finalize_apk_action.gypi b/build/android/finalize_apk_action.gypi
new file mode 100644
index 0000000..644f9e8
--- /dev/null
+++ b/build/android/finalize_apk_action.gypi
@@ -0,0 +1,49 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into an action to provide an action that
+# signs and zipaligns an APK.
+#
+# To use this, create a gyp action with the following form:
+#  {
+#    'action_name': 'some descriptive action name',
+#    'variables': {
+#      'input_apk_path': 'relative/path/to/input.apk',
+#      'output_apk_path': 'relative/path/to/output.apk',
+#    },
+#    'includes': [ '../../build/android/finalize_apk_action.gypi' ],
+#  },
+#
+
+{
+  'message': 'Signing/aligning <(_target_name) APK: <(input_apk_path)',
+  'variables': {
+    'keystore_path%': '<(DEPTH)/build/android/ant/chromium-debug.keystore',
+    'keystore_name%': 'chromiumdebugkey',
+    'keystore_password%': 'chromium',
+    'zipalign_path%': '<(android_sdk_tools)/zipalign',
+    'rezip_apk_jar_path%': '<(PRODUCT_DIR)/lib.java/rezip_apk.jar',
+    'load_library_from_zip%': 0,
+  },
+  'inputs': [
+    '<(DEPTH)/build/android/gyp/finalize_apk.py',
+    '<(DEPTH)/build/android/gyp/util/build_utils.py',
+    '<(keystore_path)',
+    '<(input_apk_path)',
+  ],
+  'outputs': [
+    '<(output_apk_path)',
+  ],
+  'action': [
+    'python', '<(DEPTH)/build/android/gyp/finalize_apk.py',
+    '--zipalign-path=<(zipalign_path)',
+    '--unsigned-apk-path=<(input_apk_path)',
+    '--final-apk-path=<(output_apk_path)',
+    '--key-path=<(keystore_path)',
+    '--key-name=<(keystore_name)',
+    '--key-passwd=<(keystore_password)',
+    '--load-library-from-zip=<(load_library_from_zip)',
+    '--rezip-apk-jar-path=<(rezip_apk_jar_path)',
+  ],
+}
diff --git a/build/android/finalize_splits_action.gypi b/build/android/finalize_splits_action.gypi
new file mode 100644
index 0000000..daa7f83
--- /dev/null
+++ b/build/android/finalize_splits_action.gypi
@@ -0,0 +1,76 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into an action to provide an action that
+# signs and zipaligns split APKs.
+#
+# Required variables:
+#  apk_name - Base name of the apk.
+# Optional variables:
+#  density_splits - Whether to process density splits
+#  language_splits - Whether to language splits
+
+{
+  'variables': {
+    'keystore_path%': '<(DEPTH)/build/android/ant/chromium-debug.keystore',
+    'keystore_name%': 'chromiumdebugkey',
+    'keystore_password%': 'chromium',
+    'zipalign_path%': '<(android_sdk_tools)/zipalign',
+    'density_splits%': 0,
+    'language_splits%': [],
+    'resource_packaged_apk_name': '<(apk_name)-resources.ap_',
+    'resource_packaged_apk_path': '<(intermediate_dir)/<(resource_packaged_apk_name)',
+    'base_output_path': '<(PRODUCT_DIR)/apks/<(apk_name)',
+  },
+  'inputs': [
+    '<(DEPTH)/build/android/gyp/finalize_splits.py',
+    '<(DEPTH)/build/android/gyp/finalize_apk.py',
+    '<(DEPTH)/build/android/gyp/util/build_utils.py',
+    '<(keystore_path)',
+  ],
+  'action': [
+    'python', '<(DEPTH)/build/android/gyp/finalize_splits.py',
+    '--resource-packaged-apk-path=<(resource_packaged_apk_path)',
+    '--base-output-path=<(base_output_path)',
+    '--zipalign-path=<(zipalign_path)',
+    '--key-path=<(keystore_path)',
+    '--key-name=<(keystore_name)',
+    '--key-passwd=<(keystore_password)',
+  ],
+  'conditions': [
+    ['density_splits == 1', {
+      'message': 'Signing/aligning <(_target_name) density splits',
+      'inputs': [
+        '<(resource_packaged_apk_path)_hdpi',
+        '<(resource_packaged_apk_path)_xhdpi',
+        '<(resource_packaged_apk_path)_xxhdpi',
+        '<(resource_packaged_apk_path)_xxxhdpi',
+        '<(resource_packaged_apk_path)_tvdpi',
+      ],
+      'outputs': [
+        '<(base_output_path)-density-hdpi.apk',
+        '<(base_output_path)-density-xhdpi.apk',
+        '<(base_output_path)-density-xxhdpi.apk',
+        '<(base_output_path)-density-xxxhdpi.apk',
+        '<(base_output_path)-density-tvdpi.apk',
+      ],
+      'action': [
+        '--densities=hdpi,xhdpi,xxhdpi,xxxhdpi,tvdpi',
+      ],
+    }],
+    ['language_splits != []', {
+      'message': 'Signing/aligning <(_target_name) language splits',
+      'inputs': [
+        "<!@(python <(DEPTH)/build/apply_locales.py '<(resource_packaged_apk_path)_ZZLOCALE' <(language_splits))",
+      ],
+      'outputs': [
+        "<!@(python <(DEPTH)/build/apply_locales.py '<(base_output_path)-lang-ZZLOCALE.apk' <(language_splits))",
+      ],
+      'action': [
+        '--languages=<(language_splits)',
+      ],
+    }],
+  ],
+}
+
diff --git a/build/android/findbugs_action.gypi b/build/android/findbugs_action.gypi
new file mode 100644
index 0000000..e3b3d36
--- /dev/null
+++ b/build/android/findbugs_action.gypi
@@ -0,0 +1,22 @@
+
+{
+  'action_name': 'findbugs_<(_target_name)',
+  'message': 'Running findbugs on <(_target_name)',
+  'variables': {
+  },
+  'inputs': [
+    '<(DEPTH)/build/android/findbugs_diff.py',
+    '<(DEPTH)/build/android/findbugs_filter/findbugs_exclude.xml',
+    '<(DEPTH)/build/android/pylib/utils/findbugs.py',
+    '<(findbugs_target_jar_path)',
+  ],
+  'outputs': [
+    '<(stamp_path)',
+  ],
+  'action': [
+    'python', '<(DEPTH)/build/android/findbugs_diff.py',
+    '--auxclasspath-gyp', '>(auxclasspath)',
+    '--stamp', '<(stamp_path)',
+    '<(findbugs_target_jar_path)',
+  ],
+}
diff --git a/build/android/findbugs_diff.py b/build/android/findbugs_diff.py
new file mode 100755
index 0000000..f55e462
--- /dev/null
+++ b/build/android/findbugs_diff.py
@@ -0,0 +1,110 @@
+#!/usr/bin/env python
+#
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Runs findbugs, and returns an error code if there are new warnings.
+
+Other options
+  --only-analyze used to only analyze the class you are interested.
+  --relase-build analyze the classes in out/Release directory.
+  --findbugs-args used to passin other findbugs's options.
+
+Run
+  $CHROMIUM_SRC/third_party/findbugs/bin/findbugs -textui for details.
+
+"""
+
+import argparse
+import os
+import sys
+
+from pylib import constants
+from pylib.utils import findbugs
+
+_DEFAULT_BASE_DIR = os.path.join(
+    constants.DIR_SOURCE_ROOT, 'build', 'android', 'findbugs_filter')
+
+sys.path.append(
+    os.path.join(constants.DIR_SOURCE_ROOT, 'build', 'android', 'gyp'))
+from util import build_utils
+
+
+def main():
+  parser = argparse.ArgumentParser()
+
+  parser.add_argument(
+      '-a', '--auxclasspath', default=None, dest='auxclasspath',
+      help='Set aux classpath for analysis.')
+  parser.add_argument(
+      '--auxclasspath-gyp', dest='auxclasspath_gyp',
+      help='A gyp list containing the aux classpath for analysis')
+  parser.add_argument(
+      '-o', '--only-analyze', default=None,
+      dest='only_analyze', help='Only analyze the given classes and packages.')
+  parser.add_argument(
+      '-e', '--exclude', default=None, dest='exclude',
+      help='Exclude bugs matching given filter.')
+  parser.add_argument(
+      '-l', '--release-build', action='store_true', dest='release_build',
+      help='Analyze release build instead of debug.')
+  parser.add_argument(
+      '-f', '--findbug-args', default=None, dest='findbug_args',
+      help='Additional findbug arguments.')
+  parser.add_argument(
+      '-b', '--base-dir', default=_DEFAULT_BASE_DIR,
+      dest='base_dir', help='Base directory for configuration file.')
+  parser.add_argument(
+      '--output-file', dest='output_file',
+      help='Path to save the output to.')
+  parser.add_argument(
+      '--stamp', help='Path to touch on success.')
+  parser.add_argument(
+      '--depfile', help='Path to the depfile. This must be specified as the '
+                        "action's first output.")
+
+  parser.add_argument(
+      'jar_paths', metavar='JAR_PATH', nargs='+',
+      help='JAR file to analyze')
+
+  args = parser.parse_args(build_utils.ExpandFileArgs(sys.argv[1:]))
+  if args.auxclasspath:
+    args.auxclasspath = args.auxclasspath.split(':')
+  elif args.auxclasspath_gyp:
+    args.auxclasspath = build_utils.ParseGypList(args.auxclasspath_gyp)
+
+  if args.base_dir:
+    if not args.exclude:
+      args.exclude = os.path.join(args.base_dir, 'findbugs_exclude.xml')
+
+  findbugs_command, findbugs_warnings = findbugs.Run(
+      args.exclude, args.only_analyze, args.auxclasspath,
+      args.output_file, args.findbug_args, args.jar_paths)
+
+  if findbugs_warnings:
+    print
+    print '*' * 80
+    print 'FindBugs run via:'
+    print findbugs_command
+    print
+    print 'FindBugs reported the following issues:'
+    for warning in sorted(findbugs_warnings):
+      print str(warning)
+    print '*' * 80
+    print
+  else:
+    if args.depfile:
+      build_utils.WriteDepfile(
+          args.depfile,
+          build_utils.GetPythonDependencies() + args.auxclasspath
+              + args.jar_paths)
+    if args.stamp:
+      build_utils.Touch(args.stamp)
+
+  return len(findbugs_warnings)
+
+
+if __name__ == '__main__':
+  sys.exit(main())
+
diff --git a/build/android/findbugs_filter/findbugs_exclude.xml b/build/android/findbugs_filter/findbugs_exclude.xml
new file mode 100644
index 0000000..dbff9d9
--- /dev/null
+++ b/build/android/findbugs_filter/findbugs_exclude.xml
@@ -0,0 +1,23 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  Copyright (c) 2012 The Chromium Authors. All rights reserved.
+  Use of this source code is governed by a BSD-style license that can be
+  found in the LICENSE file.
+-->
+
+<!--
+Documentation: http://findbugs.sourceforge.net/manual/filter.html
+In particular, ~ at the start of a string means it's a regex.
+-->
+<FindBugsFilter>
+  <!-- Skip the generated resource classes (including nested classes). -->
+  <Match>
+    <Class name="~.*\.R(\$\w+)?" />
+  </Match>
+  <Match>
+    <Class name="~org\.chromium\..*\.Manifest(\$\w+)?" />
+  </Match>
+  <Bug pattern="DM_STRING_CTOR" />
+  <!-- Ignore "reliance on default String encoding" warnings, as we're not multi-platform -->
+  <Bug pattern="DM_DEFAULT_ENCODING" />
+</FindBugsFilter>
diff --git a/build/android/generate_emma_html.py b/build/android/generate_emma_html.py
new file mode 100755
index 0000000..93b0b0e
--- /dev/null
+++ b/build/android/generate_emma_html.py
@@ -0,0 +1,90 @@
+#!/usr/bin/env python
+
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Aggregates EMMA coverage files to produce html output."""
+
+import fnmatch
+import json
+import optparse
+import os
+import sys
+
+from pylib import cmd_helper
+from pylib import constants
+
+
+def _GetFilesWithExt(root_dir, ext):
+  """Gets all files with a given extension.
+
+  Args:
+    root_dir: Directory in which to search for files.
+    ext: Extension to look for (including dot)
+
+  Returns:
+    A list of absolute paths to files that match.
+  """
+  files = []
+  for root, _, filenames in os.walk(root_dir):
+    basenames = fnmatch.filter(filenames, '*.' + ext)
+    files.extend([os.path.join(root, basename)
+                  for basename in basenames])
+
+  return files
+
+
+def main():
+  option_parser = optparse.OptionParser()
+  option_parser.add_option('--output', help='HTML output filename.')
+  option_parser.add_option('--coverage-dir', default=None,
+                           help=('Root of the directory in which to search for '
+                                 'coverage data (.ec) files.'))
+  option_parser.add_option('--metadata-dir', default=None,
+                           help=('Root of the directory in which to search for '
+                                 'coverage metadata (.em) files.'))
+  option_parser.add_option('--cleanup', action='store_true',
+                           help=('If set, removes coverage files generated at '
+                                 'runtime.'))
+  options, _ = option_parser.parse_args()
+
+  if not (options.coverage_dir and options.metadata_dir and options.output):
+    option_parser.error('One or more mandatory options are missing.')
+
+  coverage_files = _GetFilesWithExt(options.coverage_dir, 'ec')
+  metadata_files = _GetFilesWithExt(options.metadata_dir, 'em')
+  print 'Found coverage files: %s' % str(coverage_files)
+  print 'Found metadata files: %s' % str(metadata_files)
+
+  sources = []
+  for f in metadata_files:
+    sources_file = os.path.splitext(f)[0] + '_sources.txt'
+    with open(sources_file, 'r') as sf:
+      sources.extend(json.load(sf))
+  sources = [os.path.join(constants.DIR_SOURCE_ROOT, s) for s in sources]
+  print 'Sources: %s' % sources
+
+  input_args = []
+  for f in coverage_files + metadata_files:
+    input_args.append('-in')
+    input_args.append(f)
+
+  output_args = ['-Dreport.html.out.file', options.output]
+  source_args = ['-sp', ','.join(sources)]
+
+  exit_code = cmd_helper.RunCmd(
+      ['java', '-cp',
+       os.path.join(constants.ANDROID_SDK_ROOT, 'tools', 'lib', 'emma.jar'),
+       'emma', 'report', '-r', 'html']
+      + input_args + output_args + source_args)
+
+  if options.cleanup:
+    for f in coverage_files:
+      os.remove(f)
+
+  return exit_code
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/build/android/gn/zip.py b/build/android/gn/zip.py
new file mode 100755
index 0000000..5050ea0
--- /dev/null
+++ b/build/android/gn/zip.py
@@ -0,0 +1,49 @@
+#!/usr/bin/env python
+#
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Archives a set of files.
+"""
+
+import ast
+import optparse
+import os
+import sys
+import zipfile
+
+sys.path.append(os.path.join(os.path.dirname(__file__), os.pardir, 'gyp'))
+from util import build_utils
+
+def DoZip(inputs, output, base_dir):
+  with zipfile.ZipFile(output, 'w') as outfile:
+    for f in inputs:
+      outfile.write(f, os.path.relpath(f, base_dir))
+
+def main():
+  parser = optparse.OptionParser()
+  build_utils.AddDepfileOption(parser)
+
+  parser.add_option('--inputs', help='List of files to archive.')
+  parser.add_option('--output', help='Path to output archive.')
+  parser.add_option('--base-dir',
+                    help='If provided, the paths in the archive will be '
+                    'relative to this directory', default='.')
+
+  options, _ = parser.parse_args()
+
+  inputs = ast.literal_eval(options.inputs)
+  output = options.output
+  base_dir = options.base_dir
+
+  DoZip(inputs, output, base_dir)
+
+  if options.depfile:
+    build_utils.WriteDepfile(
+        options.depfile,
+        build_utils.GetPythonDependencies())
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/build/android/gyp/aidl.py b/build/android/gyp/aidl.py
new file mode 100755
index 0000000..d5aa546
--- /dev/null
+++ b/build/android/gyp/aidl.py
@@ -0,0 +1,54 @@
+#!/usr/bin/env python
+#
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Invokes Android's aidl
+"""
+
+import optparse
+import os
+import sys
+
+from util import build_utils
+
+
+def main(argv):
+  option_parser = optparse.OptionParser()
+  build_utils.AddDepfileOption(option_parser)
+  option_parser.add_option('--aidl-path', help='Path to the aidl binary.')
+  option_parser.add_option('--imports', help='Files to import.')
+  option_parser.add_option('--includes',
+                           help='Directories to add as import search paths.')
+  option_parser.add_option('--srcjar', help='Path for srcjar output.')
+  options, args = option_parser.parse_args(argv[1:])
+
+  with build_utils.TempDir() as temp_dir:
+    for f in args:
+      classname = os.path.splitext(os.path.basename(f))[0]
+      output = os.path.join(temp_dir, classname + '.java')
+      aidl_cmd = [options.aidl_path]
+      aidl_cmd += [
+        '-p' + s for s in build_utils.ParseGypList(options.imports)
+      ]
+      if options.includes is not None:
+        aidl_cmd += [
+          '-I' + s for s in build_utils.ParseGypList(options.includes)
+        ]
+      aidl_cmd += [
+        f,
+        output
+      ]
+      build_utils.CheckOutput(aidl_cmd)
+
+    build_utils.ZipDir(options.srcjar, temp_dir)
+
+  if options.depfile:
+    build_utils.WriteDepfile(
+        options.depfile,
+        build_utils.GetPythonDependencies())
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv))
diff --git a/build/android/gyp/ant.py b/build/android/gyp/ant.py
new file mode 100755
index 0000000..5394b9e
--- /dev/null
+++ b/build/android/gyp/ant.py
@@ -0,0 +1,65 @@
+#!/usr/bin/env python
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""An Ant wrapper that suppresses useless Ant output.
+
+Ant build scripts output "BUILD SUCCESSFUL" and build timing at the end of
+every build. In the Android build, this just adds a lot of useless noise to the
+build output. This script forwards its arguments to ant, and prints Ant's
+output up until the BUILD SUCCESSFUL line.
+
+Also, when a command fails, this script will re-run that ant command with the
+'-verbose' argument so that the failure is easier to debug.
+"""
+
+import optparse
+import sys
+import traceback
+
+from util import build_utils
+
+
+def main(argv):
+  option_parser = optparse.OptionParser()
+  build_utils.AddDepfileOption(option_parser)
+  options, args = option_parser.parse_args(argv[1:])
+
+  try:
+    stdout = build_utils.CheckOutput(['ant'] + args)
+  except build_utils.CalledProcessError:
+    # It is very difficult to diagnose ant failures without the '-verbose'
+    # argument. So, when an ant command fails, re-run it with '-verbose' so that
+    # the cause of the failure is easier to identify.
+    verbose_args = ['-verbose'] + [a for a in args if a != '-quiet']
+    try:
+      stdout = build_utils.CheckOutput(['ant'] + verbose_args)
+    except build_utils.CalledProcessError:
+      traceback.print_exc()
+      sys.exit(1)
+
+    # If this did sys.exit(1), building again would succeed (which would be
+    # awkward). Instead, just print a big warning.
+    build_utils.PrintBigWarning(
+        'This is unexpected. `ant ' + ' '.join(args) + '` failed.' +
+        'But, running `ant ' + ' '.join(verbose_args) + '` passed.')
+
+  stdout = stdout.strip().split('\n')
+  for line in stdout:
+    if line.strip() == 'BUILD SUCCESSFUL':
+      break
+    print line
+
+  if options.depfile:
+    assert '-buildfile' in args
+    ant_buildfile = args[args.index('-buildfile') + 1]
+
+    build_utils.WriteDepfile(
+        options.depfile,
+        [ant_buildfile] + build_utils.GetPythonDependencies())
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv))
diff --git a/build/android/gyp/apk_install.py b/build/android/gyp/apk_install.py
new file mode 100755
index 0000000..a512e50
--- /dev/null
+++ b/build/android/gyp/apk_install.py
@@ -0,0 +1,118 @@
+#!/usr/bin/env python
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Installs an APK.
+
+"""
+
+import optparse
+import os
+import re
+import sys
+
+from util import build_device
+from util import build_utils
+from util import md5_check
+
+BUILD_ANDROID_DIR = os.path.join(os.path.dirname(__file__), '..')
+sys.path.append(BUILD_ANDROID_DIR)
+
+from pylib import constants
+from pylib.utils import apk_helper
+
+
+def GetNewMetadata(device, apk_package):
+  """Gets the metadata on the device for the apk_package apk."""
+  output = device.RunShellCommand('ls -l /data/app/')
+  # Matches lines like:
+  # -rw-r--r-- system   system    7376582 2013-04-19 16:34 \
+  # org.chromium.chrome.shell.apk
+  # -rw-r--r-- system   system    7376582 2013-04-19 16:34 \
+  # org.chromium.chrome.shell-1.apk
+  apk_matcher = lambda s: re.match('.*%s(-[0-9]*)?(.apk)?$' % apk_package, s)
+  matches = filter(apk_matcher, output)
+  return matches[0] if matches else None
+
+def HasInstallMetadataChanged(device, apk_package, metadata_path):
+  """Checks if the metadata on the device for apk_package has changed."""
+  if not os.path.exists(metadata_path):
+    return True
+
+  with open(metadata_path, 'r') as expected_file:
+    return expected_file.read() != device.GetInstallMetadata(apk_package)
+
+
+def RecordInstallMetadata(device, apk_package, metadata_path):
+  """Records the metadata from the device for apk_package."""
+  metadata = GetNewMetadata(device, apk_package)
+  if not metadata:
+    raise Exception('APK install failed unexpectedly.')
+
+  with open(metadata_path, 'w') as outfile:
+    outfile.write(metadata)
+
+
+def main():
+  parser = optparse.OptionParser()
+  parser.add_option('--apk-path',
+      help='Path to .apk to install.')
+  parser.add_option('--split-apk-path',
+      help='Path to .apk splits (can specify multiple times, causes '
+      '--install-multiple to be used.',
+      action='append')
+  parser.add_option('--android-sdk-tools',
+      help='Path to the Android SDK build tools folder. ' +
+           'Required when using --split-apk-path.')
+  parser.add_option('--install-record',
+      help='Path to install record (touched only when APK is installed).')
+  parser.add_option('--build-device-configuration',
+      help='Path to build device configuration.')
+  parser.add_option('--stamp',
+      help='Path to touch on success.')
+  parser.add_option('--configuration-name',
+      help='The build CONFIGURATION_NAME')
+  options, _ = parser.parse_args()
+
+  device = build_device.GetBuildDeviceFromPath(
+      options.build_device_configuration)
+  if not device:
+    return
+
+  constants.SetBuildType(options.configuration_name)
+
+  serial_number = device.GetSerialNumber()
+  apk_package = apk_helper.GetPackageName(options.apk_path)
+
+  metadata_path = '%s.%s.device.time.stamp' % (options.apk_path, serial_number)
+
+  # If the APK on the device does not match the one that was last installed by
+  # the build, then the APK has to be installed (regardless of the md5 record).
+  force_install = HasInstallMetadataChanged(device, apk_package, metadata_path)
+
+
+  def Install():
+    if options.split_apk_path:
+      device.InstallSplitApk(options.apk_path, options.split_apk_path)
+    else:
+      device.Install(options.apk_path, reinstall=True)
+
+    RecordInstallMetadata(device, apk_package, metadata_path)
+    build_utils.Touch(options.install_record)
+
+
+  record_path = '%s.%s.md5.stamp' % (options.apk_path, serial_number)
+  md5_check.CallAndRecordIfStale(
+      Install,
+      record_path=record_path,
+      input_paths=[options.apk_path],
+      force=force_install)
+
+  if options.stamp:
+    build_utils.Touch(options.stamp)
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/build/android/gyp/apk_obfuscate.py b/build/android/gyp/apk_obfuscate.py
new file mode 100755
index 0000000..b075758
--- /dev/null
+++ b/build/android/gyp/apk_obfuscate.py
@@ -0,0 +1,147 @@
+#!/usr/bin/env python
+#
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Generates the obfuscated jar and test jar for an apk.
+
+If proguard is not enabled or 'Release' is not in the configuration name,
+obfuscation will be a no-op.
+"""
+
+import optparse
+import os
+import sys
+
+from util import build_utils
+from util import proguard_util
+
+
+def ParseArgs(argv):
+  parser = optparse.OptionParser()
+  parser.add_option('--android-sdk', help='path to the Android SDK folder')
+  parser.add_option('--android-sdk-tools',
+                    help='path to the Android SDK build tools folder')
+  parser.add_option('--android-sdk-jar',
+                    help='path to Android SDK\'s android.jar')
+  parser.add_option('--proguard-jar-path',
+                    help='Path to proguard.jar in the sdk')
+  parser.add_option('--input-jars-paths',
+                    help='Path to jars to include in obfuscated jar')
+
+  parser.add_option('--proguard-configs',
+                    help='Paths to proguard config files')
+
+  parser.add_option('--configuration-name',
+                    help='Gyp configuration name (i.e. Debug, Release)')
+  parser.add_option('--proguard-enabled', action='store_true',
+                    help='Set if proguard is enabled for this target.')
+
+  parser.add_option('--obfuscated-jar-path',
+                    help='Output path for obfuscated jar.')
+
+  parser.add_option('--testapp', action='store_true',
+                    help='Set this if building an instrumentation test apk')
+  parser.add_option('--tested-apk-obfuscated-jar-path',
+                    help='Path to obfusctated jar of the tested apk')
+  parser.add_option('--test-jar-path',
+                    help='Output path for jar containing all the test apk\'s '
+                    'code.')
+
+  parser.add_option('--stamp', help='File to touch on success')
+
+  (options, args) = parser.parse_args(argv)
+
+  if args:
+    parser.error('No positional arguments should be given. ' + str(args))
+
+  # Check that required options have been provided.
+  required_options = (
+      'android_sdk',
+      'android_sdk_tools',
+      'android_sdk_jar',
+      'proguard_jar_path',
+      'input_jars_paths',
+      'configuration_name',
+      'obfuscated_jar_path',
+      )
+
+  if options.testapp:
+    required_options += (
+        'test_jar_path',
+        )
+
+  build_utils.CheckOptions(options, parser, required=required_options)
+  return options, args
+
+
+def DoProguard(options):
+  proguard = proguard_util.ProguardCmdBuilder(options.proguard_jar_path)
+  proguard.outjar(options.obfuscated_jar_path)
+
+  library_classpath = [options.android_sdk_jar]
+  input_jars = build_utils.ParseGypList(options.input_jars_paths)
+
+  exclude_paths = []
+  configs = build_utils.ParseGypList(options.proguard_configs)
+  if options.tested_apk_obfuscated_jar_path:
+    # configs should only contain the process_resources.py generated config.
+    assert len(configs) == 1, (
+        'test apks should not have custom proguard configs: ' + str(configs))
+    tested_jar_info = build_utils.ReadJson(
+        options.tested_apk_obfuscated_jar_path + '.info')
+    exclude_paths = tested_jar_info['inputs']
+    configs = tested_jar_info['configs']
+
+    proguard.is_test(True)
+    proguard.mapping(options.tested_apk_obfuscated_jar_path + '.mapping')
+    library_classpath.append(options.tested_apk_obfuscated_jar_path)
+
+  proguard.libraryjars(library_classpath)
+  proguard_injars = [p for p in input_jars if p not in exclude_paths]
+  proguard.injars(proguard_injars)
+  proguard.configs(configs)
+
+  proguard.CheckOutput()
+
+  this_info = {
+    'inputs': proguard_injars,
+    'configs': configs
+  }
+
+  build_utils.WriteJson(
+      this_info, options.obfuscated_jar_path + '.info')
+
+
+def main(argv):
+  options, _ = ParseArgs(argv)
+
+  input_jars = build_utils.ParseGypList(options.input_jars_paths)
+
+  if options.testapp:
+    dependency_class_filters = [
+        '*R.class', '*R$*.class', '*Manifest.class', '*BuildConfig.class']
+    build_utils.MergeZips(
+        options.test_jar_path, input_jars, dependency_class_filters)
+
+  if options.configuration_name == 'Release' and options.proguard_enabled:
+    DoProguard(options)
+  else:
+    output_files = [
+        options.obfuscated_jar_path,
+        options.obfuscated_jar_path + '.info',
+        options.obfuscated_jar_path + '.dump',
+        options.obfuscated_jar_path + '.seeds',
+        options.obfuscated_jar_path + '.usage',
+        options.obfuscated_jar_path + '.mapping']
+    for f in output_files:
+      if os.path.exists(f):
+        os.remove(f)
+      build_utils.Touch(f)
+
+  if options.stamp:
+    build_utils.Touch(options.stamp)
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv[1:]))
diff --git a/build/android/gyp/copy_ex.py b/build/android/gyp/copy_ex.py
new file mode 100755
index 0000000..a474e77
--- /dev/null
+++ b/build/android/gyp/copy_ex.py
@@ -0,0 +1,77 @@
+#!/usr/bin/env python
+#
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Copies files to a directory."""
+
+import optparse
+import os
+import shutil
+import sys
+
+from util import build_utils
+
+
+def _get_all_files(base):
+  """Returns a list of all the files in |base|. Each entry is relative to the
+  last path entry of |base|."""
+  result = []
+  dirname = os.path.dirname(base)
+  for root, _, files in os.walk(base):
+    result.extend([os.path.join(root[len(dirname):], f) for f in files])
+  return result
+
+
+def main(args):
+  args = build_utils.ExpandFileArgs(args)
+
+  parser = optparse.OptionParser()
+  build_utils.AddDepfileOption(parser)
+
+  parser.add_option('--dest', help='Directory to copy files to.')
+  parser.add_option('--files', action='append',
+                    help='List of files to copy.')
+  parser.add_option('--clear', action='store_true',
+                    help='If set, the destination directory will be deleted '
+                    'before copying files to it. This is highly recommended to '
+                    'ensure that no stale files are left in the directory.')
+  parser.add_option('--stamp', help='Path to touch on success.')
+
+  options, _ = parser.parse_args(args)
+
+  if options.clear:
+    build_utils.DeleteDirectory(options.dest)
+    build_utils.MakeDirectory(options.dest)
+
+  files = []
+  for file_arg in options.files:
+    files += build_utils.ParseGypList(file_arg)
+
+  deps = []
+
+  for f in files:
+    if os.path.isdir(f):
+      if not options.clear:
+        print ('To avoid stale files you must use --clear when copying '
+               'directories')
+        sys.exit(-1)
+      shutil.copytree(f, os.path.join(options.dest, os.path.basename(f)))
+      deps.extend(_get_all_files(f))
+    else:
+      shutil.copy(f, options.dest)
+      deps.append(f)
+
+  if options.depfile:
+    build_utils.WriteDepfile(
+        options.depfile,
+        deps + build_utils.GetPythonDependencies())
+
+  if options.stamp:
+    build_utils.Touch(options.stamp)
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv[1:]))
+
diff --git a/build/android/gyp/create_device_library_links.py b/build/android/gyp/create_device_library_links.py
new file mode 100755
index 0000000..3e630b6
--- /dev/null
+++ b/build/android/gyp/create_device_library_links.py
@@ -0,0 +1,114 @@
+#!/usr/bin/env python
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Creates symlinks to native libraries for an APK.
+
+The native libraries should have previously been pushed to the device (in
+options.target_dir). This script then creates links in an apk's lib/ folder to
+those native libraries.
+"""
+
+import optparse
+import os
+import sys
+
+from util import build_device
+from util import build_utils
+
+BUILD_ANDROID_DIR = os.path.join(os.path.dirname(__file__), '..')
+sys.path.append(BUILD_ANDROID_DIR)
+
+from pylib import constants
+from pylib.utils import apk_helper
+
+def RunShellCommand(device, cmd):
+  output = device.RunShellCommand(cmd)
+
+  if output:
+    raise Exception(
+        'Unexpected output running command: ' + cmd + '\n' +
+        '\n'.join(output))
+
+
+def CreateSymlinkScript(options):
+  libraries = build_utils.ParseGypList(options.libraries)
+
+  link_cmd = (
+      'rm $APK_LIBRARIES_DIR/%(lib_basename)s > /dev/null 2>&1 \n'
+      'ln -s $STRIPPED_LIBRARIES_DIR/%(lib_basename)s '
+        '$APK_LIBRARIES_DIR/%(lib_basename)s \n'
+      )
+
+  script = '#!/bin/sh \n'
+
+  for lib in libraries:
+    script += link_cmd % { 'lib_basename': lib }
+
+  with open(options.script_host_path, 'w') as scriptfile:
+    scriptfile.write(script)
+
+
+def TriggerSymlinkScript(options):
+  device = build_device.GetBuildDeviceFromPath(
+      options.build_device_configuration)
+  if not device:
+    return
+
+  apk_package = apk_helper.GetPackageName(options.apk)
+  apk_libraries_dir = '/data/data/%s/lib' % apk_package
+
+  device_dir = os.path.dirname(options.script_device_path)
+  mkdir_cmd = ('if [ ! -e %(dir)s ]; then mkdir -p %(dir)s; fi ' %
+      { 'dir': device_dir })
+  RunShellCommand(device, mkdir_cmd)
+  device.PushChangedFiles([(options.script_host_path,
+                            options.script_device_path)])
+
+  trigger_cmd = (
+      'APK_LIBRARIES_DIR=%(apk_libraries_dir)s; '
+      'STRIPPED_LIBRARIES_DIR=%(target_dir)s; '
+      '. %(script_device_path)s'
+      ) % {
+          'apk_libraries_dir': apk_libraries_dir,
+          'target_dir': options.target_dir,
+          'script_device_path': options.script_device_path
+          }
+  RunShellCommand(device, trigger_cmd)
+
+
+def main(args):
+  args = build_utils.ExpandFileArgs(args)
+  parser = optparse.OptionParser()
+  parser.add_option('--apk', help='Path to the apk.')
+  parser.add_option('--script-host-path',
+      help='Path on the host for the symlink script.')
+  parser.add_option('--script-device-path',
+      help='Path on the device to push the created symlink script.')
+  parser.add_option('--libraries',
+      help='List of native libraries.')
+  parser.add_option('--target-dir',
+      help='Device directory that contains the target libraries for symlinks.')
+  parser.add_option('--stamp', help='Path to touch on success.')
+  parser.add_option('--build-device-configuration',
+      help='Path to build device configuration.')
+  parser.add_option('--configuration-name',
+      help='The build CONFIGURATION_NAME')
+  options, _ = parser.parse_args(args)
+
+  required_options = ['apk', 'libraries', 'script_host_path',
+      'script_device_path', 'target_dir', 'configuration_name']
+  build_utils.CheckOptions(options, parser, required=required_options)
+  constants.SetBuildType(options.configuration_name)
+
+  CreateSymlinkScript(options)
+  TriggerSymlinkScript(options)
+
+  if options.stamp:
+    build_utils.Touch(options.stamp)
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv[1:]))
diff --git a/build/android/gyp/create_dist_jar.py b/build/android/gyp/create_dist_jar.py
new file mode 100755
index 0000000..0d31c5d
--- /dev/null
+++ b/build/android/gyp/create_dist_jar.py
@@ -0,0 +1,36 @@
+#!/usr/bin/env python
+#
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Merges a list of jars into a single jar."""
+
+import optparse
+import sys
+
+from util import build_utils
+
+def main(args):
+  args = build_utils.ExpandFileArgs(args)
+  parser = optparse.OptionParser()
+  build_utils.AddDepfileOption(parser)
+  parser.add_option('--output', help='Path to output jar.')
+  parser.add_option('--inputs', action='append', help='List of jar inputs.')
+  options, _ = parser.parse_args(args)
+  build_utils.CheckOptions(options, parser, ['output', 'inputs'])
+
+  input_jars = []
+  for inputs_arg in options.inputs:
+    input_jars.extend(build_utils.ParseGypList(inputs_arg))
+
+  build_utils.MergeZips(options.output, input_jars)
+
+  if options.depfile:
+    build_utils.WriteDepfile(
+        options.depfile,
+        input_jars + build_utils.GetPythonDependencies())
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv[1:]))
diff --git a/build/android/gyp/create_flutter_jar.py b/build/android/gyp/create_flutter_jar.py
new file mode 100644
index 0000000..c30bae2
--- /dev/null
+++ b/build/android/gyp/create_flutter_jar.py
@@ -0,0 +1,56 @@
+#!/usr/bin/env python
+#
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Create a JAR incorporating all the components required to build a Flutter application"""
+
+import optparse
+import os
+import sys
+import zipfile
+
+from util import build_utils
+
+def main(args):
+  args = build_utils.ExpandFileArgs(args)
+  parser = optparse.OptionParser()
+  build_utils.AddDepfileOption(parser)
+  parser.add_option('--output', help='Path to output jar.')
+  parser.add_option('--dist_jar', help='Flutter shell Java code jar.')
+  parser.add_option('--native_lib', action='append', help='Native code library.')
+  parser.add_option('--android_abi', help='Native code ABI.')
+  parser.add_option('--asset_dir', help='Path to assets.')
+  options, _ = parser.parse_args(args)
+  build_utils.CheckOptions(options, parser, [
+    'output', 'dist_jar', 'native_lib', 'android_abi', 'asset_dir'
+  ])
+
+  input_deps = []
+
+  with zipfile.ZipFile(options.output, 'w', zipfile.ZIP_DEFLATED) as out_zip:
+    input_deps.append(options.dist_jar)
+    with zipfile.ZipFile(options.dist_jar, 'r') as dist_zip:
+      for dist_file in dist_zip.infolist():
+        if dist_file.filename.endswith('.class'):
+          out_zip.writestr(dist_file.filename, dist_zip.read(dist_file.filename))
+
+    for native_lib in options.native_lib:
+      input_deps.append(native_lib)
+      out_zip.write(native_lib,
+                    'lib/%s/%s' % (options.android_abi, os.path.basename(native_lib)))
+
+    for asset_file in os.listdir(options.asset_dir):
+      input_deps.append(asset_file)
+      out_zip.write(os.path.join(options.asset_dir, asset_file),
+                    'assets/%s' % asset_file)
+
+  if options.depfile:
+    build_utils.WriteDepfile(
+        options.depfile,
+        input_deps + build_utils.GetPythonDependencies())
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv[1:]))
diff --git a/build/android/gyp/create_java_binary_script.py b/build/android/gyp/create_java_binary_script.py
new file mode 100755
index 0000000..5de43f2
--- /dev/null
+++ b/build/android/gyp/create_java_binary_script.py
@@ -0,0 +1,77 @@
+#!/usr/bin/env python
+#
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Creates a simple script to run a java "binary".
+
+This creates a script that sets up the java command line for running a java
+jar. This includes correctly setting the classpath and the main class.
+"""
+
+import optparse
+import os
+import sys
+
+from util import build_utils
+
+# The java command must be executed in the current directory because there may
+# be user-supplied paths in the args. The script receives the classpath relative
+# to the directory that the script is written in and then, when run, must
+# recalculate the paths relative to the current directory.
+script_template = """\
+#!/usr/bin/env python
+#
+# This file was generated by build/android/gyp/create_java_binary_script.py
+
+import os
+import sys
+
+self_dir = os.path.dirname(__file__)
+classpath = [{classpath}]
+if os.getcwd() != self_dir:
+  offset = os.path.relpath(self_dir, os.getcwd())
+  classpath = [os.path.join(offset, p) for p in classpath]
+java_args = [
+  "java",
+  "-classpath", ":".join(classpath),
+  "-enableassertions",
+  \"{main_class}\"] + sys.argv[1:]
+os.execvp("java", java_args)
+"""
+
+def main(argv):
+  argv = build_utils.ExpandFileArgs(argv)
+  parser = optparse.OptionParser()
+  build_utils.AddDepfileOption(parser)
+  parser.add_option('--output', help='Output path for executable script.')
+  parser.add_option('--jar-path', help='Path to the main jar.')
+  parser.add_option('--main-class',
+      help='Name of the java class with the "main" entry point.')
+  parser.add_option('--classpath', action='append',
+      help='Classpath for running the jar.')
+  options, _ = parser.parse_args(argv)
+
+  classpath = [options.jar_path]
+  for cp_arg in options.classpath:
+    classpath += build_utils.ParseGypList(cp_arg)
+
+  run_dir = os.path.dirname(options.output)
+  classpath = [os.path.relpath(p, run_dir) for p in classpath]
+
+  with open(options.output, 'w') as script:
+    script.write(script_template.format(
+      classpath=('"%s"' % '", "'.join(classpath)),
+      main_class=options.main_class))
+
+  os.chmod(options.output, 0750)
+
+  if options.depfile:
+    build_utils.WriteDepfile(
+        options.depfile,
+        build_utils.GetPythonDependencies())
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv[1:]))
diff --git a/build/android/gyp/create_placeholder_files.py b/build/android/gyp/create_placeholder_files.py
new file mode 100755
index 0000000..103e1df
--- /dev/null
+++ b/build/android/gyp/create_placeholder_files.py
@@ -0,0 +1,35 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Create placeholder files.
+"""
+
+import optparse
+import os
+import sys
+
+from util import build_utils
+
+def main():
+  parser = optparse.OptionParser()
+  parser.add_option(
+      '--dest-lib-dir',
+      help='Destination directory to have placeholder files.')
+  parser.add_option(
+      '--stamp',
+      help='Path to touch on success')
+
+  options, args = parser.parse_args()
+
+  for name in args:
+    target_path = os.path.join(options.dest_lib_dir, name)
+    build_utils.Touch(target_path)
+
+  if options.stamp:
+    build_utils.Touch(options.stamp)
+
+if __name__ == '__main__':
+  sys.exit(main())
+
diff --git a/build/android/gyp/create_standalone_apk.py b/build/android/gyp/create_standalone_apk.py
new file mode 100755
index 0000000..c560599
--- /dev/null
+++ b/build/android/gyp/create_standalone_apk.py
@@ -0,0 +1,60 @@
+#!/usr/bin/env python
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Combines stripped libraries and incomplete APK into single standalone APK.
+
+"""
+
+import optparse
+import os
+import shutil
+import sys
+import tempfile
+
+from util import build_utils
+from util import md5_check
+
+def CreateStandaloneApk(options):
+  def DoZip():
+    with tempfile.NamedTemporaryFile(suffix='.zip') as intermediate_file:
+      intermediate_path = intermediate_file.name
+      shutil.copy(options.input_apk_path, intermediate_path)
+      apk_path_abs = os.path.abspath(intermediate_path)
+      build_utils.CheckOutput(
+          ['zip', '-r', '-1', apk_path_abs, 'lib'],
+          cwd=options.libraries_top_dir)
+      shutil.copy(intermediate_path, options.output_apk_path)
+
+  input_paths = [options.input_apk_path, options.libraries_top_dir]
+  record_path = '%s.standalone.stamp' % options.input_apk_path
+  md5_check.CallAndRecordIfStale(
+      DoZip,
+      record_path=record_path,
+      input_paths=input_paths)
+
+
+def main():
+  parser = optparse.OptionParser()
+  parser.add_option('--libraries-top-dir',
+      help='Top directory that contains libraries '
+      '(i.e. library paths are like '
+      'libraries_top_dir/lib/android_app_abi/foo.so).')
+  parser.add_option('--input-apk-path', help='Path to incomplete APK.')
+  parser.add_option('--output-apk-path', help='Path for standalone APK.')
+  parser.add_option('--stamp', help='Path to touch on success.')
+  options, _ = parser.parse_args()
+
+  required_options = ['libraries_top_dir', 'input_apk_path', 'output_apk_path']
+  build_utils.CheckOptions(options, parser, required=required_options)
+
+  CreateStandaloneApk(options)
+
+  if options.stamp:
+    build_utils.Touch(options.stamp)
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/build/android/gyp/create_test_runner_script.py b/build/android/gyp/create_test_runner_script.py
new file mode 100755
index 0000000..247bf20
--- /dev/null
+++ b/build/android/gyp/create_test_runner_script.py
@@ -0,0 +1,96 @@
+#!/usr/bin/env python
+#
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Creates a script to run an android test using build/android/test_runner.py.
+"""
+
+import argparse
+import os
+import sys
+
+from util import build_utils
+
+SCRIPT_TEMPLATE = """\
+#!/usr/bin/env python
+#
+# This file was generated by build/android/gyp/create_test_runner_script.py
+
+import logging
+import os
+import sys
+
+def main():
+  script_directory = os.path.dirname(__file__)
+
+  def ResolvePath(path):
+    \"\"\"Returns an absolute filepath given a path relative to this script.
+    \"\"\"
+    return os.path.abspath(os.path.join(script_directory, path))
+
+  test_runner_path = ResolvePath('{test_runner_path}')
+  test_runner_args = {test_runner_args}
+  test_runner_path_args = {test_runner_path_args}
+  for arg, path in test_runner_path_args.iteritems():
+    test_runner_args.extend([arg, ResolvePath(path)])
+
+  test_runner_cmd = ' '.join(
+      [test_runner_path] + test_runner_args + sys.argv[1:])
+  logging.critical(test_runner_cmd)
+  os.system(test_runner_cmd)
+
+if __name__ == '__main__':
+  sys.exit(main())
+"""
+
+def main():
+  parser = argparse.ArgumentParser()
+  parser.add_argument('--script-output-path',
+                      help='Output path for executable script.')
+  parser.add_argument('--depfile',
+                      help='Path to the depfile. This must be specified as '
+                           "the action's first output.")
+  # We need to intercept any test runner path arguments and make all
+  # of the paths relative to the output script directory.
+  group = parser.add_argument_group('Test runner path arguments.')
+  group.add_argument('--output-directory')
+  group.add_argument('--isolate-file-path')
+  group.add_argument('--support-apk')
+  args, test_runner_args = parser.parse_known_args()
+
+  def RelativizePathToScript(path):
+    """Returns the path relative to the output script directory."""
+    return os.path.relpath(path, os.path.dirname(args.script_output_path))
+
+  test_runner_path = os.path.join(
+      os.path.dirname(__file__), os.path.pardir, 'test_runner.py')
+  test_runner_path = RelativizePathToScript(test_runner_path)
+
+  test_runner_path_args = {}
+  if args.output_directory:
+    test_runner_path_args['--output-directory'] = RelativizePathToScript(
+        args.output_directory)
+  if args.isolate_file_path:
+    test_runner_path_args['--isolate-file-path'] = RelativizePathToScript(
+        args.isolate_file_path)
+  if args.support_apk:
+    test_runner_path_args['--support-apk'] = RelativizePathToScript(
+        args.support_apk)
+
+  with open(args.script_output_path, 'w') as script:
+    script.write(SCRIPT_TEMPLATE.format(
+        test_runner_path=str(test_runner_path),
+        test_runner_args=str(test_runner_args),
+        test_runner_path_args=str(test_runner_path_args)))
+
+  os.chmod(args.script_output_path, 0750)
+
+  if args.depfile:
+    build_utils.WriteDepfile(
+        args.depfile,
+        build_utils.GetPythonDependencies())
+
+if __name__ == '__main__':
+  sys.exit(main())
\ No newline at end of file
diff --git a/build/android/gyp/dex.py b/build/android/gyp/dex.py
new file mode 100755
index 0000000..c26d23a
--- /dev/null
+++ b/build/android/gyp/dex.py
@@ -0,0 +1,89 @@
+#!/usr/bin/env python
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import optparse
+import os
+import sys
+
+from util import build_utils
+from util import md5_check
+
+
+def DoDex(options, paths):
+  dx_binary = os.path.join(options.android_sdk_tools, 'dx')
+  # See http://crbug.com/272064 for context on --force-jumbo.
+  dex_cmd = [dx_binary, '--dex', '--force-jumbo', '--output', options.dex_path]
+  if options.no_locals != '0':
+    dex_cmd.append('--no-locals')
+
+  dex_cmd += paths
+
+  record_path = '%s.md5.stamp' % options.dex_path
+  md5_check.CallAndRecordIfStale(
+      lambda: build_utils.CheckOutput(dex_cmd, print_stderr=False),
+      record_path=record_path,
+      input_paths=paths,
+      input_strings=dex_cmd,
+      force=not os.path.exists(options.dex_path))
+  build_utils.WriteJson(
+      [os.path.relpath(p, options.output_directory) for p in paths],
+      options.dex_path + '.inputs')
+
+
+def main():
+  args = build_utils.ExpandFileArgs(sys.argv[1:])
+
+  parser = optparse.OptionParser()
+  build_utils.AddDepfileOption(parser)
+
+  parser.add_option('--android-sdk-tools',
+                    help='Android sdk build tools directory.')
+  parser.add_option('--output-directory',
+                    default=os.getcwd(),
+                    help='Path to the output build directory.')
+  parser.add_option('--dex-path', help='Dex output path.')
+  parser.add_option('--configuration-name',
+                    help='The build CONFIGURATION_NAME.')
+  parser.add_option('--proguard-enabled',
+                    help='"true" if proguard is enabled.')
+  parser.add_option('--proguard-enabled-input-path',
+                    help=('Path to dex in Release mode when proguard '
+                          'is enabled.'))
+  parser.add_option('--no-locals',
+                    help='Exclude locals list from the dex file.')
+  parser.add_option('--inputs', help='A list of additional input paths.')
+  parser.add_option('--excluded-paths',
+                    help='A list of paths to exclude from the dex file.')
+
+  options, paths = parser.parse_args(args)
+
+  required_options = ('android_sdk_tools',)
+  build_utils.CheckOptions(options, parser, required=required_options)
+
+  if (options.proguard_enabled == 'true'
+      and options.configuration_name == 'Release'):
+    paths = [options.proguard_enabled_input_path]
+
+  if options.inputs:
+    paths += build_utils.ParseGypList(options.inputs)
+
+  if options.excluded_paths:
+    # Excluded paths are relative to the output directory.
+    exclude_paths = build_utils.ParseGypList(options.excluded_paths)
+    paths = [p for p in paths if not
+             os.path.relpath(p, options.output_directory) in exclude_paths]
+
+  DoDex(options, paths)
+
+  if options.depfile:
+    build_utils.WriteDepfile(
+        options.depfile,
+        paths + build_utils.GetPythonDependencies())
+
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/build/android/gyp/emma_instr.py b/build/android/gyp/emma_instr.py
new file mode 100755
index 0000000..6f3555a
--- /dev/null
+++ b/build/android/gyp/emma_instr.py
@@ -0,0 +1,207 @@
+#!/usr/bin/env python
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Instruments classes and jar files.
+
+This script corresponds to the 'emma_instr' action in the java build process.
+Depending on whether emma_instrument is set, the 'emma_instr' action will either
+call one of the instrument commands, or the copy command.
+
+Possible commands are:
+- instrument_jar: Accepts a jar and instruments it using emma.jar.
+- instrument_classes: Accepts a directory containing java classes and
+      instruments it using emma.jar.
+- copy: Called when EMMA coverage is not enabled. This allows us to make
+      this a required step without necessarily instrumenting on every build.
+      Also removes any stale coverage files.
+"""
+
+import collections
+import json
+import os
+import shutil
+import sys
+import tempfile
+
+sys.path.append(os.path.join(os.path.dirname(__file__), os.pardir))
+from pylib.utils import command_option_parser
+
+from util import build_utils
+
+
+def _AddCommonOptions(option_parser):
+  """Adds common options to |option_parser|."""
+  option_parser.add_option('--input-path',
+                           help=('Path to input file(s). Either the classes '
+                                 'directory, or the path to a jar.'))
+  option_parser.add_option('--output-path',
+                           help=('Path to output final file(s) to. Either the '
+                                 'final classes directory, or the directory in '
+                                 'which to place the instrumented/copied jar.'))
+  option_parser.add_option('--stamp', help='Path to touch when done.')
+  option_parser.add_option('--coverage-file',
+                           help='File to create with coverage metadata.')
+  option_parser.add_option('--sources-file',
+                           help='File to create with the list of sources.')
+
+
+def _AddInstrumentOptions(option_parser):
+  """Adds options related to instrumentation to |option_parser|."""
+  _AddCommonOptions(option_parser)
+  option_parser.add_option('--sources',
+                           help='Space separated list of sources.')
+  option_parser.add_option('--src-root',
+                           help='Root of the src repository.')
+  option_parser.add_option('--emma-jar',
+                           help='Path to emma.jar.')
+  option_parser.add_option(
+      '--filter-string', default='',
+      help=('Filter string consisting of a list of inclusion/exclusion '
+            'patterns separated with whitespace and/or comma.'))
+
+
+def _RunCopyCommand(_command, options, _, option_parser):
+  """Copies the jar from input to output locations.
+
+  Also removes any old coverage/sources file.
+
+  Args:
+    command: String indicating the command that was received to trigger
+        this function.
+    options: optparse options dictionary.
+    args: List of extra args from optparse.
+    option_parser: optparse.OptionParser object.
+
+  Returns:
+    An exit code.
+  """
+  if not (options.input_path and options.output_path and
+          options.coverage_file and options.sources_file):
+    option_parser.error('All arguments are required.')
+
+  coverage_file = os.path.join(os.path.dirname(options.output_path),
+                               options.coverage_file)
+  sources_file = os.path.join(os.path.dirname(options.output_path),
+                              options.sources_file)
+  if os.path.exists(coverage_file):
+    os.remove(coverage_file)
+  if os.path.exists(sources_file):
+    os.remove(sources_file)
+
+  if os.path.isdir(options.input_path):
+    shutil.rmtree(options.output_path, ignore_errors=True)
+    shutil.copytree(options.input_path, options.output_path)
+  else:
+    shutil.copy(options.input_path, options.output_path)
+
+  if options.stamp:
+    build_utils.Touch(options.stamp)
+
+
+def _CreateSourcesFile(sources_string, sources_file, src_root):
+  """Adds all normalized source directories to |sources_file|.
+
+  Args:
+    sources_string: String generated from gyp containing the list of sources.
+    sources_file: File into which to write the JSON list of sources.
+    src_root: Root which sources added to the file should be relative to.
+
+  Returns:
+    An exit code.
+  """
+  src_root = os.path.abspath(src_root)
+  sources = build_utils.ParseGypList(sources_string)
+  relative_sources = []
+  for s in sources:
+    abs_source = os.path.abspath(s)
+    if abs_source[:len(src_root)] != src_root:
+      print ('Error: found source directory not under repository root: %s %s'
+             % (abs_source, src_root))
+      return 1
+    rel_source = os.path.relpath(abs_source, src_root)
+
+    relative_sources.append(rel_source)
+
+  with open(sources_file, 'w') as f:
+    json.dump(relative_sources, f)
+
+
+def _RunInstrumentCommand(command, options, _, option_parser):
+  """Instruments the classes/jar files using EMMA.
+
+  Args:
+    command: 'instrument_jar' or 'instrument_classes'. This distinguishes
+        whether we copy the output from the created lib/ directory, or classes/
+        directory.
+    options: optparse options dictionary.
+    args: List of extra args from optparse.
+    option_parser: optparse.OptionParser object.
+
+  Returns:
+    An exit code.
+  """
+  if not (options.input_path and options.output_path and
+          options.coverage_file and options.sources_file and options.sources and
+          options.src_root and options.emma_jar):
+    option_parser.error('All arguments are required.')
+
+  coverage_file = os.path.join(os.path.dirname(options.output_path),
+                               options.coverage_file)
+  sources_file = os.path.join(os.path.dirname(options.output_path),
+                              options.sources_file)
+  if os.path.exists(coverage_file):
+    os.remove(coverage_file)
+  temp_dir = tempfile.mkdtemp()
+  try:
+    cmd = ['java', '-cp', options.emma_jar,
+           'emma', 'instr',
+           '-ip', options.input_path,
+           '-ix', options.filter_string,
+           '-d', temp_dir,
+           '-out', coverage_file,
+           '-m', 'fullcopy']
+    build_utils.CheckOutput(cmd)
+
+    if command == 'instrument_jar':
+      for jar in os.listdir(os.path.join(temp_dir, 'lib')):
+        shutil.copy(os.path.join(temp_dir, 'lib', jar),
+                    options.output_path)
+    else:  # 'instrument_classes'
+      if os.path.isdir(options.output_path):
+        shutil.rmtree(options.output_path, ignore_errors=True)
+      shutil.copytree(os.path.join(temp_dir, 'classes'),
+                      options.output_path)
+  finally:
+    shutil.rmtree(temp_dir)
+
+  _CreateSourcesFile(options.sources, sources_file, options.src_root)
+
+  if options.stamp:
+    build_utils.Touch(options.stamp)
+
+  return 0
+
+
+CommandFunctionTuple = collections.namedtuple(
+    'CommandFunctionTuple', ['add_options_func', 'run_command_func'])
+VALID_COMMANDS = {
+    'copy': CommandFunctionTuple(_AddCommonOptions,
+                                 _RunCopyCommand),
+    'instrument_jar': CommandFunctionTuple(_AddInstrumentOptions,
+                                           _RunInstrumentCommand),
+    'instrument_classes': CommandFunctionTuple(_AddInstrumentOptions,
+                                               _RunInstrumentCommand),
+}
+
+
+def main():
+  option_parser = command_option_parser.CommandOptionParser(
+      commands_dict=VALID_COMMANDS)
+  command_option_parser.ParseAndExecute(option_parser)
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/build/android/gyp/finalize_apk.py b/build/android/gyp/finalize_apk.py
new file mode 100755
index 0000000..0a80035
--- /dev/null
+++ b/build/android/gyp/finalize_apk.py
@@ -0,0 +1,132 @@
+#!/usr/bin/env python
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Signs and zipaligns APK.
+
+"""
+
+import optparse
+import shutil
+import sys
+import tempfile
+
+from util import build_utils
+
+def RenameInflateAndAddPageAlignment(
+    rezip_apk_jar_path, in_zip_file, out_zip_file):
+  rezip_apk_cmd = [
+      'java',
+      '-classpath',
+      rezip_apk_jar_path,
+      'RezipApk',
+      'renamealign',
+      in_zip_file,
+      out_zip_file,
+    ]
+  build_utils.CheckOutput(rezip_apk_cmd)
+
+
+def ReorderAndAlignApk(rezip_apk_jar_path, in_zip_file, out_zip_file):
+  rezip_apk_cmd = [
+      'java',
+      '-classpath',
+      rezip_apk_jar_path,
+      'RezipApk',
+      'reorder',
+      in_zip_file,
+      out_zip_file,
+    ]
+  build_utils.CheckOutput(rezip_apk_cmd)
+
+
+def JarSigner(key_path, key_name, key_passwd, unsigned_path, signed_path):
+  shutil.copy(unsigned_path, signed_path)
+  sign_cmd = [
+      'jarsigner',
+      '-sigalg', 'MD5withRSA',
+      '-digestalg', 'SHA1',
+      '-keystore', key_path,
+      '-storepass', key_passwd,
+      signed_path,
+      key_name,
+    ]
+  build_utils.CheckOutput(sign_cmd)
+
+
+def AlignApk(zipalign_path, unaligned_path, final_path):
+  align_cmd = [
+      zipalign_path,
+      '-f', '4',  # 4 bytes
+      unaligned_path,
+      final_path,
+      ]
+  build_utils.CheckOutput(align_cmd)
+
+
+def main():
+  parser = optparse.OptionParser()
+  build_utils.AddDepfileOption(parser)
+
+  parser.add_option('--rezip-apk-jar-path',
+                    help='Path to the RezipApk jar file.')
+  parser.add_option('--zipalign-path', help='Path to the zipalign tool.')
+  parser.add_option('--unsigned-apk-path', help='Path to input unsigned APK.')
+  parser.add_option('--final-apk-path',
+      help='Path to output signed and aligned APK.')
+  parser.add_option('--key-path', help='Path to keystore for signing.')
+  parser.add_option('--key-passwd', help='Keystore password')
+  parser.add_option('--key-name', help='Keystore name')
+  parser.add_option('--stamp', help='Path to touch on success.')
+  parser.add_option('--load-library-from-zip', type='int',
+      help='If non-zero, build the APK such that the library can be loaded ' +
+           'directly from the zip file using the crazy linker. The library ' +
+           'will be renamed, uncompressed and page aligned.')
+
+  options, _ = parser.parse_args()
+
+  FinalizeApk(options)
+
+  if options.depfile:
+    build_utils.WriteDepfile(
+        options.depfile, build_utils.GetPythonDependencies())
+
+  if options.stamp:
+    build_utils.Touch(options.stamp)
+
+
+def FinalizeApk(options):
+  with tempfile.NamedTemporaryFile() as signed_apk_path_tmp, \
+      tempfile.NamedTemporaryFile() as apk_to_sign_tmp:
+
+    if options.load_library_from_zip:
+      # We alter the name of the library so that the Android Package Manager
+      # does not extract it into a separate file. This must be done before
+      # signing, as the filename is part of the signed manifest. At the same
+      # time we uncompress the library, which is necessary so that it can be
+      # loaded directly from the APK.
+      # Move the library to a page boundary by adding a page alignment file.
+      apk_to_sign = apk_to_sign_tmp.name
+      RenameInflateAndAddPageAlignment(
+          options.rezip_apk_jar_path, options.unsigned_apk_path, apk_to_sign)
+    else:
+      apk_to_sign = options.unsigned_apk_path
+
+    signed_apk_path = signed_apk_path_tmp.name
+    JarSigner(options.key_path, options.key_name, options.key_passwd,
+              apk_to_sign, signed_apk_path)
+
+    if options.load_library_from_zip:
+      # Reorder the contents of the APK. This re-establishes the canonical
+      # order which means the library will be back at its page aligned location.
+      # This step also aligns uncompressed items to 4 bytes.
+      ReorderAndAlignApk(
+          options.rezip_apk_jar_path, signed_apk_path, options.final_apk_path)
+    else:
+      # Align uncompressed items to 4 bytes
+      AlignApk(options.zipalign_path, signed_apk_path, options.final_apk_path)
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/build/android/gyp/finalize_splits.py b/build/android/gyp/finalize_splits.py
new file mode 100755
index 0000000..a6796bb
--- /dev/null
+++ b/build/android/gyp/finalize_splits.py
@@ -0,0 +1,52 @@
+#!/usr/bin/env python
+#
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Signs and zipaligns split APKs.
+
+This script is require only by GYP (not GN).
+"""
+
+import optparse
+import sys
+
+import finalize_apk
+from util import build_utils
+
+def main():
+  parser = optparse.OptionParser()
+  parser.add_option('--zipalign-path', help='Path to the zipalign tool.')
+  parser.add_option('--resource-packaged-apk-path',
+      help='Base path to input .ap_s.')
+  parser.add_option('--base-output-path',
+      help='Path to output .apk, minus extension.')
+  parser.add_option('--key-path', help='Path to keystore for signing.')
+  parser.add_option('--key-passwd', help='Keystore password')
+  parser.add_option('--key-name', help='Keystore name')
+  parser.add_option('--densities',
+      help='Comma separated list of densities finalize.')
+  parser.add_option('--languages',
+      help='GYP list of language splits to finalize.')
+
+  options, _ = parser.parse_args()
+  options.load_library_from_zip = 0
+
+  if options.densities:
+    for density in options.densities.split(','):
+      options.unsigned_apk_path = ("%s_%s" %
+          (options.resource_packaged_apk_path, density))
+      options.final_apk_path = ("%s-density-%s.apk" %
+          (options.base_output_path, density))
+      finalize_apk.FinalizeApk(options)
+
+  if options.languages:
+    for lang in build_utils.ParseGypList(options.languages):
+      options.unsigned_apk_path = ("%s_%s" %
+          (options.resource_packaged_apk_path, lang))
+      options.final_apk_path = ("%s-lang-%s.apk" %
+          (options.base_output_path, lang))
+      finalize_apk.FinalizeApk(options)
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/build/android/gyp/find.py b/build/android/gyp/find.py
new file mode 100755
index 0000000..a9f1d49
--- /dev/null
+++ b/build/android/gyp/find.py
@@ -0,0 +1,30 @@
+#!/usr/bin/env python
+#
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Finds files in directories.
+"""
+
+import fnmatch
+import optparse
+import os
+import sys
+
+
+def main(argv):
+  parser = optparse.OptionParser()
+  parser.add_option('--pattern', default='*', help='File pattern to match.')
+  options, directories = parser.parse_args(argv)
+
+  for d in directories:
+    if not os.path.exists(d):
+      print >> sys.stderr, '%s does not exist' % d
+      return 1
+    for root, _, filenames in os.walk(d):
+      for f in fnmatch.filter(filenames, options.pattern):
+        print os.path.join(root, f)
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv[1:]))
diff --git a/build/android/gyp/find_sun_tools_jar.py b/build/android/gyp/find_sun_tools_jar.py
new file mode 100755
index 0000000..2f15a15
--- /dev/null
+++ b/build/android/gyp/find_sun_tools_jar.py
@@ -0,0 +1,56 @@
+#!/usr/bin/env python
+#
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""This finds the java distribution's tools.jar and copies it somewhere.
+"""
+
+import argparse
+import os
+import re
+import shutil
+import sys
+
+from util import build_utils
+
+RT_JAR_FINDER = re.compile(r'\[Opened (.*)/jre/lib/rt.jar\]')
+
+def main():
+  parser = argparse.ArgumentParser(description='Find Sun Tools Jar')
+  parser.add_argument('--depfile',
+                      help='Path to depfile. This must be specified as the '
+                           'action\'s first output.')
+  parser.add_argument('--output', required=True)
+  args = parser.parse_args()
+
+  sun_tools_jar_path = FindSunToolsJarPath()
+
+  if sun_tools_jar_path is None:
+    raise Exception("Couldn\'t find tools.jar")
+
+  # Using copyfile instead of copy() because copy() calls copymode()
+  # We don't want the locked mode because we may copy over this file again
+  shutil.copyfile(sun_tools_jar_path, args.output)
+
+  if args.depfile:
+    build_utils.WriteDepfile(
+        args.depfile,
+        [sun_tools_jar_path] + build_utils.GetPythonDependencies())
+
+
+def FindSunToolsJarPath():
+  # This works with at least openjdk 1.6, 1.7 and sun java 1.6, 1.7
+  stdout = build_utils.CheckOutput(
+      ["java", "-verbose", "-version"], print_stderr=False)
+  for ln in stdout.splitlines():
+    match = RT_JAR_FINDER.match(ln)
+    if match:
+      return os.path.join(match.group(1), 'lib', 'tools.jar')
+
+  return None
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/build/android/gyp/gcc_preprocess.py b/build/android/gyp/gcc_preprocess.py
new file mode 100755
index 0000000..03becf9
--- /dev/null
+++ b/build/android/gyp/gcc_preprocess.py
@@ -0,0 +1,58 @@
+#!/usr/bin/env python
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import optparse
+import os
+import sys
+
+from util import build_utils
+
+def DoGcc(options):
+  build_utils.MakeDirectory(os.path.dirname(options.output))
+
+  gcc_cmd = [ 'gcc' ]  # invoke host gcc.
+  if options.defines:
+    gcc_cmd.extend(sum(map(lambda w: ['-D', w], options.defines), []))
+  gcc_cmd.extend([
+      '-E',                  # stop after preprocessing.
+      '-D', 'ANDROID',       # Specify ANDROID define for pre-processor.
+      '-x', 'c-header',      # treat sources as C header files
+      '-P',                  # disable line markers, i.e. '#line 309'
+      '-I', options.include_path,
+      '-o', options.output,
+      options.template
+      ])
+
+  build_utils.CheckOutput(gcc_cmd)
+
+
+def main(args):
+  args = build_utils.ExpandFileArgs(args)
+
+  parser = optparse.OptionParser()
+  build_utils.AddDepfileOption(parser)
+
+  parser.add_option('--include-path', help='Include path for gcc.')
+  parser.add_option('--template', help='Path to template.')
+  parser.add_option('--output', help='Path for generated file.')
+  parser.add_option('--stamp', help='Path to touch on success.')
+  parser.add_option('--defines', help='Pre-defines macros', action='append')
+
+  options, _ = parser.parse_args(args)
+
+  DoGcc(options)
+
+  if options.depfile:
+    build_utils.WriteDepfile(
+        options.depfile,
+        build_utils.GetPythonDependencies())
+
+  if options.stamp:
+    build_utils.Touch(options.stamp)
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv[1:]))
diff --git a/build/android/gyp/generate_split_manifest.py b/build/android/gyp/generate_split_manifest.py
new file mode 100755
index 0000000..9cb3bca
--- /dev/null
+++ b/build/android/gyp/generate_split_manifest.py
@@ -0,0 +1,97 @@
+#!/usr/bin/env python
+#
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Creates an AndroidManifest.xml for an APK split.
+
+Given the manifest file for the main APK, generates an AndroidManifest.xml with
+the value required for a Split APK (package, versionCode, etc).
+"""
+
+import optparse
+import xml.etree.ElementTree
+
+from util import build_utils
+
+MANIFEST_TEMPLATE = """<?xml version="1.0" encoding="utf-8"?>
+<manifest
+    xmlns:android="http://schemas.android.com/apk/res/android"
+    package="%(package)s"
+    split="%(split)s">
+  <uses-sdk android:minSdkVersion="21" />
+  <application android:hasCode="%(has_code)s">
+  </application>
+</manifest>
+"""
+
+def ParseArgs():
+  """Parses command line options.
+
+  Returns:
+    An options object as from optparse.OptionsParser.parse_args()
+  """
+  parser = optparse.OptionParser()
+  build_utils.AddDepfileOption(parser)
+  parser.add_option('--main-manifest', help='The main manifest of the app')
+  parser.add_option('--out-manifest', help='The output manifest')
+  parser.add_option('--split', help='The name of the split')
+  parser.add_option(
+      '--has-code',
+      action='store_true',
+      default=False,
+      help='Whether the split will contain a .dex file')
+
+  (options, args) = parser.parse_args()
+
+  if args:
+    parser.error('No positional arguments should be given.')
+
+  # Check that required options have been provided.
+  required_options = ('main_manifest', 'out_manifest', 'split')
+  build_utils.CheckOptions(options, parser, required=required_options)
+
+  return options
+
+
+def Build(main_manifest, split, has_code):
+  """Builds a split manifest based on the manifest of the main APK.
+
+  Args:
+    main_manifest: the XML manifest of the main APK as a string
+    split: the name of the split as a string
+    has_code: whether this split APK will contain .dex files
+
+  Returns:
+    The XML split manifest as a string
+  """
+
+  doc = xml.etree.ElementTree.fromstring(main_manifest)
+  package = doc.get('package')
+
+  return MANIFEST_TEMPLATE % {
+      'package': package,
+      'split': split.replace('-', '_'),
+      'has_code': str(has_code).lower()
+  }
+
+
+def main():
+  options = ParseArgs()
+  main_manifest = file(options.main_manifest).read()
+  split_manifest = Build(
+      main_manifest,
+      options.split,
+      options.has_code)
+
+  with file(options.out_manifest, 'w') as f:
+    f.write(split_manifest)
+
+  if options.depfile:
+    build_utils.WriteDepfile(
+        options.depfile,
+        [options.main_manifest] + build_utils.GetPythonDependencies())
+
+
+if __name__ == '__main__':
+  main()
diff --git a/build/android/gyp/generate_v14_compatible_resources.py b/build/android/gyp/generate_v14_compatible_resources.py
new file mode 100755
index 0000000..9c8ff3b
--- /dev/null
+++ b/build/android/gyp/generate_v14_compatible_resources.py
@@ -0,0 +1,319 @@
+#!/usr/bin/env python
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Convert Android xml resources to API 14 compatible.
+
+There are two reasons that we cannot just use API 17 attributes,
+so we are generating another set of resources by this script.
+
+1. paddingStart attribute can cause a crash on Galaxy Tab 2.
+2. There is a bug that paddingStart does not override paddingLeft on
+   JB-MR1. This is fixed on JB-MR2. b/8654490
+
+Therefore, this resource generation script can be removed when
+we drop the support for JB-MR1.
+
+Please refer to http://crbug.com/235118 for the details.
+"""
+
+import optparse
+import os
+import re
+import shutil
+import sys
+import xml.dom.minidom as minidom
+
+from util import build_utils
+
+# Note that we are assuming 'android:' is an alias of
+# the namespace 'http://schemas.android.com/apk/res/android'.
+
+GRAVITY_ATTRIBUTES = ('android:gravity', 'android:layout_gravity')
+
+# Almost all the attributes that has "Start" or "End" in
+# its name should be mapped.
+ATTRIBUTES_TO_MAP = {'paddingStart' : 'paddingLeft',
+                     'drawableStart' : 'drawableLeft',
+                     'layout_alignStart' : 'layout_alignLeft',
+                     'layout_marginStart' : 'layout_marginLeft',
+                     'layout_alignParentStart' : 'layout_alignParentLeft',
+                     'layout_toStartOf' : 'layout_toLeftOf',
+                     'paddingEnd' : 'paddingRight',
+                     'drawableEnd' : 'drawableRight',
+                     'layout_alignEnd' : 'layout_alignRight',
+                     'layout_marginEnd' : 'layout_marginRight',
+                     'layout_alignParentEnd' : 'layout_alignParentRight',
+                     'layout_toEndOf' : 'layout_toRightOf'}
+
+ATTRIBUTES_TO_MAP = dict(['android:' + k, 'android:' + v] for k, v
+                         in ATTRIBUTES_TO_MAP.iteritems())
+
+ATTRIBUTES_TO_MAP_REVERSED = dict([v, k] for k, v
+                                  in ATTRIBUTES_TO_MAP.iteritems())
+
+
+def IterateXmlElements(node):
+  """minidom helper function that iterates all the element nodes.
+  Iteration order is pre-order depth-first."""
+  if node.nodeType == node.ELEMENT_NODE:
+    yield node
+  for child_node in node.childNodes:
+    for child_node_element in IterateXmlElements(child_node):
+      yield child_node_element
+
+
+def ParseAndReportErrors(filename):
+  try:
+    return minidom.parse(filename)
+  except Exception:
+    import traceback
+    traceback.print_exc()
+    sys.stderr.write('Failed to parse XML file: %s\n' % filename)
+    sys.exit(1)
+
+
+def AssertNotDeprecatedAttribute(name, value, filename):
+  """Raises an exception if the given attribute is deprecated."""
+  msg = None
+  if name in ATTRIBUTES_TO_MAP_REVERSED:
+    msg = '{0} should use {1} instead of {2}'.format(filename,
+        ATTRIBUTES_TO_MAP_REVERSED[name], name)
+  elif name in GRAVITY_ATTRIBUTES and ('left' in value or 'right' in value):
+    msg = '{0} should use start/end instead of left/right for {1}'.format(
+        filename, name)
+
+  if msg:
+    msg += ('\nFor background, see: http://android-developers.blogspot.com/'
+            '2013/03/native-rtl-support-in-android-42.html\n'
+            'If you have a legitimate need for this attribute, discuss with '
+            'kkimlabs@chromium.org or newt@chromium.org')
+    raise Exception(msg)
+
+
+def WriteDomToFile(dom, filename):
+  """Write the given dom to filename."""
+  build_utils.MakeDirectory(os.path.dirname(filename))
+  with open(filename, 'w') as f:
+    dom.writexml(f, '', '  ', '\n', encoding='utf-8')
+
+
+def HasStyleResource(dom):
+  """Return True if the dom is a style resource, False otherwise."""
+  root_node = IterateXmlElements(dom).next()
+  return bool(root_node.nodeName == 'resources' and
+              list(root_node.getElementsByTagName('style')))
+
+
+def ErrorIfStyleResourceExistsInDir(input_dir):
+  """If a style resource is in input_dir, raises an exception."""
+  for input_filename in build_utils.FindInDirectory(input_dir, '*.xml'):
+    dom = ParseAndReportErrors(input_filename)
+    if HasStyleResource(dom):
+      raise Exception('error: style file ' + input_filename +
+                      ' should be under ' + input_dir +
+                      '-v17 directory. Please refer to '
+                      'http://crbug.com/243952 for the details.')
+
+
+def GenerateV14LayoutResourceDom(dom, filename, assert_not_deprecated=True):
+  """Convert layout resource to API 14 compatible layout resource.
+
+  Args:
+    dom: Parsed minidom object to be modified.
+    filename: Filename that the DOM was parsed from.
+    assert_not_deprecated: Whether deprecated attributes (e.g. paddingLeft) will
+                           cause an exception to be thrown.
+
+  Returns:
+    True if dom is modified, False otherwise.
+  """
+  is_modified = False
+
+  # Iterate all the elements' attributes to find attributes to convert.
+  for element in IterateXmlElements(dom):
+    for name, value in list(element.attributes.items()):
+      # Convert any API 17 Start/End attributes to Left/Right attributes.
+      # For example, from paddingStart="10dp" to paddingLeft="10dp"
+      # Note: gravity attributes are not necessary to convert because
+      # start/end values are backward-compatible. Explained at
+      # https://plus.sandbox.google.com/+RomanNurik/posts/huuJd8iVVXY?e=Showroom
+      if name in ATTRIBUTES_TO_MAP:
+        element.setAttribute(ATTRIBUTES_TO_MAP[name], value)
+        del element.attributes[name]
+        is_modified = True
+      elif assert_not_deprecated:
+        AssertNotDeprecatedAttribute(name, value, filename)
+
+  return is_modified
+
+
+def GenerateV14StyleResourceDom(dom, filename, assert_not_deprecated=True):
+  """Convert style resource to API 14 compatible style resource.
+
+  Args:
+    dom: Parsed minidom object to be modified.
+    filename: Filename that the DOM was parsed from.
+    assert_not_deprecated: Whether deprecated attributes (e.g. paddingLeft) will
+                           cause an exception to be thrown.
+
+  Returns:
+    True if dom is modified, False otherwise.
+  """
+  is_modified = False
+
+  for style_element in dom.getElementsByTagName('style'):
+    for item_element in style_element.getElementsByTagName('item'):
+      name = item_element.attributes['name'].value
+      value = item_element.childNodes[0].nodeValue
+      if name in ATTRIBUTES_TO_MAP:
+        item_element.attributes['name'].value = ATTRIBUTES_TO_MAP[name]
+        is_modified = True
+      elif assert_not_deprecated:
+        AssertNotDeprecatedAttribute(name, value, filename)
+
+  return is_modified
+
+
+def GenerateV14LayoutResource(input_filename, output_v14_filename,
+                              output_v17_filename):
+  """Convert API 17 layout resource to API 14 compatible layout resource.
+
+  It's mostly a simple replacement, s/Start/Left s/End/Right,
+  on the attribute names.
+  If the generated resource is identical to the original resource,
+  don't do anything. If not, write the generated resource to
+  output_v14_filename, and copy the original resource to output_v17_filename.
+  """
+  dom = ParseAndReportErrors(input_filename)
+  is_modified = GenerateV14LayoutResourceDom(dom, input_filename)
+
+  if is_modified:
+    # Write the generated resource.
+    WriteDomToFile(dom, output_v14_filename)
+
+    # Copy the original resource.
+    build_utils.MakeDirectory(os.path.dirname(output_v17_filename))
+    shutil.copy2(input_filename, output_v17_filename)
+
+
+def GenerateV14StyleResource(input_filename, output_v14_filename):
+  """Convert API 17 style resources to API 14 compatible style resource.
+
+  Write the generated style resource to output_v14_filename.
+  It's mostly a simple replacement, s/Start/Left s/End/Right,
+  on the attribute names.
+  """
+  dom = ParseAndReportErrors(input_filename)
+  GenerateV14StyleResourceDom(dom, input_filename)
+
+  # Write the generated resource.
+  WriteDomToFile(dom, output_v14_filename)
+
+
+def GenerateV14LayoutResourcesInDir(input_dir, output_v14_dir, output_v17_dir):
+  """Convert layout resources to API 14 compatible resources in input_dir."""
+  for input_filename in build_utils.FindInDirectory(input_dir, '*.xml'):
+    rel_filename = os.path.relpath(input_filename, input_dir)
+    output_v14_filename = os.path.join(output_v14_dir, rel_filename)
+    output_v17_filename = os.path.join(output_v17_dir, rel_filename)
+    GenerateV14LayoutResource(input_filename, output_v14_filename,
+                              output_v17_filename)
+
+
+def GenerateV14StyleResourcesInDir(input_dir, output_v14_dir):
+  """Convert style resources to API 14 compatible resources in input_dir."""
+  for input_filename in build_utils.FindInDirectory(input_dir, '*.xml'):
+    rel_filename = os.path.relpath(input_filename, input_dir)
+    output_v14_filename = os.path.join(output_v14_dir, rel_filename)
+    GenerateV14StyleResource(input_filename, output_v14_filename)
+
+
+def ParseArgs():
+  """Parses command line options.
+
+  Returns:
+    An options object as from optparse.OptionsParser.parse_args()
+  """
+  parser = optparse.OptionParser()
+  parser.add_option('--res-dir',
+                    help='directory containing resources '
+                         'used to generate v14 compatible resources')
+  parser.add_option('--res-v14-compatibility-dir',
+                    help='output directory into which '
+                         'v14 compatible resources will be generated')
+  parser.add_option('--stamp', help='File to touch on success')
+
+  options, args = parser.parse_args()
+
+  if args:
+    parser.error('No positional arguments should be given.')
+
+  # Check that required options have been provided.
+  required_options = ('res_dir', 'res_v14_compatibility_dir')
+  build_utils.CheckOptions(options, parser, required=required_options)
+  return options
+
+def GenerateV14Resources(res_dir, res_v14_dir):
+  for name in os.listdir(res_dir):
+    if not os.path.isdir(os.path.join(res_dir, name)):
+      continue
+
+    dir_pieces = name.split('-')
+    resource_type = dir_pieces[0]
+    qualifiers = dir_pieces[1:]
+
+    api_level_qualifier_index = -1
+    api_level_qualifier = ''
+    for index, qualifier in enumerate(qualifiers):
+      if re.match('v[0-9]+$', qualifier):
+        api_level_qualifier_index = index
+        api_level_qualifier = qualifier
+        break
+
+    # Android pre-v17 API doesn't support RTL. Skip.
+    if 'ldrtl' in qualifiers:
+      continue
+
+    input_dir = os.path.abspath(os.path.join(res_dir, name))
+
+    # We also need to copy the original v17 resource to *-v17 directory
+    # because the generated v14 resource will hide the original resource.
+    output_v14_dir = os.path.join(res_v14_dir, name)
+    output_v17_dir = os.path.join(res_v14_dir, name + '-v17')
+
+    # We only convert layout resources under layout*/, xml*/,
+    # and style resources under values*/.
+    if resource_type in ('layout', 'xml'):
+      if not api_level_qualifier:
+        GenerateV14LayoutResourcesInDir(input_dir, output_v14_dir,
+                                        output_v17_dir)
+    elif resource_type == 'values':
+      if api_level_qualifier == 'v17':
+        output_qualifiers = qualifiers[:]
+        del output_qualifiers[api_level_qualifier_index]
+        output_v14_dir = os.path.join(res_v14_dir,
+                                      '-'.join([resource_type] +
+                                               output_qualifiers))
+        GenerateV14StyleResourcesInDir(input_dir, output_v14_dir)
+      elif not api_level_qualifier:
+        ErrorIfStyleResourceExistsInDir(input_dir)
+
+def main():
+  options = ParseArgs()
+
+  res_v14_dir = options.res_v14_compatibility_dir
+
+  build_utils.DeleteDirectory(res_v14_dir)
+  build_utils.MakeDirectory(res_v14_dir)
+
+  GenerateV14Resources(options.res_dir, res_v14_dir)
+
+  if options.stamp:
+    build_utils.Touch(options.stamp)
+
+if __name__ == '__main__':
+  sys.exit(main())
+
diff --git a/build/android/gyp/get_device_configuration.py b/build/android/gyp/get_device_configuration.py
new file mode 100755
index 0000000..390eb2f
--- /dev/null
+++ b/build/android/gyp/get_device_configuration.py
@@ -0,0 +1,67 @@
+#!/usr/bin/env python
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Gets and writes the configurations of the attached devices.
+
+This configuration is used by later build steps to determine which devices to
+install to and what needs to be installed to those devices.
+"""
+
+import optparse
+import sys
+
+from util import build_utils
+from util import build_device
+
+
+def main(argv):
+  parser = optparse.OptionParser()
+  parser.add_option('--stamp', action='store')
+  parser.add_option('--output', action='store')
+  options, _ = parser.parse_args(argv)
+
+  devices = build_device.GetAttachedDevices()
+
+  device_configurations = []
+  for d in devices:
+    configuration, is_online, has_root = (
+        build_device.GetConfigurationForDevice(d))
+
+    if not is_online:
+      build_utils.PrintBigWarning(
+          '%s is not online. Skipping managed install for this device. '
+          'Try rebooting the device to fix this warning.' % d)
+      continue
+
+    if not has_root:
+      build_utils.PrintBigWarning(
+          '"adb root" failed on device: %s\n'
+          'Skipping managed install for this device.'
+          % configuration['description'])
+      continue
+
+    device_configurations.append(configuration)
+
+  if len(device_configurations) == 0:
+    build_utils.PrintBigWarning(
+        'No valid devices attached. Skipping managed install steps.')
+  elif len(devices) > 1:
+    # Note that this checks len(devices) and not len(device_configurations).
+    # This way, any time there are multiple devices attached it is
+    # explicitly stated which device we will install things to even if all but
+    # one device were rejected for other reasons (e.g. two devices attached,
+    # one w/o root).
+    build_utils.PrintBigWarning(
+        'Multiple devices attached. '
+        'Installing to the preferred device: '
+        '%(id)s (%(description)s)' % (device_configurations[0]))
+
+
+  build_device.WriteConfigurations(device_configurations, options.output)
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv))
diff --git a/build/android/gyp/insert_chromium_version.py b/build/android/gyp/insert_chromium_version.py
new file mode 100755
index 0000000..171f9d4
--- /dev/null
+++ b/build/android/gyp/insert_chromium_version.py
@@ -0,0 +1,66 @@
+#!/usr/bin/env python
+#
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Insert a version string into a library as a section '.chromium.version'.
+"""
+
+import optparse
+import os
+import sys
+import tempfile
+
+from util import build_utils
+
+def InsertChromiumVersion(android_objcopy,
+                          library_path,
+                          version_string):
+  # Remove existing .chromium.version section from .so
+  objcopy_command = [android_objcopy,
+                     '--remove-section=.chromium.version',
+                     library_path]
+  build_utils.CheckOutput(objcopy_command)
+
+  # Add a .chromium.version section.
+  with tempfile.NamedTemporaryFile() as stream:
+    stream.write(version_string)
+    stream.flush()
+    objcopy_command = [android_objcopy,
+                       '--add-section', '.chromium.version=%s' % stream.name,
+                       library_path]
+    build_utils.CheckOutput(objcopy_command)
+
+def main(args):
+  args = build_utils.ExpandFileArgs(args)
+  parser = optparse.OptionParser()
+
+  parser.add_option('--android-objcopy',
+      help='Path to the toolchain\'s objcopy binary')
+  parser.add_option('--stripped-libraries-dir',
+      help='Directory of native libraries')
+  parser.add_option('--libraries',
+      help='List of libraries')
+  parser.add_option('--version-string',
+      help='Version string to be inserted')
+  parser.add_option('--stamp', help='Path to touch on success')
+
+  options, _ = parser.parse_args(args)
+  libraries = build_utils.ParseGypList(options.libraries)
+
+  for library in libraries:
+    library_path = os.path.join(options.stripped_libraries_dir, library)
+
+    InsertChromiumVersion(options.android_objcopy,
+                          library_path,
+                          options.version_string)
+
+  if options.stamp:
+    build_utils.Touch(options.stamp)
+
+  return 0
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv[1:]))
diff --git a/build/android/gyp/jar.py b/build/android/gyp/jar.py
new file mode 100755
index 0000000..48abf5e
--- /dev/null
+++ b/build/android/gyp/jar.py
@@ -0,0 +1,79 @@
+#!/usr/bin/env python
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import fnmatch
+import optparse
+import os
+import sys
+
+from util import build_utils
+from util import md5_check
+
+
+def Jar(class_files, classes_dir, jar_path, manifest_file=None):
+  jar_path = os.path.abspath(jar_path)
+
+  # The paths of the files in the jar will be the same as they are passed in to
+  # the command. Because of this, the command should be run in
+  # options.classes_dir so the .class file paths in the jar are correct.
+  jar_cwd = classes_dir
+  class_files_rel = [os.path.relpath(f, jar_cwd) for f in class_files]
+  jar_cmd = ['jar', 'cf0', jar_path]
+  if manifest_file:
+    jar_cmd[1] += 'm'
+    jar_cmd.append(os.path.abspath(manifest_file))
+  jar_cmd.extend(class_files_rel)
+
+  with build_utils.TempDir() as temp_dir:
+    empty_file = os.path.join(temp_dir, '.empty')
+    build_utils.Touch(empty_file)
+    jar_cmd.append(os.path.relpath(empty_file, jar_cwd))
+    record_path = '%s.md5.stamp' % jar_path
+    md5_check.CallAndRecordIfStale(
+        lambda: build_utils.CheckOutput(jar_cmd, cwd=jar_cwd),
+        record_path=record_path,
+        input_paths=class_files,
+        input_strings=jar_cmd,
+        force=not os.path.exists(jar_path),
+        )
+
+    build_utils.Touch(jar_path, fail_if_missing=True)
+
+
+def JarDirectory(classes_dir, excluded_classes, jar_path, manifest_file=None):
+  class_files = build_utils.FindInDirectory(classes_dir, '*.class')
+  for exclude in excluded_classes:
+    class_files = filter(
+        lambda f: not fnmatch.fnmatch(f, exclude), class_files)
+
+  Jar(class_files, classes_dir, jar_path, manifest_file=manifest_file)
+
+
+def main():
+  parser = optparse.OptionParser()
+  parser.add_option('--classes-dir', help='Directory containing .class files.')
+  parser.add_option('--jar-path', help='Jar output path.')
+  parser.add_option('--excluded-classes',
+      help='List of .class file patterns to exclude from the jar.')
+  parser.add_option('--stamp', help='Path to touch on success.')
+
+  options, _ = parser.parse_args()
+
+  if options.excluded_classes:
+    excluded_classes = build_utils.ParseGypList(options.excluded_classes)
+  else:
+    excluded_classes = []
+  JarDirectory(options.classes_dir,
+               excluded_classes,
+               options.jar_path)
+
+  if options.stamp:
+    build_utils.Touch(options.stamp)
+
+
+if __name__ == '__main__':
+  sys.exit(main())
+
diff --git a/build/android/gyp/jar_toc.py b/build/android/gyp/jar_toc.py
new file mode 100755
index 0000000..00d97d2
--- /dev/null
+++ b/build/android/gyp/jar_toc.py
@@ -0,0 +1,127 @@
+#!/usr/bin/env python
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Creates a TOC file from a Java jar.
+
+The TOC file contains the non-package API of the jar. This includes all
+public/protected/package classes/functions/members and the values of static
+final variables (members with package access are kept because in some cases we
+have multiple libraries with the same package, particularly test+non-test). Some
+other information (major/minor javac version) is also included.
+
+This TOC file then can be used to determine if a dependent library should be
+rebuilt when this jar changes. I.e. any change to the jar that would require a
+rebuild, will have a corresponding change in the TOC file.
+"""
+
+import optparse
+import os
+import re
+import sys
+import zipfile
+
+from util import build_utils
+from util import md5_check
+
+
+def GetClassesInZipFile(zip_file):
+  classes = []
+  files = zip_file.namelist()
+  for f in files:
+    if f.endswith('.class'):
+      # f is of the form org/chromium/base/Class$Inner.class
+      classes.append(f.replace('/', '.')[:-6])
+  return classes
+
+
+def CallJavap(classpath, classes):
+  javap_cmd = [
+      'javap',
+      '-package',  # Show public/protected/package.
+      # -verbose is required to get constant values (which can be inlined in
+      # dependents).
+      '-verbose',
+      '-J-XX:NewSize=4m',
+      '-classpath', classpath
+      ] + classes
+  return build_utils.CheckOutput(javap_cmd)
+
+
+def ExtractToc(disassembled_classes):
+  # javap output is structured by indent (2-space) levels.
+  good_patterns = [
+      '^[^ ]', # This includes all class/function/member signatures.
+      '^  SourceFile:',
+      '^  minor version:',
+      '^  major version:',
+      '^  Constant value:',
+      ]
+  bad_patterns = [
+      '^const #', # Matches the constant pool (i.e. literals used in the class).
+    ]
+
+  def JavapFilter(line):
+    return (re.match('|'.join(good_patterns), line) and
+        not re.match('|'.join(bad_patterns), line))
+  toc = filter(JavapFilter, disassembled_classes.split('\n'))
+
+  return '\n'.join(toc)
+
+
+def UpdateToc(jar_path, toc_path):
+  classes = GetClassesInZipFile(zipfile.ZipFile(jar_path))
+  toc = []
+
+  limit = 1000 # Split into multiple calls to stay under command size limit
+  for i in xrange(0, len(classes), limit):
+    javap_output = CallJavap(classpath=jar_path, classes=classes[i:i+limit])
+    toc.append(ExtractToc(javap_output))
+
+  with open(toc_path, 'w') as tocfile:
+    tocfile.write(''.join(toc))
+
+
+def DoJarToc(options):
+  jar_path = options.jar_path
+  toc_path = options.toc_path
+  record_path = '%s.md5.stamp' % toc_path
+  md5_check.CallAndRecordIfStale(
+      lambda: UpdateToc(jar_path, toc_path),
+      record_path=record_path,
+      input_paths=[jar_path],
+      force=not os.path.exists(toc_path),
+      )
+  build_utils.Touch(toc_path, fail_if_missing=True)
+
+
+def main():
+  parser = optparse.OptionParser()
+  build_utils.AddDepfileOption(parser)
+
+  parser.add_option('--jar-path', help='Input .jar path.')
+  parser.add_option('--toc-path', help='Output .jar.TOC path.')
+  parser.add_option('--stamp', help='Path to touch on success.')
+
+  options, _ = parser.parse_args()
+
+  if options.depfile:
+    build_utils.WriteDepfile(
+        options.depfile,
+        build_utils.GetPythonDependencies())
+
+  DoJarToc(options)
+
+  if options.depfile:
+    build_utils.WriteDepfile(
+        options.depfile,
+        build_utils.GetPythonDependencies())
+
+  if options.stamp:
+    build_utils.Touch(options.stamp)
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/build/android/gyp/java_cpp_enum.py b/build/android/gyp/java_cpp_enum.py
new file mode 100755
index 0000000..c2f1764
--- /dev/null
+++ b/build/android/gyp/java_cpp_enum.py
@@ -0,0 +1,340 @@
+#!/usr/bin/env python
+#
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import collections
+import re
+import optparse
+import os
+from string import Template
+import sys
+
+from util import build_utils
+
+# List of C++ types that are compatible with the Java code generated by this
+# script.
+#
+# This script can parse .idl files however, at present it ignores special
+# rules such as [cpp_enum_prefix_override="ax_attr"].
+ENUM_FIXED_TYPE_WHITELIST = ['char', 'unsigned char',
+  'short', 'unsigned short',
+  'int', 'int8_t', 'int16_t', 'int32_t', 'uint8_t', 'uint16_t']
+
+class EnumDefinition(object):
+  def __init__(self, original_enum_name=None, class_name_override=None,
+               enum_package=None, entries=None, fixed_type=None):
+    self.original_enum_name = original_enum_name
+    self.class_name_override = class_name_override
+    self.enum_package = enum_package
+    self.entries = collections.OrderedDict(entries or [])
+    self.prefix_to_strip = None
+    self.fixed_type = fixed_type
+
+  def AppendEntry(self, key, value):
+    if key in self.entries:
+      raise Exception('Multiple definitions of key %s found.' % key)
+    self.entries[key] = value
+
+  @property
+  def class_name(self):
+    return self.class_name_override or self.original_enum_name
+
+  def Finalize(self):
+    self._Validate()
+    self._AssignEntryIndices()
+    self._StripPrefix()
+
+  def _Validate(self):
+    assert self.class_name
+    assert self.enum_package
+    assert self.entries
+    if self.fixed_type and self.fixed_type not in ENUM_FIXED_TYPE_WHITELIST:
+      raise Exception('Fixed type %s for enum %s not whitelisted.' %
+          (self.fixed_type, self.class_name))
+
+  def _AssignEntryIndices(self):
+    # Enums, if given no value, are given the value of the previous enum + 1.
+    if not all(self.entries.values()):
+      prev_enum_value = -1
+      for key, value in self.entries.iteritems():
+        if not value:
+          self.entries[key] = prev_enum_value + 1
+        elif value in self.entries:
+          self.entries[key] = self.entries[value]
+        else:
+          try:
+            self.entries[key] = int(value)
+          except ValueError:
+            raise Exception('Could not interpret integer from enum value "%s" '
+                            'for key %s.' % (value, key))
+        prev_enum_value = self.entries[key]
+
+
+  def _StripPrefix(self):
+    prefix_to_strip = self.prefix_to_strip
+    if not prefix_to_strip:
+      prefix_to_strip = self.original_enum_name
+      prefix_to_strip = re.sub('(?!^)([A-Z]+)', r'_\1', prefix_to_strip).upper()
+      prefix_to_strip += '_'
+      if not all([w.startswith(prefix_to_strip) for w in self.entries.keys()]):
+        prefix_to_strip = ''
+
+    entries = collections.OrderedDict()
+    for (k, v) in self.entries.iteritems():
+      stripped_key = k.replace(prefix_to_strip, '', 1)
+      if isinstance(v, basestring):
+        stripped_value = v.replace(prefix_to_strip, '', 1)
+      else:
+        stripped_value = v
+      entries[stripped_key] = stripped_value
+
+    self.entries = entries
+
+class DirectiveSet(object):
+  class_name_override_key = 'CLASS_NAME_OVERRIDE'
+  enum_package_key = 'ENUM_PACKAGE'
+  prefix_to_strip_key = 'PREFIX_TO_STRIP'
+
+  known_keys = [class_name_override_key, enum_package_key, prefix_to_strip_key]
+
+  def __init__(self):
+    self._directives = {}
+
+  def Update(self, key, value):
+    if key not in DirectiveSet.known_keys:
+      raise Exception("Unknown directive: " + key)
+    self._directives[key] = value
+
+  @property
+  def empty(self):
+    return len(self._directives) == 0
+
+  def UpdateDefinition(self, definition):
+    definition.class_name_override = self._directives.get(
+        DirectiveSet.class_name_override_key, '')
+    definition.enum_package = self._directives.get(
+        DirectiveSet.enum_package_key)
+    definition.prefix_to_strip = self._directives.get(
+        DirectiveSet.prefix_to_strip_key)
+
+
+class HeaderParser(object):
+  single_line_comment_re = re.compile(r'\s*//')
+  multi_line_comment_start_re = re.compile(r'\s*/\*')
+  enum_line_re = re.compile(r'^\s*(\w+)(\s*\=\s*([^,\n]+))?,?')
+  enum_end_re = re.compile(r'^\s*}\s*;\.*$')
+  generator_directive_re = re.compile(
+      r'^\s*//\s+GENERATED_JAVA_(\w+)\s*:\s*([\.\w]+)$')
+  multi_line_generator_directive_start_re = re.compile(
+      r'^\s*//\s+GENERATED_JAVA_(\w+)\s*:\s*\(([\.\w]*)$')
+  multi_line_directive_continuation_re = re.compile(
+      r'^\s*//\s+([\.\w]+)$')
+  multi_line_directive_end_re = re.compile(
+      r'^\s*//\s+([\.\w]*)\)$')
+
+  optional_class_or_struct_re = r'(class|struct)?'
+  enum_name_re = r'(\w+)'
+  optional_fixed_type_re = r'(\:\s*(\w+\s*\w+?))?'
+  enum_start_re = re.compile(r'^\s*(?:\[cpp.*\])?\s*enum\s+' +
+      optional_class_or_struct_re + '\s*' + enum_name_re + '\s*' +
+      optional_fixed_type_re + '\s*{\s*$')
+
+  def __init__(self, lines, path=None):
+    self._lines = lines
+    self._path = path
+    self._enum_definitions = []
+    self._in_enum = False
+    self._current_definition = None
+    self._generator_directives = DirectiveSet()
+    self._multi_line_generator_directive = None
+
+  def _ApplyGeneratorDirectives(self):
+    self._generator_directives.UpdateDefinition(self._current_definition)
+    self._generator_directives = DirectiveSet()
+
+  def ParseDefinitions(self):
+    for line in self._lines:
+      self._ParseLine(line)
+    return self._enum_definitions
+
+  def _ParseLine(self, line):
+    if self._multi_line_generator_directive:
+      self._ParseMultiLineDirectiveLine(line)
+    elif not self._in_enum:
+      self._ParseRegularLine(line)
+    else:
+      self._ParseEnumLine(line)
+
+  def _ParseEnumLine(self, line):
+    if HeaderParser.single_line_comment_re.match(line):
+      return
+    if HeaderParser.multi_line_comment_start_re.match(line):
+      raise Exception('Multi-line comments in enums are not supported in ' +
+                      self._path)
+    enum_end = HeaderParser.enum_end_re.match(line)
+    enum_entry = HeaderParser.enum_line_re.match(line)
+    if enum_end:
+      self._ApplyGeneratorDirectives()
+      self._current_definition.Finalize()
+      self._enum_definitions.append(self._current_definition)
+      self._in_enum = False
+    elif enum_entry:
+      enum_key = enum_entry.groups()[0]
+      enum_value = enum_entry.groups()[2]
+      self._current_definition.AppendEntry(enum_key, enum_value)
+
+  def _ParseMultiLineDirectiveLine(self, line):
+    multi_line_directive_continuation = (
+        HeaderParser.multi_line_directive_continuation_re.match(line))
+    multi_line_directive_end = (
+        HeaderParser.multi_line_directive_end_re.match(line))
+
+    if multi_line_directive_continuation:
+      value_cont = multi_line_directive_continuation.groups()[0]
+      self._multi_line_generator_directive[1].append(value_cont)
+    elif multi_line_directive_end:
+      directive_name = self._multi_line_generator_directive[0]
+      directive_value = "".join(self._multi_line_generator_directive[1])
+      directive_value += multi_line_directive_end.groups()[0]
+      self._multi_line_generator_directive = None
+      self._generator_directives.Update(directive_name, directive_value)
+    else:
+      raise Exception('Malformed multi-line directive declaration in ' +
+                      self._path)
+
+  def _ParseRegularLine(self, line):
+    enum_start = HeaderParser.enum_start_re.match(line)
+    generator_directive = HeaderParser.generator_directive_re.match(line)
+    multi_line_generator_directive_start = (
+        HeaderParser.multi_line_generator_directive_start_re.match(line))
+
+    if generator_directive:
+      directive_name = generator_directive.groups()[0]
+      directive_value = generator_directive.groups()[1]
+      self._generator_directives.Update(directive_name, directive_value)
+    elif multi_line_generator_directive_start:
+      directive_name = multi_line_generator_directive_start.groups()[0]
+      directive_value = multi_line_generator_directive_start.groups()[1]
+      self._multi_line_generator_directive = (directive_name, [directive_value])
+    elif enum_start:
+      if self._generator_directives.empty:
+        return
+      self._current_definition = EnumDefinition(
+          original_enum_name=enum_start.groups()[1],
+          fixed_type=enum_start.groups()[3])
+      self._in_enum = True
+
+def GetScriptName():
+  script_components = os.path.abspath(sys.argv[0]).split(os.path.sep)
+  build_index = script_components.index('build')
+  return os.sep.join(script_components[build_index:])
+
+
+def DoGenerate(output_dir, source_paths, print_output_only=False):
+  output_paths = []
+  for source_path in source_paths:
+    enum_definitions = DoParseHeaderFile(source_path)
+    if not enum_definitions:
+      raise Exception('No enums found in %s\n'
+                      'Did you forget prefixing enums with '
+                      '"// GENERATED_JAVA_ENUM_PACKAGE: foo"?' %
+                      source_path)
+    for enum_definition in enum_definitions:
+      package_path = enum_definition.enum_package.replace('.', os.path.sep)
+      file_name = enum_definition.class_name + '.java'
+      output_path = os.path.join(output_dir, package_path, file_name)
+      output_paths.append(output_path)
+      if not print_output_only:
+        build_utils.MakeDirectory(os.path.dirname(output_path))
+        DoWriteOutput(source_path, output_path, enum_definition)
+  return output_paths
+
+
+def DoParseHeaderFile(path):
+  with open(path) as f:
+    return HeaderParser(f.readlines(), path).ParseDefinitions()
+
+
+def GenerateOutput(source_path, enum_definition):
+  template = Template("""
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// This file is autogenerated by
+//     ${SCRIPT_NAME}
+// From
+//     ${SOURCE_PATH}
+
+package ${PACKAGE};
+
+public class ${CLASS_NAME} {
+${ENUM_ENTRIES}
+}
+""")
+
+  enum_template = Template('  public static final int ${NAME} = ${VALUE};')
+  enum_entries_string = []
+  for enum_name, enum_value in enum_definition.entries.iteritems():
+    values = {
+        'NAME': enum_name,
+        'VALUE': enum_value,
+    }
+    enum_entries_string.append(enum_template.substitute(values))
+  enum_entries_string = '\n'.join(enum_entries_string)
+
+  values = {
+      'CLASS_NAME': enum_definition.class_name,
+      'ENUM_ENTRIES': enum_entries_string,
+      'PACKAGE': enum_definition.enum_package,
+      'SCRIPT_NAME': GetScriptName(),
+      'SOURCE_PATH': source_path,
+  }
+  return template.substitute(values)
+
+
+def DoWriteOutput(source_path, output_path, enum_definition):
+  with open(output_path, 'w') as out_file:
+    out_file.write(GenerateOutput(source_path, enum_definition))
+
+def AssertFilesList(output_paths, assert_files_list):
+  actual = set(output_paths)
+  expected = set(assert_files_list)
+  if not actual == expected:
+    need_to_add = list(actual - expected)
+    need_to_remove = list(expected - actual)
+    raise Exception('Output files list does not match expectations. Please '
+                    'add %s and remove %s.' % (need_to_add, need_to_remove))
+
+def DoMain(argv):
+  usage = 'usage: %prog [options] output_dir input_file(s)...'
+  parser = optparse.OptionParser(usage=usage)
+
+  parser.add_option('--assert_file', action="append", default=[],
+                    dest="assert_files_list", help='Assert that the given '
+                    'file is an output. There can be multiple occurrences of '
+                    'this flag.')
+  parser.add_option('--print_output_only', help='Only print output paths.',
+                    action='store_true')
+  parser.add_option('--verbose', help='Print more information.',
+                    action='store_true')
+
+  options, args = parser.parse_args(argv)
+  if len(args) < 2:
+    parser.error('Need to specify output directory and at least one input file')
+  output_paths = DoGenerate(args[0], args[1:],
+                            print_output_only=options.print_output_only)
+
+  if options.assert_files_list:
+    AssertFilesList(output_paths, options.assert_files_list)
+
+  if options.verbose:
+    print 'Output paths:'
+    print '\n'.join(output_paths)
+
+  return ' '.join(output_paths)
+
+if __name__ == '__main__':
+  DoMain(sys.argv[1:])
diff --git a/build/android/gyp/java_cpp_enum_tests.py b/build/android/gyp/java_cpp_enum_tests.py
new file mode 100755
index 0000000..44f9766
--- /dev/null
+++ b/build/android/gyp/java_cpp_enum_tests.py
@@ -0,0 +1,436 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Tests for enum_preprocess.py.
+
+This test suite containss various tests for the C++ -> Java enum generator.
+"""
+
+import collections
+import optparse
+import os
+import sys
+import unittest
+
+import java_cpp_enum
+from java_cpp_enum import EnumDefinition, GenerateOutput, GetScriptName
+from java_cpp_enum import HeaderParser
+
+sys.path.append(os.path.join(os.path.dirname(__file__), "gyp"))
+from util import build_utils
+
+class TestPreprocess(unittest.TestCase):
+  def testOutput(self):
+    definition = EnumDefinition(original_enum_name='ClassName',
+                                enum_package='some.package',
+                                entries=[('E1', 1), ('E2', '2 << 2')])
+    output = GenerateOutput('path/to/file', definition)
+    expected = """
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// This file is autogenerated by
+//     %s
+// From
+//     path/to/file
+
+package some.package;
+
+public class ClassName {
+  public static final int E1 = 1;
+  public static final int E2 = 2 << 2;
+}
+"""
+    self.assertEqual(expected % GetScriptName(), output)
+
+  def testParseSimpleEnum(self):
+    test_data = """
+      // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+      enum EnumName {
+        VALUE_ZERO,
+        VALUE_ONE,
+      };
+    """.split('\n')
+    definitions = HeaderParser(test_data).ParseDefinitions()
+    self.assertEqual(1, len(definitions))
+    definition = definitions[0]
+    self.assertEqual('EnumName', definition.class_name)
+    self.assertEqual('test.namespace', definition.enum_package)
+    self.assertEqual(collections.OrderedDict([('VALUE_ZERO', 0),
+                                              ('VALUE_ONE', 1)]),
+                     definition.entries)
+
+  def testParseBitShifts(self):
+    test_data = """
+      // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+      enum EnumName {
+        VALUE_ZERO = 1 << 0,
+        VALUE_ONE = 1 << 1,
+      };
+    """.split('\n')
+    definitions = HeaderParser(test_data).ParseDefinitions()
+    self.assertEqual(1, len(definitions))
+    definition = definitions[0]
+    self.assertEqual('EnumName', definition.class_name)
+    self.assertEqual('test.namespace', definition.enum_package)
+    self.assertEqual(collections.OrderedDict([('VALUE_ZERO', '1 << 0'),
+                                              ('VALUE_ONE', '1 << 1')]),
+                     definition.entries)
+
+  def testParseClassNameOverride(self):
+    test_data = """
+      // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+      // GENERATED_JAVA_CLASS_NAME_OVERRIDE: OverrideName
+      enum EnumName {
+        FOO
+      };
+
+      // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+      // GENERATED_JAVA_CLASS_NAME_OVERRIDE: OtherOverride
+      enum PrefixTest {
+        PREFIX_TEST_A,
+        PREFIX_TEST_B,
+      };
+    """.split('\n')
+    definitions = HeaderParser(test_data).ParseDefinitions()
+    self.assertEqual(2, len(definitions))
+    definition = definitions[0]
+    self.assertEqual('OverrideName', definition.class_name)
+
+    definition = definitions[1]
+    self.assertEqual('OtherOverride', definition.class_name)
+    self.assertEqual(collections.OrderedDict([('A', 0),
+                                              ('B', 1)]),
+                     definition.entries)
+
+  def testParseTwoEnums(self):
+    test_data = """
+      // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+      enum EnumOne {
+        ENUM_ONE_A = 1,
+        // Comment there
+        ENUM_ONE_B = A,
+      };
+
+      enum EnumIgnore {
+        C, D, E
+      };
+
+      // GENERATED_JAVA_ENUM_PACKAGE: other.package
+      // GENERATED_JAVA_PREFIX_TO_STRIP: P_
+      enum EnumTwo {
+        P_A,
+        P_B
+      };
+    """.split('\n')
+    definitions = HeaderParser(test_data).ParseDefinitions()
+    self.assertEqual(2, len(definitions))
+    definition = definitions[0]
+    self.assertEqual('EnumOne', definition.class_name)
+    self.assertEqual('test.namespace', definition.enum_package)
+    self.assertEqual(collections.OrderedDict([('A', '1'),
+                                              ('B', 'A')]),
+                     definition.entries)
+
+    definition = definitions[1]
+    self.assertEqual('EnumTwo', definition.class_name)
+    self.assertEqual('other.package', definition.enum_package)
+    self.assertEqual(collections.OrderedDict([('A', 0),
+                                              ('B', 1)]),
+                     definition.entries)
+
+  def testParseThrowsOnUnknownDirective(self):
+    test_data = """
+      // GENERATED_JAVA_UNKNOWN: Value
+      enum EnumName {
+        VALUE_ONE,
+      };
+    """.split('\n')
+    with self.assertRaises(Exception):
+      HeaderParser(test_data).ParseDefinitions()
+
+  def testParseReturnsEmptyListWithoutDirectives(self):
+    test_data = """
+      enum EnumName {
+        VALUE_ONE,
+      };
+    """.split('\n')
+    self.assertEqual([], HeaderParser(test_data).ParseDefinitions())
+
+  def testParseEnumClass(self):
+    test_data = """
+      // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+      enum class Foo {
+        FOO_A,
+      };
+    """.split('\n')
+    definitions = HeaderParser(test_data).ParseDefinitions()
+    self.assertEqual(1, len(definitions))
+    definition = definitions[0]
+    self.assertEqual('Foo', definition.class_name)
+    self.assertEqual('test.namespace', definition.enum_package)
+    self.assertEqual(collections.OrderedDict([('A', 0)]),
+                     definition.entries)
+
+  def testParseEnumStruct(self):
+    test_data = """
+      // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+      enum struct Foo {
+        FOO_A,
+      };
+    """.split('\n')
+    definitions = HeaderParser(test_data).ParseDefinitions()
+    self.assertEqual(1, len(definitions))
+    definition = definitions[0]
+    self.assertEqual('Foo', definition.class_name)
+    self.assertEqual('test.namespace', definition.enum_package)
+    self.assertEqual(collections.OrderedDict([('A', 0)]),
+                     definition.entries)
+
+  def testParseFixedTypeEnum(self):
+    test_data = """
+      // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+      enum Foo : int {
+        FOO_A,
+      };
+    """.split('\n')
+    definitions = HeaderParser(test_data).ParseDefinitions()
+    self.assertEqual(1, len(definitions))
+    definition = definitions[0]
+    self.assertEqual('Foo', definition.class_name)
+    self.assertEqual('test.namespace', definition.enum_package)
+    self.assertEqual('int', definition.fixed_type)
+    self.assertEqual(collections.OrderedDict([('A', 0)]),
+                     definition.entries)
+
+  def testParseFixedTypeEnumClass(self):
+    test_data = """
+      // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+      enum class Foo: unsigned short {
+        FOO_A,
+      };
+    """.split('\n')
+    definitions = HeaderParser(test_data).ParseDefinitions()
+    self.assertEqual(1, len(definitions))
+    definition = definitions[0]
+    self.assertEqual('Foo', definition.class_name)
+    self.assertEqual('test.namespace', definition.enum_package)
+    self.assertEqual('unsigned short', definition.fixed_type)
+    self.assertEqual(collections.OrderedDict([('A', 0)]),
+                     definition.entries)
+
+  def testParseUnknownFixedTypeRaises(self):
+    test_data = """
+      // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+      enum class Foo: foo_type {
+        FOO_A,
+      };
+    """.split('\n')
+    with self.assertRaises(Exception):
+      HeaderParser(test_data).ParseDefinitions()
+
+  def testParseSimpleMultiLineDirective(self):
+    test_data = """
+      // GENERATED_JAVA_ENUM_PACKAGE: (
+      //   test.namespace)
+      // GENERATED_JAVA_CLASS_NAME_OVERRIDE: Bar
+      enum Foo {
+        FOO_A,
+      };
+    """.split('\n')
+    definitions = HeaderParser(test_data).ParseDefinitions()
+    self.assertEqual('test.namespace', definitions[0].enum_package)
+    self.assertEqual('Bar', definitions[0].class_name)
+
+  def testParseMultiLineDirective(self):
+    test_data = """
+      // GENERATED_JAVA_ENUM_PACKAGE: (te
+      //   st.name
+      //   space)
+      enum Foo {
+        FOO_A,
+      };
+    """.split('\n')
+    definitions = HeaderParser(test_data).ParseDefinitions()
+    self.assertEqual('test.namespace', definitions[0].enum_package)
+
+  def testParseMultiLineDirectiveWithOtherDirective(self):
+    test_data = """
+      // GENERATED_JAVA_ENUM_PACKAGE: (
+      //   test.namespace)
+      // GENERATED_JAVA_CLASS_NAME_OVERRIDE: (
+      //   Ba
+      //   r
+      //   )
+      enum Foo {
+        FOO_A,
+      };
+    """.split('\n')
+    definitions = HeaderParser(test_data).ParseDefinitions()
+    self.assertEqual('test.namespace', definitions[0].enum_package)
+    self.assertEqual('Bar', definitions[0].class_name)
+
+  def testParseMalformedMultiLineDirectiveWithOtherDirective(self):
+    test_data = """
+      // GENERATED_JAVA_ENUM_PACKAGE: (
+      //   test.name
+      //   space
+      // GENERATED_JAVA_CLASS_NAME_OVERRIDE: Bar
+      enum Foo {
+        FOO_A,
+      };
+    """.split('\n')
+    with self.assertRaises(Exception):
+      HeaderParser(test_data).ParseDefinitions()
+
+  def testParseMalformedMultiLineDirective(self):
+    test_data = """
+      // GENERATED_JAVA_ENUM_PACKAGE: (
+      //   test.name
+      //   space
+      enum Foo {
+        FOO_A,
+      };
+    """.split('\n')
+    with self.assertRaises(Exception):
+      HeaderParser(test_data).ParseDefinitions()
+
+  def testParseMalformedMultiLineDirectiveShort(self):
+    test_data = """
+      // GENERATED_JAVA_ENUM_PACKAGE: (
+      enum Foo {
+        FOO_A,
+      };
+    """.split('\n')
+    with self.assertRaises(Exception):
+      HeaderParser(test_data).ParseDefinitions()
+
+  def testEnumValueAssignmentNoneDefined(self):
+    definition = EnumDefinition(original_enum_name='c', enum_package='p')
+    definition.AppendEntry('A', None)
+    definition.AppendEntry('B', None)
+    definition.AppendEntry('C', None)
+    definition.Finalize()
+    self.assertEqual(collections.OrderedDict([('A', 0),
+                                              ('B', 1),
+                                              ('C', 2)]),
+                     definition.entries)
+
+  def testEnumValueAssignmentAllDefined(self):
+    definition = EnumDefinition(original_enum_name='c', enum_package='p')
+    definition.AppendEntry('A', '1')
+    definition.AppendEntry('B', '2')
+    definition.AppendEntry('C', '3')
+    definition.Finalize()
+    self.assertEqual(collections.OrderedDict([('A', '1'),
+                                              ('B', '2'),
+                                              ('C', '3')]),
+                     definition.entries)
+
+  def testEnumValueAssignmentReferences(self):
+    definition = EnumDefinition(original_enum_name='c', enum_package='p')
+    definition.AppendEntry('A', None)
+    definition.AppendEntry('B', 'A')
+    definition.AppendEntry('C', None)
+    definition.AppendEntry('D', 'C')
+    definition.Finalize()
+    self.assertEqual(collections.OrderedDict([('A', 0),
+                                              ('B', 0),
+                                              ('C', 1),
+                                              ('D', 1)]),
+                     definition.entries)
+
+  def testEnumValueAssignmentSet(self):
+    definition = EnumDefinition(original_enum_name='c', enum_package='p')
+    definition.AppendEntry('A', None)
+    definition.AppendEntry('B', '2')
+    definition.AppendEntry('C', None)
+    definition.Finalize()
+    self.assertEqual(collections.OrderedDict([('A', 0),
+                                              ('B', 2),
+                                              ('C', 3)]),
+                     definition.entries)
+
+  def testEnumValueAssignmentSetReferences(self):
+    definition = EnumDefinition(original_enum_name='c', enum_package='p')
+    definition.AppendEntry('A', None)
+    definition.AppendEntry('B', 'A')
+    definition.AppendEntry('C', 'B')
+    definition.AppendEntry('D', None)
+    definition.Finalize()
+    self.assertEqual(collections.OrderedDict([('A', 0),
+                                              ('B', 0),
+                                              ('C', 0),
+                                              ('D', 1)]),
+                     definition.entries)
+
+  def testEnumValueAssignmentRaises(self):
+    definition = EnumDefinition(original_enum_name='c', enum_package='p')
+    definition.AppendEntry('A', None)
+    definition.AppendEntry('B', 'foo')
+    definition.AppendEntry('C', None)
+    with self.assertRaises(Exception):
+      definition.Finalize()
+
+  def testExplicitPrefixStripping(self):
+    definition = EnumDefinition(original_enum_name='c', enum_package='p')
+    definition.AppendEntry('P_A', None)
+    definition.AppendEntry('B', None)
+    definition.AppendEntry('P_C', None)
+    definition.AppendEntry('P_LAST', 'P_C')
+    definition.prefix_to_strip = 'P_'
+    definition.Finalize()
+    self.assertEqual(collections.OrderedDict([('A', 0),
+                                              ('B', 1),
+                                              ('C', 2),
+                                              ('LAST', 2)]),
+                     definition.entries)
+
+  def testImplicitPrefixStripping(self):
+    definition = EnumDefinition(original_enum_name='ClassName',
+                                enum_package='p')
+    definition.AppendEntry('CLASS_NAME_A', None)
+    definition.AppendEntry('CLASS_NAME_B', None)
+    definition.AppendEntry('CLASS_NAME_C', None)
+    definition.AppendEntry('CLASS_NAME_LAST', 'CLASS_NAME_C')
+    definition.Finalize()
+    self.assertEqual(collections.OrderedDict([('A', 0),
+                                              ('B', 1),
+                                              ('C', 2),
+                                              ('LAST', 2)]),
+                     definition.entries)
+
+  def testImplicitPrefixStrippingRequiresAllConstantsToBePrefixed(self):
+    definition = EnumDefinition(original_enum_name='Name',
+                                enum_package='p')
+    definition.AppendEntry('A', None)
+    definition.AppendEntry('B', None)
+    definition.AppendEntry('NAME_LAST', None)
+    definition.Finalize()
+    self.assertEqual(['A', 'B', 'NAME_LAST'], definition.entries.keys())
+
+  def testGenerateThrowsOnEmptyInput(self):
+    with self.assertRaises(Exception):
+      original_do_parse = java_cpp_enum.DoParseHeaderFile
+      try:
+        java_cpp_enum.DoParseHeaderFile = lambda _: []
+        java_cpp_enum.DoGenerate('dir', ['file'])
+      finally:
+        java_cpp_enum.DoParseHeaderFile = original_do_parse
+
+def main(argv):
+  parser = optparse.OptionParser()
+  parser.add_option("--stamp", help="File to touch on success.")
+  options, _ = parser.parse_args(argv)
+
+  suite = unittest.TestLoader().loadTestsFromTestCase(TestPreprocess)
+  unittest.TextTestRunner(verbosity=0).run(suite)
+
+  if options.stamp:
+    build_utils.Touch(options.stamp)
+
+if __name__ == '__main__':
+  main(sys.argv[1:])
diff --git a/build/android/gyp/javac.py b/build/android/gyp/javac.py
new file mode 100755
index 0000000..dafe5df
--- /dev/null
+++ b/build/android/gyp/javac.py
@@ -0,0 +1,321 @@
+#!/usr/bin/env python
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import fnmatch
+import optparse
+import os
+import shutil
+import re
+import sys
+import textwrap
+
+from util import build_utils
+from util import md5_check
+
+import jar
+
+sys.path.append(build_utils.COLORAMA_ROOT)
+import colorama
+
+
+def ColorJavacOutput(output):
+  fileline_prefix = r'(?P<fileline>(?P<file>[-.\w/\\]+.java):(?P<line>[0-9]+):)'
+  warning_re = re.compile(
+      fileline_prefix + r'(?P<full_message> warning: (?P<message>.*))$')
+  error_re = re.compile(
+      fileline_prefix + r'(?P<full_message> (?P<message>.*))$')
+  marker_re = re.compile(r'\s*(?P<marker>\^)\s*$')
+
+  warning_color = ['full_message', colorama.Fore.YELLOW + colorama.Style.DIM]
+  error_color = ['full_message', colorama.Fore.MAGENTA + colorama.Style.BRIGHT]
+  marker_color = ['marker',  colorama.Fore.BLUE + colorama.Style.BRIGHT]
+
+  def Colorize(line, regex, color):
+    match = regex.match(line)
+    start = match.start(color[0])
+    end = match.end(color[0])
+    return (line[:start]
+            + color[1] + line[start:end]
+            + colorama.Fore.RESET + colorama.Style.RESET_ALL
+            + line[end:])
+
+  def ApplyColor(line):
+    if warning_re.match(line):
+      line = Colorize(line, warning_re, warning_color)
+    elif error_re.match(line):
+      line = Colorize(line, error_re, error_color)
+    elif marker_re.match(line):
+      line = Colorize(line, marker_re, marker_color)
+    return line
+
+  return '\n'.join(map(ApplyColor, output.split('\n')))
+
+
+ERRORPRONE_OPTIONS = [
+  '-Xepdisable:'
+  # Something in chrome_private_java makes this check crash.
+  'com.google.errorprone.bugpatterns.ClassCanBeStatic,'
+  # These crash on lots of targets.
+  'com.google.errorprone.bugpatterns.WrongParameterPackage,'
+  'com.google.errorprone.bugpatterns.GuiceOverridesGuiceInjectableMethod,'
+  'com.google.errorprone.bugpatterns.GuiceOverridesJavaxInjectableMethod,'
+  'com.google.errorprone.bugpatterns.ElementsCountedInLoop'
+]
+
+def DoJavac(
+    bootclasspath, classpath, classes_dir, chromium_code,
+    use_errorprone_path, java_files):
+  """Runs javac.
+
+  Builds |java_files| with the provided |classpath| and puts the generated
+  .class files into |classes_dir|. If |chromium_code| is true, extra lint
+  checking will be enabled.
+  """
+
+  jar_inputs = []
+  for path in classpath:
+    if os.path.exists(path + '.TOC'):
+      jar_inputs.append(path + '.TOC')
+    else:
+      jar_inputs.append(path)
+
+  javac_args = [
+      '-g',
+      # Chromium only allows UTF8 source files.  Being explicit avoids
+      # javac pulling a default encoding from the user's environment.
+      '-encoding', 'UTF-8',
+      '-classpath', ':'.join(classpath),
+      '-d', classes_dir]
+
+  if bootclasspath:
+    javac_args.extend([
+        '-bootclasspath', ':'.join(bootclasspath),
+        '-source', '1.7',
+        '-target', '1.7',
+        ])
+
+  if chromium_code:
+    # TODO(aurimas): re-enable '-Xlint:deprecation' checks once they are fixed.
+    javac_args.extend(['-Xlint:unchecked'])
+  else:
+    # XDignore.symbol.file makes javac compile against rt.jar instead of
+    # ct.sym. This means that using a java internal package/class will not
+    # trigger a compile warning or error.
+    javac_args.extend(['-XDignore.symbol.file'])
+
+  if use_errorprone_path:
+    javac_cmd = [use_errorprone_path] + ERRORPRONE_OPTIONS
+  else:
+    javac_cmd = ['javac']
+
+  javac_cmd = javac_cmd + javac_args + java_files
+
+  def Compile():
+    build_utils.CheckOutput(
+        javac_cmd,
+        print_stdout=chromium_code,
+        stderr_filter=ColorJavacOutput)
+
+  record_path = os.path.join(classes_dir, 'javac.md5.stamp')
+  md5_check.CallAndRecordIfStale(
+      Compile,
+      record_path=record_path,
+      input_paths=java_files + jar_inputs,
+      input_strings=javac_cmd)
+
+
+_MAX_MANIFEST_LINE_LEN = 72
+
+
+def CreateManifest(manifest_path, classpath, main_class=None,
+                   manifest_entries=None):
+  """Creates a manifest file with the given parameters.
+
+  This generates a manifest file that compiles with the spec found at
+  http://docs.oracle.com/javase/7/docs/technotes/guides/jar/jar.html#JAR_Manifest
+
+  Args:
+    manifest_path: The path to the manifest file that should be created.
+    classpath: The JAR files that should be listed on the manifest file's
+      classpath.
+    main_class: If present, the class containing the main() function.
+    manifest_entries: If present, a list of (key, value) pairs to add to
+      the manifest.
+
+  """
+  output = ['Manifest-Version: 1.0']
+  if main_class:
+    output.append('Main-Class: %s' % main_class)
+  if manifest_entries:
+    for k, v in manifest_entries:
+      output.append('%s: %s' % (k, v))
+  if classpath:
+    sanitized_paths = []
+    for path in classpath:
+      sanitized_paths.append(os.path.basename(path.strip('"')))
+    output.append('Class-Path: %s' % ' '.join(sanitized_paths))
+  output.append('Created-By: ')
+  output.append('')
+
+  wrapper = textwrap.TextWrapper(break_long_words=True,
+                                 drop_whitespace=False,
+                                 subsequent_indent=' ',
+                                 width=_MAX_MANIFEST_LINE_LEN - 2)
+  output = '\r\n'.join(w for l in output for w in wrapper.wrap(l))
+
+  with open(manifest_path, 'w') as f:
+    f.write(output)
+
+
+def main(argv):
+  colorama.init()
+
+  argv = build_utils.ExpandFileArgs(argv)
+
+  parser = optparse.OptionParser()
+  build_utils.AddDepfileOption(parser)
+
+  parser.add_option(
+      '--src-gendirs',
+      help='Directories containing generated java files.')
+  parser.add_option(
+      '--java-srcjars',
+      action='append',
+      default=[],
+      help='List of srcjars to include in compilation.')
+  parser.add_option(
+      '--bootclasspath',
+      action='append',
+      default=[],
+      help='Boot classpath for javac. If this is specified multiple times, '
+      'they will all be appended to construct the classpath.')
+  parser.add_option(
+      '--classpath',
+      action='append',
+      help='Classpath for javac. If this is specified multiple times, they '
+      'will all be appended to construct the classpath.')
+  parser.add_option(
+      '--javac-includes',
+      help='A list of file patterns. If provided, only java files that match'
+      'one of the patterns will be compiled.')
+  parser.add_option(
+      '--jar-excluded-classes',
+      default='',
+      help='List of .class file patterns to exclude from the jar.')
+
+  parser.add_option(
+      '--chromium-code',
+      type='int',
+      help='Whether code being compiled should be built with stricter '
+      'warnings for chromium code.')
+
+  parser.add_option(
+      '--use-errorprone-path',
+      help='Use the Errorprone compiler at this path.')
+
+  parser.add_option(
+      '--classes-dir',
+      help='Directory for compiled .class files.')
+  parser.add_option('--jar-path', help='Jar output path.')
+  parser.add_option(
+      '--main-class',
+      help='The class containing the main method.')
+  parser.add_option(
+      '--manifest-entry',
+      action='append',
+      help='Key:value pairs to add to the .jar manifest.')
+
+  parser.add_option('--stamp', help='Path to touch on success.')
+
+  options, args = parser.parse_args(argv)
+
+  if options.main_class and not options.jar_path:
+    parser.error('--main-class requires --jar-path')
+
+  bootclasspath = []
+  for arg in options.bootclasspath:
+    bootclasspath += build_utils.ParseGypList(arg)
+
+  classpath = []
+  for arg in options.classpath:
+    classpath += build_utils.ParseGypList(arg)
+
+  java_srcjars = []
+  for arg in options.java_srcjars:
+    java_srcjars += build_utils.ParseGypList(arg)
+
+  java_files = args
+  if options.src_gendirs:
+    src_gendirs = build_utils.ParseGypList(options.src_gendirs)
+    java_files += build_utils.FindInDirectories(src_gendirs, '*.java')
+
+  input_files = bootclasspath + classpath + java_srcjars + java_files
+  with build_utils.TempDir() as temp_dir:
+    classes_dir = os.path.join(temp_dir, 'classes')
+    os.makedirs(classes_dir)
+    if java_srcjars:
+      java_dir = os.path.join(temp_dir, 'java')
+      os.makedirs(java_dir)
+      for srcjar in java_srcjars:
+        build_utils.ExtractAll(srcjar, path=java_dir, pattern='*.java')
+      java_files += build_utils.FindInDirectory(java_dir, '*.java')
+
+    if options.javac_includes:
+      javac_includes = build_utils.ParseGypList(options.javac_includes)
+      filtered_java_files = []
+      for f in java_files:
+        for include in javac_includes:
+          if fnmatch.fnmatch(f, include):
+            filtered_java_files.append(f)
+            break
+      java_files = filtered_java_files
+
+    if len(java_files) != 0:
+      DoJavac(
+          bootclasspath,
+          classpath,
+          classes_dir,
+          options.chromium_code,
+          options.use_errorprone_path,
+          java_files)
+
+    if options.jar_path:
+      if options.main_class or options.manifest_entry:
+        if options.manifest_entry:
+          entries = map(lambda e: e.split(":"), options.manifest_entry)
+        else:
+          entries = []
+        manifest_file = os.path.join(temp_dir, 'manifest')
+        CreateManifest(manifest_file, classpath, options.main_class, entries)
+      else:
+        manifest_file = None
+      jar.JarDirectory(classes_dir,
+                       build_utils.ParseGypList(options.jar_excluded_classes),
+                       options.jar_path,
+                       manifest_file=manifest_file)
+
+    if options.classes_dir:
+      # Delete the old classes directory. This ensures that all .class files in
+      # the output are actually from the input .java files. For example, if a
+      # .java file is deleted or an inner class is removed, the classes
+      # directory should not contain the corresponding old .class file after
+      # running this action.
+      build_utils.DeleteDirectory(options.classes_dir)
+      shutil.copytree(classes_dir, options.classes_dir)
+
+  if options.depfile:
+    build_utils.WriteDepfile(
+        options.depfile,
+        input_files + build_utils.GetPythonDependencies())
+
+  if options.stamp:
+    build_utils.Touch(options.stamp)
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv[1:]))
+
+
diff --git a/build/android/gyp/jinja_template.py b/build/android/gyp/jinja_template.py
new file mode 100755
index 0000000..e7c9a34
--- /dev/null
+++ b/build/android/gyp/jinja_template.py
@@ -0,0 +1,121 @@
+#!/usr/bin/env python
+#
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Renders one or more template files using the Jinja template engine."""
+
+import codecs
+import optparse
+import os
+import sys
+
+from util import build_utils
+
+# Import jinja2 from third_party/jinja2
+sys.path.append(os.path.join(os.path.dirname(__file__), '../../../third_party'))
+import jinja2  # pylint: disable=F0401
+
+
+class RecordingFileSystemLoader(jinja2.FileSystemLoader):
+  '''A FileSystemLoader that stores a list of loaded templates.'''
+  def __init__(self, searchpath):
+    jinja2.FileSystemLoader.__init__(self, searchpath)
+    self.loaded_templates = set()
+
+  def get_source(self, environment, template):
+    contents, filename, uptodate = jinja2.FileSystemLoader.get_source(
+        self, environment, template)
+    self.loaded_templates.add(os.path.relpath(filename))
+    return contents, filename, uptodate
+
+  def get_loaded_templates(self):
+    return list(self.loaded_templates)
+
+
+def ProcessFile(env, input_filename, loader_base_dir, output_filename,
+                variables):
+  input_rel_path = os.path.relpath(input_filename, loader_base_dir)
+  template = env.get_template(input_rel_path)
+  output = template.render(variables)
+  with codecs.open(output_filename, 'w', 'utf-8') as output_file:
+    output_file.write(output)
+
+
+def ProcessFiles(env, input_filenames, loader_base_dir, inputs_base_dir,
+                 outputs_zip, variables):
+  with build_utils.TempDir() as temp_dir:
+    for input_filename in input_filenames:
+      relpath = os.path.relpath(os.path.abspath(input_filename),
+                                os.path.abspath(inputs_base_dir))
+      if relpath.startswith(os.pardir):
+        raise Exception('input file %s is not contained in inputs base dir %s'
+                        % (input_filename, inputs_base_dir))
+
+      output_filename = os.path.join(temp_dir, relpath)
+      parent_dir = os.path.dirname(output_filename)
+      build_utils.MakeDirectory(parent_dir)
+      ProcessFile(env, input_filename, loader_base_dir, output_filename,
+                  variables)
+
+    build_utils.ZipDir(outputs_zip, temp_dir)
+
+
+def main():
+  parser = optparse.OptionParser()
+  build_utils.AddDepfileOption(parser)
+  parser.add_option('--inputs', help='The template files to process.')
+  parser.add_option('--output', help='The output file to generate. Valid '
+                    'only if there is a single input.')
+  parser.add_option('--outputs-zip', help='A zip file containing the processed '
+                    'templates. Required if there are multiple inputs.')
+  parser.add_option('--inputs-base-dir', help='A common ancestor directory of '
+                    'the inputs. Each output\'s path in the output zip will '
+                    'match the relative path from INPUTS_BASE_DIR to the '
+                    'input. Required if --output-zip is given.')
+  parser.add_option('--loader-base-dir', help='Base path used by the template '
+                    'loader. Must be a common ancestor directory of '
+                    'the inputs. Defaults to CHROMIUM_SRC.',
+                    default=build_utils.CHROMIUM_SRC)
+  parser.add_option('--variables', help='Variables to be made available in the '
+                    'template processing environment, as a GYP list (e.g. '
+                    '--variables "channel=beta mstone=39")', default='')
+  options, args = parser.parse_args()
+
+  build_utils.CheckOptions(options, parser, required=['inputs'])
+  inputs = build_utils.ParseGypList(options.inputs)
+
+  if (options.output is None) == (options.outputs_zip is None):
+    parser.error('Exactly one of --output and --output-zip must be given')
+  if options.output and len(inputs) != 1:
+    parser.error('--output cannot be used with multiple inputs')
+  if options.outputs_zip and not options.inputs_base_dir:
+    parser.error('--inputs-base-dir must be given when --output-zip is used')
+  if args:
+    parser.error('No positional arguments should be given.')
+
+  variables = {}
+  for v in build_utils.ParseGypList(options.variables):
+    if '=' not in v:
+      parser.error('--variables argument must contain "=": ' + v)
+    name, _, value = v.partition('=')
+    variables[name] = value
+
+  loader = RecordingFileSystemLoader(options.loader_base_dir)
+  env = jinja2.Environment(loader=loader, undefined=jinja2.StrictUndefined,
+                           line_comment_prefix='##')
+  if options.output:
+    ProcessFile(env, inputs[0], options.loader_base_dir, options.output,
+                variables)
+  else:
+    ProcessFiles(env, inputs, options.loader_base_dir, options.inputs_base_dir,
+                 options.outputs_zip, variables)
+
+  if options.depfile:
+    deps = loader.get_loaded_templates() + build_utils.GetPythonDependencies()
+    build_utils.WriteDepfile(options.depfile, deps)
+
+
+if __name__ == '__main__':
+  main()
diff --git a/build/android/gyp/lint.py b/build/android/gyp/lint.py
new file mode 100755
index 0000000..6c4645a
--- /dev/null
+++ b/build/android/gyp/lint.py
@@ -0,0 +1,214 @@
+#!/usr/bin/env python
+#
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Runs Android's lint tool."""
+
+
+import optparse
+import os
+import sys
+from xml.dom import minidom
+
+from util import build_utils
+
+
+_SRC_ROOT = os.path.abspath(os.path.join(os.path.dirname(__file__),
+                                         '..', '..', '..'))
+
+
+def _RunLint(lint_path, config_path, processed_config_path, manifest_path,
+             result_path, product_dir, sources, jar_path, resource_dir=None):
+
+  def _RelativizePath(path):
+    """Returns relative path to top-level src dir.
+
+    Args:
+      path: A path relative to cwd.
+    """
+    return os.path.relpath(os.path.abspath(path), _SRC_ROOT)
+
+  def _ProcessConfigFile():
+    if not build_utils.IsTimeStale(processed_config_path, [config_path]):
+      return
+
+    with open(config_path, 'rb') as f:
+      content = f.read().replace(
+          'PRODUCT_DIR', _RelativizePath(product_dir))
+
+    with open(processed_config_path, 'wb') as f:
+      f.write(content)
+
+  def _ProcessResultFile():
+    with open(result_path, 'rb') as f:
+      content = f.read().replace(
+          _RelativizePath(product_dir), 'PRODUCT_DIR')
+
+    with open(result_path, 'wb') as f:
+      f.write(content)
+
+  def _ParseAndShowResultFile():
+    dom = minidom.parse(result_path)
+    issues = dom.getElementsByTagName('issue')
+    print >> sys.stderr
+    for issue in issues:
+      issue_id = issue.attributes['id'].value
+      message = issue.attributes['message'].value
+      location_elem = issue.getElementsByTagName('location')[0]
+      path = location_elem.attributes['file'].value
+      line = location_elem.getAttribute('line')
+      if line:
+        error = '%s:%s %s: %s [warning]' % (path, line, message, issue_id)
+      else:
+        # Issues in class files don't have a line number.
+        error = '%s %s: %s [warning]' % (path, message, issue_id)
+      print >> sys.stderr, error
+      for attr in ['errorLine1', 'errorLine2']:
+        error_line = issue.getAttribute(attr)
+        if error_line:
+          print >> sys.stderr, error_line
+    return len(issues)
+
+  with build_utils.TempDir() as temp_dir:
+    _ProcessConfigFile()
+
+    cmd = [
+        _RelativizePath(lint_path), '-Werror', '--exitcode', '--showall',
+        '--config', _RelativizePath(processed_config_path),
+        '--classpath', _RelativizePath(jar_path),
+        '--xml', _RelativizePath(result_path),
+    ]
+    if resource_dir:
+      cmd.extend(['--resources', _RelativizePath(resource_dir)])
+
+    # There may be multiple source files with the same basename (but in
+    # different directories). It is difficult to determine what part of the path
+    # corresponds to the java package, and so instead just link the source files
+    # into temporary directories (creating a new one whenever there is a name
+    # conflict).
+    src_dirs = []
+    def NewSourceDir():
+      new_dir = os.path.join(temp_dir, str(len(src_dirs)))
+      os.mkdir(new_dir)
+      src_dirs.append(new_dir)
+      cmd.extend(['--sources', _RelativizePath(new_dir)])
+      return new_dir
+
+    def PathInDir(d, src):
+      return os.path.join(d, os.path.basename(src))
+
+    for src in sources:
+      src_dir = None
+      for d in src_dirs:
+        if not os.path.exists(PathInDir(d, src)):
+          src_dir = d
+          break
+      if not src_dir:
+        src_dir = NewSourceDir()
+      os.symlink(os.path.abspath(src), PathInDir(src_dir, src))
+
+    cmd.append(_RelativizePath(os.path.join(manifest_path, os.pardir)))
+
+    if os.path.exists(result_path):
+      os.remove(result_path)
+
+    try:
+      build_utils.CheckOutput(cmd, cwd=_SRC_ROOT)
+    except build_utils.CalledProcessError as e:
+      # There is a problem with lint usage
+      if not os.path.exists(result_path):
+        print 'Something is wrong:'
+        print e
+        return 1
+
+      # There are actual lint issues
+      else:
+        try:
+          num_issues = _ParseAndShowResultFile()
+        except Exception:
+          print 'Lint created unparseable xml file...'
+          print 'File contents:'
+          with open(result_path) as f:
+            print f.read()
+          return 1
+
+        _ProcessResultFile()
+        msg = ('\nLint found %d new issues.\n'
+               ' - For full explanation refer to %s\n'
+               ' - Wanna suppress these issues?\n'
+               '    1. Read comment in %s\n'
+               '    2. Run "python %s %s"\n' %
+               (num_issues,
+                _RelativizePath(result_path),
+                _RelativizePath(config_path),
+                _RelativizePath(os.path.join(_SRC_ROOT, 'build', 'android',
+                                             'lint', 'suppress.py')),
+                _RelativizePath(result_path)))
+        print >> sys.stderr, msg
+        return 1
+
+  return 0
+
+
+def main():
+  parser = optparse.OptionParser()
+  build_utils.AddDepfileOption(parser)
+  parser.add_option('--lint-path', help='Path to lint executable.')
+  parser.add_option('--config-path', help='Path to lint suppressions file.')
+  parser.add_option('--processed-config-path',
+                    help='Path to processed lint suppressions file.')
+  parser.add_option('--manifest-path', help='Path to AndroidManifest.xml')
+  parser.add_option('--result-path', help='Path to XML lint result file.')
+  parser.add_option('--product-dir', help='Path to product dir.')
+  parser.add_option('--src-dirs', help='Directories containing java files.')
+  parser.add_option('--java-files', help='Paths to java files.')
+  parser.add_option('--jar-path', help='Jar file containing class files.')
+  parser.add_option('--resource-dir', help='Path to resource dir.')
+  parser.add_option('--can-fail-build', action='store_true',
+                    help='If set, script will exit with nonzero exit status'
+                    ' if lint errors are present')
+  parser.add_option('--stamp', help='Path to touch on success.')
+  parser.add_option('--enable', action='store_true',
+                    help='Run lint instead of just touching stamp.')
+
+  options, _ = parser.parse_args()
+
+  build_utils.CheckOptions(
+      options, parser, required=['lint_path', 'config_path',
+                                 'processed_config_path', 'manifest_path',
+                                 'result_path', 'product_dir',
+                                 'jar_path'])
+
+  rc = 0
+
+  if options.enable:
+    sources = []
+    if options.src_dirs:
+      src_dirs = build_utils.ParseGypList(options.src_dirs)
+      sources = build_utils.FindInDirectories(src_dirs, '*.java')
+    elif options.java_files:
+      sources = build_utils.ParseGypList(options.java_files)
+    else:
+      print 'One of --src-dirs or --java-files must be specified.'
+      return 1
+    rc = _RunLint(options.lint_path, options.config_path,
+                  options.processed_config_path,
+                  options.manifest_path, options.result_path,
+                  options.product_dir, sources, options.jar_path,
+                  options.resource_dir)
+
+  if options.depfile:
+    build_utils.WriteDepfile(
+        options.depfile,
+        build_utils.GetPythonDependencies())
+
+  if options.stamp and not rc:
+    build_utils.Touch(options.stamp)
+
+  return rc if options.can_fail_build else 0
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/build/android/gyp/locale_pak_resources.py b/build/android/gyp/locale_pak_resources.py
new file mode 100755
index 0000000..84c4a37
--- /dev/null
+++ b/build/android/gyp/locale_pak_resources.py
@@ -0,0 +1,126 @@
+#!/usr/bin/env python
+#
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Creates a resources.zip for locale .pak files.
+
+Places the locale.pak files into appropriate resource configs
+(e.g. en-GB.pak -> res/raw-en/en_gb.lpak). Also generates a locale_paks
+TypedArray so that resource files can be enumerated at runtime.
+"""
+
+import collections
+import optparse
+import os
+import sys
+import zipfile
+
+from util import build_utils
+
+
+# This should stay in sync with:
+# base/android/java/src/org/chromium/base/LocaleUtils.java
+_CHROME_TO_ANDROID_LOCALE_MAP = {
+    'he': 'iw',
+    'id': 'in',
+    'fil': 'tl',
+}
+
+
+def ToResourceFileName(name):
+  """Returns the resource-compatible file name for the given file."""
+  # Resources file names must consist of [a-z0-9_.].
+  # Changes extension to .lpak so that compression can be toggled separately for
+  # locale pak files vs other pak files.
+  return name.replace('-', '_').replace('.pak', '.lpak').lower()
+
+
+def CreateLocalePaksXml(names):
+  """Creates the contents for the locale-paks.xml files."""
+  VALUES_FILE_TEMPLATE = '''<?xml version="1.0" encoding="utf-8"?>
+<resources>
+  <array name="locale_paks">%s
+  </array>
+</resources>
+'''
+  VALUES_ITEM_TEMPLATE = '''
+    <item>@raw/%s</item>'''
+
+  res_names = (os.path.splitext(name)[0] for name in names)
+  items = ''.join((VALUES_ITEM_TEMPLATE % name for name in res_names))
+  return VALUES_FILE_TEMPLATE % items
+
+
+def ComputeMappings(sources):
+  """Computes the mappings of sources -> resources.
+
+  Returns a tuple of:
+    - mappings: List of (src, dest) paths
+    - lang_to_locale_map: Map of language -> list of resource names
+      e.g. "en" -> ["en_gb.lpak"]
+  """
+  lang_to_locale_map = collections.defaultdict(list)
+  mappings = []
+  for src_path in sources:
+    basename = os.path.basename(src_path)
+    name = os.path.splitext(basename)[0]
+    res_name = ToResourceFileName(basename)
+    if name == 'en-US':
+      dest_dir = 'raw'
+    else:
+      # Chrome's uses different region mapping logic from Android, so include
+      # all regions for each language.
+      android_locale = _CHROME_TO_ANDROID_LOCALE_MAP.get(name, name)
+      lang = android_locale[0:2]
+      dest_dir = 'raw-' + lang
+      lang_to_locale_map[lang].append(res_name)
+    mappings.append((src_path, os.path.join(dest_dir, res_name)))
+  return mappings, lang_to_locale_map
+
+
+def main():
+  parser = optparse.OptionParser()
+  build_utils.AddDepfileOption(parser)
+  parser.add_option('--locale-paks', help='List of files for res/raw-LOCALE')
+  parser.add_option('--resources-zip', help='Path to output resources.zip')
+  parser.add_option('--print-languages',
+      action='store_true',
+      help='Print out the list of languages that cover the given locale paks '
+           '(using Android\'s language codes)')
+
+  options, _ = parser.parse_args()
+  build_utils.CheckOptions(options, parser,
+                           required=['locale_paks'])
+
+  sources = build_utils.ParseGypList(options.locale_paks)
+
+  if options.depfile:
+    deps = sources + build_utils.GetPythonDependencies()
+    build_utils.WriteDepfile(options.depfile, deps)
+
+  mappings, lang_to_locale_map = ComputeMappings(sources)
+  if options.print_languages:
+    print '\n'.join(sorted(lang_to_locale_map))
+
+  if options.resources_zip:
+    with zipfile.ZipFile(options.resources_zip, 'w', zipfile.ZIP_STORED) as out:
+      for mapping in mappings:
+        out.write(mapping[0], mapping[1])
+
+      # Create TypedArray resources so ResourceExtractor can enumerate files.
+      def WriteValuesFile(lang, names):
+        dest_dir = 'values'
+        if lang:
+          dest_dir += '-' + lang
+        # Always extract en-US.lpak since it's the fallback.
+        xml = CreateLocalePaksXml(names + ['en_us.lpak'])
+        out.writestr(os.path.join(dest_dir, 'locale-paks.xml'), xml)
+
+      for lang, names in lang_to_locale_map.iteritems():
+        WriteValuesFile(lang, names)
+      WriteValuesFile(None, [])
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/build/android/gyp/pack_relocations.py b/build/android/gyp/pack_relocations.py
new file mode 100755
index 0000000..02e4499
--- /dev/null
+++ b/build/android/gyp/pack_relocations.py
@@ -0,0 +1,107 @@
+#!/usr/bin/env python
+#
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Pack relocations in a library (or copy unchanged).
+
+If --enable-packing and --configuration-name=='Release', invoke the
+relocation_packer tool to pack the .rel.dyn or .rela.dyn section in the given
+library files.  This step is inserted after the libraries are stripped.
+
+If --enable-packing is zero, the script copies files verbatim, with no
+attempt to pack relocations.
+
+Any library listed in --exclude-packing-list is also copied verbatim,
+irrespective of any --enable-packing setting.  Typically this would be
+'libchromium_android_linker.so'.
+"""
+
+import optparse
+import os
+import shlex
+import shutil
+import sys
+import tempfile
+
+from util import build_utils
+
+def PackLibraryRelocations(android_pack_relocations, library_path, output_path):
+  shutil.copy(library_path, output_path)
+  pack_command = [android_pack_relocations, output_path]
+  build_utils.CheckOutput(pack_command)
+
+
+def CopyLibraryUnchanged(library_path, output_path):
+  shutil.copy(library_path, output_path)
+
+
+def main(args):
+  args = build_utils.ExpandFileArgs(args)
+  parser = optparse.OptionParser()
+  build_utils.AddDepfileOption(parser)
+  parser.add_option('--clear-dir', action='store_true',
+                    help='If set, the destination directory will be deleted '
+                    'before copying files to it. This is highly recommended to '
+                    'ensure that no stale files are left in the directory.')
+
+  parser.add_option('--configuration-name',
+      default='Release',
+      help='Gyp configuration name (i.e. Debug, Release)')
+  parser.add_option('--enable-packing',
+      choices=['0', '1'],
+      help=('Pack relocations if 1 and configuration name is \'Release\','
+            ' otherwise plain file copy'))
+  parser.add_option('--exclude-packing-list',
+      default='',
+      help='Names of any libraries explicitly not packed')
+  parser.add_option('--android-pack-relocations',
+      help='Path to the relocations packer binary')
+  parser.add_option('--stripped-libraries-dir',
+      help='Directory for stripped libraries')
+  parser.add_option('--packed-libraries-dir',
+      help='Directory for packed libraries')
+  parser.add_option('--libraries', action='append',
+      help='List of libraries')
+  parser.add_option('--stamp', help='Path to touch on success')
+
+  options, _ = parser.parse_args(args)
+  enable_packing = (options.enable_packing == '1' and
+                    options.configuration_name == 'Release')
+  exclude_packing_set = set(shlex.split(options.exclude_packing_list))
+
+  libraries = []
+  for libs_arg in options.libraries:
+    libraries += build_utils.ParseGypList(libs_arg)
+
+  if options.clear_dir:
+    build_utils.DeleteDirectory(options.packed_libraries_dir)
+
+  build_utils.MakeDirectory(options.packed_libraries_dir)
+
+  for library in libraries:
+    library_path = os.path.join(options.stripped_libraries_dir, library)
+    output_path = os.path.join(
+        options.packed_libraries_dir, os.path.basename(library))
+
+    if enable_packing and library not in exclude_packing_set:
+      PackLibraryRelocations(options.android_pack_relocations,
+                             library_path,
+                             output_path)
+    else:
+      CopyLibraryUnchanged(library_path, output_path)
+
+  if options.depfile:
+    build_utils.WriteDepfile(
+        options.depfile,
+        libraries + build_utils.GetPythonDependencies())
+
+  if options.stamp:
+    build_utils.Touch(options.stamp)
+
+  return 0
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv[1:]))
diff --git a/build/android/gyp/package_resources.py b/build/android/gyp/package_resources.py
new file mode 100755
index 0000000..d17d1fe
--- /dev/null
+++ b/build/android/gyp/package_resources.py
@@ -0,0 +1,270 @@
+#!/usr/bin/env python
+#
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# pylint: disable=C0301
+"""Package resources into an apk.
+
+See https://android.googlesource.com/platform/tools/base/+/master/legacy/ant-tasks/src/main/java/com/android/ant/AaptExecTask.java
+and
+https://android.googlesource.com/platform/sdk/+/master/files/ant/build.xml
+"""
+# pylint: enable=C0301
+
+import optparse
+import os
+import re
+import shutil
+import zipfile
+
+from util import build_utils
+
+
+# List is generated from the chrome_apk.apk_intermediates.ap_ via:
+#     unzip -l $FILE_AP_ | cut -c31- | grep res/draw | cut -d'/' -f 2 | sort \
+#     | uniq | grep -- -tvdpi- | cut -c10-
+# and then manually sorted.
+# Note that we can't just do a cross-product of dimentions because the filenames
+# become too big and aapt fails to create the files.
+# This leaves all default drawables (mdpi) in the main apk. Android gets upset
+# though if any drawables are missing from the default drawables/ directory.
+DENSITY_SPLITS = {
+    'hdpi': (
+        'hdpi-v4', # Order matters for output file names.
+        'ldrtl-hdpi-v4',
+        'sw600dp-hdpi-v13',
+        'ldrtl-hdpi-v17',
+        'ldrtl-sw600dp-hdpi-v17',
+        'hdpi-v21',
+    ),
+    'xhdpi': (
+        'xhdpi-v4',
+        'ldrtl-xhdpi-v4',
+        'sw600dp-xhdpi-v13',
+        'ldrtl-xhdpi-v17',
+        'ldrtl-sw600dp-xhdpi-v17',
+        'xhdpi-v21',
+    ),
+    'xxhdpi': (
+        'xxhdpi-v4',
+        'ldrtl-xxhdpi-v4',
+        'sw600dp-xxhdpi-v13',
+        'ldrtl-xxhdpi-v17',
+        'ldrtl-sw600dp-xxhdpi-v17',
+        'xxhdpi-v21',
+    ),
+    'xxxhdpi': (
+        'xxxhdpi-v4',
+        'ldrtl-xxxhdpi-v4',
+        'sw600dp-xxxhdpi-v13',
+        'ldrtl-xxxhdpi-v17',
+        'ldrtl-sw600dp-xxxhdpi-v17',
+        'xxxhdpi-v21',
+    ),
+    'tvdpi': (
+        'tvdpi-v4',
+        'sw600dp-tvdpi-v13',
+        'ldrtl-sw600dp-tvdpi-v17',
+    ),
+}
+
+
+def ParseArgs():
+  """Parses command line options.
+
+  Returns:
+    An options object as from optparse.OptionsParser.parse_args()
+  """
+  parser = optparse.OptionParser()
+  build_utils.AddDepfileOption(parser)
+  parser.add_option('--android-sdk', help='path to the Android SDK folder')
+  parser.add_option('--aapt-path',
+                    help='path to the Android aapt tool')
+
+  parser.add_option('--configuration-name',
+                    help='Gyp\'s configuration name (Debug or Release).')
+
+  parser.add_option('--android-manifest', help='AndroidManifest.xml path')
+  parser.add_option('--version-code', help='Version code for apk.')
+  parser.add_option('--version-name', help='Version name for apk.')
+  parser.add_option(
+      '--shared-resources',
+      action='store_true',
+      help='Make a resource package that can be loaded by a different'
+      'application at runtime to access the package\'s resources.')
+  parser.add_option('--resource-zips',
+                    help='zip files containing resources to be packaged')
+  parser.add_option('--asset-dir',
+                    help='directories containing assets to be packaged')
+  parser.add_option('--no-compress', help='disables compression for the '
+                    'given comma separated list of extensions')
+  parser.add_option(
+      '--create-density-splits',
+      action='store_true',
+      help='Enables density splits')
+  parser.add_option('--language-splits',
+                    help='GYP list of languages to create splits for')
+
+  parser.add_option('--apk-path',
+                    help='Path to output (partial) apk.')
+
+  (options, args) = parser.parse_args()
+
+  if args:
+    parser.error('No positional arguments should be given.')
+
+  # Check that required options have been provided.
+  required_options = ('android_sdk', 'aapt_path', 'configuration_name',
+                      'android_manifest', 'version_code', 'version_name',
+                      'apk_path')
+
+  build_utils.CheckOptions(options, parser, required=required_options)
+
+  return options
+
+
+def MoveImagesToNonMdpiFolders(res_root):
+  """Move images from drawable-*-mdpi-* folders to drawable-* folders.
+
+  Why? http://crbug.com/289843
+  """
+  for src_dir_name in os.listdir(res_root):
+    src_components = src_dir_name.split('-')
+    if src_components[0] != 'drawable' or 'mdpi' not in src_components:
+      continue
+    src_dir = os.path.join(res_root, src_dir_name)
+    if not os.path.isdir(src_dir):
+      continue
+    dst_components = [c for c in src_components if c != 'mdpi']
+    assert dst_components != src_components
+    dst_dir_name = '-'.join(dst_components)
+    dst_dir = os.path.join(res_root, dst_dir_name)
+    build_utils.MakeDirectory(dst_dir)
+    for src_file_name in os.listdir(src_dir):
+      if not src_file_name.endswith('.png'):
+        continue
+      src_file = os.path.join(src_dir, src_file_name)
+      dst_file = os.path.join(dst_dir, src_file_name)
+      assert not os.path.lexists(dst_file)
+      shutil.move(src_file, dst_file)
+
+
+def PackageArgsForExtractedZip(d):
+  """Returns the aapt args for an extracted resources zip.
+
+  A resources zip either contains the resources for a single target or for
+  multiple targets. If it is multiple targets merged into one, the actual
+  resource directories will be contained in the subdirectories 0, 1, 2, ...
+  """
+  subdirs = [os.path.join(d, s) for s in os.listdir(d)]
+  subdirs = [s for s in subdirs if os.path.isdir(s)]
+  is_multi = '0' in [os.path.basename(s) for s in subdirs]
+  if is_multi:
+    res_dirs = sorted(subdirs, key=lambda p : int(os.path.basename(p)))
+  else:
+    res_dirs = [d]
+  package_command = []
+  for d in res_dirs:
+    MoveImagesToNonMdpiFolders(d)
+    package_command += ['-S', d]
+  return package_command
+
+
+def RenameDensitySplits(apk_path):
+  """Renames all density splits to have shorter / predictable names."""
+  for density, config in DENSITY_SPLITS.iteritems():
+    src_path = '%s_%s' % (apk_path, '_'.join(config))
+    dst_path = '%s_%s' % (apk_path, density)
+    if src_path != dst_path:
+      if os.path.exists(dst_path):
+        os.unlink(dst_path)
+      os.rename(src_path, dst_path)
+
+
+def CheckForMissedConfigs(apk_path, check_density, languages):
+  """Raises an exception if apk_path contains any unexpected configs."""
+  triggers = []
+  if check_density:
+    triggers.extend(re.compile('-%s' % density) for density in DENSITY_SPLITS)
+  if languages:
+    triggers.extend(re.compile(r'-%s\b' % lang) for lang in languages)
+  with zipfile.ZipFile(apk_path) as main_apk_zip:
+    for name in main_apk_zip.namelist():
+      for trigger in triggers:
+        if trigger.search(name) and not 'mipmap-' in name:
+          raise Exception(('Found config in main apk that should have been ' +
+                           'put into a split: %s\nYou need to update ' +
+                           'package_resources.py to include this new ' +
+                           'config (trigger=%s)') % (name, trigger.pattern))
+
+
+def main():
+  options = ParseArgs()
+  android_jar = os.path.join(options.android_sdk, 'android.jar')
+  aapt = options.aapt_path
+
+  with build_utils.TempDir() as temp_dir:
+    package_command = [aapt,
+                       'package',
+                       '--version-code', options.version_code,
+                       '--version-name', options.version_name,
+                       '-M', options.android_manifest,
+                       '--no-crunch',
+                       '-f',
+                       '--auto-add-overlay',
+                       '-I', android_jar,
+                       '-F', options.apk_path,
+                       '--ignore-assets', build_utils.AAPT_IGNORE_PATTERN,
+                       ]
+
+    if options.no_compress:
+      for ext in options.no_compress.split(','):
+        package_command += ['-0', ext]
+    if options.shared_resources:
+      package_command.append('--shared-lib')
+
+    if options.asset_dir and os.path.exists(options.asset_dir):
+      package_command += ['-A', options.asset_dir]
+
+    if options.resource_zips:
+      dep_zips = build_utils.ParseGypList(options.resource_zips)
+      for z in dep_zips:
+        subdir = os.path.join(temp_dir, os.path.basename(z))
+        if os.path.exists(subdir):
+          raise Exception('Resource zip name conflict: ' + os.path.basename(z))
+        build_utils.ExtractAll(z, path=subdir)
+        package_command += PackageArgsForExtractedZip(subdir)
+
+    if options.create_density_splits:
+      for config in DENSITY_SPLITS.itervalues():
+        package_command.extend(('--split', ','.join(config)))
+
+    language_splits = None
+    if options.language_splits:
+      language_splits = build_utils.ParseGypList(options.language_splits)
+      for lang in language_splits:
+        package_command.extend(('--split', lang))
+
+    if 'Debug' in options.configuration_name:
+      package_command += ['--debug-mode']
+
+    build_utils.CheckOutput(
+        package_command, print_stdout=False, print_stderr=False)
+
+    if options.create_density_splits or language_splits:
+      CheckForMissedConfigs(
+          options.apk_path, options.create_density_splits, language_splits)
+
+    if options.create_density_splits:
+      RenameDensitySplits(options.apk_path)
+
+    if options.depfile:
+      build_utils.WriteDepfile(
+          options.depfile,
+          build_utils.GetPythonDependencies())
+
+
+if __name__ == '__main__':
+  main()
diff --git a/build/android/gyp/process_resources.py b/build/android/gyp/process_resources.py
new file mode 100755
index 0000000..d227954
--- /dev/null
+++ b/build/android/gyp/process_resources.py
@@ -0,0 +1,420 @@
+#!/usr/bin/env python
+#
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Process Android resources to generate R.java, and prepare for packaging.
+
+This will crunch images and generate v14 compatible resources
+(see generate_v14_compatible_resources.py).
+"""
+
+import codecs
+import optparse
+import os
+import re
+import shutil
+import sys
+import zipfile
+
+import generate_v14_compatible_resources
+
+from util import build_utils
+
+# Import jinja2 from third_party/jinja2
+sys.path.insert(1,
+    os.path.join(os.path.dirname(__file__), '../../../third_party'))
+from jinja2 import Template # pylint: disable=F0401
+
+
+def ParseArgs(args):
+  """Parses command line options.
+
+  Returns:
+    An options object as from optparse.OptionsParser.parse_args()
+  """
+  parser = optparse.OptionParser()
+  build_utils.AddDepfileOption(parser)
+
+  parser.add_option('--android-sdk', help='path to the Android SDK folder')
+  parser.add_option('--aapt-path',
+                    help='path to the Android aapt tool')
+  parser.add_option('--non-constant-id', action='store_true')
+
+  parser.add_option('--android-manifest', help='AndroidManifest.xml path')
+  parser.add_option('--custom-package', help='Java package for R.java')
+  parser.add_option(
+      '--shared-resources',
+      action='store_true',
+      help='Make a resource package that can be loaded by a different'
+      'application at runtime to access the package\'s resources.')
+
+  parser.add_option('--resource-dirs',
+                    help='Directories containing resources of this target.')
+  parser.add_option('--dependencies-res-zips',
+                    help='Resources from dependents.')
+
+  parser.add_option('--resource-zip-out',
+                    help='Path for output zipped resources.')
+
+  parser.add_option('--R-dir',
+                    help='directory to hold generated R.java.')
+  parser.add_option('--srcjar-out',
+                    help='Path to srcjar to contain generated R.java.')
+  parser.add_option('--r-text-out',
+                    help='Path to store the R.txt file generated by appt.')
+
+  parser.add_option('--proguard-file',
+                    help='Path to proguard.txt generated file')
+
+  parser.add_option(
+      '--v14-skip',
+      action="store_true",
+      help='Do not generate nor verify v14 resources')
+
+  parser.add_option(
+      '--extra-res-packages',
+      help='Additional package names to generate R.java files for')
+  parser.add_option(
+      '--extra-r-text-files',
+      help='For each additional package, the R.txt file should contain a '
+      'list of resources to be included in the R.java file in the format '
+      'generated by aapt')
+  parser.add_option(
+      '--include-all-resources',
+      action='store_true',
+      help='Include every resource ID in every generated R.java file '
+      '(ignoring R.txt).')
+
+  parser.add_option(
+      '--all-resources-zip-out',
+      help='Path for output of all resources. This includes resources in '
+      'dependencies.')
+
+  parser.add_option('--stamp', help='File to touch on success')
+
+  (options, args) = parser.parse_args(args)
+
+  if args:
+    parser.error('No positional arguments should be given.')
+
+  # Check that required options have been provided.
+  required_options = (
+      'android_sdk',
+      'aapt_path',
+      'android_manifest',
+      'dependencies_res_zips',
+      'resource_dirs',
+      'resource_zip_out',
+      )
+  build_utils.CheckOptions(options, parser, required=required_options)
+
+  if (options.R_dir is None) == (options.srcjar_out is None):
+    raise Exception('Exactly one of --R-dir or --srcjar-out must be specified.')
+
+  return options
+
+
+def CreateExtraRJavaFiles(
+      r_dir, extra_packages, extra_r_text_files, shared_resources, include_all):
+  if include_all:
+    java_files = build_utils.FindInDirectory(r_dir, "R.java")
+    if len(java_files) != 1:
+      return
+    r_java_file = java_files[0]
+    r_java_contents = codecs.open(r_java_file, encoding='utf-8').read()
+
+    for package in extra_packages:
+      package_r_java_dir = os.path.join(r_dir, *package.split('.'))
+      build_utils.MakeDirectory(package_r_java_dir)
+      package_r_java_path = os.path.join(package_r_java_dir, 'R.java')
+      new_r_java = re.sub(r'package [.\w]*;', u'package %s;' % package,
+                          r_java_contents)
+      codecs.open(package_r_java_path, 'w', encoding='utf-8').write(new_r_java)
+  else:
+    if len(extra_packages) != len(extra_r_text_files):
+      raise Exception('Need one R.txt file per extra package')
+
+    all_resources = {}
+    r_txt_file = os.path.join(r_dir, 'R.txt')
+    if not os.path.exists(r_txt_file):
+      return
+    with open(r_txt_file) as f:
+      for line in f:
+        m = re.match(r'(int(?:\[\])?) (\w+) (\w+) (.+)$', line)
+        if not m:
+          raise Exception('Unexpected line in R.txt: %s' % line)
+        java_type, resource_type, name, value = m.groups()
+        all_resources[(resource_type, name)] = (java_type, value)
+
+    for package, r_text_file in zip(extra_packages, extra_r_text_files):
+      if os.path.exists(r_text_file):
+        package_r_java_dir = os.path.join(r_dir, *package.split('.'))
+        build_utils.MakeDirectory(package_r_java_dir)
+        package_r_java_path = os.path.join(package_r_java_dir, 'R.java')
+        CreateExtraRJavaFile(
+            package, package_r_java_path, r_text_file, all_resources,
+            shared_resources)
+
+
+def CreateExtraRJavaFile(
+      package, r_java_path, r_text_file, all_resources, shared_resources):
+  resources = {}
+  with open(r_text_file) as f:
+    for line in f:
+      m = re.match(r'int(?:\[\])? (\w+) (\w+) ', line)
+      if not m:
+        raise Exception('Unexpected line in R.txt: %s' % line)
+      resource_type, name = m.groups()
+      java_type, value = all_resources[(resource_type, name)]
+      if resource_type not in resources:
+        resources[resource_type] = []
+      resources[resource_type].append((name, java_type, value))
+
+  template = Template("""/* AUTO-GENERATED FILE.  DO NOT MODIFY. */
+
+package {{ package }};
+
+public final class R {
+    {% for resource_type in resources %}
+    public static final class {{ resource_type }} {
+        {% for name, java_type, value in resources[resource_type] %}
+        {% if shared_resources %}
+        public static {{ java_type }} {{ name }} = {{ value }};
+        {% else %}
+        public static final {{ java_type }} {{ name }} = {{ value }};
+        {% endif %}
+        {% endfor %}
+    }
+    {% endfor %}
+    {% if shared_resources %}
+    public static void onResourcesLoaded(int packageId) {
+        {% for resource_type in resources %}
+        {% for name, java_type, value in resources[resource_type] %}
+        {% if java_type == 'int[]' %}
+        for(int i = 0; i < {{ resource_type }}.{{ name }}.length; ++i) {
+            {{ resource_type }}.{{ name }}[i] =
+                    ({{ resource_type }}.{{ name }}[i] & 0x00ffffff)
+                    | (packageId << 24);
+        }
+        {% else %}
+        {{ resource_type }}.{{ name }} =
+                ({{ resource_type }}.{{ name }} & 0x00ffffff)
+                | (packageId << 24);
+        {% endif %}
+        {% endfor %}
+        {% endfor %}
+    }
+    {% endif %}
+}
+""", trim_blocks=True, lstrip_blocks=True)
+
+  output = template.render(package=package, resources=resources,
+                           shared_resources=shared_resources)
+  with open(r_java_path, 'w') as f:
+    f.write(output)
+
+
+def CrunchDirectory(aapt, input_dir, output_dir):
+  """Crunches the images in input_dir and its subdirectories into output_dir.
+
+  If an image is already optimized, crunching often increases image size. In
+  this case, the crunched image is overwritten with the original image.
+  """
+  aapt_cmd = [aapt,
+              'crunch',
+              '-C', output_dir,
+              '-S', input_dir,
+              '--ignore-assets', build_utils.AAPT_IGNORE_PATTERN]
+  build_utils.CheckOutput(aapt_cmd, stderr_filter=FilterCrunchStderr,
+                          fail_func=DidCrunchFail)
+
+  # Check for images whose size increased during crunching and replace them
+  # with their originals (except for 9-patches, which must be crunched).
+  for dir_, _, files in os.walk(output_dir):
+    for crunched in files:
+      if crunched.endswith('.9.png'):
+        continue
+      if not crunched.endswith('.png'):
+        raise Exception('Unexpected file in crunched dir: ' + crunched)
+      crunched = os.path.join(dir_, crunched)
+      original = os.path.join(input_dir, os.path.relpath(crunched, output_dir))
+      original_size = os.path.getsize(original)
+      crunched_size = os.path.getsize(crunched)
+      if original_size < crunched_size:
+        shutil.copyfile(original, crunched)
+
+
+def FilterCrunchStderr(stderr):
+  """Filters out lines from aapt crunch's stderr that can safely be ignored."""
+  filtered_lines = []
+  for line in stderr.splitlines(True):
+    # Ignore this libpng warning, which is a known non-error condition.
+    # http://crbug.com/364355
+    if ('libpng warning: iCCP: Not recognizing known sRGB profile that has '
+        + 'been edited' in line):
+      continue
+    filtered_lines.append(line)
+  return ''.join(filtered_lines)
+
+
+def DidCrunchFail(returncode, stderr):
+  """Determines whether aapt crunch failed from its return code and output.
+
+  Because aapt's return code cannot be trusted, any output to stderr is
+  an indication that aapt has failed (http://crbug.com/314885).
+  """
+  return returncode != 0 or stderr
+
+
+def ZipResources(resource_dirs, zip_path):
+  # Python zipfile does not provide a way to replace a file (it just writes
+  # another file with the same name). So, first collect all the files to put
+  # in the zip (with proper overriding), and then zip them.
+  files_to_zip = dict()
+  for d in resource_dirs:
+    for root, _, files in os.walk(d):
+      for f in files:
+        archive_path = os.path.join(os.path.relpath(root, d), f)
+        path = os.path.join(root, f)
+        files_to_zip[archive_path] = path
+  with zipfile.ZipFile(zip_path, 'w') as outzip:
+    for archive_path, path in files_to_zip.iteritems():
+      outzip.write(path, archive_path)
+
+
+def CombineZips(zip_files, output_path):
+  # When packaging resources, if the top-level directories in the zip file are
+  # of the form 0, 1, ..., then each subdirectory will be passed to aapt as a
+  # resources directory. While some resources just clobber others (image files,
+  # etc), other resources (particularly .xml files) need to be more
+  # intelligently merged. That merging is left up to aapt.
+  with zipfile.ZipFile(output_path, 'w') as outzip:
+    for i, z in enumerate(zip_files):
+      with zipfile.ZipFile(z, 'r') as inzip:
+        for name in inzip.namelist():
+          new_name = '%d/%s' % (i, name)
+          outzip.writestr(new_name, inzip.read(name))
+
+
+def main():
+  args = build_utils.ExpandFileArgs(sys.argv[1:])
+
+  options = ParseArgs(args)
+  android_jar = os.path.join(options.android_sdk, 'android.jar')
+  aapt = options.aapt_path
+
+  input_files = []
+
+  with build_utils.TempDir() as temp_dir:
+    deps_dir = os.path.join(temp_dir, 'deps')
+    build_utils.MakeDirectory(deps_dir)
+    v14_dir = os.path.join(temp_dir, 'v14')
+    build_utils.MakeDirectory(v14_dir)
+
+    gen_dir = os.path.join(temp_dir, 'gen')
+    build_utils.MakeDirectory(gen_dir)
+
+    input_resource_dirs = build_utils.ParseGypList(options.resource_dirs)
+
+    if not options.v14_skip:
+      for resource_dir in input_resource_dirs:
+        generate_v14_compatible_resources.GenerateV14Resources(
+            resource_dir,
+            v14_dir)
+
+    dep_zips = build_utils.ParseGypList(options.dependencies_res_zips)
+    input_files += dep_zips
+    dep_subdirs = []
+    for z in dep_zips:
+      subdir = os.path.join(deps_dir, os.path.basename(z))
+      if os.path.exists(subdir):
+        raise Exception('Resource zip name conflict: ' + os.path.basename(z))
+      build_utils.ExtractAll(z, path=subdir)
+      dep_subdirs.append(subdir)
+
+    # Generate R.java. This R.java contains non-final constants and is used only
+    # while compiling the library jar (e.g. chromium_content.jar). When building
+    # an apk, a new R.java file with the correct resource -> ID mappings will be
+    # generated by merging the resources from all libraries and the main apk
+    # project.
+    package_command = [aapt,
+                       'package',
+                       '-m',
+                       '-M', options.android_manifest,
+                       '--auto-add-overlay',
+                       '-I', android_jar,
+                       '--output-text-symbols', gen_dir,
+                       '-J', gen_dir,
+                       '--ignore-assets', build_utils.AAPT_IGNORE_PATTERN]
+
+    for d in input_resource_dirs:
+      package_command += ['-S', d]
+
+    for d in dep_subdirs:
+      package_command += ['-S', d]
+
+    if options.non_constant_id:
+      package_command.append('--non-constant-id')
+    if options.custom_package:
+      package_command += ['--custom-package', options.custom_package]
+    if options.proguard_file:
+      package_command += ['-G', options.proguard_file]
+    if options.shared_resources:
+      package_command.append('--shared-lib')
+    build_utils.CheckOutput(package_command, print_stderr=False)
+
+    if options.extra_res_packages:
+      CreateExtraRJavaFiles(
+          gen_dir,
+          build_utils.ParseGypList(options.extra_res_packages),
+          build_utils.ParseGypList(options.extra_r_text_files),
+          options.shared_resources,
+          options.include_all_resources)
+
+    # This is the list of directories with resources to put in the final .zip
+    # file. The order of these is important so that crunched/v14 resources
+    # override the normal ones.
+    zip_resource_dirs = input_resource_dirs + [v14_dir]
+
+    base_crunch_dir = os.path.join(temp_dir, 'crunch')
+
+    # Crunch image resources. This shrinks png files and is necessary for
+    # 9-patch images to display correctly. 'aapt crunch' accepts only a single
+    # directory at a time and deletes everything in the output directory.
+    for idx, input_dir in enumerate(input_resource_dirs):
+      crunch_dir = os.path.join(base_crunch_dir, str(idx))
+      build_utils.MakeDirectory(crunch_dir)
+      zip_resource_dirs.append(crunch_dir)
+      CrunchDirectory(aapt, input_dir, crunch_dir)
+
+    ZipResources(zip_resource_dirs, options.resource_zip_out)
+
+    if options.all_resources_zip_out:
+      CombineZips([options.resource_zip_out] + dep_zips,
+                  options.all_resources_zip_out)
+
+    if options.R_dir:
+      build_utils.DeleteDirectory(options.R_dir)
+      shutil.copytree(gen_dir, options.R_dir)
+    else:
+      build_utils.ZipDir(options.srcjar_out, gen_dir)
+
+    if options.r_text_out:
+      r_text_path = os.path.join(gen_dir, 'R.txt')
+      if os.path.exists(r_text_path):
+        shutil.copyfile(r_text_path, options.r_text_out)
+      else:
+        open(options.r_text_out, 'w').close()
+
+  if options.depfile:
+    input_files += build_utils.GetPythonDependencies()
+    build_utils.WriteDepfile(options.depfile, input_files)
+
+  if options.stamp:
+    build_utils.Touch(options.stamp)
+
+
+if __name__ == '__main__':
+  main()
diff --git a/build/android/gyp/proguard.py b/build/android/gyp/proguard.py
new file mode 100755
index 0000000..5127100
--- /dev/null
+++ b/build/android/gyp/proguard.py
@@ -0,0 +1,69 @@
+#!/usr/bin/env python
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import optparse
+import sys
+
+from util import build_utils
+from util import proguard_util
+
+def DoProguard(options):
+  proguard = proguard_util.ProguardCmdBuilder(options.proguard_path)
+  proguard.injars(build_utils.ParseGypList(options.input_paths))
+  proguard.configs(build_utils.ParseGypList(options.proguard_configs))
+  proguard.outjar(options.output_path)
+
+  if options.mapping:
+    proguard.mapping(options.mapping)
+
+  if options.is_test:
+    proguard.is_test(True)
+
+  classpath = []
+  for arg in options.classpath:
+    classpath += build_utils.ParseGypList(arg)
+  classpath = list(set(classpath))
+  proguard.libraryjars(classpath)
+
+  proguard.CheckOutput()
+
+  return proguard.GetInputs()
+
+
+def main(args):
+  args = build_utils.ExpandFileArgs(args)
+  parser = optparse.OptionParser()
+  build_utils.AddDepfileOption(parser)
+  parser.add_option('--proguard-path',
+                    help='Path to the proguard executable.')
+  parser.add_option('--input-paths',
+                    help='Paths to the .jar files proguard should run on.')
+  parser.add_option('--output-path', help='Path to the generated .jar file.')
+  parser.add_option('--proguard-configs',
+                    help='Paths to proguard configuration files.')
+  parser.add_option('--mapping', help='Path to proguard mapping to apply.')
+  parser.add_option('--is-test', action='store_true',
+      help='If true, extra proguard options for instrumentation tests will be '
+      'added.')
+  parser.add_option('--classpath', action='append',
+                    help='Classpath for proguard.')
+  parser.add_option('--stamp', help='Path to touch on success.')
+
+  options, _ = parser.parse_args(args)
+
+  inputs = DoProguard(options)
+
+  if options.depfile:
+    build_utils.WriteDepfile(
+        options.depfile,
+        inputs + build_utils.GetPythonDependencies())
+
+  if options.stamp:
+    build_utils.Touch(options.stamp)
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv[1:]))
diff --git a/build/android/gyp/push_libraries.py b/build/android/gyp/push_libraries.py
new file mode 100755
index 0000000..6b31a2e
--- /dev/null
+++ b/build/android/gyp/push_libraries.py
@@ -0,0 +1,80 @@
+#!/usr/bin/env python
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Pushes native libraries to a device.
+
+"""
+
+import optparse
+import os
+import sys
+
+BUILD_ANDROID_DIR = os.path.join(os.path.dirname(__file__), os.pardir)
+sys.path.append(BUILD_ANDROID_DIR)
+
+from pylib import constants
+
+from util import build_device
+from util import build_utils
+from util import md5_check
+
+def DoPush(options):
+  libraries = build_utils.ParseGypList(options.libraries)
+
+  device = build_device.GetBuildDeviceFromPath(
+      options.build_device_configuration)
+  if not device:
+    return
+
+  serial_number = device.GetSerialNumber()
+  # A list so that it is modifiable in Push below.
+  needs_directory = [True]
+  for lib in libraries:
+    device_path = os.path.join(options.device_dir, lib)
+    host_path = os.path.join(options.libraries_dir, lib)
+
+    def Push():
+      if needs_directory:
+        device.RunShellCommand('mkdir -p ' + options.device_dir)
+        needs_directory[:] = [] # = False
+      device.PushChangedFiles([(host_path, device_path)])
+
+    record_path = '%s.%s.push.md5.stamp' % (host_path, serial_number)
+    md5_check.CallAndRecordIfStale(
+        Push,
+        record_path=record_path,
+        input_paths=[host_path],
+        input_strings=[device_path])
+
+
+def main(args):
+  args = build_utils.ExpandFileArgs(args)
+  parser = optparse.OptionParser()
+  parser.add_option('--libraries-dir',
+      help='Directory that contains stripped libraries.')
+  parser.add_option('--device-dir',
+      help='Device directory to push the libraries to.')
+  parser.add_option('--libraries',
+      help='List of native libraries.')
+  parser.add_option('--stamp', help='Path to touch on success.')
+  parser.add_option('--build-device-configuration',
+      help='Path to build device configuration.')
+  parser.add_option('--configuration-name',
+      help='The build CONFIGURATION_NAME')
+  options, _ = parser.parse_args(args)
+
+  required_options = ['libraries', 'device_dir', 'libraries']
+  build_utils.CheckOptions(options, parser, required=required_options)
+  constants.SetBuildType(options.configuration_name)
+
+  DoPush(options)
+
+  if options.stamp:
+    build_utils.Touch(options.stamp)
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv[1:]))
diff --git a/build/android/gyp/strip_library_for_device.py b/build/android/gyp/strip_library_for_device.py
new file mode 100755
index 0000000..9e2daae
--- /dev/null
+++ b/build/android/gyp/strip_library_for_device.py
@@ -0,0 +1,61 @@
+#!/usr/bin/env python
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import optparse
+import os
+import sys
+
+from util import build_utils
+
+
+def StripLibrary(android_strip, android_strip_args, library_path, output_path):
+  if build_utils.IsTimeStale(output_path, [library_path]):
+    strip_cmd = ([android_strip] +
+                 android_strip_args +
+                 ['-o', output_path, library_path])
+    build_utils.CheckOutput(strip_cmd)
+
+
+def main(args):
+  args = build_utils.ExpandFileArgs(args)
+
+  parser = optparse.OptionParser()
+  build_utils.AddDepfileOption(parser)
+
+  parser.add_option('--android-strip',
+      help='Path to the toolchain\'s strip binary')
+  parser.add_option('--android-strip-arg', action='append',
+      help='Argument to be passed to strip')
+  parser.add_option('--libraries-dir',
+      help='Directory for un-stripped libraries')
+  parser.add_option('--stripped-libraries-dir',
+      help='Directory for stripped libraries')
+  parser.add_option('--libraries',
+      help='List of libraries to strip')
+  parser.add_option('--stamp', help='Path to touch on success')
+
+  options, _ = parser.parse_args(args)
+
+  libraries = build_utils.ParseGypList(options.libraries)
+
+  build_utils.MakeDirectory(options.stripped_libraries_dir)
+
+  for library in libraries:
+    for base_path in options.libraries_dir.split(','):
+      library_path = os.path.join(base_path, library)
+      if (os.path.exists(library_path)):
+        break
+    stripped_library_path = os.path.join(
+        options.stripped_libraries_dir, library)
+    StripLibrary(options.android_strip, options.android_strip_arg, library_path,
+        stripped_library_path)
+
+  if options.stamp:
+    build_utils.Touch(options.stamp)
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv[1:]))
diff --git a/build/android/gyp/test/BUILD.gn b/build/android/gyp/test/BUILD.gn
new file mode 100644
index 0000000..2deac1d
--- /dev/null
+++ b/build/android/gyp/test/BUILD.gn
@@ -0,0 +1,13 @@
+import("//build/config/android/rules.gni")
+
+java_library("hello_world_java") {
+  java_files = [ "java/org/chromium/helloworld/HelloWorldPrinter.java" ]
+}
+
+java_binary("hello_world") {
+  deps = [
+    ":hello_world_java",
+  ]
+  java_files = [ "java/org/chromium/helloworld/HelloWorldMain.java" ]
+  main_class = "org.chromium.helloworld.HelloWorldMain"
+}
diff --git a/build/android/gyp/test/java/org/chromium/helloworld/HelloWorldMain.java b/build/android/gyp/test/java/org/chromium/helloworld/HelloWorldMain.java
new file mode 100644
index 0000000..10860d8
--- /dev/null
+++ b/build/android/gyp/test/java/org/chromium/helloworld/HelloWorldMain.java
@@ -0,0 +1,15 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package org.chromium.helloworld;
+
+public class HelloWorldMain {
+    public static void main(String[] args) {
+        if (args.length > 0) {
+            System.exit(Integer.parseInt(args[0]));
+        }
+        HelloWorldPrinter.print();
+    }
+}
+
diff --git a/build/android/gyp/test/java/org/chromium/helloworld/HelloWorldPrinter.java b/build/android/gyp/test/java/org/chromium/helloworld/HelloWorldPrinter.java
new file mode 100644
index 0000000..b09673e
--- /dev/null
+++ b/build/android/gyp/test/java/org/chromium/helloworld/HelloWorldPrinter.java
@@ -0,0 +1,12 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package org.chromium.helloworld;
+
+public class HelloWorldPrinter {
+    public static void print() {
+        System.out.println("Hello, world!");
+    }
+}
+
diff --git a/build/android/gyp/touch.py b/build/android/gyp/touch.py
new file mode 100755
index 0000000..7b4375e
--- /dev/null
+++ b/build/android/gyp/touch.py
@@ -0,0 +1,16 @@
+#!/usr/bin/env python
+#
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import sys
+
+from util import build_utils
+
+def main(argv):
+  for f in argv[1:]:
+    build_utils.Touch(f)
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv))
diff --git a/build/android/gyp/util/__init__.py b/build/android/gyp/util/__init__.py
new file mode 100644
index 0000000..727e987
--- /dev/null
+++ b/build/android/gyp/util/__init__.py
@@ -0,0 +1,4 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
diff --git a/build/android/gyp/util/build_device.py b/build/android/gyp/util/build_device.py
new file mode 100644
index 0000000..8ab1112
--- /dev/null
+++ b/build/android/gyp/util/build_device.py
@@ -0,0 +1,108 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+""" A simple device interface for build steps.
+
+"""
+
+import logging
+import os
+import re
+import sys
+
+from util import build_utils
+
+BUILD_ANDROID_DIR = os.path.join(os.path.dirname(__file__), '..', '..')
+sys.path.append(BUILD_ANDROID_DIR)
+
+from pylib import android_commands
+from pylib.device import device_errors
+from pylib.device import device_utils
+
+GetAttachedDevices = android_commands.GetAttachedDevices
+
+
+class BuildDevice(object):
+  def __init__(self, configuration):
+    self.id = configuration['id']
+    self.description = configuration['description']
+    self.install_metadata = configuration['install_metadata']
+    self.device = device_utils.DeviceUtils(self.id)
+
+  def RunShellCommand(self, *args, **kwargs):
+    return self.device.RunShellCommand(*args, **kwargs)
+
+  def PushChangedFiles(self, *args, **kwargs):
+    return self.device.PushChangedFiles(*args, **kwargs)
+
+  def GetSerialNumber(self):
+    return self.id
+
+  def Install(self, *args, **kwargs):
+    return self.device.Install(*args, **kwargs)
+
+  def InstallSplitApk(self, *args, **kwargs):
+    return self.device.InstallSplitApk(*args, **kwargs)
+
+  def GetInstallMetadata(self, apk_package):
+    """Gets the metadata on the device for the apk_package apk."""
+    # Matches lines like:
+    # -rw-r--r-- system   system    7376582 2013-04-19 16:34 \
+    #   org.chromium.chrome.shell.apk
+    # -rw-r--r-- system   system    7376582 2013-04-19 16:34 \
+    #   org.chromium.chrome.shell-1.apk
+    apk_matcher = lambda s: re.match('.*%s(-[0-9]*)?.apk$' % apk_package, s)
+    matches = filter(apk_matcher, self.install_metadata)
+    return matches[0] if matches else None
+
+
+def GetConfigurationForDevice(device_id):
+  device = device_utils.DeviceUtils(device_id)
+  configuration = None
+  has_root = False
+  is_online = device.IsOnline()
+  if is_online:
+    cmd = 'ls -l /data/app; getprop ro.build.description'
+    cmd_output = device.RunShellCommand(cmd)
+    has_root = not 'Permission denied' in cmd_output[0]
+    if not has_root:
+      # Disable warning log messages from EnableRoot()
+      logging.getLogger().disabled = True
+      try:
+        device.EnableRoot()
+        has_root = True
+      except device_errors.CommandFailedError:
+        has_root = False
+      finally:
+        logging.getLogger().disabled = False
+      cmd_output = device.RunShellCommand(cmd)
+
+    configuration = {
+        'id': device_id,
+        'description': cmd_output[-1],
+        'install_metadata': cmd_output[:-1],
+      }
+  return configuration, is_online, has_root
+
+
+def WriteConfigurations(configurations, path):
+  # Currently we only support installing to the first device.
+  build_utils.WriteJson(configurations[:1], path, only_if_changed=True)
+
+
+def ReadConfigurations(path):
+  return build_utils.ReadJson(path)
+
+
+def GetBuildDevice(configurations):
+  assert len(configurations) == 1
+  return BuildDevice(configurations[0])
+
+
+def GetBuildDeviceFromPath(path):
+  configurations = ReadConfigurations(path)
+  if len(configurations) > 0:
+    return GetBuildDevice(ReadConfigurations(path))
+  return None
+
diff --git a/build/android/gyp/util/build_utils.py b/build/android/gyp/util/build_utils.py
new file mode 100644
index 0000000..65b1a64
--- /dev/null
+++ b/build/android/gyp/util/build_utils.py
@@ -0,0 +1,376 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import ast
+import contextlib
+import fnmatch
+import json
+import os
+import pipes
+import re
+import shlex
+import shutil
+import subprocess
+import sys
+import tempfile
+import zipfile
+
+
+CHROMIUM_SRC = os.path.normpath(
+    os.path.join(os.path.dirname(__file__),
+                 os.pardir, os.pardir, os.pardir, os.pardir))
+COLORAMA_ROOT = os.path.join(CHROMIUM_SRC,
+                             'third_party', 'colorama', 'src')
+# aapt should ignore OWNERS files in addition the default ignore pattern.
+AAPT_IGNORE_PATTERN = ('!OWNERS:!.svn:!.git:!.ds_store:!*.scc:.*:<dir>_*:' +
+                       '!CVS:!thumbs.db:!picasa.ini:!*~:!*.d.stamp')
+
+
+@contextlib.contextmanager
+def TempDir():
+  dirname = tempfile.mkdtemp()
+  try:
+    yield dirname
+  finally:
+    shutil.rmtree(dirname)
+
+
+def MakeDirectory(dir_path):
+  try:
+    os.makedirs(dir_path)
+  except OSError:
+    pass
+
+
+def DeleteDirectory(dir_path):
+  if os.path.exists(dir_path):
+    shutil.rmtree(dir_path)
+
+
+def Touch(path, fail_if_missing=False):
+  if fail_if_missing and not os.path.exists(path):
+    raise Exception(path + ' doesn\'t exist.')
+
+  MakeDirectory(os.path.dirname(path))
+  with open(path, 'a'):
+    os.utime(path, None)
+
+
+def FindInDirectory(directory, filename_filter):
+  files = []
+  for root, _dirnames, filenames in os.walk(directory):
+    matched_files = fnmatch.filter(filenames, filename_filter)
+    files.extend((os.path.join(root, f) for f in matched_files))
+  return files
+
+
+def FindInDirectories(directories, filename_filter):
+  all_files = []
+  for directory in directories:
+    all_files.extend(FindInDirectory(directory, filename_filter))
+  return all_files
+
+
+def ParseGnList(gn_string):
+  return ast.literal_eval(gn_string)
+
+
+def ParseGypList(gyp_string):
+  # The ninja generator doesn't support $ in strings, so use ## to
+  # represent $.
+  # TODO(cjhopman): Remove when
+  # https://code.google.com/p/gyp/issues/detail?id=327
+  # is addressed.
+  gyp_string = gyp_string.replace('##', '$')
+
+  if gyp_string.startswith('['):
+    return ParseGnList(gyp_string)
+  return shlex.split(gyp_string)
+
+
+def CheckOptions(options, parser, required=None):
+  if not required:
+    return
+  for option_name in required:
+    if getattr(options, option_name) is None:
+      parser.error('--%s is required' % option_name.replace('_', '-'))
+
+
+def WriteJson(obj, path, only_if_changed=False):
+  old_dump = None
+  if os.path.exists(path):
+    with open(path, 'r') as oldfile:
+      old_dump = oldfile.read()
+
+  new_dump = json.dumps(obj, sort_keys=True, indent=2, separators=(',', ': '))
+
+  if not only_if_changed or old_dump != new_dump:
+    with open(path, 'w') as outfile:
+      outfile.write(new_dump)
+
+
+def ReadJson(path):
+  with open(path, 'r') as jsonfile:
+    return json.load(jsonfile)
+
+
+class CalledProcessError(Exception):
+  """This exception is raised when the process run by CheckOutput
+  exits with a non-zero exit code."""
+
+  def __init__(self, cwd, args, output):
+    super(CalledProcessError, self).__init__()
+    self.cwd = cwd
+    self.args = args
+    self.output = output
+
+  def __str__(self):
+    # A user should be able to simply copy and paste the command that failed
+    # into their shell.
+    copyable_command = '( cd {}; {} )'.format(os.path.abspath(self.cwd),
+        ' '.join(map(pipes.quote, self.args)))
+    return 'Command failed: {}\n{}'.format(copyable_command, self.output)
+
+
+# This can be used in most cases like subprocess.check_output(). The output,
+# particularly when the command fails, better highlights the command's failure.
+# If the command fails, raises a build_utils.CalledProcessError.
+def CheckOutput(args, cwd=None,
+                print_stdout=False, print_stderr=True,
+                stdout_filter=None,
+                stderr_filter=None,
+                fail_func=lambda returncode, stderr: returncode != 0):
+  if not cwd:
+    cwd = os.getcwd()
+
+  child = subprocess.Popen(args,
+      stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=cwd)
+  stdout, stderr = child.communicate()
+
+  if stdout_filter is not None:
+    stdout = stdout_filter(stdout)
+
+  if stderr_filter is not None:
+    stderr = stderr_filter(stderr)
+
+  if fail_func(child.returncode, stderr):
+    raise CalledProcessError(cwd, args, stdout + stderr)
+
+  if print_stdout:
+    sys.stdout.write(stdout)
+  if print_stderr:
+    sys.stderr.write(stderr)
+
+  return stdout
+
+
+def GetModifiedTime(path):
+  # For a symlink, the modified time should be the greater of the link's
+  # modified time and the modified time of the target.
+  return max(os.lstat(path).st_mtime, os.stat(path).st_mtime)
+
+
+def IsTimeStale(output, inputs):
+  if not os.path.exists(output):
+    return True
+
+  output_time = GetModifiedTime(output)
+  for i in inputs:
+    if GetModifiedTime(i) > output_time:
+      return True
+  return False
+
+
+def IsDeviceReady():
+  device_state = CheckOutput(['adb', 'get-state'])
+  return device_state.strip() == 'device'
+
+
+def CheckZipPath(name):
+  if os.path.normpath(name) != name:
+    raise Exception('Non-canonical zip path: %s' % name)
+  if os.path.isabs(name):
+    raise Exception('Absolute zip path: %s' % name)
+
+
+def ExtractAll(zip_path, path=None, no_clobber=True, pattern=None):
+  if path is None:
+    path = os.getcwd()
+  elif not os.path.exists(path):
+    MakeDirectory(path)
+
+  with zipfile.ZipFile(zip_path) as z:
+    for name in z.namelist():
+      if name.endswith('/'):
+        continue
+      if pattern is not None:
+        if not fnmatch.fnmatch(name, pattern):
+          continue
+      CheckZipPath(name)
+      if no_clobber:
+        output_path = os.path.join(path, name)
+        if os.path.exists(output_path):
+          raise Exception(
+              'Path already exists from zip: %s %s %s'
+              % (zip_path, name, output_path))
+
+    z.extractall(path=path)
+
+
+def DoZip(inputs, output, base_dir):
+  with zipfile.ZipFile(output, 'w') as outfile:
+    for f in inputs:
+      CheckZipPath(os.path.relpath(f, base_dir))
+      outfile.write(f, os.path.relpath(f, base_dir))
+
+
+def ZipDir(output, base_dir):
+  with zipfile.ZipFile(output, 'w') as outfile:
+    for root, _, files in os.walk(base_dir):
+      for f in files:
+        path = os.path.join(root, f)
+        archive_path = os.path.relpath(path, base_dir)
+        CheckZipPath(archive_path)
+        outfile.write(path, archive_path)
+
+
+def MergeZips(output, inputs, exclude_patterns=None):
+  added_names = set()
+  def Allow(name):
+    if exclude_patterns is not None:
+      for p in exclude_patterns:
+        if fnmatch.fnmatch(name, p):
+          return False
+    return True
+
+  with zipfile.ZipFile(output, 'w') as out_zip:
+    for in_file in inputs:
+      with zipfile.ZipFile(in_file, 'r') as in_zip:
+        for name in in_zip.namelist():
+          if name not in added_names and Allow(name):
+            out_zip.writestr(name, in_zip.read(name))
+            added_names.add(name)
+
+
+def PrintWarning(message):
+  print 'WARNING: ' + message
+
+
+def PrintBigWarning(message):
+  print '*****     ' * 8
+  PrintWarning(message)
+  print '*****     ' * 8
+
+
+def GetSortedTransitiveDependencies(top, deps_func):
+  """Gets the list of all transitive dependencies in sorted order.
+
+  There should be no cycles in the dependency graph.
+
+  Args:
+    top: a list of the top level nodes
+    deps_func: A function that takes a node and returns its direct dependencies.
+  Returns:
+    A list of all transitive dependencies of nodes in top, in order (a node will
+    appear in the list at a higher index than all of its dependencies).
+  """
+  def Node(dep):
+    return (dep, deps_func(dep))
+
+  # First: find all deps
+  unchecked_deps = list(top)
+  all_deps = set(top)
+  while unchecked_deps:
+    dep = unchecked_deps.pop()
+    new_deps = deps_func(dep).difference(all_deps)
+    unchecked_deps.extend(new_deps)
+    all_deps = all_deps.union(new_deps)
+
+  # Then: simple, slow topological sort.
+  sorted_deps = []
+  unsorted_deps = dict(map(Node, all_deps))
+  while unsorted_deps:
+    for library, dependencies in unsorted_deps.items():
+      if not dependencies.intersection(unsorted_deps.keys()):
+        sorted_deps.append(library)
+        del unsorted_deps[library]
+
+  return sorted_deps
+
+
+def GetPythonDependencies():
+  """Gets the paths of imported non-system python modules.
+
+  A path is assumed to be a "system" import if it is outside of chromium's
+  src/. The paths will be relative to the current directory.
+  """
+  module_paths = (m.__file__ for m in sys.modules.itervalues()
+                  if m is not None and hasattr(m, '__file__'))
+
+  abs_module_paths = map(os.path.abspath, module_paths)
+
+  non_system_module_paths = [
+      p for p in abs_module_paths if p.startswith(CHROMIUM_SRC)]
+  def ConvertPycToPy(s):
+    if s.endswith('.pyc'):
+      return s[:-1]
+    return s
+
+  non_system_module_paths = map(ConvertPycToPy, non_system_module_paths)
+  non_system_module_paths = map(os.path.relpath, non_system_module_paths)
+  return sorted(set(non_system_module_paths))
+
+
+def AddDepfileOption(parser):
+  parser.add_option('--depfile',
+                    help='Path to depfile. This must be specified as the '
+                    'action\'s first output.')
+
+
+def WriteDepfile(path, dependencies):
+  with open(path, 'w') as depfile:
+    depfile.write(path)
+    depfile.write(': ')
+    depfile.write(' '.join(dependencies))
+    depfile.write('\n')
+
+
+def ExpandFileArgs(args):
+  """Replaces file-arg placeholders in args.
+
+  These placeholders have the form:
+    @FileArg(filename:key1:key2:...:keyn)
+
+  The value of such a placeholder is calculated by reading 'filename' as json.
+  And then extracting the value at [key1][key2]...[keyn].
+
+  Note: This intentionally does not return the list of files that appear in such
+  placeholders. An action that uses file-args *must* know the paths of those
+  files prior to the parsing of the arguments (typically by explicitly listing
+  them in the action's inputs in build files).
+  """
+  new_args = list(args)
+  file_jsons = dict()
+  r = re.compile('@FileArg\((.*?)\)')
+  for i, arg in enumerate(args):
+    match = r.search(arg)
+    if not match:
+      continue
+
+    if match.end() != len(arg):
+      raise Exception('Unexpected characters after FileArg: ' + arg)
+
+    lookup_path = match.group(1).split(':')
+    file_path = lookup_path[0]
+    if not file_path in file_jsons:
+      file_jsons[file_path] = ReadJson(file_path)
+
+    expansion = file_jsons[file_path]
+    for k in lookup_path[1:]:
+      expansion = expansion[k]
+
+    new_args[i] = arg[:match.start()] + str(expansion)
+
+  return new_args
+
diff --git a/build/android/gyp/util/md5_check.py b/build/android/gyp/util/md5_check.py
new file mode 100644
index 0000000..9f365aa
--- /dev/null
+++ b/build/android/gyp/util/md5_check.py
@@ -0,0 +1,86 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import hashlib
+import os
+
+
+def CallAndRecordIfStale(
+    function, record_path=None, input_paths=None, input_strings=None,
+    force=False):
+  """Calls function if the md5sum of the input paths/strings has changed.
+
+  The md5sum of the inputs is compared with the one stored in record_path. If
+  this has changed (or the record doesn't exist), function will be called and
+  the new md5sum will be recorded.
+
+  If force is True, the function will be called regardless of whether the
+  md5sum is out of date.
+  """
+  if not input_paths:
+    input_paths = []
+  if not input_strings:
+    input_strings = []
+  md5_checker = _Md5Checker(
+      record_path=record_path,
+      input_paths=input_paths,
+      input_strings=input_strings)
+  if force or md5_checker.IsStale():
+    function()
+    md5_checker.Write()
+
+
+def _UpdateMd5ForFile(md5, path, block_size=2**16):
+  with open(path, 'rb') as infile:
+    while True:
+      data = infile.read(block_size)
+      if not data:
+        break
+      md5.update(data)
+
+
+def _UpdateMd5ForDirectory(md5, dir_path):
+  for root, _, files in os.walk(dir_path):
+    for f in files:
+      _UpdateMd5ForFile(md5, os.path.join(root, f))
+
+
+def _UpdateMd5ForPath(md5, path):
+  if os.path.isdir(path):
+    _UpdateMd5ForDirectory(md5, path)
+  else:
+    _UpdateMd5ForFile(md5, path)
+
+
+class _Md5Checker(object):
+  def __init__(self, record_path=None, input_paths=None, input_strings=None):
+    if not input_paths:
+      input_paths = []
+    if not input_strings:
+      input_strings = []
+
+    assert record_path.endswith('.stamp'), (
+        'record paths must end in \'.stamp\' so that they are easy to find '
+        'and delete')
+
+    self.record_path = record_path
+
+    md5 = hashlib.md5()
+    for i in sorted(input_paths):
+      _UpdateMd5ForPath(md5, i)
+    for s in input_strings:
+      md5.update(s)
+    self.new_digest = md5.hexdigest()
+
+    self.old_digest = ''
+    if os.path.exists(self.record_path):
+      with open(self.record_path, 'r') as old_record:
+        self.old_digest = old_record.read()
+
+  def IsStale(self):
+    return self.old_digest != self.new_digest
+
+  def Write(self):
+    with open(self.record_path, 'w') as new_record:
+      new_record.write(self.new_digest)
diff --git a/build/android/gyp/util/md5_check_test.py b/build/android/gyp/util/md5_check_test.py
new file mode 100644
index 0000000..4f89fc2
--- /dev/null
+++ b/build/android/gyp/util/md5_check_test.py
@@ -0,0 +1,72 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import tempfile
+import unittest
+
+import md5_check # pylint: disable=W0403
+
+
+class TestMd5Check(unittest.TestCase):
+  def setUp(self):
+    self.called = False
+
+  def testCallAndRecordIfStale(self):
+    input_strings = ['string1', 'string2']
+    input_file1 = tempfile.NamedTemporaryFile()
+    input_file2 = tempfile.NamedTemporaryFile()
+    file1_contents = 'input file 1'
+    file2_contents = 'input file 2'
+    input_file1.write(file1_contents)
+    input_file1.flush()
+    input_file2.write(file2_contents)
+    input_file2.flush()
+    input_files = [input_file1.name, input_file2.name]
+
+    record_path = tempfile.NamedTemporaryFile(suffix='.stamp')
+
+    def CheckCallAndRecord(should_call, message, force=False):
+      self.called = False
+      def MarkCalled():
+        self.called = True
+      md5_check.CallAndRecordIfStale(
+          MarkCalled,
+          record_path=record_path.name,
+          input_paths=input_files,
+          input_strings=input_strings,
+          force=force)
+      self.failUnlessEqual(should_call, self.called, message)
+
+    CheckCallAndRecord(True, 'should call when record doesn\'t exist')
+    CheckCallAndRecord(False, 'should not call when nothing changed')
+    CheckCallAndRecord(True, force=True, message='should call when forced')
+
+    input_file1.write('some more input')
+    input_file1.flush()
+    CheckCallAndRecord(True, 'changed input file should trigger call')
+
+    input_files = input_files[::-1]
+    CheckCallAndRecord(False, 'reordering of inputs shouldn\'t trigger call')
+
+    input_files = input_files[:1]
+    CheckCallAndRecord(True, 'removing file should trigger call')
+
+    input_files.append(input_file2.name)
+    CheckCallAndRecord(True, 'added input file should trigger call')
+
+    input_strings[0] = input_strings[0] + ' a bit longer'
+    CheckCallAndRecord(True, 'changed input string should trigger call')
+
+    input_strings = input_strings[::-1]
+    CheckCallAndRecord(True, 'reordering of string inputs should trigger call')
+
+    input_strings = input_strings[:1]
+    CheckCallAndRecord(True, 'removing a string should trigger call')
+
+    input_strings.append('a brand new string')
+    CheckCallAndRecord(True, 'added input string should trigger call')
+
+
+if __name__ == '__main__':
+  unittest.main()
diff --git a/build/android/gyp/util/proguard_util.py b/build/android/gyp/util/proguard_util.py
new file mode 100644
index 0000000..901cd9f
--- /dev/null
+++ b/build/android/gyp/util/proguard_util.py
@@ -0,0 +1,128 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+from util import build_utils
+
+def FilterProguardOutput(output):
+  '''ProGuard outputs boring stuff to stdout (proguard version, jar path, etc)
+  as well as interesting stuff (notes, warnings, etc). If stdout is entirely
+  boring, this method suppresses the output.
+  '''
+  ignore_patterns = [
+    'ProGuard, version ',
+    'Reading program jar [',
+    'Reading library jar [',
+    'Preparing output jar [',
+    '  Copying resources from program jar [',
+  ]
+  for line in output.splitlines():
+    for pattern in ignore_patterns:
+      if line.startswith(pattern):
+        break
+    else:
+      # line doesn't match any of the patterns; it's probably something worth
+      # printing out.
+      return output
+  return ''
+
+
+class ProguardCmdBuilder(object):
+  def __init__(self, proguard_jar):
+    assert os.path.exists(proguard_jar)
+    self._proguard_jar_path = proguard_jar
+    self._test = None
+    self._mapping = None
+    self._libraries = None
+    self._injars = None
+    self._configs = None
+    self._outjar = None
+
+  def outjar(self, path):
+    assert self._outjar is None
+    self._outjar = path
+
+  def is_test(self, enable):
+    assert self._test is None
+    self._test = enable
+
+  def mapping(self, path):
+    assert self._mapping is None
+    assert os.path.exists(path), path
+    self._mapping = path
+
+  def libraryjars(self, paths):
+    assert self._libraries is None
+    for p in paths:
+      assert os.path.exists(p), p
+    self._libraries = paths
+
+  def injars(self, paths):
+    assert self._injars is None
+    for p in paths:
+      assert os.path.exists(p), p
+    self._injars = paths
+
+  def configs(self, paths):
+    assert self._configs is None
+    for p in paths:
+      assert os.path.exists(p), p
+    self._configs = paths
+
+  def build(self):
+    assert self._injars is not None
+    assert self._outjar is not None
+    assert self._configs is not None
+    cmd = [
+      'java', '-jar', self._proguard_jar_path,
+      '-forceprocessing',
+    ]
+    if self._test:
+      cmd += [
+        '-dontobfuscate',
+        '-dontoptimize',
+        '-dontshrink',
+        '-dontskipnonpubliclibraryclassmembers',
+      ]
+
+    if self._mapping:
+      cmd += [
+        '-applymapping', self._mapping,
+      ]
+
+    if self._libraries:
+      cmd += [
+        '-libraryjars', ':'.join(self._libraries),
+      ]
+
+    cmd += [
+      '-injars', ':'.join(self._injars)
+    ]
+
+    for config_file in self._configs:
+      cmd += ['-include', config_file]
+
+    # The output jar must be specified after inputs.
+    cmd += [
+      '-outjars', self._outjar,
+      '-dump', self._outjar + '.dump',
+      '-printseeds', self._outjar + '.seeds',
+      '-printusage', self._outjar + '.usage',
+      '-printmapping', self._outjar + '.mapping',
+    ]
+    return cmd
+
+  def GetInputs(self):
+    inputs = [self._proguard_jar_path] + self._configs + self._injars
+    if self._mapping:
+      inputs.append(self._mapping)
+    if self._libraries:
+      inputs += self._libraries
+    return inputs
+
+
+  def CheckOutput(self):
+    build_utils.CheckOutput(self.build(), print_stdout=True,
+                            stdout_filter=FilterProguardOutput)
+
diff --git a/build/android/gyp/write_build_config.py b/build/android/gyp/write_build_config.py
new file mode 100755
index 0000000..3773e98
--- /dev/null
+++ b/build/android/gyp/write_build_config.py
@@ -0,0 +1,357 @@
+#!/usr/bin/env python
+#
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Writes a build_config file.
+
+The build_config file for a target is a json file containing information about
+how to build that target based on the target's dependencies. This includes
+things like: the javac classpath, the list of android resources dependencies,
+etc. It also includes the information needed to create the build_config for
+other targets that depend on that one.
+
+Android build scripts should not refer to the build_config directly, and the
+build specification should instead pass information in using the special
+file-arg syntax (see build_utils.py:ExpandFileArgs). That syntax allows passing
+of values in a json dict in a file and looks like this:
+  --python-arg=@FileArg(build_config_path:javac:classpath)
+
+Note: If paths to input files are passed in this way, it is important that:
+  1. inputs/deps of the action ensure that the files are available the first
+  time the action runs.
+  2. Either (a) or (b)
+    a. inputs/deps ensure that the action runs whenever one of the files changes
+    b. the files are added to the action's depfile
+"""
+
+import optparse
+import os
+import sys
+import xml.dom.minidom
+
+from util import build_utils
+
+import write_ordered_libraries
+
+class AndroidManifest(object):
+  def __init__(self, path):
+    self.path = path
+    dom = xml.dom.minidom.parse(path)
+    manifests = dom.getElementsByTagName('manifest')
+    assert len(manifests) == 1
+    self.manifest = manifests[0]
+
+  def GetInstrumentation(self):
+    instrumentation_els = self.manifest.getElementsByTagName('instrumentation')
+    if len(instrumentation_els) == 0:
+      return None
+    if len(instrumentation_els) != 1:
+      raise Exception(
+          'More than one <instrumentation> element found in %s' % self.path)
+    return instrumentation_els[0]
+
+  def CheckInstrumentation(self, expected_package):
+    instr = self.GetInstrumentation()
+    if not instr:
+      raise Exception('No <instrumentation> elements found in %s' % self.path)
+    instrumented_package = instr.getAttributeNS(
+        'http://schemas.android.com/apk/res/android', 'targetPackage')
+    if instrumented_package != expected_package:
+      raise Exception(
+          'Wrong instrumented package. Expected %s, got %s'
+          % (expected_package, instrumented_package))
+
+  def GetPackageName(self):
+    return self.manifest.getAttribute('package')
+
+
+dep_config_cache = {}
+def GetDepConfig(path):
+  if not path in dep_config_cache:
+    dep_config_cache[path] = build_utils.ReadJson(path)['deps_info']
+  return dep_config_cache[path]
+
+
+def DepsOfType(wanted_type, configs):
+  return [c for c in configs if c['type'] == wanted_type]
+
+
+def GetAllDepsConfigsInOrder(deps_config_paths):
+  def GetDeps(path):
+    return set(GetDepConfig(path)['deps_configs'])
+  return build_utils.GetSortedTransitiveDependencies(deps_config_paths, GetDeps)
+
+
+class Deps(object):
+  def __init__(self, direct_deps_config_paths):
+    self.all_deps_config_paths = GetAllDepsConfigsInOrder(
+        direct_deps_config_paths)
+    self.direct_deps_configs = [
+        GetDepConfig(p) for p in direct_deps_config_paths]
+    self.all_deps_configs = [
+        GetDepConfig(p) for p in self.all_deps_config_paths]
+
+  def All(self, wanted_type=None):
+    if type is None:
+      return self.all_deps_configs
+    return DepsOfType(wanted_type, self.all_deps_configs)
+
+  def Direct(self, wanted_type=None):
+    if wanted_type is None:
+      return self.direct_deps_configs
+    return DepsOfType(wanted_type, self.direct_deps_configs)
+
+  def AllConfigPaths(self):
+    return self.all_deps_config_paths
+
+
+def main(argv):
+  parser = optparse.OptionParser()
+  build_utils.AddDepfileOption(parser)
+  parser.add_option('--build-config', help='Path to build_config output.')
+  parser.add_option(
+      '--type',
+      help='Type of this target (e.g. android_library).')
+  parser.add_option(
+      '--possible-deps-configs',
+      help='List of paths for dependency\'s build_config files. Some '
+      'dependencies may not write build_config files. Missing build_config '
+      'files are handled differently based on the type of this target.')
+
+  # android_resources options
+  parser.add_option('--srcjar', help='Path to target\'s resources srcjar.')
+  parser.add_option('--resources-zip', help='Path to target\'s resources zip.')
+  parser.add_option('--r-text', help='Path to target\'s R.txt file.')
+  parser.add_option('--package-name',
+      help='Java package name for these resources.')
+  parser.add_option('--android-manifest', help='Path to android manifest.')
+
+  # java library options
+  parser.add_option('--jar-path', help='Path to target\'s jar output.')
+  parser.add_option('--supports-android', action='store_true',
+      help='Whether this library supports running on the Android platform.')
+  parser.add_option('--requires-android', action='store_true',
+      help='Whether this library requires running on the Android platform.')
+  parser.add_option('--bypass-platform-checks', action='store_true',
+      help='Bypass checks for support/require Android platform.')
+
+  # android library options
+  parser.add_option('--dex-path', help='Path to target\'s dex output.')
+
+  # native library options
+  parser.add_option('--native-libs', help='List of top-level native libs.')
+  parser.add_option('--readelf-path', help='Path to toolchain\'s readelf.')
+
+  parser.add_option('--tested-apk-config',
+      help='Path to the build config of the tested apk (for an instrumentation '
+      'test apk).')
+
+  options, args = parser.parse_args(argv)
+
+  if args:
+    parser.error('No positional arguments should be given.')
+
+
+  if not options.type in [
+      'java_library', 'android_resources', 'android_apk', 'deps_dex']:
+    raise Exception('Unknown type: <%s>' % options.type)
+
+  required_options = ['build_config'] + {
+      'java_library': ['jar_path'],
+      'android_resources': ['resources_zip'],
+      'android_apk': ['jar_path', 'dex_path', 'resources_zip'],
+      'deps_dex': ['dex_path']
+    }[options.type]
+
+  if options.native_libs:
+    required_options.append('readelf_path')
+
+  build_utils.CheckOptions(options, parser, required_options)
+
+  if options.type == 'java_library':
+    if options.supports_android and not options.dex_path:
+      raise Exception('java_library that supports Android requires a dex path.')
+
+    if options.requires_android and not options.supports_android:
+      raise Exception(
+          '--supports-android is required when using --requires-android')
+
+  possible_deps_config_paths = build_utils.ParseGypList(
+      options.possible_deps_configs)
+
+  allow_unknown_deps = (options.type == 'android_apk' or
+                        options.type == 'android_resources')
+  unknown_deps = [
+      c for c in possible_deps_config_paths if not os.path.exists(c)]
+  if unknown_deps and not allow_unknown_deps:
+    raise Exception('Unknown deps: ' + str(unknown_deps))
+
+  direct_deps_config_paths = [
+      c for c in possible_deps_config_paths if not c in unknown_deps]
+
+  deps = Deps(direct_deps_config_paths)
+  direct_library_deps = deps.Direct('java_library')
+  all_library_deps = deps.All('java_library')
+
+  direct_resources_deps = deps.Direct('android_resources')
+  all_resources_deps = deps.All('android_resources')
+  # Resources should be ordered with the highest-level dependency first so that
+  # overrides are done correctly.
+  all_resources_deps.reverse()
+
+  if options.type == 'android_apk' and options.tested_apk_config:
+    tested_apk_deps = Deps([options.tested_apk_config])
+    tested_apk_resources_deps = tested_apk_deps.All('android_resources')
+    all_resources_deps = [
+        d for d in all_resources_deps if not d in tested_apk_resources_deps]
+
+  # Initialize some common config.
+  config = {
+    'deps_info': {
+      'name': os.path.basename(options.build_config),
+      'path': options.build_config,
+      'type': options.type,
+      'deps_configs': direct_deps_config_paths,
+    }
+  }
+  deps_info = config['deps_info']
+
+  if options.type == 'java_library' and not options.bypass_platform_checks:
+    deps_info['requires_android'] = options.requires_android
+    deps_info['supports_android'] = options.supports_android
+
+    deps_require_android = (all_resources_deps +
+        [d['name'] for d in all_library_deps if d['requires_android']])
+    deps_not_support_android = (
+        [d['name'] for d in all_library_deps if not d['supports_android']])
+
+    if deps_require_android and not options.requires_android:
+      raise Exception('Some deps require building for the Android platform: ' +
+          str(deps_require_android))
+
+    if deps_not_support_android and options.supports_android:
+      raise Exception('Not all deps support the Android platform: ' +
+          str(deps_not_support_android))
+
+  if options.type in ['java_library', 'android_apk']:
+    javac_classpath = [c['jar_path'] for c in direct_library_deps]
+    java_full_classpath = [c['jar_path'] for c in all_library_deps]
+    deps_info['resources_deps'] = [c['path'] for c in all_resources_deps]
+    deps_info['jar_path'] = options.jar_path
+    if options.type == 'android_apk' or options.supports_android:
+      deps_info['dex_path'] = options.dex_path
+    config['javac'] = {
+      'classpath': javac_classpath,
+    }
+    config['java'] = {
+      'full_classpath': java_full_classpath
+    }
+
+  if options.type == 'java_library':
+    # Only resources might have srcjars (normal srcjar targets are listed in
+    # srcjar_deps). A resource's srcjar contains the R.java file for those
+    # resources, and (like Android's default build system) we allow a library to
+    # refer to the resources in any of its dependents.
+    config['javac']['srcjars'] = [
+        c['srcjar'] for c in direct_resources_deps if 'srcjar' in c]
+
+  if options.type == 'android_apk':
+    # Apks will get their resources srcjar explicitly passed to the java step.
+    config['javac']['srcjars'] = []
+
+  if options.type == 'android_resources':
+    deps_info['resources_zip'] = options.resources_zip
+    if options.srcjar:
+      deps_info['srcjar'] = options.srcjar
+    if options.android_manifest:
+      manifest = AndroidManifest(options.android_manifest)
+      deps_info['package_name'] = manifest.GetPackageName()
+    if options.package_name:
+      deps_info['package_name'] = options.package_name
+    if options.r_text:
+      deps_info['r_text'] = options.r_text
+
+  if options.type == 'android_resources' or options.type == 'android_apk':
+    config['resources'] = {}
+    config['resources']['dependency_zips'] = [
+        c['resources_zip'] for c in all_resources_deps]
+    config['resources']['extra_package_names'] = []
+    config['resources']['extra_r_text_files'] = []
+
+  if options.type == 'android_apk':
+    config['resources']['extra_package_names'] = [
+        c['package_name'] for c in all_resources_deps if 'package_name' in c]
+    config['resources']['extra_r_text_files'] = [
+        c['r_text'] for c in all_resources_deps if 'r_text' in c]
+
+  if options.type in ['android_apk', 'deps_dex']:
+    deps_dex_files = [c['dex_path'] for c in all_library_deps]
+
+  # An instrumentation test apk should exclude the dex files that are in the apk
+  # under test.
+  if options.type == 'android_apk' and options.tested_apk_config:
+    tested_apk_deps = Deps([options.tested_apk_config])
+    tested_apk_library_deps = tested_apk_deps.All('java_library')
+    tested_apk_deps_dex_files = [c['dex_path'] for c in tested_apk_library_deps]
+    deps_dex_files = [
+        p for p in deps_dex_files if not p in tested_apk_deps_dex_files]
+
+    tested_apk_config = GetDepConfig(options.tested_apk_config)
+    expected_tested_package = tested_apk_config['package_name']
+    AndroidManifest(options.android_manifest).CheckInstrumentation(
+        expected_tested_package)
+
+  # Dependencies for the final dex file of an apk or a 'deps_dex'.
+  if options.type in ['android_apk', 'deps_dex']:
+    config['final_dex'] = {}
+    dex_config = config['final_dex']
+    # TODO(cjhopman): proguard version
+    dex_config['dependency_dex_files'] = deps_dex_files
+
+  if options.type == 'android_apk':
+    config['dist_jar'] = {
+      'dependency_jars': [
+        c['jar_path'] for c in all_library_deps
+      ]
+    }
+    manifest = AndroidManifest(options.android_manifest)
+    deps_info['package_name'] = manifest.GetPackageName()
+    if not options.tested_apk_config and manifest.GetInstrumentation():
+      # This must then have instrumentation only for itself.
+      manifest.CheckInstrumentation(manifest.GetPackageName())
+
+    library_paths = []
+    java_libraries_list = []
+    if options.native_libs:
+      libraries = build_utils.ParseGypList(options.native_libs)
+      if libraries:
+        libraries_dir = os.path.dirname(libraries[0])
+        write_ordered_libraries.SetReadelfPath(options.readelf_path)
+        write_ordered_libraries.SetLibraryDirs([libraries_dir])
+        all_native_library_deps = (
+            write_ordered_libraries.GetSortedTransitiveDependenciesForBinaries(
+                libraries))
+        # Create a java literal array with the "base" library names:
+        # e.g. libfoo.so -> foo
+        java_libraries_list = '{%s}' % ','.join(
+            ['"%s"' % s[3:-3] for s in all_native_library_deps])
+        library_paths = map(
+            write_ordered_libraries.FullLibraryPath, all_native_library_deps)
+
+      config['native'] = {
+        'libraries': library_paths,
+        'java_libraries_list': java_libraries_list
+      }
+
+  build_utils.WriteJson(config, options.build_config, only_if_changed=True)
+
+  if options.depfile:
+    build_utils.WriteDepfile(
+        options.depfile,
+        deps.AllConfigPaths() + build_utils.GetPythonDependencies())
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv[1:]))
diff --git a/build/android/gyp/write_ordered_libraries.py b/build/android/gyp/write_ordered_libraries.py
new file mode 100755
index 0000000..0fc9a8c
--- /dev/null
+++ b/build/android/gyp/write_ordered_libraries.py
@@ -0,0 +1,144 @@
+#!/usr/bin/env python
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Writes dependency ordered list of native libraries.
+
+The list excludes any Android system libraries, as those are not bundled with
+the APK.
+
+This list of libraries is used for several steps of building an APK.
+In the component build, the --input-libraries only needs to be the top-level
+library (i.e. libcontent_shell_content_view). This will then use readelf to
+inspect the shared libraries and determine the full list of (non-system)
+libraries that should be included in the APK.
+"""
+
+# TODO(cjhopman): See if we can expose the list of library dependencies from
+# gyp, rather than calculating it ourselves.
+# http://crbug.com/225558
+
+import optparse
+import os
+import re
+import sys
+
+from util import build_utils
+
+_readelf = None
+_library_dirs = None
+
+_library_re = re.compile(
+    '.*NEEDED.*Shared library: \[(?P<library_name>.+)\]')
+
+
+def SetReadelfPath(path):
+  global _readelf
+  _readelf = path
+
+
+def SetLibraryDirs(dirs):
+  global _library_dirs
+  _library_dirs = dirs
+
+
+def FullLibraryPath(library_name):
+  assert _library_dirs is not None
+  for directory in _library_dirs:
+    path = '%s/%s' % (directory, library_name)
+    if os.path.exists(path):
+      return path
+  return library_name
+
+
+def IsSystemLibrary(library_name):
+  # If the library doesn't exist in the libraries directory, assume that it is
+  # an Android system library.
+  return not os.path.exists(FullLibraryPath(library_name))
+
+
+def CallReadElf(library_or_executable):
+  assert _readelf is not None
+  readelf_cmd = [_readelf,
+                 '-d',
+                 FullLibraryPath(library_or_executable)]
+  return build_utils.CheckOutput(readelf_cmd)
+
+
+def GetDependencies(library_or_executable):
+  elf = CallReadElf(library_or_executable)
+  return set(_library_re.findall(elf))
+
+
+def GetNonSystemDependencies(library_name):
+  all_deps = GetDependencies(library_name)
+  return set((lib for lib in all_deps if not IsSystemLibrary(lib)))
+
+
+def GetSortedTransitiveDependencies(libraries):
+  """Returns all transitive library dependencies in dependency order."""
+  return build_utils.GetSortedTransitiveDependencies(
+      libraries, GetNonSystemDependencies)
+
+
+def GetSortedTransitiveDependenciesForBinaries(binaries):
+  if binaries[0].endswith('.so'):
+    libraries = [os.path.basename(lib) for lib in binaries]
+  else:
+    assert len(binaries) == 1
+    all_deps = GetDependencies(binaries[0])
+    libraries = [lib for lib in all_deps if not IsSystemLibrary(lib)]
+
+  return GetSortedTransitiveDependencies(libraries)
+
+
+def main():
+  parser = optparse.OptionParser()
+  build_utils.AddDepfileOption(parser)
+
+  parser.add_option('--input-libraries',
+      help='A list of top-level input libraries.')
+  parser.add_option('--libraries-dir',
+      help='The directory which contains shared libraries.')
+  parser.add_option('--readelf', help='Path to the readelf binary.')
+  parser.add_option('--output', help='Path to the generated .json file.')
+  parser.add_option('--stamp', help='Path to touch on success.')
+
+  options, _ = parser.parse_args()
+
+  SetReadelfPath(options.readelf)
+  SetLibraryDirs(options.libraries_dir.split(','))
+
+  libraries = build_utils.ParseGypList(options.input_libraries)
+  if len(libraries):
+    libraries = GetSortedTransitiveDependenciesForBinaries(libraries)
+
+  # Convert to "base" library names: e.g. libfoo.so -> foo
+  java_libraries_list = (
+      '{%s}' % ','.join(['"%s"' % s[3:-3] for s in libraries]))
+
+  out_json = {
+      'libraries': libraries,
+      'lib_paths': [FullLibraryPath(l) for l in libraries],
+      'java_libraries_list': java_libraries_list
+      }
+  build_utils.WriteJson(
+      out_json,
+      options.output,
+      only_if_changed=True)
+
+  if options.stamp:
+    build_utils.Touch(options.stamp)
+
+  if options.depfile:
+    build_utils.WriteDepfile(
+        options.depfile,
+        libraries + build_utils.GetPythonDependencies())
+
+
+if __name__ == '__main__':
+  sys.exit(main())
+
+
diff --git a/build/android/gyp/zip.py b/build/android/gyp/zip.py
new file mode 100755
index 0000000..51322df
--- /dev/null
+++ b/build/android/gyp/zip.py
@@ -0,0 +1,26 @@
+#!/usr/bin/env python
+#
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Archives a set of files.
+"""
+
+import optparse
+import sys
+
+from util import build_utils
+
+def main():
+  parser = optparse.OptionParser()
+  parser.add_option('--input-dir', help='Directory of files to archive.')
+  parser.add_option('--output', help='Path to output archive.')
+  options, _ = parser.parse_args()
+
+  inputs = build_utils.FindInDirectory(options.input_dir, '*')
+  build_utils.DoZip(inputs, options.output, options.input_dir)
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/build/android/host_heartbeat.py b/build/android/host_heartbeat.py
new file mode 100755
index 0000000..6a7cdd1
--- /dev/null
+++ b/build/android/host_heartbeat.py
@@ -0,0 +1,33 @@
+#!/usr/bin/env python
+#
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Sends a heart beat pulse to the currently online Android devices.
+This heart beat lets the devices know that they are connected to a host.
+"""
+# pylint: disable=W0702
+
+import sys
+import time
+
+from pylib.device import device_utils
+
+PULSE_PERIOD = 20
+
+def main():
+  while True:
+    try:
+      devices = device_utils.DeviceUtils.HealthyDevices()
+      for d in devices:
+        d.RunShellCommand(['touch', '/sdcard/host_heartbeat'],
+                          check_return=True)
+    except:
+      # Keep the heatbeat running bypassing all errors.
+      pass
+    time.sleep(PULSE_PERIOD)
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/build/android/increase_size_for_speed.gypi b/build/android/increase_size_for_speed.gypi
new file mode 100644
index 0000000..48d17f5
--- /dev/null
+++ b/build/android/increase_size_for_speed.gypi
@@ -0,0 +1,42 @@
+# Copyright (c) 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included to optimize a target for speed
+# rather than for size on Android.
+# This is used in some carefully tailored targets and is not meant
+# to be included everywhere. Before adding the template to another target,
+# please ask in chromium-dev@. See crbug.com/411909
+
+{
+  'configurations': {
+    'Release': {
+      'target_conditions': [
+        ['_toolset=="target"', {
+          'conditions': [
+            ['OS=="android"', {
+              'cflags!': ['-Os'],
+              'cflags': ['-O2'],
+            }],
+            # Do not merge -Os and -O2 in LTO.
+            # LTO merges all optimization options at link-time. -O2 takes
+            # precedence over -Os. Avoid using LTO simultaneously
+            # on -Os and -O2 parts for that reason.
+            ['OS=="android" and use_lto==1', {
+              'cflags!': [
+                '-flto',
+                '-ffat-lto-objects',
+              ],
+            }],
+            ['OS=="android" and use_lto_o2==1', {
+              'cflags': [
+                '-flto',
+                '-ffat-lto-objects',
+              ],
+            }],
+          ],
+        }],
+      ],
+    },
+  },
+}
diff --git a/build/android/insert_chromium_version.gypi b/build/android/insert_chromium_version.gypi
new file mode 100644
index 0000000..a6ff908
--- /dev/null
+++ b/build/android/insert_chromium_version.gypi
@@ -0,0 +1,53 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into an action to provide a rule that
+# inserts a chromium version string into native libraries.
+#
+# To use this, create a gyp target with the following form:
+#  {
+#    'action_name': 'insert_chromium_version',
+#    'actions': [
+#      'variables': {
+#        'ordered_libraries_file': 'file generated by write_ordered_libraries'
+#        'stripped_libraries_dir': 'the directory contains native libraries'
+#        'input_paths': 'files to be added to the list of inputs'
+#        'stamp': 'file to touch when the action is complete'
+#        'version_string': 'chromium version string to be inserted'
+#      'includes': [ '../../build/android/insert_chromium_version.gypi' ],
+#    ],
+#  },
+#
+
+{
+  'message': 'Inserting chromium version string into native libraries',
+  'variables': {
+    'input_paths': [],
+  },
+  'inputs': [
+    '<(DEPTH)/build/android/gyp/util/build_utils.py',
+    '<(DEPTH)/build/android/gyp/insert_chromium_version.py',
+    '<(ordered_libraries_file)',
+    '>@(input_paths)',
+  ],
+  'outputs': [
+    '<(stamp)',
+  ],
+  'action': [
+    'python', '<(DEPTH)/build/android/gyp/insert_chromium_version.py',
+    '--android-objcopy=<(android_objcopy)',
+    '--stripped-libraries-dir=<(stripped_libraries_dir)',
+    '--libraries=@FileArg(<(ordered_libraries_file):libraries)',
+    '--version-string=<(version_string)',
+    '--stamp=<(stamp)',
+  ],
+  'conditions': [
+    ['component == "shared_library"', {
+      # Add a fake output to force the build to always re-run this step. This
+      # is required because the real inputs are not known at gyp-time and
+      # changing base.so may not trigger changes to dependent libraries.
+      'outputs': [ '<(stamp).fake' ]
+    }],
+  ],
+}
diff --git a/build/android/install_emulator_deps.py b/build/android/install_emulator_deps.py
new file mode 100755
index 0000000..82d1c75
--- /dev/null
+++ b/build/android/install_emulator_deps.py
@@ -0,0 +1,277 @@
+#!/usr/bin/env python
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Installs deps for using SDK emulator for testing.
+
+The script will download the SDK and system images, if they are not present, and
+install and enable KVM, if virtualization has been enabled in the BIOS.
+"""
+
+
+import logging
+import optparse
+import os
+import re
+import shutil
+import sys
+
+from pylib import cmd_helper
+from pylib import constants
+from pylib import pexpect
+from pylib.utils import run_tests_helper
+
+# Android API level
+DEFAULT_ANDROID_API_LEVEL = constants.ANDROID_SDK_VERSION
+
+# From the Android Developer's website.
+# Keep this up to date; the user can install older API levels as necessary.
+SDK_BASE_URL = 'http://dl.google.com/android/adt'
+SDK_ZIP = 'adt-bundle-linux-x86_64-20131030.zip'
+
+# pylint: disable=line-too-long
+# Android x86 system image from the Intel website:
+# http://software.intel.com/en-us/articles/intel-eula-x86-android-4-2-jelly-bean-bin
+# These don't exist prior to Android-15.
+# As of 08 Nov 2013, Android-19 is not yet available either.
+X86_IMG_URLS = {
+  15: 'https://software.intel.com/sites/landingpage/android/sysimg_x86-15_r01.zip',
+  16: 'https://software.intel.com/sites/landingpage/android/sysimg_x86-16_r01.zip',
+  17: 'https://software.intel.com/sites/landingpage/android/sysimg_x86-17_r01.zip',
+  18: 'https://software.intel.com/sites/landingpage/android/sysimg_x86-18_r01.zip',
+  19: 'https://software.intel.com/sites/landingpage/android/sysimg_x86-19_r01.zip'}
+#pylint: enable=line-too-long
+
+def CheckSDK():
+  """Check if SDK is already installed.
+
+  Returns:
+    True if the emulator SDK directory (src/android_emulator_sdk/) exists.
+  """
+  return os.path.exists(constants.EMULATOR_SDK_ROOT)
+
+
+def CheckSDKPlatform(api_level=DEFAULT_ANDROID_API_LEVEL):
+  """Check if the "SDK Platform" for the specified API level is installed.
+     This is necessary in order for the emulator to run when the target
+     is specified.
+
+  Args:
+    api_level: the Android API level to check; defaults to the latest API.
+
+  Returns:
+    True if the platform is already installed.
+  """
+  android_binary = os.path.join(constants.EMULATOR_SDK_ROOT,
+                                'sdk', 'tools', 'android')
+  pattern = re.compile('id: [0-9]+ or "android-%d"' % api_level)
+  try:
+    exit_code, stdout = cmd_helper.GetCmdStatusAndOutput(
+        [android_binary, 'list'])
+    if exit_code != 0:
+      raise Exception('\'android list\' command failed')
+    for line in stdout.split('\n'):
+      if pattern.match(line):
+        return True
+    return False
+  except OSError:
+    logging.exception('Unable to execute \'android list\'')
+    return False
+
+
+def CheckX86Image(api_level=DEFAULT_ANDROID_API_LEVEL):
+  """Check if Android system images have been installed.
+
+  Args:
+    api_level: the Android API level to check for; defaults to the latest API.
+
+  Returns:
+    True if sdk/system-images/android-<api_level>/x86 exists inside
+    EMULATOR_SDK_ROOT.
+  """
+  api_target = 'android-%d' % api_level
+  return os.path.exists(os.path.join(constants.EMULATOR_SDK_ROOT,
+                                     'sdk', 'system-images',
+                                     api_target, 'x86'))
+
+
+def CheckKVM():
+  """Quickly check whether KVM is enabled.
+
+  Returns:
+    True iff /dev/kvm exists (Linux only).
+  """
+  return os.path.exists('/dev/kvm')
+
+
+def RunKvmOk():
+  """Run kvm-ok as root to check that KVM is properly enabled after installation
+     of the required packages.
+
+  Returns:
+    True iff KVM is enabled (/dev/kvm exists). On failure, returns False
+    but also print detailed information explaining why KVM isn't enabled
+    (e.g. CPU doesn't support it, or BIOS disabled it).
+  """
+  try:
+    # Note: kvm-ok is in /usr/sbin, so always use 'sudo' to run it.
+    return not cmd_helper.RunCmd(['sudo', 'kvm-ok'])
+  except OSError:
+    logging.info('kvm-ok not installed')
+    return False
+
+
+def GetSDK():
+  """Download the SDK and unzip it into EMULATOR_SDK_ROOT."""
+  logging.info('Download Android SDK.')
+  sdk_url = '%s/%s' % (SDK_BASE_URL, SDK_ZIP)
+  try:
+    cmd_helper.RunCmd(['curl', '-o', '/tmp/sdk.zip', sdk_url])
+    print 'curled unzipping...'
+    rc = cmd_helper.RunCmd(['unzip', '-o', '/tmp/sdk.zip', '-d', '/tmp/'])
+    if rc:
+      raise Exception('ERROR: could not download/unzip Android SDK.')
+    # Get the name of the sub-directory that everything will be extracted to.
+    dirname, _ = os.path.splitext(SDK_ZIP)
+    zip_dir = '/tmp/%s' % dirname
+    # Move the extracted directory to EMULATOR_SDK_ROOT
+    shutil.move(zip_dir, constants.EMULATOR_SDK_ROOT)
+  finally:
+    os.unlink('/tmp/sdk.zip')
+
+
+def InstallKVM():
+  """Installs KVM packages."""
+  rc = cmd_helper.RunCmd(['sudo', 'apt-get', 'install', 'kvm'])
+  if rc:
+    logging.critical('ERROR: Did not install KVM. Make sure hardware '
+                     'virtualization is enabled in BIOS (i.e. Intel VT-x or '
+                     'AMD SVM).')
+  # TODO(navabi): Use modprobe kvm-amd on AMD processors.
+  rc = cmd_helper.RunCmd(['sudo', 'modprobe', 'kvm-intel'])
+  if rc:
+    logging.critical('ERROR: Did not add KVM module to Linux Kernel. Make sure '
+                     'hardware virtualization is enabled in BIOS.')
+  # Now check to ensure KVM acceleration can be used.
+  if not RunKvmOk():
+    logging.critical('ERROR: Can not use KVM acceleration. Make sure hardware '
+                     'virtualization is enabled in BIOS (i.e. Intel VT-x or '
+                     'AMD SVM).')
+
+
+def GetX86Image(api_level=DEFAULT_ANDROID_API_LEVEL):
+  """Download x86 system image from Intel's website.
+
+  Args:
+    api_level: the Android API level to download for.
+  """
+  logging.info('Download x86 system image directory into sdk directory.')
+  # TODO(andrewhayden): Use python tempfile lib instead
+  temp_file = '/tmp/x86_img_android-%d.zip' % api_level
+  if api_level not in X86_IMG_URLS:
+    raise Exception('ERROR: no URL known for x86 image for android-%s' %
+                    api_level)
+  try:
+    cmd_helper.RunCmd(['curl', '-o', temp_file, X86_IMG_URLS[api_level]])
+    rc = cmd_helper.RunCmd(['unzip', '-o', temp_file, '-d', '/tmp/'])
+    if rc:
+      raise Exception('ERROR: Could not download/unzip image zip.')
+    api_target = 'android-%d' % api_level
+    sys_imgs = os.path.join(constants.EMULATOR_SDK_ROOT, 'sdk',
+                            'system-images', api_target, 'x86')
+    logging.info('Deploying system image to %s' % sys_imgs)
+    shutil.move('/tmp/x86', sys_imgs)
+  finally:
+    os.unlink(temp_file)
+
+
+def GetSDKPlatform(api_level=DEFAULT_ANDROID_API_LEVEL):
+  """Update the SDK to include the platform specified.
+
+  Args:
+    api_level: the Android API level to download
+  """
+  android_binary = os.path.join(constants.EMULATOR_SDK_ROOT,
+                                'sdk', 'tools', 'android')
+  pattern = re.compile(
+      r'\s*([0-9]+)- SDK Platform Android [\.,0-9]+, API %d.*' % api_level)
+  # Example:
+  #   2- SDK Platform Android 4.3, API 18, revision 2
+  exit_code, stdout = cmd_helper.GetCmdStatusAndOutput(
+      [android_binary, 'list', 'sdk'])
+  if exit_code != 0:
+    raise Exception('\'android list sdk\' command return %d' % exit_code)
+  for line in stdout.split('\n'):
+    match = pattern.match(line)
+    if match:
+      index = match.group(1)
+      print 'package %s corresponds to platform level %d' % (index, api_level)
+      # update sdk --no-ui --filter $INDEX
+      update_command = [android_binary,
+                        'update', 'sdk', '--no-ui', '--filter', index]
+      update_command_str = ' '.join(update_command)
+      logging.info('running update command: %s' % update_command_str)
+      update_process = pexpect.spawn(update_command_str)
+      # TODO(andrewhayden): Do we need to bug the user about this?
+      if update_process.expect('Do you accept the license') != 0:
+        raise Exception('License agreement check failed')
+      update_process.sendline('y')
+      if update_process.expect('Done. 1 package installed.') == 0:
+        print 'Successfully installed platform for API level %d' % api_level
+        return
+      else:
+        raise Exception('Failed to install platform update')
+  raise Exception('Could not find android-%d update for the SDK!' % api_level)
+
+
+def main(argv):
+  opt_parser = optparse.OptionParser(
+      description='Install dependencies for running the Android emulator')
+  opt_parser.add_option('--api-level', dest='api_level',
+      help='The API level (e.g., 19 for Android 4.4) to ensure is available',
+      type='int', default=DEFAULT_ANDROID_API_LEVEL)
+  opt_parser.add_option('-v', dest='verbose', action='store_true',
+      help='enable verbose logging')
+  options, _ = opt_parser.parse_args(argv[1:])
+
+  # run_tests_helper will set logging to INFO or DEBUG
+  # We achieve verbose output by configuring it with 2 (==DEBUG)
+  verbosity = 1
+  if options.verbose:
+    verbosity = 2
+  logging.basicConfig(level=logging.INFO,
+                      format='# %(asctime)-15s: %(message)s')
+  run_tests_helper.SetLogLevel(verbose_count=verbosity)
+
+  # Calls below will download emulator SDK and/or system images only if needed.
+  if CheckSDK():
+    logging.info('android_emulator_sdk/ already exists, skipping download.')
+  else:
+    GetSDK()
+
+  # Check target. The target has to be installed in order to run the emulator.
+  if CheckSDKPlatform(options.api_level):
+    logging.info('SDK platform android-%d already present, skipping.' %
+                 options.api_level)
+  else:
+    logging.info('SDK platform android-%d not present, installing.' %
+                 options.api_level)
+    GetSDKPlatform(options.api_level)
+
+  # Download the x86 system image only if needed.
+  if CheckX86Image(options.api_level):
+    logging.info('x86 image for android-%d already present, skipping.' %
+                 options.api_level)
+  else:
+    GetX86Image(options.api_level)
+
+  # Make sure KVM packages are installed and enabled.
+  if CheckKVM():
+    logging.info('KVM already installed and enabled.')
+  else:
+    InstallKVM()
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv))
diff --git a/build/android/instr_action.gypi b/build/android/instr_action.gypi
new file mode 100644
index 0000000..fa6d062
--- /dev/null
+++ b/build/android/instr_action.gypi
@@ -0,0 +1,53 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into an action to provide a rule that
+# instruments either java class files, or jars.
+
+{
+  'variables': {
+    'instr_type%': 'jar',
+    'input_path%': '',
+    'output_path%': '',
+    'stamp_path%': '',
+    'extra_instr_args': [
+      '--coverage-file=<(_target_name).em',
+      '--sources-file=<(_target_name)_sources.txt',
+    ],
+    'emma_jar': '<(android_sdk_root)/tools/lib/emma.jar',
+    'conditions': [
+      ['emma_instrument != 0', {
+        'extra_instr_args': [
+          '--sources=<(java_in_dir)/src >(additional_src_dirs) >(generated_src_dirs)',
+          '--src-root=<(DEPTH)',
+          '--emma-jar=<(emma_jar)',
+          '--filter-string=<(emma_filter)',
+        ],
+        'conditions': [
+          ['instr_type == "jar"', {
+            'instr_action': 'instrument_jar',
+          }, {
+            'instr_action': 'instrument_classes',
+          }]
+        ],
+      }, {
+        'instr_action': 'copy',
+        'extra_instr_args': [],
+      }]
+    ]
+  },
+  'inputs': [
+    '<(DEPTH)/build/android/gyp/emma_instr.py',
+    '<(DEPTH)/build/android/gyp/util/build_utils.py',
+    '<(DEPTH)/build/android/pylib/utils/command_option_parser.py',
+  ],
+  'action': [
+    'python', '<(DEPTH)/build/android/gyp/emma_instr.py',
+    '<(instr_action)',
+    '--input-path=<(input_path)',
+    '--output-path=<(output_path)',
+    '--stamp=<(stamp_path)',
+    '<@(extra_instr_args)',
+  ]
+}
diff --git a/build/android/java_cpp_enum.gypi b/build/android/java_cpp_enum.gypi
new file mode 100644
index 0000000..d4abafa
--- /dev/null
+++ b/build/android/java_cpp_enum.gypi
@@ -0,0 +1,64 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into a target to provide an action
+# to generate Java source files from a C++ header file containing annotated
+# enum definitions using a Python script.
+#
+# To use this, create a gyp target with the following form:
+#  {
+#    'target_name': 'bitmap_format_java',
+#    'type': 'none',
+#    'variables': {
+#      'source_file': 'ui/android/bitmap_format.h',
+#    },
+#    'includes': [ '../build/android/java_cpp_enum.gypi' ],
+#  },
+#
+# Then have the gyp target which compiles the java code depend on the newly
+# created target.
+
+{
+  'variables': {
+    # Location where all generated Java sources will be placed.
+    'output_dir': '<(SHARED_INTERMEDIATE_DIR)/enums/<(_target_name)',
+    'generator_path': '<(DEPTH)/build/android/gyp/java_cpp_enum.py',
+    'generator_args': '<(output_dir) <(source_file)',
+  },
+  'direct_dependent_settings': {
+    'variables': {
+      # Ensure that the output directory is used in the class path
+      # when building targets that depend on this one.
+      'generated_src_dirs': [
+        '<(output_dir)/',
+      ],
+      # Ensure that the targets depending on this one are rebuilt if the sources
+      # of this one are modified.
+      'additional_input_paths': [
+        '<(source_file)',
+      ],
+    },
+  },
+  'actions': [
+    {
+      'action_name': 'generate_java_constants',
+      'inputs': [
+        '<(DEPTH)/build/android/gyp/util/build_utils.py',
+        '<(generator_path)',
+        '<(source_file)',
+      ],
+      'outputs': [
+        # This is the main reason this is an action and not a rule. Gyp doesn't
+        # properly expand RULE_INPUT_PATH here and so it's impossible to
+        # calculate the list of outputs.
+        '<!@pymod_do_main(java_cpp_enum --print_output_only '
+            '<@(generator_args))',
+      ],
+      'action': [
+        'python', '<(generator_path)', '<@(generator_args)'
+      ],
+      'message': 'Generating Java from cpp header <(source_file)',
+    },
+  ],
+}
diff --git a/build/android/java_cpp_template.gypi b/build/android/java_cpp_template.gypi
new file mode 100644
index 0000000..3296659
--- /dev/null
+++ b/build/android/java_cpp_template.gypi
@@ -0,0 +1,81 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into a target to provide a rule
+# to generate Java source files from templates that are processed
+# through the host C pre-processor.
+#
+# NOTE: For generating Java conterparts to enums prefer using the java_cpp_enum
+#       rule instead.
+#
+# To use this, create a gyp target with the following form:
+#  {
+#    'target_name': 'android_net_java_constants',
+#    'type': 'none',
+#    'sources': [
+#      'net/android/NetError.template',
+#    ],
+#    'variables': {
+#      'package_name': 'org/chromium/net',
+#      'template_deps': ['base/net_error_list.h'],
+#    },
+#    'includes': [ '../build/android/java_cpp_template.gypi' ],
+#  },
+#
+# The 'sources' entry should only list template file. The template file
+# itself should use the 'ClassName.template' format, and will generate
+# 'gen/templates/<target-name>/<package-name>/ClassName.java. The files which
+# template dependents on and typically included by the template should be listed
+# in template_deps variables. Any change to them will force a rebuild of
+# the template, and hence of any source that depends on it.
+#
+
+{
+  # Location where all generated Java sources will be placed.
+  'variables': {
+    'include_path%': '<(DEPTH)',
+    'output_dir': '<(SHARED_INTERMEDIATE_DIR)/templates/<(_target_name)/<(package_name)',
+  },
+  'direct_dependent_settings': {
+    'variables': {
+      # Ensure that the output directory is used in the class path
+      # when building targets that depend on this one.
+      'generated_src_dirs': [
+        '<(output_dir)/',
+      ],
+      # Ensure dependents are rebuilt when sources for this rule change.
+      'additional_input_paths': [
+        '<@(_sources)',
+        '<@(template_deps)',
+      ],
+    },
+  },
+  # Define a single rule that will be apply to each .template file
+  # listed in 'sources'.
+  'rules': [
+    {
+      'rule_name': 'generate_java_constants',
+      'extension': 'template',
+      # Set template_deps as additional dependencies.
+      'variables': {
+        'output_path': '<(output_dir)/<(RULE_INPUT_ROOT).java',
+      },
+      'inputs': [
+        '<(DEPTH)/build/android/gyp/util/build_utils.py',
+        '<(DEPTH)/build/android/gyp/gcc_preprocess.py',
+        '<@(template_deps)'
+      ],
+      'outputs': [
+        '<(output_path)',
+      ],
+      'action': [
+        'python', '<(DEPTH)/build/android/gyp/gcc_preprocess.py',
+        '--include-path=<(include_path)',
+        '--output=<(output_path)',
+        '--template=<(RULE_INPUT_PATH)',
+      ],
+      'message': 'Generating Java from cpp template <(RULE_INPUT_PATH)',
+    }
+  ],
+}
diff --git a/build/android/jinja_template.gypi b/build/android/jinja_template.gypi
new file mode 100644
index 0000000..9c49360
--- /dev/null
+++ b/build/android/jinja_template.gypi
@@ -0,0 +1,85 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into a target to process one or more
+# Jinja templates.
+#
+# To process a single template file, create a gyp target with the following
+# form:
+#  {
+#    'target_name': 'chrome_shell_manifest',
+#    'type': 'none',
+#    'variables': {
+#      'jinja_inputs': ['android/shell/java/AndroidManifest.xml'],
+#      'jinja_output': '<(SHARED_INTERMEDIATE_DIR)/chrome_shell_manifest/AndroidManifest.xml',
+#      'jinja_variables': ['app_name=ChromeShell'],
+#    },
+#    'includes': [ '../build/android/jinja_template.gypi' ],
+#  },
+#
+# To process multiple template files and package the results into a zip file,
+# create a gyp target with the following form:
+#  {
+#    'target_name': 'chrome_template_resources',
+#    'type': 'none',
+#    'variables': {
+#       'jinja_inputs_base_dir': 'android/shell/java/res_template',
+#       'jinja_inputs': [
+#         '<(jinja_inputs_base_dir)/xml/searchable.xml',
+#         '<(jinja_inputs_base_dir)/xml/syncadapter.xml',
+#       ],
+#       'jinja_outputs_zip': '<(PRODUCT_DIR)/res.java/<(_target_name).zip',
+#       'jinja_variables': ['app_name=ChromeShell'],
+#     },
+#     'includes': [ '../build/android/jinja_template.gypi' ],
+#   },
+#
+
+{
+  'actions': [
+    {
+      'action_name': '<(_target_name)_jinja_template',
+      'message': 'processing jinja template',
+      'variables': {
+        'jinja_output%': '',
+        'jinja_outputs_zip%': '',
+        'jinja_inputs_base_dir%': '',
+        'jinja_includes%': [],
+        'jinja_variables%': [],
+        'jinja_args': [],
+      },
+      'inputs': [
+        '<(DEPTH)/build/android/gyp/util/build_utils.py',
+        '<(DEPTH)/build/android/gyp/jinja_template.py',
+        '<@(jinja_inputs)',
+        '<@(jinja_includes)',
+      ],
+      'conditions': [
+        ['jinja_output != ""', {
+          'outputs': [ '<(jinja_output)' ],
+          'variables': {
+            'jinja_args': ['--output', '<(jinja_output)'],
+          },
+        }],
+        ['jinja_outputs_zip != ""', {
+          'outputs': [ '<(jinja_outputs_zip)' ],
+          'variables': {
+            'jinja_args': ['--outputs-zip', '<(jinja_outputs_zip)'],
+          },
+        }],
+        ['jinja_inputs_base_dir != ""', {
+          'variables': {
+            'jinja_args': ['--inputs-base-dir', '<(jinja_inputs_base_dir)'],
+          },
+        }],
+      ],
+      'action': [
+        'python', '<(DEPTH)/build/android/gyp/jinja_template.py',
+        '--inputs', '<(jinja_inputs)',
+        '--variables', '<(jinja_variables)',
+        '<@(jinja_args)',
+      ],
+    },
+  ],
+}
diff --git a/build/android/lighttpd_server.py b/build/android/lighttpd_server.py
new file mode 100755
index 0000000..a5195ac
--- /dev/null
+++ b/build/android/lighttpd_server.py
@@ -0,0 +1,256 @@
+#!/usr/bin/env python
+#
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Provides a convenient wrapper for spawning a test lighttpd instance.
+
+Usage:
+  lighttpd_server PATH_TO_DOC_ROOT
+"""
+
+import codecs
+import contextlib
+import httplib
+import os
+import random
+import shutil
+import socket
+import subprocess
+import sys
+import tempfile
+import time
+
+from pylib import constants
+from pylib import pexpect
+
+class LighttpdServer(object):
+  """Wraps lighttpd server, providing robust startup.
+
+  Args:
+    document_root: Path to root of this server's hosted files.
+    port: TCP port on the _host_ machine that the server will listen on. If
+        ommitted it will attempt to use 9000, or if unavailable it will find
+        a free port from 8001 - 8999.
+    lighttpd_path, lighttpd_module_path: Optional paths to lighttpd binaries.
+    base_config_path: If supplied this file will replace the built-in default
+        lighttpd config file.
+    extra_config_contents: If specified, this string will be appended to the
+        base config (default built-in, or from base_config_path).
+    config_path, error_log, access_log: Optional paths where the class should
+        place temprary files for this session.
+  """
+
+  def __init__(self, document_root, port=None,
+               lighttpd_path=None, lighttpd_module_path=None,
+               base_config_path=None, extra_config_contents=None,
+               config_path=None, error_log=None, access_log=None):
+    self.temp_dir = tempfile.mkdtemp(prefix='lighttpd_for_chrome_android')
+    self.document_root = os.path.abspath(document_root)
+    self.fixed_port = port
+    self.port = port or constants.LIGHTTPD_DEFAULT_PORT
+    self.server_tag = 'LightTPD ' + str(random.randint(111111, 999999))
+    self.lighttpd_path = lighttpd_path or '/usr/sbin/lighttpd'
+    self.lighttpd_module_path = lighttpd_module_path or '/usr/lib/lighttpd'
+    self.base_config_path = base_config_path
+    self.extra_config_contents = extra_config_contents
+    self.config_path = config_path or self._Mktmp('config')
+    self.error_log = error_log or self._Mktmp('error_log')
+    self.access_log = access_log or self._Mktmp('access_log')
+    self.pid_file = self._Mktmp('pid_file')
+    self.process = None
+
+  def _Mktmp(self, name):
+    return os.path.join(self.temp_dir, name)
+
+  @staticmethod
+  def _GetRandomPort():
+    # The ports of test server is arranged in constants.py.
+    return random.randint(constants.LIGHTTPD_RANDOM_PORT_FIRST,
+                          constants.LIGHTTPD_RANDOM_PORT_LAST)
+
+  def StartupHttpServer(self):
+    """Starts up a http server with specified document root and port."""
+    # If we want a specific port, make sure no one else is listening on it.
+    if self.fixed_port:
+      self._KillProcessListeningOnPort(self.fixed_port)
+    while True:
+      if self.base_config_path:
+        # Read the config
+        with codecs.open(self.base_config_path, 'r', 'utf-8') as f:
+          config_contents = f.read()
+      else:
+        config_contents = self._GetDefaultBaseConfig()
+      if self.extra_config_contents:
+        config_contents += self.extra_config_contents
+      # Write out the config, filling in placeholders from the members of |self|
+      with codecs.open(self.config_path, 'w', 'utf-8') as f:
+        f.write(config_contents % self.__dict__)
+      if (not os.path.exists(self.lighttpd_path) or
+          not os.access(self.lighttpd_path, os.X_OK)):
+        raise EnvironmentError(
+            'Could not find lighttpd at %s.\n'
+            'It may need to be installed (e.g. sudo apt-get install lighttpd)'
+            % self.lighttpd_path)
+      self.process = pexpect.spawn(self.lighttpd_path,
+                                   ['-D', '-f', self.config_path,
+                                    '-m', self.lighttpd_module_path],
+                                   cwd=self.temp_dir)
+      client_error, server_error = self._TestServerConnection()
+      if not client_error:
+        assert int(open(self.pid_file, 'r').read()) == self.process.pid
+        break
+      self.process.close()
+
+      if self.fixed_port or not 'in use' in server_error:
+        print 'Client error:', client_error
+        print 'Server error:', server_error
+        return False
+      self.port = self._GetRandomPort()
+    return True
+
+  def ShutdownHttpServer(self):
+    """Shuts down our lighttpd processes."""
+    if self.process:
+      self.process.terminate()
+    shutil.rmtree(self.temp_dir, ignore_errors=True)
+
+  def _TestServerConnection(self):
+    # Wait for server to start
+    server_msg = ''
+    for timeout in xrange(1, 5):
+      client_error = None
+      try:
+        with contextlib.closing(httplib.HTTPConnection(
+            '127.0.0.1', self.port, timeout=timeout)) as http:
+          http.set_debuglevel(timeout > 3)
+          http.request('HEAD', '/')
+          r = http.getresponse()
+          r.read()
+          if (r.status == 200 and r.reason == 'OK' and
+              r.getheader('Server') == self.server_tag):
+            return (None, server_msg)
+          client_error = ('Bad response: %s %s version %s\n  ' %
+                          (r.status, r.reason, r.version) +
+                          '\n  '.join([': '.join(h) for h in r.getheaders()]))
+      except (httplib.HTTPException, socket.error) as client_error:
+        pass  # Probably too quick connecting: try again
+      # Check for server startup error messages
+      ix = self.process.expect([pexpect.TIMEOUT, pexpect.EOF, '.+'],
+                               timeout=timeout)
+      if ix == 2:  # stdout spew from the server
+        server_msg += self.process.match.group(0)
+      elif ix == 1:  # EOF -- server has quit so giveup.
+        client_error = client_error or 'Server exited'
+        break
+    return (client_error or 'Timeout', server_msg)
+
+  @staticmethod
+  def _KillProcessListeningOnPort(port):
+    """Checks if there is a process listening on port number |port| and
+    terminates it if found.
+
+    Args:
+      port: Port number to check.
+    """
+    if subprocess.call(['fuser', '-kv', '%d/tcp' % port]) == 0:
+      # Give the process some time to terminate and check that it is gone.
+      time.sleep(2)
+      assert subprocess.call(['fuser', '-v', '%d/tcp' % port]) != 0, \
+          'Unable to kill process listening on port %d.' % port
+
+  @staticmethod
+  def _GetDefaultBaseConfig():
+    return """server.tag                  = "%(server_tag)s"
+server.modules              = ( "mod_access",
+                                "mod_accesslog",
+                                "mod_alias",
+                                "mod_cgi",
+                                "mod_rewrite" )
+
+# default document root required
+#server.document-root = "."
+
+# files to check for if .../ is requested
+index-file.names            = ( "index.php", "index.pl", "index.cgi",
+                                "index.html", "index.htm", "default.htm" )
+# mimetype mapping
+mimetype.assign             = (
+  ".gif"          =>      "image/gif",
+  ".jpg"          =>      "image/jpeg",
+  ".jpeg"         =>      "image/jpeg",
+  ".png"          =>      "image/png",
+  ".svg"          =>      "image/svg+xml",
+  ".css"          =>      "text/css",
+  ".html"         =>      "text/html",
+  ".htm"          =>      "text/html",
+  ".xhtml"        =>      "application/xhtml+xml",
+  ".xhtmlmp"      =>      "application/vnd.wap.xhtml+xml",
+  ".js"           =>      "application/x-javascript",
+  ".log"          =>      "text/plain",
+  ".conf"         =>      "text/plain",
+  ".text"         =>      "text/plain",
+  ".txt"          =>      "text/plain",
+  ".dtd"          =>      "text/xml",
+  ".xml"          =>      "text/xml",
+  ".manifest"     =>      "text/cache-manifest",
+ )
+
+# Use the "Content-Type" extended attribute to obtain mime type if possible
+mimetype.use-xattr          = "enable"
+
+##
+# which extensions should not be handle via static-file transfer
+#
+# .php, .pl, .fcgi are most often handled by mod_fastcgi or mod_cgi
+static-file.exclude-extensions = ( ".php", ".pl", ".cgi" )
+
+server.bind = "127.0.0.1"
+server.port = %(port)s
+
+## virtual directory listings
+dir-listing.activate        = "enable"
+#dir-listing.encoding       = "iso-8859-2"
+#dir-listing.external-css   = "style/oldstyle.css"
+
+## enable debugging
+#debug.log-request-header   = "enable"
+#debug.log-response-header  = "enable"
+#debug.log-request-handling = "enable"
+#debug.log-file-not-found   = "enable"
+
+#### SSL engine
+#ssl.engine                 = "enable"
+#ssl.pemfile                = "server.pem"
+
+# Autogenerated test-specific config follows.
+
+cgi.assign = ( ".cgi"  => "/usr/bin/env",
+               ".pl"   => "/usr/bin/env",
+               ".asis" => "/bin/cat",
+               ".php"  => "/usr/bin/php-cgi" )
+
+server.errorlog = "%(error_log)s"
+accesslog.filename = "%(access_log)s"
+server.upload-dirs = ( "/tmp" )
+server.pid-file = "%(pid_file)s"
+server.document-root = "%(document_root)s"
+
+"""
+
+
+def main(argv):
+  server = LighttpdServer(*argv[1:])
+  try:
+    if server.StartupHttpServer():
+      raw_input('Server running at http://127.0.0.1:%s -'
+                ' press Enter to exit it.' % server.port)
+    else:
+      print 'Server exit code:', server.process.exitstatus
+  finally:
+    server.ShutdownHttpServer()
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv))
diff --git a/build/android/lint/OWNERS b/build/android/lint/OWNERS
new file mode 100644
index 0000000..cd396e7
--- /dev/null
+++ b/build/android/lint/OWNERS
@@ -0,0 +1,2 @@
+newt@chromium.org
+aurimas@chromium.org
diff --git a/build/android/lint/suppress.py b/build/android/lint/suppress.py
new file mode 100755
index 0000000..52d7579
--- /dev/null
+++ b/build/android/lint/suppress.py
@@ -0,0 +1,115 @@
+#!/usr/bin/env python
+#
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Add all generated lint_result.xml files to suppressions.xml"""
+
+
+import collections
+import optparse
+import os
+import sys
+from xml.dom import minidom
+
+_BUILD_ANDROID_DIR = os.path.join(os.path.dirname(__file__), '..')
+sys.path.append(_BUILD_ANDROID_DIR)
+
+from pylib import constants
+
+
+_THIS_FILE = os.path.abspath(__file__)
+_CONFIG_PATH = os.path.join(os.path.dirname(_THIS_FILE), 'suppressions.xml')
+_DOC = (
+    '\nSTOP! It looks like you want to suppress some lint errors:\n'
+    '- Have you tried identifing the offending patch?\n'
+    '  Ask the author for a fix and/or revert the patch.\n'
+    '- It is preferred to add suppressions in the code instead of\n'
+    '  sweeping it under the rug here. See:\n\n'
+    '    http://developer.android.com/tools/debugging/improving-w-lint.html\n'
+    '\n'
+    'Still reading?\n'
+    '- You can edit this file manually to suppress an issue\n'
+    '  globally if it is not applicable to the project.\n'
+    '- You can also automatically add issues found so for in the\n'
+    '  build process by running:\n\n'
+    '    ' + os.path.relpath(_THIS_FILE, constants.DIR_SOURCE_ROOT) + '\n\n'
+    '  which will generate this file (Comments are not preserved).\n'
+    '  Note: PRODUCT_DIR will be substituted at run-time with actual\n'
+    '  directory path (e.g. out/Debug)\n'
+)
+
+
+_Issue = collections.namedtuple('Issue', ['severity', 'paths'])
+
+
+def _ParseConfigFile(config_path):
+  print 'Parsing %s' % config_path
+  issues_dict = {}
+  dom = minidom.parse(config_path)
+  for issue in dom.getElementsByTagName('issue'):
+    issue_id = issue.attributes['id'].value
+    severity = issue.getAttribute('severity')
+    paths = set(
+        [p.attributes['path'].value for p in
+         issue.getElementsByTagName('ignore')])
+    issues_dict[issue_id] = _Issue(severity, paths)
+  return issues_dict
+
+
+def _ParseAndMergeResultFile(result_path, issues_dict):
+  print 'Parsing and merging %s' % result_path
+  dom = minidom.parse(result_path)
+  for issue in dom.getElementsByTagName('issue'):
+    issue_id = issue.attributes['id'].value
+    severity = issue.attributes['severity'].value
+    path = issue.getElementsByTagName('location')[0].attributes['file'].value
+    if issue_id not in issues_dict:
+      issues_dict[issue_id] = _Issue(severity, set())
+    issues_dict[issue_id].paths.add(path)
+
+
+def _WriteConfigFile(config_path, issues_dict):
+  new_dom = minidom.getDOMImplementation().createDocument(None, 'lint', None)
+  top_element = new_dom.documentElement
+  top_element.appendChild(new_dom.createComment(_DOC))
+  for issue_id in sorted(issues_dict.keys()):
+    severity = issues_dict[issue_id].severity
+    paths = issues_dict[issue_id].paths
+    issue = new_dom.createElement('issue')
+    issue.attributes['id'] = issue_id
+    if severity:
+      issue.attributes['severity'] = severity
+    if severity == 'ignore':
+      print 'Warning: [%s] is suppressed globally.' % issue_id
+    else:
+      for path in sorted(paths):
+        ignore = new_dom.createElement('ignore')
+        ignore.attributes['path'] = path
+        issue.appendChild(ignore)
+    top_element.appendChild(issue)
+
+  with open(config_path, 'w') as f:
+    f.write(new_dom.toprettyxml(indent='  ', encoding='utf-8'))
+  print 'Updated %s' % config_path
+
+
+def _Suppress(config_path, result_path):
+  issues_dict = _ParseConfigFile(config_path)
+  _ParseAndMergeResultFile(result_path, issues_dict)
+  _WriteConfigFile(config_path, issues_dict)
+
+
+def main():
+  parser = optparse.OptionParser(usage='%prog RESULT-FILE')
+  _, args = parser.parse_args()
+
+  if len(args) != 1 or not os.path.exists(args[0]):
+    parser.error('Must provide RESULT-FILE')
+
+  _Suppress(_CONFIG_PATH, args[0])
+
+
+if __name__ == '__main__':
+  main()
diff --git a/build/android/lint/suppressions.xml b/build/android/lint/suppressions.xml
new file mode 100644
index 0000000..cb77c1f
--- /dev/null
+++ b/build/android/lint/suppressions.xml
@@ -0,0 +1,111 @@
+<?xml version="1.0" encoding="utf-8"?>
+<lint>
+  <!--
+STOP! It looks like you want to suppress some lint errors:
+- Have you tried identifing the offending patch?
+  Ask the author for a fix and/or revert the patch.
+- It is preferred to add suppressions in the code instead of
+  sweeping it under the rug here. See:
+
+    http://developer.android.com/tools/debugging/improving-w-lint.html
+
+Still reading?
+- You can edit this file manually to suppress an issue
+  globally if it is not applicable to the project.
+- You can also automatically add issues found so for in the
+  build process by running:
+
+    build/android/lint/suppress.py
+
+  which will generate this file (Comments are not preserved).
+  Note: PRODUCT_DIR will be substituted at run-time with actual
+  directory path (e.g. out/Debug)
+-->
+  <issue id="AllowBackup">
+    <ignore path="AndroidManifest.xml"/>
+  </issue>
+  <issue id="Assert" severity="ignore"/>
+  <issue id="CommitPrefEdits">
+    <ignore path="third_party/cacheinvalidation/src/java/com/google/ipc/invalidation/ticl/android2/channel/AndroidChannelPreferences.java"/>
+  </issue>
+  <issue id="DefaultLocale">
+    <ignore path="third_party/cacheinvalidation/src/java/com/google/ipc/invalidation/external/client/contrib/AndroidListenerState.java"/>
+  </issue>
+  <issue id="DrawAllocation">
+    <ignore path="content/public/android/java/src/org/chromium/content/browser/ContentViewRenderView.java"/>
+    <ignore path="content/public/android/java/src/org/chromium/content/browser/PopupZoomer.java"/>
+  </issue>
+  <issue id="ExportedContentProvider">
+    <ignore path="AndroidManifest.xml"/>
+  </issue>
+  <issue id="HandlerLeak">
+    <ignore path="remoting/android/java/src/org/chromium/chromoting/TapGestureDetector.java"/>
+  </issue>
+  <issue id="HardcodedDebugMode" severity="Fatal">
+    <ignore path="AndroidManifest.xml"/>
+  </issue>
+  <issue id="IconDensities">
+    <!-- crbug.com/457918 is tracking missing assets -->
+    <ignore path="components/web_contents_delegate_android/android/java/res/drawable-xxhdpi"/>
+    <ignore path="components/web_contents_delegate_android/android/java/res/drawable-xxxhdpi"/>
+    <ignore path="content/public/android/java/res/drawable-xxhdpi"/>
+    <ignore path="content/public/android/java/res/drawable-xxxhdpi"/>
+    <ignore path="chrome/android/java/res/drawable-xxhdpi"/>
+    <ignore path="chrome/android/java/res/drawable-xxxhdpi"/>
+    <ignore path="ui/android/java/res/drawable-xxhdpi"/>
+    <ignore path="ui/android/java/res/drawable-xxxhdpi"/>
+    <ignore regexp=".*: reader_mode_bar_background.9.png, tabs_moved_htc.png, tabs_moved_nexus.png, tabs_moved_samsung.png$"/>
+  </issue>
+  <issue id="IconLocation">
+    <!-- It is OK for content_shell_apk and chrome_shell_apk to have missing assets. -->
+    <ignore path="content/shell/android/java/res/"/>
+    <ignore path="chrome/android/shell/res/"/>
+    <!-- Suppression for chrome/test/chromedriver/test/webview_shell/java/res/drawable/icon.png -->
+    <ignore path="res/drawable/icon.png"/>
+    <!-- TODO(lambroslambrou) remove this once crbug.com/502030 is fixed. -->
+    <ignore path="remoting/android/java/res"/>
+  </issue>
+  <issue id="InconsistentLayout" severity="ignore"/>
+  <issue id="InflateParams" severity="ignore"/>
+  <issue id="MissingApplicationIcon" severity="ignore"/>
+  <issue id="MissingRegistered" severity="ignore"/>
+  <issue id="MissingVersion">
+    <ignore path="AndroidManifest.xml"/>
+  </issue>
+  <issue id="InlinedApi" severity="ignore"/>
+  <issue id="NewApi">
+    <ignore regexp="Attribute `paddingStart` referenced here can result in a crash on some specific devices older than API 17"/>
+    <ignore path="org/chromium/base/AnimationFrameTimeHistogram$Recorder.class"/>
+    <ignore path="org/chromium/base/JavaHandlerThread.class"/>
+    <ignore path="org/chromium/base/SysUtils.class"/>
+    <ignore path="org/chromium/chrome/browser/TtsPlatformImpl.class"/>
+    <ignore path="org/chromium/chrome/browser/TtsPlatformImpl$*.class"/>
+    <ignore path="chrome/android/java/res/values-v17/styles.xml"/>
+  </issue>
+  <issue id="OldTargetApi">
+    <ignore path="AndroidManifest.xml"/>
+  </issue>
+  <issue id="Overdraw" severity="ignore"/>
+  <issue id="Recycle" severity="ignore"/>
+  <issue id="Registered" severity="ignore"/>
+  <issue id="RtlCompat" severity="ignore"/>
+  <issue id="RtlEnabled" severity="ignore"/>
+  <issue id="RtlSymmetry" severity="ignore"/>
+  <issue id="SdCardPath">
+    <ignore path="content/public/android/java/src/org/chromium/content/browser/MediaResourceGetter.java"/>
+  </issue>
+  <issue id="SetJavaScriptEnabled" severity="ignore"/>
+  <issue id="UnusedResources">
+    <!-- Used by chrome/android/java/AndroidManifest.xml -->
+    <ignore path="chrome/android/java/res/drawable/window_background.xml" />
+    <ignore path="chrome/android/java/res/xml/bookmark_thumbnail_widget_info.xml" />
+    <ignore path="chrome/android/java/res/xml/file_paths.xml" />
+
+    <ignore path="content/shell/android/shell_apk/res/layout/content_shell_activity.xml" />
+    <ignore path="content/shell/android/shell_apk/res/values/strings.xml" />
+  </issue>
+  <issue id="SignatureOrSystemPermissions" severity="ignore"/>
+  <issue id="UnusedAttribute" severity="ignore"/>
+  <issue id="ViewConstructor" severity="ignore"/>
+  <issue id="WrongCall" severity="ignore"/>
+</lint>
diff --git a/build/android/lint_action.gypi b/build/android/lint_action.gypi
new file mode 100644
index 0000000..e1adf1f
--- /dev/null
+++ b/build/android/lint_action.gypi
@@ -0,0 +1,43 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into an action to provide a rule to
+# run lint on java/class files.
+
+{
+  'action_name': 'lint_<(_target_name)',
+  'message': 'Linting <(_target_name)',
+  'variables': {
+    'conditions': [
+      ['chromium_code != 0 and android_lint != 0 and never_lint == 0', {
+        'is_enabled': '--enable',
+      }, {
+        'is_enabled': '',
+      }]
+    ],
+    'android_manifest_path%': '<(DEPTH)/build/android/AndroidManifest.xml',
+    'resource_dir%': '<(DEPTH)/build/android/ant/empty/res',
+  },
+  'inputs': [
+    '<(DEPTH)/build/android/gyp/util/build_utils.py',
+    '<(DEPTH)/build/android/gyp/lint.py',
+    '<(DEPTH)/build/android/lint/suppressions.xml',
+    '<(lint_jar_path)',
+  ],
+  'action': [
+    'python', '<(DEPTH)/build/android/gyp/lint.py',
+    '--lint-path=<(android_sdk_root)/tools/lint',
+    '--config-path=<(DEPTH)/build/android/lint/suppressions.xml',
+    '--processed-config-path=<(config_path)',
+    '--manifest-path=<(android_manifest_path)',
+    '--result-path=<(result_path)',
+    '--resource-dir=<(resource_dir)',
+    '--product-dir=<(PRODUCT_DIR)',
+    '--src-dirs=>(src_dirs)',
+    '--jar-path=<(lint_jar_path)',
+    '--can-fail-build',
+    '--stamp=<(stamp_path)',
+    '<(is_enabled)',
+  ],
+}
diff --git a/build/android/locale_pak_resources.gypi b/build/android/locale_pak_resources.gypi
new file mode 100644
index 0000000..6f8e56f
--- /dev/null
+++ b/build/android/locale_pak_resources.gypi
@@ -0,0 +1,52 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Creates a resources.zip with locale.pak files placed into appropriate
+# resource configs (e.g. en-GB.pak -> res/raw-en/en_gb.pak). Also generates
+# a locale_paks TypedArray so that resource files can be enumerated at runtime.
+#
+# If this target is included in the deps of an android resources/library/apk,
+# the resources will be included with that target.
+#
+# Variables:
+#   locale_pak_files - List of .pak files to process.
+#     Names must be of the form "en.pak" or "en-US.pak".
+#
+# Example
+#  {
+#    'target_name': 'my_locale_resources',
+#    'type': 'none',
+#    'variables': {
+#      'locale_paks_files': ['path1/fr.pak'],
+#    },
+#    'includes': [ '../build/android/locale_pak_resources.gypi' ],
+#  },
+#
+{
+  'variables': {
+    'resources_zip_path': '<(PRODUCT_DIR)/res.java/<(_target_name).zip',
+  },
+  'all_dependent_settings': {
+    'variables': {
+      'additional_input_paths': ['<(resources_zip_path)'],
+      'dependencies_res_zip_paths': ['<(resources_zip_path)'],
+    },
+  },
+  'actions': [{
+    'action_name': '<(_target_name)_locale_pak_resources',
+    'inputs': [
+      '<(DEPTH)/build/android/gyp/util/build_utils.py',
+      '<(DEPTH)/build/android/gyp/locale_pak_resources.py',
+      '<@(locale_pak_files)',
+    ],
+    'outputs': [
+      '<(resources_zip_path)',
+    ],
+    'action': [
+      'python', '<(DEPTH)/build/android/gyp/locale_pak_resources.py',
+      '--locale-paks', '<(locale_pak_files)',
+      '--resources-zip', '<(resources_zip_path)',
+    ],
+  }],
+}
diff --git a/build/android/method_count.py b/build/android/method_count.py
new file mode 100755
index 0000000..93250b5
--- /dev/null
+++ b/build/android/method_count.py
@@ -0,0 +1,55 @@
+#! /usr/bin/env python
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import os
+import re
+import sys
+
+from pylib import constants
+from pylib.sdk import dexdump
+
+sys.path.append(os.path.join(constants.DIR_SOURCE_ROOT, 'build', 'util', 'lib',
+                             'common'))
+import perf_tests_results_helper
+
+
+_METHOD_IDS_SIZE_RE = re.compile(r'^method_ids_size +: +(\d+)$')
+
+def MethodCount(dexfile):
+  for line in dexdump.DexDump(dexfile, file_summary=True):
+    m = _METHOD_IDS_SIZE_RE.match(line)
+    if m:
+      return m.group(1)
+  raise Exception('"method_ids_size" not found in dex dump of %s' % dexfile)
+
+def main():
+  parser = argparse.ArgumentParser()
+  parser.add_argument(
+      '--apk-name', help='Name of the APK to which the dexfile corresponds.')
+  parser.add_argument('dexfile')
+
+  args = parser.parse_args()
+
+  if not args.apk_name:
+    dirname, basename = os.path.split(args.dexfile)
+    while basename:
+      if 'apk' in basename:
+        args.apk_name = basename
+        break
+      dirname, basename = os.path.split(dirname)
+    else:
+      parser.error(
+          'Unable to determine apk name from %s, '
+          'and --apk-name was not provided.' % args.dexfile)
+
+  method_count = MethodCount(args.dexfile)
+  perf_tests_results_helper.PrintPerfResult(
+      '%s_methods' % args.apk_name, 'total', [method_count], 'methods')
+  return 0
+
+if __name__ == '__main__':
+  sys.exit(main())
+
diff --git a/build/android/native_app_dependencies.gypi b/build/android/native_app_dependencies.gypi
new file mode 100644
index 0000000..6032274
--- /dev/null
+++ b/build/android/native_app_dependencies.gypi
@@ -0,0 +1,67 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into a target to provide a rule
+# to strip and place dependent shared libraries required by a native binary in a
+# single folder that can later be pushed to the device.
+#
+# NOTE: consider packaging your binary as an apk instead of running a native
+# library.
+#
+# To use this, create a gyp target with the following form:
+#  {
+#    'target_name': 'target_that_depends_on_my_binary',
+#    'type': 'none',
+#    'dependencies': [
+#      'my_binary',
+#    ],
+#    'variables': {
+#      'native_binary': '<(PRODUCT_DIR)/my_binary',
+#      'output_dir': 'location to place binary and dependent libraries'
+#    },
+#    'includes': [ '../../build/android/native_app_dependencies.gypi' ],
+#  },
+#
+
+{
+  'variables': {
+    'include_main_binary%': 1,
+  },
+  'conditions': [
+      ['component == "shared_library"', {
+        'dependencies': [
+          '<(DEPTH)/build/android/setup.gyp:copy_system_libraries',
+        ],
+        'variables': {
+          'intermediate_dir': '<(PRODUCT_DIR)/<(_target_name)',
+          'ordered_libraries_file': '<(intermediate_dir)/native_libraries.json',
+        },
+        'actions': [
+          {
+            'variables': {
+              'input_libraries': ['<(native_binary)'],
+            },
+            'includes': ['../../build/android/write_ordered_libraries.gypi'],
+          },
+          {
+            'action_name': 'stripping native libraries',
+            'variables': {
+              'stripped_libraries_dir%': '<(output_dir)',
+              'input_paths': ['<(native_binary)'],
+              'stamp': '<(intermediate_dir)/strip.stamp',
+            },
+            'includes': ['../../build/android/strip_native_libraries.gypi'],
+          },
+        ],
+      }],
+      ['include_main_binary==1', {
+        'copies': [
+          {
+            'destination': '<(output_dir)',
+            'files': [ '<(native_binary)' ],
+          }
+        ],
+      }],
+  ],
+}
diff --git a/build/android/ndk.gyp b/build/android/ndk.gyp
new file mode 100644
index 0000000..2838a98
--- /dev/null
+++ b/build/android/ndk.gyp
@@ -0,0 +1,20 @@
+# Copyright (c) 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+  'targets': [
+    {
+      'target_name': 'cpu_features',
+      'type': 'static_library',
+      'direct_dependent_settings': {
+        'include_dirs': [
+          '<(android_ndk_root)/sources/android/cpufeatures',
+        ],
+      },
+      'sources': [
+        '<(android_ndk_root)/sources/android/cpufeatures/cpu-features.c',
+      ],
+    },
+  ],
+}
diff --git a/build/android/pack_relocations.gypi b/build/android/pack_relocations.gypi
new file mode 100644
index 0000000..8567fa6
--- /dev/null
+++ b/build/android/pack_relocations.gypi
@@ -0,0 +1,77 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into an action to provide a rule that
+# packs relocations in Release builds of native libraries.
+#
+# To use this, create a gyp target with the following form:
+#  {
+#    'action_name': 'pack_relocations',
+#    'actions': [
+#      'variables': {
+#        'enable_packing': 'pack relocations if 1, plain file copy if 0'
+#        'exclude_packing_list': 'names of libraries explicitly not packed',
+#        'ordered_libraries_file': 'file generated by write_ordered_libraries'
+#        'input_paths': 'files to be added to the list of inputs'
+#        'stamp': 'file to touch when the action is complete'
+#        'stripped_libraries_dir': 'directory holding stripped libraries',
+#        'packed_libraries_dir': 'directory holding packed libraries',
+#      'includes': [ '../../build/android/pack_relocations.gypi' ],
+#    ],
+#  },
+#
+
+{
+  'variables': {
+    'input_paths': [],
+  },
+  'inputs': [
+    '<(DEPTH)/build/android/gyp/util/build_utils.py',
+    '<(DEPTH)/build/android/gyp/pack_relocations.py',
+    '<(ordered_libraries_file)',
+    '>@(input_paths)',
+  ],
+  'outputs': [
+    '<(stamp)',
+  ],
+  'conditions': [
+    ['enable_packing == 1', {
+      'message': 'Packing relocations for <(_target_name)',
+      'dependencies': [
+        '<(DEPTH)/third_party/android_platform/relocation_packer.gyp:android_relocation_packer#host',
+      ],
+      'inputs': [
+        '<(PRODUCT_DIR)/android_relocation_packer',
+      ],
+      'action': [
+        'python', '<(DEPTH)/build/android/gyp/pack_relocations.py',
+        '--configuration-name=<(CONFIGURATION_NAME)',
+        '--enable-packing=1',
+        '--exclude-packing-list=<@(exclude_packing_list)',
+        '--android-pack-relocations=<(PRODUCT_DIR)/android_relocation_packer',
+        '--stripped-libraries-dir=<(stripped_libraries_dir)',
+        '--packed-libraries-dir=<(packed_libraries_dir)',
+        '--libraries=@FileArg(<(ordered_libraries_file):libraries)',
+        '--stamp=<(stamp)',
+      ],
+    }, {
+      'message': 'Copying libraries (no relocation packing) for <(_target_name)',
+      'action': [
+        'python', '<(DEPTH)/build/android/gyp/pack_relocations.py',
+        '--configuration-name=<(CONFIGURATION_NAME)',
+        '--enable-packing=0',
+        '--stripped-libraries-dir=<(stripped_libraries_dir)',
+        '--packed-libraries-dir=<(packed_libraries_dir)',
+        '--libraries=@FileArg(<(ordered_libraries_file):libraries)',
+        '--stamp=<(stamp)',
+      ],
+    }],
+    ['component == "shared_library"', {
+      # Add a fake output to force the build to always re-run this step. This
+      # is required because the real inputs are not known at gyp-time and
+      # changing base.so may not trigger changes to dependent libraries.
+      'outputs': [ '<(stamp).fake' ]
+    }],
+  ],
+}
diff --git a/build/android/package_resources_action.gypi b/build/android/package_resources_action.gypi
new file mode 100644
index 0000000..eb60871
--- /dev/null
+++ b/build/android/package_resources_action.gypi
@@ -0,0 +1,97 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is a helper to java_apk.gypi. It should be used to create an
+# action that runs ApkBuilder via ANT.
+#
+# Required variables:
+#  apk_name - File name (minus path & extension) of the output apk.
+#  android_manifest_path - Path to AndroidManifest.xml.
+#  app_manifest_version_name - set the apps 'human readable' version number.
+#  app_manifest_version_code - set the apps version number.
+# Optional variables:
+#  asset_location - The directory where assets are located (if any).
+#  create_density_splits - Whether to create density-based apk splits. Splits
+#    are supported only for minSdkVersion >= 21.
+#  language_splits - List of languages to create apk splits for.
+#  resource_zips - List of paths to resource zip files.
+#  shared_resources - Make a resource package that can be loaded by a different
+#    application at runtime to access the package's resources.
+#  extensions_to_not_compress - E.g.: 'pak,dat,bin'
+#  extra_inputs - List of extra action inputs.
+{
+  'variables': {
+    'asset_location%': '',
+    'create_density_splits%': 0,
+    'resource_zips%': [],
+    'shared_resources%': 0,
+    'extensions_to_not_compress%': '',
+    'extra_inputs%': [],
+    'resource_packaged_apk_name': '<(apk_name)-resources.ap_',
+    'resource_packaged_apk_path': '<(intermediate_dir)/<(resource_packaged_apk_name)',
+  },
+  'action_name': 'package_resources_<(apk_name)',
+  'message': 'packaging resources for <(apk_name)',
+  'inputs': [
+    # TODO: This isn't always rerun correctly, http://crbug.com/351928
+    '<(DEPTH)/build/android/gyp/util/build_utils.py',
+    '<(DEPTH)/build/android/gyp/package_resources.py',
+    '<(android_manifest_path)',
+    '<@(extra_inputs)',
+  ],
+  'outputs': [
+    '<(resource_packaged_apk_path)',
+  ],
+  'action': [
+    'python', '<(DEPTH)/build/android/gyp/package_resources.py',
+    '--android-sdk', '<(android_sdk)',
+    '--aapt-path', '<(android_aapt_path)',
+    '--configuration-name', '<(CONFIGURATION_NAME)',
+    '--android-manifest', '<(android_manifest_path)',
+    '--version-code', '<(app_manifest_version_code)',
+    '--version-name', '<(app_manifest_version_name)',
+    '--no-compress', '<(extensions_to_not_compress)',
+    '--apk-path', '<(resource_packaged_apk_path)',
+  ],
+  'conditions': [
+    ['shared_resources == 1', {
+      'action': [
+        '--shared-resources',
+      ],
+    }],
+    ['asset_location != ""', {
+      'action': [
+        '--asset-dir', '<(asset_location)',
+      ],
+    }],
+    ['create_density_splits == 1', {
+      'action': [
+        '--create-density-splits',
+      ],
+      'outputs': [
+        '<(resource_packaged_apk_path)_hdpi',
+        '<(resource_packaged_apk_path)_xhdpi',
+        '<(resource_packaged_apk_path)_xxhdpi',
+        '<(resource_packaged_apk_path)_xxxhdpi',
+        '<(resource_packaged_apk_path)_tvdpi',
+      ],
+    }],
+    ['language_splits != []', {
+      'action': [
+        '--language-splits=<(language_splits)',
+      ],
+      'outputs': [
+        "<!@(python <(DEPTH)/build/apply_locales.py '<(resource_packaged_apk_path)_ZZLOCALE' <(language_splits))",
+      ],
+    }],
+    ['resource_zips != []', {
+      'action': [
+        '--resource-zips', '>(resource_zips)',
+      ],
+      'inputs': [
+        '>@(resource_zips)',
+      ],
+    }],
+  ],
+}
diff --git a/build/android/preprocess_google_play_services.config.json b/build/android/preprocess_google_play_services.config.json
new file mode 100644
index 0000000..8b3198b
--- /dev/null
+++ b/build/android/preprocess_google_play_services.config.json
@@ -0,0 +1,90 @@
+{
+  "lib_version": "7.3.0",
+  "clients": [
+    "play-services-base",
+    "play-services-cast",
+    "play-services-identity"
+  ],
+  "client_filter": [
+    "res/drawable*",
+    "res/values-af",
+    "res/values-az",
+    "res/values-be",
+    "res/values-bn",
+    "res/values-bn-rBD",
+    "res/values-de-rAT",
+    "res/values-de-rCH",
+    "res/values-en-rIE",
+    "res/values-en-rIN",
+    "res/values-en-rSG",
+    "res/values-en-rZA",
+    "res/values-es-rAR",
+    "res/values-es-rBO",
+    "res/values-es-rCL",
+    "res/values-es-rCO",
+    "res/values-es-rCR",
+    "res/values-es-rDO",
+    "res/values-es-rEC",
+    "res/values-es-rGT",
+    "res/values-es-rHN",
+    "res/values-es-rMX",
+    "res/values-es-rNI",
+    "res/values-es-rPA",
+    "res/values-es-rPE",
+    "res/values-es-rPR",
+    "res/values-es-rPY",
+    "res/values-es-rSV",
+    "res/values-es-rUS",
+    "res/values-es-rUY",
+    "res/values-es-rVE",
+    "res/values-eu-rES",
+    "res/values-fr-rCA",
+    "res/values-fr-rCH",
+    "res/values-gl",
+    "res/values-gl-rES",
+    "res/values-gu",
+    "res/values-he",
+    "res/values-hy",
+    "res/values-hy-rAM",
+    "res/values-in",
+    "res/values-is",
+    "res/values-is-rIS",
+    "res/values-ka",
+    "res/values-ka-rGE",
+    "res/values-kk-rKZ",
+    "res/values-km",
+    "res/values-km-rKH",
+    "res/values-kn",
+    "res/values-kn-rIN",
+    "res/values-ky",
+    "res/values-ky-rKG",
+    "res/values-lo",
+    "res/values-lo-rLA",
+    "res/values-mk-rMK",
+    "res/values-ml",
+    "res/values-ml-rIN",
+    "res/values-mn",
+    "res/values-mn-rMN",
+    "res/values-mo",
+    "res/values-mr",
+    "res/values-mr-rIN",
+    "res/values-ms",
+    "res/values-ms-rMY",
+    "res/values-my-rMM",
+    "res/values-nb",
+    "res/values-ne",
+    "res/values-ne-rNP",
+    "res/values-si",
+    "res/values-si-rLK",
+    "res/values-ta",
+    "res/values-ta-rIN",
+    "res/values-te",
+    "res/values-te-rIN",
+    "res/values-tl",
+    "res/values-ur-rPK",
+    "res/values-uz-rUZ",
+    "res/values-zh",
+    "res/values-zh-rHK",
+    "res/values-zu"
+  ]
+}
diff --git a/build/android/preprocess_google_play_services.py b/build/android/preprocess_google_play_services.py
new file mode 100755
index 0000000..85d239a
--- /dev/null
+++ b/build/android/preprocess_google_play_services.py
@@ -0,0 +1,238 @@
+#!/usr/bin/env python
+#
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+'''Prepares the Google Play services split client libraries before usage by
+Chrome's build system.
+
+We need to preprocess Google Play services before using it in Chrome
+builds for 2 main reasons:
+
+- Getting rid of unused resources: unsupported languages, unused
+drawables, etc.
+
+- Merging the differents jars so that it can be proguarded more
+easily. This is necessary since debug and test apks get very close
+to the dex limit.
+
+The script is supposed to be used with the maven repository that can be obtained
+by downloading the "extra-google-m2repository" from the Android SDK Manager. It
+also supports importing from already extracted AAR files using the
+--is-extracted-repo flag.
+
+The json config (see the -c argument) file should provide the following fields:
+
+- lib_version: String. Used when building from the maven repository. It should
+  be the package's version (e.g. "7.3.0")
+
+- clients: String array. List of clients to pick. For example, when building
+  from the maven repository, it's the artifactId (e.g. "play-services-base") of
+  each client.
+
+- client_filter: String array. Pattern of files to prune from the clients once
+  extracted. Metacharacters are allowed. (e.g. "res/drawable*")
+
+The output is a directory with the following structure:
+
+    OUT_DIR
+    +-- google-play-services.jar
+    +-- res
+    |   +-- CLIENT_1
+    |   |   +-- color
+    |   |   +-- values
+    |   |   +-- etc.
+    |   +-- CLIENT_2
+    |       +-- ...
+    +-- stub
+        +-- res/[.git-keep-directory]
+        +-- src/android/UnusedStub.java
+
+Requires the `jar` utility in the path.
+
+'''
+
+import argparse
+import glob
+import itertools
+import json
+import os
+import shutil
+import stat
+import sys
+
+from pylib import cmd_helper
+from pylib import constants
+
+sys.path.append(
+    os.path.join(constants.DIR_SOURCE_ROOT, 'build', 'android', 'gyp'))
+from util import build_utils
+
+
+M2_PKG_PATH = os.path.join('com', 'google', 'android', 'gms')
+
+
+def main():
+  parser = argparse.ArgumentParser(description=("Prepares the Google Play "
+      "services split client libraries before usage by Chrome's build system"))
+  parser.add_argument('-r',
+                      '--repository',
+                      help='The Google Play services repository location',
+                      required=True,
+                      metavar='FILE')
+  parser.add_argument('-o',
+                      '--out-dir',
+                      help='The output directory',
+                      required=True,
+                      metavar='FILE')
+  parser.add_argument('-c',
+                      '--config-file',
+                      help='Config file path',
+                      required=True,
+                      metavar='FILE')
+  parser.add_argument('-g',
+                      '--git-friendly',
+                      action='store_true',
+                      default=False,
+                      help='Add a .gitkeep file to the empty directories')
+  parser.add_argument('-x',
+                      '--is-extracted-repo',
+                      action='store_true',
+                      default=False,
+                      help='The provided repository is not made of AAR files.')
+
+  args = parser.parse_args()
+
+  ProcessGooglePlayServices(args.repository,
+                            args.out_dir,
+                            args.config_file,
+                            args.git_friendly,
+                            args.is_extracted_repo)
+
+
+def ProcessGooglePlayServices(repo, out_dir, config_path, git_friendly,
+                              is_extracted_repo):
+  with open(config_path, 'r') as json_file:
+    config = json.load(json_file)
+
+  with build_utils.TempDir() as tmp_root:
+    tmp_paths = _SetupTempDir(tmp_root)
+
+    if is_extracted_repo:
+      _ImportFromExtractedRepo(config, tmp_paths, repo)
+    else:
+      _ImportFromAars(config, tmp_paths, repo)
+
+    _GenerateCombinedJar(tmp_paths)
+    _ProcessResources(config, tmp_paths)
+    _BuildOutput(config, tmp_paths, out_dir, git_friendly)
+
+
+def _SetupTempDir(tmp_root):
+  tmp_paths = {
+    'root': tmp_root,
+    'imported_clients': os.path.join(tmp_root, 'imported_clients'),
+    'extracted_jars': os.path.join(tmp_root, 'jar'),
+    'combined_jar': os.path.join(tmp_root, 'google-play-services.jar'),
+  }
+  os.mkdir(tmp_paths['imported_clients'])
+  os.mkdir(tmp_paths['extracted_jars'])
+
+  return tmp_paths
+
+
+def _SetupOutputDir(out_dir):
+  out_paths = {
+    'root': out_dir,
+    'res': os.path.join(out_dir, 'res'),
+    'jar': os.path.join(out_dir, 'google-play-services.jar'),
+    'stub': os.path.join(out_dir, 'stub'),
+  }
+
+  shutil.rmtree(out_paths['jar'], ignore_errors=True)
+  shutil.rmtree(out_paths['res'], ignore_errors=True)
+  shutil.rmtree(out_paths['stub'], ignore_errors=True)
+
+  return out_paths
+
+
+def _MakeWritable(dir_path):
+  for root, dirs, files in os.walk(dir_path):
+    for path in itertools.chain(dirs, files):
+      st = os.stat(os.path.join(root, path))
+      os.chmod(os.path.join(root, path), st.st_mode | stat.S_IWUSR)
+
+
+def _ImportFromAars(config, tmp_paths, repo):
+  for client in config['clients']:
+    aar_name = '%s-%s.aar' % (client, config['lib_version'])
+    aar_path = os.path.join(repo, M2_PKG_PATH, client,
+                            config['lib_version'], aar_name)
+    aar_out_path = os.path.join(tmp_paths['imported_clients'], client)
+    build_utils.ExtractAll(aar_path, aar_out_path)
+
+    client_jar_path = os.path.join(aar_out_path, 'classes.jar')
+    build_utils.ExtractAll(client_jar_path, tmp_paths['extracted_jars'],
+                           no_clobber=False)
+
+
+def _ImportFromExtractedRepo(config, tmp_paths, repo):
+  # Import the clients
+  try:
+    for client in config['clients']:
+      client_out_dir = os.path.join(tmp_paths['imported_clients'], client)
+      shutil.copytree(os.path.join(repo, client), client_out_dir)
+
+      client_jar_path = os.path.join(client_out_dir, 'classes.jar')
+      build_utils.ExtractAll(client_jar_path, tmp_paths['extracted_jars'],
+                             no_clobber=False)
+  finally:
+    _MakeWritable(tmp_paths['imported_clients'])
+
+
+def _GenerateCombinedJar(tmp_paths):
+  out_file_name = tmp_paths['combined_jar']
+  working_dir = tmp_paths['extracted_jars']
+  cmd_helper.Call(['jar', '-cf', out_file_name, '-C', working_dir, '.'])
+
+
+def _ProcessResources(config, tmp_paths):
+  # Prune unused resources
+  for res_filter in config['client_filter']:
+    glob_pattern = os.path.join(tmp_paths['imported_clients'], '*', res_filter)
+    for prune_target in glob.glob(glob_pattern):
+      shutil.rmtree(prune_target)
+
+
+def _BuildOutput(config, tmp_paths, out_dir, git_friendly):
+  out_paths = _SetupOutputDir(out_dir)
+
+  # Copy the resources to the output dir
+  for client in config['clients']:
+    res_in_tmp_dir = os.path.join(tmp_paths['imported_clients'], client, 'res')
+    if os.path.isdir(res_in_tmp_dir) and os.listdir(res_in_tmp_dir):
+      res_in_final_dir = os.path.join(out_paths['res'], client)
+      shutil.copytree(res_in_tmp_dir, res_in_final_dir)
+
+  # Copy the jar
+  shutil.copyfile(tmp_paths['combined_jar'], out_paths['jar'])
+
+  # Write the java dummy stub. Needed for gyp to create the resource jar
+  stub_location = os.path.join(out_paths['stub'], 'src', 'android')
+  os.makedirs(stub_location)
+  with open(os.path.join(stub_location, 'UnusedStub.java'), 'w') as stub:
+    stub.write('package android;'
+               'public final class UnusedStub {'
+               '    private UnusedStub() {}'
+               '}')
+
+  # Create the main res directory. Will be empty but is needed by gyp
+  stub_res_location = os.path.join(out_paths['stub'], 'res')
+  os.makedirs(stub_res_location)
+  if git_friendly:
+    build_utils.Touch(os.path.join(stub_res_location, '.git-keep-directory'))
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/build/android/provision_devices.py b/build/android/provision_devices.py
new file mode 100755
index 0000000..a5f8fc6
--- /dev/null
+++ b/build/android/provision_devices.py
@@ -0,0 +1,349 @@
+#!/usr/bin/env python
+#
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Provisions Android devices with settings required for bots.
+
+Usage:
+  ./provision_devices.py [-d <device serial number>]
+"""
+
+import argparse
+import json
+import logging
+import os
+import posixpath
+import re
+import subprocess
+import sys
+import time
+
+from pylib import constants
+from pylib import device_settings
+from pylib.device import battery_utils
+from pylib.device import device_blacklist
+from pylib.device import device_errors
+from pylib.device import device_utils
+from pylib.utils import run_tests_helper
+from pylib.utils import timeout_retry
+
+sys.path.append(os.path.join(constants.DIR_SOURCE_ROOT,
+                             'third_party', 'android_testrunner'))
+import errors
+
+
+class _DEFAULT_TIMEOUTS(object):
+  # L can take a while to reboot after a wipe.
+  LOLLIPOP = 600
+  PRE_LOLLIPOP = 180
+
+  HELP_TEXT = '{}s on L, {}s on pre-L'.format(LOLLIPOP, PRE_LOLLIPOP)
+
+
+class _PHASES(object):
+  WIPE = 'wipe'
+  PROPERTIES = 'properties'
+  FINISH = 'finish'
+
+  ALL = [WIPE, PROPERTIES, FINISH]
+
+
+def ProvisionDevices(options):
+  devices = device_utils.DeviceUtils.HealthyDevices()
+  if options.device:
+    devices = [d for d in devices if d == options.device]
+    if not devices:
+      raise device_errors.DeviceUnreachableError(options.device)
+
+  parallel_devices = device_utils.DeviceUtils.parallel(devices)
+  parallel_devices.pMap(ProvisionDevice, options)
+  if options.auto_reconnect:
+    _LaunchHostHeartbeat()
+  blacklist = device_blacklist.ReadBlacklist()
+  if options.output_device_blacklist:
+    with open(options.output_device_blacklist, 'w') as f:
+      json.dump(blacklist, f)
+  if all(d in blacklist for d in devices):
+    raise device_errors.NoDevicesError
+  return 0
+
+
+def ProvisionDevice(device, options):
+  if options.reboot_timeout:
+    reboot_timeout = options.reboot_timeout
+  elif (device.build_version_sdk >=
+        constants.ANDROID_SDK_VERSION_CODES.LOLLIPOP):
+    reboot_timeout = _DEFAULT_TIMEOUTS.LOLLIPOP
+  else:
+    reboot_timeout = _DEFAULT_TIMEOUTS.PRE_LOLLIPOP
+
+  def should_run_phase(phase_name):
+    return not options.phases or phase_name in options.phases
+
+  def run_phase(phase_func, reboot=True):
+    try:
+      device.WaitUntilFullyBooted(timeout=reboot_timeout, retries=0)
+    except device_errors.CommandTimeoutError:
+      logging.error('Device did not finish booting. Will try to reboot.')
+      device.Reboot(timeout=reboot_timeout)
+    phase_func(device, options)
+    if reboot:
+      device.Reboot(False, retries=0)
+      device.adb.WaitForDevice()
+
+  try:
+    if should_run_phase(_PHASES.WIPE):
+      run_phase(WipeDevice)
+
+    if should_run_phase(_PHASES.PROPERTIES):
+      run_phase(SetProperties)
+
+    if should_run_phase(_PHASES.FINISH):
+      run_phase(FinishProvisioning, reboot=False)
+
+  except (errors.WaitForResponseTimedOutError,
+          device_errors.CommandTimeoutError):
+    logging.exception('Timed out waiting for device %s. Adding to blacklist.',
+                      str(device))
+    device_blacklist.ExtendBlacklist([str(device)])
+
+  except device_errors.CommandFailedError:
+    logging.exception('Failed to provision device %s. Adding to blacklist.',
+                      str(device))
+    device_blacklist.ExtendBlacklist([str(device)])
+
+
+def WipeDevice(device, options):
+  """Wipes data from device, keeping only the adb_keys for authorization.
+
+  After wiping data on a device that has been authorized, adb can still
+  communicate with the device, but after reboot the device will need to be
+  re-authorized because the adb keys file is stored in /data/misc/adb/.
+  Thus, adb_keys file is rewritten so the device does not need to be
+  re-authorized.
+
+  Arguments:
+    device: the device to wipe
+  """
+  if options.skip_wipe:
+    return
+
+  try:
+    device.EnableRoot()
+    device_authorized = device.FileExists(constants.ADB_KEYS_FILE)
+    if device_authorized:
+      adb_keys = device.ReadFile(constants.ADB_KEYS_FILE,
+                                 as_root=True).splitlines()
+    device.RunShellCommand(['wipe', 'data'],
+                           as_root=True, check_return=True)
+    device.adb.WaitForDevice()
+
+    if device_authorized:
+      adb_keys_set = set(adb_keys)
+      for adb_key_file in options.adb_key_files or []:
+        try:
+          with open(adb_key_file, 'r') as f:
+            adb_public_keys = f.readlines()
+          adb_keys_set.update(adb_public_keys)
+        except IOError:
+          logging.warning('Unable to find adb keys file %s.' % adb_key_file)
+      _WriteAdbKeysFile(device, '\n'.join(adb_keys_set))
+  except device_errors.CommandFailedError:
+    logging.exception('Possible failure while wiping the device. '
+                      'Attempting to continue.')
+
+
+def _WriteAdbKeysFile(device, adb_keys_string):
+  dir_path = posixpath.dirname(constants.ADB_KEYS_FILE)
+  device.RunShellCommand(['mkdir', '-p', dir_path],
+                         as_root=True, check_return=True)
+  device.RunShellCommand(['restorecon', dir_path],
+                         as_root=True, check_return=True)
+  device.WriteFile(constants.ADB_KEYS_FILE, adb_keys_string, as_root=True)
+  device.RunShellCommand(['restorecon', constants.ADB_KEYS_FILE],
+                         as_root=True, check_return=True)
+
+
+def SetProperties(device, options):
+  try:
+    device.EnableRoot()
+  except device_errors.CommandFailedError as e:
+    logging.warning(str(e))
+
+  _ConfigureLocalProperties(device, options.enable_java_debug)
+  device_settings.ConfigureContentSettings(
+      device, device_settings.DETERMINISTIC_DEVICE_SETTINGS)
+  if options.disable_location:
+    device_settings.ConfigureContentSettings(
+        device, device_settings.DISABLE_LOCATION_SETTINGS)
+  else:
+    device_settings.ConfigureContentSettings(
+        device, device_settings.ENABLE_LOCATION_SETTINGS)
+
+  if options.disable_mock_location:
+    device_settings.ConfigureContentSettings(
+        device, device_settings.DISABLE_MOCK_LOCATION_SETTINGS)
+  else:
+    device_settings.ConfigureContentSettings(
+        device, device_settings.ENABLE_MOCK_LOCATION_SETTINGS)
+
+  device_settings.SetLockScreenSettings(device)
+  if options.disable_network:
+    device_settings.ConfigureContentSettings(
+        device, device_settings.NETWORK_DISABLED_SETTINGS)
+
+def _ConfigureLocalProperties(device, java_debug=True):
+  """Set standard readonly testing device properties prior to reboot."""
+  local_props = [
+      'persist.sys.usb.config=adb',
+      'ro.monkey=1',
+      'ro.test_harness=1',
+      'ro.audio.silent=1',
+      'ro.setupwizard.mode=DISABLED',
+      ]
+  if java_debug:
+    local_props.append(
+        '%s=all' % device_utils.DeviceUtils.JAVA_ASSERT_PROPERTY)
+    local_props.append('debug.checkjni=1')
+  try:
+    device.WriteFile(
+        constants.DEVICE_LOCAL_PROPERTIES_PATH,
+        '\n'.join(local_props), as_root=True)
+    # Android will not respect the local props file if it is world writable.
+    device.RunShellCommand(
+        ['chmod', '644', constants.DEVICE_LOCAL_PROPERTIES_PATH],
+        as_root=True, check_return=True)
+  except device_errors.CommandFailedError:
+    logging.exception('Failed to configure local properties.')
+
+
+def FinishProvisioning(device, options):
+  if options.min_battery_level is not None:
+    try:
+      battery = battery_utils.BatteryUtils(device)
+      battery.ChargeDeviceToLevel(options.min_battery_level)
+    except device_errors.CommandFailedError:
+      logging.exception('Unable to charge device to specified level.')
+
+  if options.max_battery_temp is not None:
+    try:
+      battery = battery_utils.BatteryUtils(device)
+      battery.LetBatteryCoolToTemperature(options.max_battery_temp)
+    except device_errors.CommandFailedError:
+      logging.exception('Unable to let battery cool to specified temperature.')
+
+  device.RunShellCommand(
+      ['date', '-s', time.strftime('%Y%m%d.%H%M%S', time.gmtime())],
+      as_root=True, check_return=True)
+  props = device.RunShellCommand('getprop', check_return=True)
+  for prop in props:
+    logging.info('  %s' % prop)
+  if options.auto_reconnect:
+    _PushAndLaunchAdbReboot(device, options.target)
+
+
+def _PushAndLaunchAdbReboot(device, target):
+  """Pushes and launches the adb_reboot binary on the device.
+
+  Arguments:
+    device: The DeviceUtils instance for the device to which the adb_reboot
+            binary should be pushed.
+    target: The build target (example, Debug or Release) which helps in
+            locating the adb_reboot binary.
+  """
+  logging.info('Will push and launch adb_reboot on %s' % str(device))
+  # Kill if adb_reboot is already running.
+  device.KillAll('adb_reboot', blocking=True, timeout=2, quiet=True)
+  # Push adb_reboot
+  logging.info('  Pushing adb_reboot ...')
+  adb_reboot = os.path.join(constants.DIR_SOURCE_ROOT,
+                            'out/%s/adb_reboot' % target)
+  device.PushChangedFiles([(adb_reboot, '/data/local/tmp/')])
+  # Launch adb_reboot
+  logging.info('  Launching adb_reboot ...')
+  device.RunShellCommand(
+      ['/data/local/tmp/adb_reboot'],
+      check_return=True)
+
+
+def _LaunchHostHeartbeat():
+  # Kill if existing host_heartbeat
+  KillHostHeartbeat()
+  # Launch a new host_heartbeat
+  logging.info('Spawning host heartbeat...')
+  subprocess.Popen([os.path.join(constants.DIR_SOURCE_ROOT,
+                                 'build/android/host_heartbeat.py')])
+
+
+def KillHostHeartbeat():
+  ps = subprocess.Popen(['ps', 'aux'], stdout=subprocess.PIPE)
+  stdout, _ = ps.communicate()
+  matches = re.findall('\\n.*host_heartbeat.*', stdout)
+  for match in matches:
+    logging.info('An instance of host heart beart running... will kill')
+    pid = re.findall(r'(\S+)', match)[1]
+    subprocess.call(['kill', str(pid)])
+
+
+def main():
+  # Recommended options on perf bots:
+  # --disable-network
+  #     TODO(tonyg): We eventually want network on. However, currently radios
+  #     can cause perfbots to drain faster than they charge.
+  # --min-battery-level 95
+  #     Some perf bots run benchmarks with USB charging disabled which leads
+  #     to gradual draining of the battery. We must wait for a full charge
+  #     before starting a run in order to keep the devices online.
+
+  parser = argparse.ArgumentParser(
+      description='Provision Android devices with settings required for bots.')
+  parser.add_argument('-d', '--device', metavar='SERIAL',
+                      help='the serial number of the device to be provisioned'
+                      ' (the default is to provision all devices attached)')
+  parser.add_argument('--phase', action='append', choices=_PHASES.ALL,
+                      dest='phases',
+                      help='Phases of provisioning to run. '
+                           '(If omitted, all phases will be run.)')
+  parser.add_argument('--skip-wipe', action='store_true', default=False,
+                      help="don't wipe device data during provisioning")
+  parser.add_argument('--reboot-timeout', metavar='SECS', type=int,
+                      help='when wiping the device, max number of seconds to'
+                      ' wait after each reboot '
+                      '(default: %s)' % _DEFAULT_TIMEOUTS.HELP_TEXT)
+  parser.add_argument('--min-battery-level', type=int, metavar='NUM',
+                      help='wait for the device to reach this minimum battery'
+                      ' level before trying to continue')
+  parser.add_argument('--disable-location', action='store_true',
+                      help='disable Google location services on devices')
+  parser.add_argument('--disable-mock-location', action='store_true',
+                      default=False, help='Set ALLOW_MOCK_LOCATION to false')
+  parser.add_argument('--disable-network', action='store_true',
+                      help='disable network access on devices')
+  parser.add_argument('--disable-java-debug', action='store_false',
+                      dest='enable_java_debug', default=True,
+                      help='disable Java property asserts and JNI checking')
+  parser.add_argument('-t', '--target', default='Debug',
+                      help='the build target (default: %(default)s)')
+  parser.add_argument('-r', '--auto-reconnect', action='store_true',
+                      help='push binary which will reboot the device on adb'
+                      ' disconnections')
+  parser.add_argument('--adb-key-files', type=str, nargs='+',
+                      help='list of adb keys to push to device')
+  parser.add_argument('-v', '--verbose', action='count', default=1,
+                      help='Log more information.')
+  parser.add_argument('--max-battery-temp', type=int, metavar='NUM',
+                      help='Wait for the battery to have this temp or lower.')
+  parser.add_argument('--output-device-blacklist',
+                      help='Json file to output the device blacklist.')
+  args = parser.parse_args()
+  constants.SetBuildType(args.target)
+
+  run_tests_helper.SetLogLevel(args.verbose)
+
+  return ProvisionDevices(args)
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/build/android/push_libraries.gypi b/build/android/push_libraries.gypi
new file mode 100644
index 0000000..773c44f
--- /dev/null
+++ b/build/android/push_libraries.gypi
@@ -0,0 +1,49 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into an action to provide a rule that
+# pushes stripped shared libraries to the attached Android device. This should
+# only be used with the gyp_managed_install flag set.
+#
+# To use this, create a gyp target with the following form:
+#  {
+#    'actions': [
+#      'variables': {
+#        'ordered_libraries_file': 'file generated by write_ordered_libraries'
+#        'strip_stamp': 'stamp from strip action to block on'
+#        'libraries_source_dir': 'location where stripped libraries are stored'
+#        'device_library_dir': 'location on the device where to put pushed libraries',
+#        'push_stamp': 'file to touch when the action is complete'
+#        'configuration_name': 'The build CONFIGURATION_NAME'
+#      },
+#      'includes': [ '../../build/android/push_libraries.gypi' ],
+#    ],
+#  },
+#
+
+{
+  'action_name': 'push_libraries_<(_target_name)',
+  'message': 'Pushing libraries to device for <(_target_name)',
+  'inputs': [
+    '<(DEPTH)/build/android/gyp/util/build_utils.py',
+    '<(DEPTH)/build/android/gyp/util/md5_check.py',
+    '<(DEPTH)/build/android/gyp/push_libraries.py',
+    '<(strip_stamp)',
+    '<(strip_additional_stamp)',
+    '<(build_device_config_path)',
+    '<(pack_relocations_stamp)',
+  ],
+  'outputs': [
+    '<(push_stamp)',
+  ],
+  'action': [
+    'python', '<(DEPTH)/build/android/gyp/push_libraries.py',
+    '--build-device-configuration=<(build_device_config_path)',
+    '--libraries-dir=<(libraries_source_dir)',
+    '--device-dir=<(device_library_dir)',
+    '--libraries=@FileArg(<(ordered_libraries_file):libraries)',
+    '--stamp=<(push_stamp)',
+    '--configuration-name=<(configuration_name)',
+  ],
+}
diff --git a/build/android/pylib/OWNERS b/build/android/pylib/OWNERS
new file mode 100644
index 0000000..dbbbba7
--- /dev/null
+++ b/build/android/pylib/OWNERS
@@ -0,0 +1,4 @@
+jbudorick@chromium.org
+klundberg@chromium.org
+navabi@chromium.org
+skyostil@chromium.org
diff --git a/build/android/pylib/__init__.py b/build/android/pylib/__init__.py
new file mode 100644
index 0000000..96196cf
--- /dev/null
+++ b/build/android/pylib/__init__.py
@@ -0,0 +1,3 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
diff --git a/build/android/pylib/android_commands.py b/build/android/pylib/android_commands.py
new file mode 100644
index 0000000..f7191f7
--- /dev/null
+++ b/build/android/pylib/android_commands.py
@@ -0,0 +1,1976 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Provides an interface to communicate with the device via the adb command.
+
+Assumes adb binary is currently on system path.
+
+Note that this module is deprecated.
+"""
+# TODO(jbudorick): Delete this file once no clients use it.
+
+# pylint: skip-file
+
+import collections
+import datetime
+import inspect
+import logging
+import os
+import random
+import re
+import shlex
+import signal
+import subprocess
+import sys
+import tempfile
+import time
+
+import cmd_helper
+import constants
+import system_properties
+from utils import host_utils
+
+try:
+  from pylib import pexpect
+except ImportError:
+  pexpect = None
+
+sys.path.append(os.path.join(
+    constants.DIR_SOURCE_ROOT, 'third_party', 'android_testrunner'))
+import adb_interface
+import am_instrument_parser
+import errors
+
+from pylib.device import device_blacklist
+from pylib.device import device_errors
+
+# Pattern to search for the next whole line of pexpect output and capture it
+# into a match group. We can't use ^ and $ for line start end with pexpect,
+# see http://www.noah.org/python/pexpect/#doc for explanation why.
+PEXPECT_LINE_RE = re.compile('\n([^\r]*)\r')
+
+# Set the adb shell prompt to be a unique marker that will [hopefully] not
+# appear at the start of any line of a command's output.
+SHELL_PROMPT = '~+~PQ\x17RS~+~'
+
+# Java properties file
+LOCAL_PROPERTIES_PATH = constants.DEVICE_LOCAL_PROPERTIES_PATH
+
+# Property in /data/local.prop that controls Java assertions.
+JAVA_ASSERT_PROPERTY = 'dalvik.vm.enableassertions'
+
+# Keycode "enum" suitable for passing to AndroidCommands.SendKey().
+KEYCODE_HOME = 3
+KEYCODE_BACK = 4
+KEYCODE_DPAD_UP = 19
+KEYCODE_DPAD_DOWN = 20
+KEYCODE_DPAD_RIGHT = 22
+KEYCODE_ENTER = 66
+KEYCODE_MENU = 82
+
+MD5SUM_DEVICE_FOLDER = constants.TEST_EXECUTABLE_DIR + '/md5sum/'
+MD5SUM_DEVICE_PATH = MD5SUM_DEVICE_FOLDER + 'md5sum_bin'
+
+PIE_WRAPPER_PATH = constants.TEST_EXECUTABLE_DIR + '/run_pie'
+
+CONTROL_USB_CHARGING_COMMANDS = [
+  {
+    # Nexus 4
+    'witness_file': '/sys/module/pm8921_charger/parameters/disabled',
+    'enable_command': 'echo 0 > /sys/module/pm8921_charger/parameters/disabled',
+    'disable_command':
+        'echo 1 > /sys/module/pm8921_charger/parameters/disabled',
+  },
+  {
+    # Nexus 5
+    # Setting the HIZ bit of the bq24192 causes the charger to actually ignore
+    # energy coming from USB. Setting the power_supply offline just updates the
+    # Android system to reflect that.
+    'witness_file': '/sys/kernel/debug/bq24192/INPUT_SRC_CONT',
+    'enable_command': (
+        'echo 0x4A > /sys/kernel/debug/bq24192/INPUT_SRC_CONT && '
+        'echo 1 > /sys/class/power_supply/usb/online'),
+    'disable_command': (
+        'echo 0xCA > /sys/kernel/debug/bq24192/INPUT_SRC_CONT && '
+        'chmod 644 /sys/class/power_supply/usb/online && '
+        'echo 0 > /sys/class/power_supply/usb/online'),
+  },
+]
+
+class DeviceTempFile(object):
+  def __init__(self, android_commands, prefix='temp_file', suffix=''):
+    """Find an unused temporary file path in the devices external directory.
+
+    When this object is closed, the file will be deleted on the device.
+    """
+    self.android_commands = android_commands
+    while True:
+      # TODO(cjhopman): This could actually return the same file in multiple
+      # calls if the caller doesn't write to the files immediately. This is
+      # expected to never happen.
+      i = random.randint(0, 1000000)
+      self.name = '%s/%s-%d-%010d%s' % (
+          android_commands.GetExternalStorage(),
+          prefix, int(time.time()), i, suffix)
+      if not android_commands.FileExistsOnDevice(self.name):
+        break
+
+  def __enter__(self):
+    return self
+
+  def __exit__(self, type, value, traceback):
+    self.close()
+
+  def close(self):
+    self.android_commands.RunShellCommand('rm ' + self.name)
+
+
+def GetAVDs():
+  """Returns a list of AVDs."""
+  re_avd = re.compile('^[ ]+Name: ([a-zA-Z0-9_:.-]+)', re.MULTILINE)
+  avds = re_avd.findall(cmd_helper.GetCmdOutput(['android', 'list', 'avd']))
+  return avds
+
+def ResetBadDevices():
+  """Removes the blacklist that keeps track of bad devices for a current
+     build.
+  """
+  device_blacklist.ResetBlacklist()
+
+def ExtendBadDevices(devices):
+  """Adds devices to the blacklist that keeps track of bad devices for a
+     current build.
+
+  The devices listed in the bad devices file will not be returned by
+  GetAttachedDevices.
+
+  Args:
+    devices: list of bad devices to be added to the bad devices file.
+  """
+  device_blacklist.ExtendBlacklist(devices)
+
+
+def GetAttachedDevices(hardware=True, emulator=True, offline=False):
+  """Returns a list of attached, android devices and emulators.
+
+  If a preferred device has been set with ANDROID_SERIAL, it will be first in
+  the returned list. The arguments specify what devices to include in the list.
+
+  Example output:
+
+    * daemon not running. starting it now on port 5037 *
+    * daemon started successfully *
+    List of devices attached
+    027c10494100b4d7        device
+    emulator-5554   offline
+
+  Args:
+    hardware: Include attached actual devices that are online.
+    emulator: Include emulators (i.e. AVD's) currently on host.
+    offline: Include devices and emulators that are offline.
+
+  Returns: List of devices.
+  """
+  adb_devices_output = cmd_helper.GetCmdOutput([constants.GetAdbPath(),
+                                                'devices'])
+
+  re_device = re.compile('^([a-zA-Z0-9_:.-]+)\tdevice$', re.MULTILINE)
+  online_devices = re_device.findall(adb_devices_output)
+
+  re_device = re.compile('^(emulator-[0-9]+)\tdevice', re.MULTILINE)
+  emulator_devices = re_device.findall(adb_devices_output)
+
+  re_device = re.compile('^([a-zA-Z0-9_:.-]+)\t(?:offline|unauthorized)$',
+                         re.MULTILINE)
+  offline_devices = re_device.findall(adb_devices_output)
+
+  devices = []
+  # First determine list of online devices (e.g. hardware and/or emulator).
+  if hardware and emulator:
+    devices = online_devices
+  elif hardware:
+    devices = [device for device in online_devices
+               if device not in emulator_devices]
+  elif emulator:
+    devices = emulator_devices
+
+  # Now add offline devices if offline is true
+  if offline:
+    devices = devices + offline_devices
+
+  # Remove any devices in the blacklist.
+  blacklist = device_blacklist.ReadBlacklist()
+  if len(blacklist):
+    logging.info('Avoiding bad devices %s', ' '.join(blacklist))
+    devices = [device for device in devices if device not in blacklist]
+
+  preferred_device = os.environ.get('ANDROID_SERIAL')
+  if preferred_device in devices:
+    devices.remove(preferred_device)
+    devices.insert(0, preferred_device)
+  return devices
+
+
+def IsDeviceAttached(device):
+  """Return true if the device is attached and online."""
+  return device in GetAttachedDevices()
+
+
+def _GetFilesFromRecursiveLsOutput(path, ls_output, re_file, utc_offset=None):
+  """Gets a list of files from `ls` command output.
+
+  Python's os.walk isn't used because it doesn't work over adb shell.
+
+  Args:
+    path: The path to list.
+    ls_output: A list of lines returned by an `ls -lR` command.
+    re_file: A compiled regular expression which parses a line into named groups
+        consisting of at minimum "filename", "date", "time", "size" and
+        optionally "timezone".
+    utc_offset: A 5-character string of the form +HHMM or -HHMM, where HH is a
+        2-digit string giving the number of UTC offset hours, and MM is a
+        2-digit string giving the number of UTC offset minutes. If the input
+        utc_offset is None, will try to look for the value of "timezone" if it
+        is specified in re_file.
+
+  Returns:
+    A dict of {"name": (size, lastmod), ...} where:
+      name: The file name relative to |path|'s directory.
+      size: The file size in bytes (0 for directories).
+      lastmod: The file last modification date in UTC.
+  """
+  re_directory = re.compile('^%s/(?P<dir>[^:]+):$' % re.escape(path))
+  path_dir = os.path.dirname(path)
+
+  current_dir = ''
+  files = {}
+  for line in ls_output:
+    directory_match = re_directory.match(line)
+    if directory_match:
+      current_dir = directory_match.group('dir')
+      continue
+    file_match = re_file.match(line)
+    if file_match:
+      filename = os.path.join(current_dir, file_match.group('filename'))
+      if filename.startswith(path_dir):
+        filename = filename[len(path_dir) + 1:]
+      lastmod = datetime.datetime.strptime(
+          file_match.group('date') + ' ' + file_match.group('time')[:5],
+          '%Y-%m-%d %H:%M')
+      if not utc_offset and 'timezone' in re_file.groupindex:
+        utc_offset = file_match.group('timezone')
+      if isinstance(utc_offset, str) and len(utc_offset) == 5:
+        utc_delta = datetime.timedelta(hours=int(utc_offset[1:3]),
+                                       minutes=int(utc_offset[3:5]))
+        if utc_offset[0:1] == '-':
+          utc_delta = -utc_delta
+        lastmod -= utc_delta
+      files[filename] = (int(file_match.group('size')), lastmod)
+  return files
+
+
+def _ParseMd5SumOutput(md5sum_output):
+  """Returns a list of tuples from the provided md5sum output.
+
+  Args:
+    md5sum_output: output directly from md5sum binary.
+
+  Returns:
+    List of namedtuples with attributes |hash| and |path|, where |path| is the
+    absolute path to the file with an Md5Sum of |hash|.
+  """
+  HashAndPath = collections.namedtuple('HashAndPath', ['hash', 'path'])
+  split_lines = [line.split('  ') for line in md5sum_output]
+  return [HashAndPath._make(s) for s in split_lines if len(s) == 2]
+
+
+def _HasAdbPushSucceeded(command_output):
+  """Returns whether adb push has succeeded from the provided output."""
+  # TODO(frankf): We should look at the return code instead of the command
+  # output for many of the commands in this file.
+  if not command_output:
+    return True
+  # Success looks like this: "3035 KB/s (12512056 bytes in 4.025s)"
+  # Errors look like this: "failed to copy  ... "
+  if not re.search('^[0-9]', command_output.splitlines()[-1]):
+    logging.critical('PUSH FAILED: ' + command_output)
+    return False
+  return True
+
+
+def GetLogTimestamp(log_line, year):
+  """Returns the timestamp of the given |log_line| in the given year."""
+  try:
+    return datetime.datetime.strptime('%s-%s' % (year, log_line[:18]),
+                                      '%Y-%m-%d %H:%M:%S.%f')
+  except (ValueError, IndexError):
+    logging.critical('Error reading timestamp from ' + log_line)
+    return None
+
+
+class AndroidCommands(object):
+  """Helper class for communicating with Android device via adb."""
+
+  def __init__(self, device=None):
+    """Constructor.
+
+    Args:
+      device: If given, adb commands are only send to the device of this ID.
+          Otherwise commands are sent to all attached devices.
+    """
+    self._adb = adb_interface.AdbInterface(constants.GetAdbPath())
+    if device:
+      self._adb.SetTargetSerial(device)
+    self._device = device
+    self._logcat = None
+    self.logcat_process = None
+    self._logcat_tmpoutfile = None
+    self._pushed_files = []
+    self._device_utc_offset = None
+    self._potential_push_size = 0
+    self._actual_push_size = 0
+    self._external_storage = ''
+    self._util_wrapper = ''
+    self._system_properties = system_properties.SystemProperties(self.Adb())
+    self._push_if_needed_cache = {}
+    self._control_usb_charging_command = {
+        'command': None,
+        'cached': False,
+    }
+    self._protected_file_access_method_initialized = None
+    self._privileged_command_runner = None
+    self._pie_wrapper = None
+
+  @property
+  def system_properties(self):
+    return self._system_properties
+
+  def _LogShell(self, cmd):
+    """Logs the adb shell command."""
+    if self._device:
+      device_repr = self._device[-4:]
+    else:
+      device_repr = '????'
+    logging.info('[%s]> %s', device_repr, cmd)
+
+  def Adb(self):
+    """Returns our AdbInterface to avoid us wrapping all its methods."""
+    # TODO(tonyg): Goal should be to git rid of this method by making this API
+    # complete and alleviating the need.
+    return self._adb
+
+  def GetDevice(self):
+    """Returns the device serial."""
+    return self._device
+
+  def IsOnline(self):
+    """Checks whether the device is online.
+
+    Returns:
+      True if device is in 'device' mode, False otherwise.
+    """
+    # TODO(aurimas): revert to using adb get-state when android L adb is fixed.
+    #out = self._adb.SendCommand('get-state')
+    #return out.strip() == 'device'
+
+    out = self._adb.SendCommand('devices')
+    for line in out.split('\n'):
+      if self._device in line and 'device' in line:
+        return True
+    return False
+
+  def IsRootEnabled(self):
+    """Checks if root is enabled on the device."""
+    root_test_output = self.RunShellCommand('ls /root') or ['']
+    return not 'Permission denied' in root_test_output[0]
+
+  def EnableAdbRoot(self):
+    """Enables adb root on the device.
+
+    Returns:
+      True: if output from executing adb root was as expected.
+      False: otherwise.
+    """
+    if self.GetBuildType() == 'user':
+      logging.warning("Can't enable root in production builds with type user")
+      return False
+    else:
+      return_value = self._adb.EnableAdbRoot()
+      # EnableAdbRoot inserts a call for wait-for-device only when adb logcat
+      # output matches what is expected. Just to be safe add a call to
+      # wait-for-device.
+      self._adb.SendCommand('wait-for-device')
+      return return_value
+
+  def GetDeviceYear(self):
+    """Returns the year information of the date on device."""
+    return self.RunShellCommand('date +%Y')[0]
+
+  def GetExternalStorage(self):
+    if not self._external_storage:
+      self._external_storage = self.RunShellCommand('echo $EXTERNAL_STORAGE')[0]
+      if not self._external_storage:
+        raise device_errors.CommandFailedError(
+            ['shell', "'echo $EXTERNAL_STORAGE'"],
+            'Unable to find $EXTERNAL_STORAGE')
+    return self._external_storage
+
+  def WaitForDevicePm(self, timeout=120):
+    """Blocks until the device's package manager is available.
+
+    To workaround http://b/5201039, we restart the shell and retry if the
+    package manager isn't back after 120 seconds.
+
+    Raises:
+      errors.WaitForResponseTimedOutError after max retries reached.
+    """
+    last_err = None
+    retries = 3
+    while retries:
+      try:
+        self._adb.WaitForDevicePm(wait_time=timeout)
+        return  # Success
+      except errors.WaitForResponseTimedOutError as e:
+        last_err = e
+        logging.warning('Restarting and retrying after timeout: %s', e)
+        retries -= 1
+        self.RestartShell()
+    raise last_err # Only reached after max retries, re-raise the last error.
+
+  def RestartShell(self):
+    """Restarts the shell on the device. Does not block for it to return."""
+    self.RunShellCommand('stop')
+    self.RunShellCommand('start')
+
+  def Reboot(self, full_reboot=True):
+    """Reboots the device and waits for the package manager to return.
+
+    Args:
+      full_reboot: Whether to fully reboot the device or just restart the shell.
+    """
+    # TODO(torne): hive can't reboot the device either way without breaking the
+    # connection; work out if we can handle this better
+    if os.environ.get('USING_HIVE'):
+      logging.warning('Ignoring reboot request as we are on hive')
+      return
+    if full_reboot or not self.IsRootEnabled():
+      self._adb.SendCommand('reboot')
+      self._system_properties = system_properties.SystemProperties(self.Adb())
+      timeout = 300
+      retries = 1
+      # Wait for the device to disappear.
+      while retries < 10 and self.IsOnline():
+        time.sleep(1)
+        retries += 1
+    else:
+      self.RestartShell()
+      timeout = 120
+    # To run tests we need at least the package manager and the sd card (or
+    # other external storage) to be ready.
+    self.WaitForDevicePm(timeout)
+    self.WaitForSdCardReady(timeout)
+
+  def Shutdown(self):
+    """Shuts down the device."""
+    self._adb.SendCommand('reboot -p')
+    self._system_properties = system_properties.SystemProperties(self.Adb())
+
+  def Uninstall(self, package):
+    """Uninstalls the specified package from the device.
+
+    Args:
+      package: Name of the package to remove.
+
+    Returns:
+      A status string returned by adb uninstall
+    """
+    uninstall_command = 'uninstall %s' % package
+
+    self._LogShell(uninstall_command)
+    return self._adb.SendCommand(uninstall_command, timeout_time=60)
+
+  def Install(self, package_file_path, reinstall=False):
+    """Installs the specified package to the device.
+
+    Args:
+      package_file_path: Path to .apk file to install.
+      reinstall: Reinstall an existing apk, keeping the data.
+
+    Returns:
+      A status string returned by adb install
+    """
+    assert os.path.isfile(package_file_path), ('<%s> is not file' %
+                                               package_file_path)
+
+    install_cmd = ['install']
+
+    if reinstall:
+      install_cmd.append('-r')
+
+    install_cmd.append(package_file_path)
+    install_cmd = ' '.join(install_cmd)
+
+    self._LogShell(install_cmd)
+    return self._adb.SendCommand(install_cmd,
+                                 timeout_time=2 * 60,
+                                 retry_count=0)
+
+  def ManagedInstall(self, apk_path, keep_data=False, package_name=None,
+                     reboots_on_timeout=2):
+    """Installs specified package and reboots device on timeouts.
+
+    If package_name is supplied, checks if the package is already installed and
+    doesn't reinstall if the apk md5sums match.
+
+    Args:
+      apk_path: Path to .apk file to install.
+      keep_data: Reinstalls instead of uninstalling first, preserving the
+        application data.
+      package_name: Package name (only needed if keep_data=False).
+      reboots_on_timeout: number of time to reboot if package manager is frozen.
+    """
+    # Check if package is already installed and up to date.
+    if package_name:
+      installed_apk_path = self.GetApplicationPath(package_name)
+      if (installed_apk_path and
+          not self.GetFilesChanged(apk_path, installed_apk_path,
+                                   ignore_filenames=True)):
+        logging.info('Skipped install: identical %s APK already installed' %
+            package_name)
+        return
+    # Install.
+    reboots_left = reboots_on_timeout
+    while True:
+      try:
+        if not keep_data:
+          assert package_name
+          self.Uninstall(package_name)
+        install_status = self.Install(apk_path, reinstall=keep_data)
+        if 'Success' in install_status:
+          return
+        else:
+          raise Exception('Install failure: %s' % install_status)
+      except errors.WaitForResponseTimedOutError:
+        print '@@@STEP_WARNINGS@@@'
+        logging.info('Timeout on installing %s on device %s', apk_path,
+                     self._device)
+
+        if reboots_left <= 0:
+          raise Exception('Install timed out')
+
+        # Force a hard reboot on last attempt
+        self.Reboot(full_reboot=(reboots_left == 1))
+        reboots_left -= 1
+
+  def MakeSystemFolderWritable(self):
+    """Remounts the /system folder rw."""
+    out = self._adb.SendCommand('remount')
+    if out.strip() != 'remount succeeded':
+      raise errors.MsgException('Remount failed: %s' % out)
+
+  def RestartAdbdOnDevice(self):
+    logging.info('Restarting adbd on the device...')
+    with DeviceTempFile(self, suffix=".sh") as temp_script_file:
+      host_script_path = os.path.join(constants.DIR_SOURCE_ROOT,
+                                      'build',
+                                      'android',
+                                      'pylib',
+                                      'restart_adbd.sh')
+      self._adb.Push(host_script_path, temp_script_file.name)
+      self.RunShellCommand('. %s' % temp_script_file.name)
+      self._adb.SendCommand('wait-for-device')
+
+  def RestartAdbServer(self):
+    """Restart the adb server."""
+    ret = self.KillAdbServer()
+    if ret != 0:
+      raise errors.MsgException('KillAdbServer: %d' % ret)
+
+    ret = self.StartAdbServer()
+    if ret != 0:
+      raise errors.MsgException('StartAdbServer: %d' % ret)
+
+  @staticmethod
+  def KillAdbServer():
+    """Kill adb server."""
+    adb_cmd = [constants.GetAdbPath(), 'kill-server']
+    ret = cmd_helper.RunCmd(adb_cmd)
+    retry = 0
+    while retry < 3:
+      ret, _ = cmd_helper.GetCmdStatusAndOutput(['pgrep', 'adb'])
+      if ret != 0:
+        # pgrep didn't find adb, kill-server succeeded.
+        return 0
+      retry += 1
+      time.sleep(retry)
+    return ret
+
+  def StartAdbServer(self):
+    """Start adb server."""
+    adb_cmd = ['taskset', '-c', '0', constants.GetAdbPath(), 'start-server']
+    ret, _ = cmd_helper.GetCmdStatusAndOutput(adb_cmd)
+    retry = 0
+    while retry < 3:
+      ret, _ = cmd_helper.GetCmdStatusAndOutput(['pgrep', 'adb'])
+      if ret == 0:
+        # pgrep found adb, start-server succeeded.
+        # Waiting for device to reconnect before returning success.
+        self._adb.SendCommand('wait-for-device')
+        return 0
+      retry += 1
+      time.sleep(retry)
+    return ret
+
+  def WaitForSystemBootCompleted(self, wait_time):
+    """Waits for targeted system's boot_completed flag to be set.
+
+    Args:
+      wait_time: time in seconds to wait
+
+    Raises:
+      WaitForResponseTimedOutError if wait_time elapses and flag still not
+      set.
+    """
+    logging.info('Waiting for system boot completed...')
+    self._adb.SendCommand('wait-for-device')
+    # Now the device is there, but system not boot completed.
+    # Query the sys.boot_completed flag with a basic command
+    boot_completed = False
+    attempts = 0
+    wait_period = 5
+    while not boot_completed and (attempts * wait_period) < wait_time:
+      output = self.system_properties['sys.boot_completed']
+      output = output.strip()
+      if output == '1':
+        boot_completed = True
+      else:
+        # If 'error: xxx' returned when querying the flag, it means
+        # adb server lost the connection to the emulator, so restart the adb
+        # server.
+        if 'error:' in output:
+          self.RestartAdbServer()
+        time.sleep(wait_period)
+        attempts += 1
+    if not boot_completed:
+      raise errors.WaitForResponseTimedOutError(
+          'sys.boot_completed flag was not set after %s seconds' % wait_time)
+
+  def WaitForSdCardReady(self, timeout_time):
+    """Wait for the SD card ready before pushing data into it."""
+    logging.info('Waiting for SD card ready...')
+    sdcard_ready = False
+    attempts = 0
+    wait_period = 5
+    external_storage = self.GetExternalStorage()
+    while not sdcard_ready and attempts * wait_period < timeout_time:
+      output = self.RunShellCommand('ls ' + external_storage)
+      if output:
+        sdcard_ready = True
+      else:
+        time.sleep(wait_period)
+        attempts += 1
+    if not sdcard_ready:
+      raise errors.WaitForResponseTimedOutError(
+          'SD card not ready after %s seconds' % timeout_time)
+
+  def GetAndroidToolStatusAndOutput(self, command, lib_path=None, *args, **kw):
+    """Runs a native Android binary, wrapping the command as necessary.
+
+    This is a specialization of GetShellCommandStatusAndOutput, which is meant
+    for running tools/android/ binaries and handle properly: (1) setting the
+    lib path (for component=shared_library), (2) using the PIE wrapper on ICS.
+    See crbug.com/373219 for more context.
+
+    Args:
+      command: String containing the command to send.
+      lib_path: (optional) path to the folder containing the dependent libs.
+      Same other arguments of GetCmdStatusAndOutput.
+    """
+    # The first time this command is run the device is inspected to check
+    # whether a wrapper for running PIE executable is needed (only Android ICS)
+    # or not. The results is cached, so the wrapper is pushed only once.
+    if self._pie_wrapper is None:
+      # None: did not check; '': did check and not needed; '/path': use /path.
+      self._pie_wrapper = ''
+      if self.GetBuildId().startswith('I'):  # Ixxxx = Android ICS.
+        run_pie_dist_path = os.path.join(constants.GetOutDirectory(), 'run_pie')
+        assert os.path.exists(run_pie_dist_path), 'Please build run_pie'
+        # The PIE loader must be pushed manually (i.e. no PushIfNeeded) because
+        # PushIfNeeded requires md5sum and md5sum requires the wrapper as well.
+        adb_command = 'push %s %s' % (run_pie_dist_path, PIE_WRAPPER_PATH)
+        assert _HasAdbPushSucceeded(self._adb.SendCommand(adb_command))
+        self._pie_wrapper = PIE_WRAPPER_PATH
+
+    if self._pie_wrapper:
+      command = '%s %s' % (self._pie_wrapper, command)
+    if lib_path:
+      command = 'LD_LIBRARY_PATH=%s %s' % (lib_path, command)
+    return self.GetShellCommandStatusAndOutput(command, *args, **kw)
+
+  # It is tempting to turn this function into a generator, however this is not
+  # possible without using a private (local) adb_shell instance (to ensure no
+  # other command interleaves usage of it), which would defeat the main aim of
+  # being able to reuse the adb shell instance across commands.
+  def RunShellCommand(self, command, timeout_time=20, log_result=False):
+    """Send a command to the adb shell and return the result.
+
+    Args:
+      command: String containing the shell command to send.
+      timeout_time: Number of seconds to wait for command to respond before
+        retrying, used by AdbInterface.SendShellCommand.
+      log_result: Boolean to indicate whether we should log the result of the
+                  shell command.
+
+    Returns:
+      list containing the lines of output received from running the command
+    """
+    self._LogShell(command)
+    if "'" in command:
+      command = command.replace('\'', '\'\\\'\'')
+    result = self._adb.SendShellCommand(
+        "'%s'" % command, timeout_time).splitlines()
+    # TODO(b.kelemen): we should really be able to drop the stderr of the
+    # command or raise an exception based on what the caller wants.
+    result = [ l for l in result if not l.startswith('WARNING') ]
+    if ['error: device not found'] == result:
+      raise errors.DeviceUnresponsiveError('device not found')
+    if log_result:
+      self._LogShell('\n'.join(result))
+    return result
+
+  def GetShellCommandStatusAndOutput(self, command, timeout_time=20,
+                                     log_result=False):
+    """See RunShellCommand() above.
+
+    Returns:
+      The tuple (exit code, list of output lines).
+    """
+    lines = self.RunShellCommand(
+        command + '; echo %$?', timeout_time, log_result)
+    last_line = lines[-1]
+    status_pos = last_line.rfind('%')
+    assert status_pos >= 0
+    status = int(last_line[status_pos + 1:])
+    if status_pos == 0:
+      lines = lines[:-1]
+    else:
+      lines = lines[:-1] + [last_line[:status_pos]]
+    return (status, lines)
+
+  def KillAll(self, process, signum=9, with_su=False):
+    """Android version of killall, connected via adb.
+
+    Args:
+      process: name of the process to kill off.
+      signum: signal to use, 9 (SIGKILL) by default.
+      with_su: wether or not to use su to kill the processes.
+
+    Returns:
+      the number of processes killed
+    """
+    pids = self.ExtractPid(process)
+    if pids:
+      cmd = 'kill -%d %s' % (signum, ' '.join(pids))
+      if with_su:
+        self.RunShellCommandWithSU(cmd)
+      else:
+        self.RunShellCommand(cmd)
+    return len(pids)
+
+  def KillAllBlocking(self, process, timeout_sec, signum=9, with_su=False):
+    """Blocking version of killall, connected via adb.
+
+    This waits until no process matching the corresponding name appears in ps'
+    output anymore.
+
+    Args:
+      process: name of the process to kill off
+      timeout_sec: the timeout in seconds
+      signum: same as |KillAll|
+      with_su: same as |KillAll|
+    Returns:
+      the number of processes killed
+    """
+    processes_killed = self.KillAll(process, signum=signum, with_su=with_su)
+    if processes_killed:
+      elapsed = 0
+      wait_period = 0.1
+      # Note that this doesn't take into account the time spent in ExtractPid().
+      while self.ExtractPid(process) and elapsed < timeout_sec:
+        time.sleep(wait_period)
+        elapsed += wait_period
+      if elapsed >= timeout_sec:
+        return processes_killed - self.ExtractPid(process)
+    return processes_killed
+
+  @staticmethod
+  def _GetActivityCommand(package, activity, wait_for_completion, action,
+                          category, data, extras, trace_file_name, force_stop,
+                          flags):
+    """Creates command to start |package|'s activity on the device.
+
+    Args - as for StartActivity
+
+    Returns:
+      the command to run on the target to start the activity
+    """
+    cmd = 'am start -a %s' % action
+    if force_stop:
+      cmd += ' -S'
+    if wait_for_completion:
+      cmd += ' -W'
+    if category:
+      cmd += ' -c %s' % category
+    if package and activity:
+      cmd += ' -n %s/%s' % (package, activity)
+    if data:
+      cmd += ' -d "%s"' % data
+    if extras:
+      for key in extras:
+        value = extras[key]
+        if isinstance(value, str):
+          cmd += ' --es'
+        elif isinstance(value, bool):
+          cmd += ' --ez'
+        elif isinstance(value, int):
+          cmd += ' --ei'
+        else:
+          raise NotImplementedError(
+              'Need to teach StartActivity how to pass %s extras' % type(value))
+        cmd += ' %s %s' % (key, value)
+    if trace_file_name:
+      cmd += ' --start-profiler ' + trace_file_name
+    if flags:
+      cmd += ' -f %s' % flags
+    return cmd
+
+  def StartActivity(self, package, activity, wait_for_completion=False,
+                    action='android.intent.action.VIEW',
+                    category=None, data=None,
+                    extras=None, trace_file_name=None,
+                    force_stop=False, flags=None):
+    """Starts |package|'s activity on the device.
+
+    Args:
+      package: Name of package to start (e.g. 'com.google.android.apps.chrome').
+      activity: Name of activity (e.g. '.Main' or
+        'com.google.android.apps.chrome.Main').
+      wait_for_completion: wait for the activity to finish launching (-W flag).
+      action: string (e.g. "android.intent.action.MAIN"). Default is VIEW.
+      category: string (e.g. "android.intent.category.HOME")
+      data: Data string to pass to activity (e.g. 'http://www.example.com/').
+      extras: Dict of extras to pass to activity. Values are significant.
+      trace_file_name: If used, turns on and saves the trace to this file name.
+      force_stop: force stop the target app before starting the activity (-S
+        flag).
+    Returns:
+      The output of the underlying command as a list of lines.
+    """
+    cmd = self._GetActivityCommand(package, activity, wait_for_completion,
+                                   action, category, data, extras,
+                                   trace_file_name, force_stop, flags)
+    return self.RunShellCommand(cmd)
+
+  def StartActivityTimed(self, package, activity, wait_for_completion=False,
+                         action='android.intent.action.VIEW',
+                         category=None, data=None,
+                         extras=None, trace_file_name=None,
+                         force_stop=False, flags=None):
+    """Starts |package|'s activity on the device, returning the start time
+
+    Args - as for StartActivity
+
+    Returns:
+      A tuple containing:
+        - the output of the underlying command as a list of lines, and
+        - a timestamp string for the time at which the activity started
+    """
+    cmd = self._GetActivityCommand(package, activity, wait_for_completion,
+                                   action, category, data, extras,
+                                   trace_file_name, force_stop, flags)
+    self.StartMonitoringLogcat()
+    out = self.RunShellCommand('log starting activity; ' + cmd)
+    activity_started_re = re.compile('.*starting activity.*')
+    m = self.WaitForLogMatch(activity_started_re, None)
+    assert m
+    start_line = m.group(0)
+    return (out, GetLogTimestamp(start_line, self.GetDeviceYear()))
+
+  def StartCrashUploadService(self, package):
+    # TODO(frankf): We really need a python wrapper around Intent
+    # to be shared with StartActivity/BroadcastIntent.
+    cmd = (
+      'am startservice -a %s.crash.ACTION_FIND_ALL -n '
+      '%s/%s.crash.MinidumpUploadService' %
+      (constants.PACKAGE_INFO['chrome'].package,
+       package,
+       constants.PACKAGE_INFO['chrome'].package))
+    am_output = self.RunShellCommandWithSU(cmd)
+    assert am_output and 'Starting' in am_output[-1], (
+        'Service failed to start: %s' % am_output)
+    time.sleep(15)
+
+  def BroadcastIntent(self, package, intent, *args):
+    """Send a broadcast intent.
+
+    Args:
+      package: Name of package containing the intent.
+      intent: Name of the intent.
+      args: Optional extra arguments for the intent.
+    """
+    cmd = 'am broadcast -a %s.%s %s' % (package, intent, ' '.join(args))
+    self.RunShellCommand(cmd)
+
+  def GoHome(self):
+    """Tell the device to return to the home screen. Blocks until completion."""
+    self.RunShellCommand('am start -W '
+        '-a android.intent.action.MAIN -c android.intent.category.HOME')
+
+  def CloseApplication(self, package):
+    """Attempt to close down the application, using increasing violence.
+
+    Args:
+      package: Name of the process to kill off, e.g.
+      com.google.android.apps.chrome
+    """
+    self.RunShellCommand('am force-stop ' + package)
+
+  def GetApplicationPath(self, package):
+    """Get the installed apk path on the device for the given package.
+
+    Args:
+      package: Name of the package.
+
+    Returns:
+      Path to the apk on the device if it exists, None otherwise.
+    """
+    pm_path_output  = self.RunShellCommand('pm path ' + package)
+    # The path output contains anything if and only if the package
+    # exists.
+    if pm_path_output:
+      # pm_path_output is of the form: "package:/path/to/foo.apk"
+      return pm_path_output[0].split(':')[1]
+    else:
+      return None
+
+  def ClearApplicationState(self, package):
+    """Closes and clears all state for the given |package|."""
+    # Check that the package exists before clearing it. Necessary because
+    # calling pm clear on a package that doesn't exist may never return.
+    pm_path_output  = self.RunShellCommand('pm path ' + package)
+    # The path output only contains anything if and only if the package exists.
+    if pm_path_output:
+      self.RunShellCommand('pm clear ' + package)
+
+  def SendKeyEvent(self, keycode):
+    """Sends keycode to the device.
+
+    Args:
+      keycode: Numeric keycode to send (see "enum" at top of file).
+    """
+    self.RunShellCommand('input keyevent %d' % keycode)
+
+  def _RunMd5Sum(self, host_path, device_path):
+    """Gets the md5sum of a host path and device path.
+
+    Args:
+      host_path: Path (file or directory) on the host.
+      device_path: Path on the device.
+
+    Returns:
+      A tuple containing lists of the host and device md5sum results as
+      created by _ParseMd5SumOutput().
+    """
+    md5sum_dist_path = os.path.join(constants.GetOutDirectory(),
+                                    'md5sum_dist')
+    assert os.path.exists(md5sum_dist_path), 'Please build md5sum.'
+    md5sum_dist_mtime = os.stat(md5sum_dist_path).st_mtime
+    if (md5sum_dist_path not in self._push_if_needed_cache or
+        self._push_if_needed_cache[md5sum_dist_path] != md5sum_dist_mtime):
+      command = 'push %s %s' % (md5sum_dist_path, MD5SUM_DEVICE_FOLDER)
+      assert _HasAdbPushSucceeded(self._adb.SendCommand(command))
+      self._push_if_needed_cache[md5sum_dist_path] = md5sum_dist_mtime
+
+    (_, md5_device_output) = self.GetAndroidToolStatusAndOutput(
+        self._util_wrapper + ' ' + MD5SUM_DEVICE_PATH + ' ' + device_path,
+        lib_path=MD5SUM_DEVICE_FOLDER,
+        timeout_time=2 * 60)
+    device_hash_tuples = _ParseMd5SumOutput(md5_device_output)
+    assert os.path.exists(host_path), 'Local path not found %s' % host_path
+    md5sum_output = cmd_helper.GetCmdOutput(
+        [os.path.join(constants.GetOutDirectory(), 'md5sum_bin_host'),
+         host_path])
+    host_hash_tuples = _ParseMd5SumOutput(md5sum_output.splitlines())
+    return (host_hash_tuples, device_hash_tuples)
+
+  def GetFilesChanged(self, host_path, device_path, ignore_filenames=False):
+    """Compares the md5sum of a host path against a device path.
+
+    Note: Ignores extra files on the device.
+
+    Args:
+      host_path: Path (file or directory) on the host.
+      device_path: Path on the device.
+      ignore_filenames: If True only the file contents are considered when
+          checking whether a file has changed, otherwise the relative path
+          must also match.
+
+    Returns:
+      A list of tuples of the form (host_path, device_path) for files whose
+      md5sums do not match.
+    """
+
+    # Md5Sum resolves symbolic links in path names so the calculation of
+    # relative path names from its output will need the real path names of the
+    # base directories. Having calculated these they are used throughout the
+    # function since this makes us less subject to any future changes to Md5Sum.
+    real_host_path = os.path.realpath(host_path)
+    real_device_path = self.RunShellCommand('realpath "%s"' % device_path)[0]
+
+    host_hash_tuples, device_hash_tuples = self._RunMd5Sum(
+        real_host_path, real_device_path)
+
+    if len(host_hash_tuples) > len(device_hash_tuples):
+      logging.info('%s files do not exist on the device' %
+                   (len(host_hash_tuples) - len(device_hash_tuples)))
+
+    host_rel = [(os.path.relpath(os.path.normpath(t.path), real_host_path),
+                 t.hash)
+                for t in host_hash_tuples]
+
+    if os.path.isdir(real_host_path):
+      def RelToRealPaths(rel_path):
+        return (os.path.join(real_host_path, rel_path),
+                os.path.join(real_device_path, rel_path))
+    else:
+      assert len(host_rel) == 1
+      def RelToRealPaths(_):
+        return (real_host_path, real_device_path)
+
+    if ignore_filenames:
+      # If we are ignoring file names, then we want to push any file for which
+      # a file with an equivalent MD5 sum does not exist on the device.
+      device_hashes = set([h.hash for h in device_hash_tuples])
+      ShouldPush = lambda p, h: h not in device_hashes
+    else:
+      # Otherwise, we want to push any file on the host for which a file with
+      # an equivalent MD5 sum does not exist at the same relative path on the
+      # device.
+      device_rel = dict([(os.path.relpath(os.path.normpath(t.path),
+                                          real_device_path),
+                          t.hash)
+                         for t in device_hash_tuples])
+      ShouldPush = lambda p, h: p not in device_rel or h != device_rel[p]
+
+    return [RelToRealPaths(path) for path, host_hash in host_rel
+            if ShouldPush(path, host_hash)]
+
+  def PushIfNeeded(self, host_path, device_path):
+    """Pushes |host_path| to |device_path|.
+
+    Works for files and directories. This method skips copying any paths in
+    |test_data_paths| that already exist on the device with the same hash.
+
+    All pushed files can be removed by calling RemovePushedFiles().
+    """
+    MAX_INDIVIDUAL_PUSHES = 50
+    if not os.path.exists(host_path):
+      raise device_errors.CommandFailedError(
+          'Local path not found %s' % host_path, device=str(self))
+
+    # See if the file on the host changed since the last push (if any) and
+    # return early if it didn't. Note that this shortcut assumes that the tests
+    # on the device don't modify the files.
+    if not os.path.isdir(host_path):
+      if host_path in self._push_if_needed_cache:
+        host_path_mtime = self._push_if_needed_cache[host_path]
+        if host_path_mtime == os.stat(host_path).st_mtime:
+          return
+
+    size = host_utils.GetRecursiveDiskUsage(host_path)
+    self._pushed_files.append(device_path)
+    self._potential_push_size += size
+
+    if os.path.isdir(host_path):
+      self.RunShellCommand('mkdir -p "%s"' % device_path)
+
+    changed_files = self.GetFilesChanged(host_path, device_path)
+    logging.info('Found %d files that need to be pushed to %s',
+        len(changed_files), device_path)
+    if not changed_files:
+      return
+
+    def Push(host, device):
+      # NOTE: We can't use adb_interface.Push() because it hardcodes a timeout
+      # of 60 seconds which isn't sufficient for a lot of users of this method.
+      push_command = 'push %s %s' % (host, device)
+      self._LogShell(push_command)
+
+      # Retry push with increasing backoff if the device is busy.
+      retry = 0
+      while True:
+        output = self._adb.SendCommand(push_command, timeout_time=30 * 60)
+        if _HasAdbPushSucceeded(output):
+          if not os.path.isdir(host_path):
+            self._push_if_needed_cache[host] = os.stat(host).st_mtime
+          return
+        if retry < 3:
+          retry += 1
+          wait_time = 5 * retry
+          logging.error('Push failed, retrying in %d seconds: %s' %
+                        (wait_time, output))
+          time.sleep(wait_time)
+        else:
+          raise Exception('Push failed: %s' % output)
+
+    diff_size = 0
+    if len(changed_files) <= MAX_INDIVIDUAL_PUSHES:
+      diff_size = sum(host_utils.GetRecursiveDiskUsage(f[0])
+                      for f in changed_files)
+
+    # TODO(craigdh): Replace this educated guess with a heuristic that
+    # approximates the push time for each method.
+    if len(changed_files) > MAX_INDIVIDUAL_PUSHES or diff_size > 0.5 * size:
+      self._actual_push_size += size
+      Push(host_path, device_path)
+    else:
+      for f in changed_files:
+        Push(f[0], f[1])
+      self._actual_push_size += diff_size
+
+  def GetPushSizeInfo(self):
+    """Get total size of pushes to the device done via PushIfNeeded()
+
+    Returns:
+      A tuple:
+        1. Total size of push requests to PushIfNeeded (MB)
+        2. Total size that was actually pushed (MB)
+    """
+    return (self._potential_push_size, self._actual_push_size)
+
+  def GetFileContents(self, filename, log_result=False):
+    """Gets contents from the file specified by |filename|."""
+    return self.RunShellCommand('cat "%s" 2>/dev/null' % filename,
+                                log_result=log_result)
+
+  def SetFileContents(self, filename, contents):
+    """Writes |contents| to the file specified by |filename|."""
+    with tempfile.NamedTemporaryFile() as f:
+      f.write(contents)
+      f.flush()
+      self._adb.Push(f.name, filename)
+
+  def RunShellCommandWithSU(self, command, timeout_time=20, log_result=False):
+    return self.RunShellCommand('su -c %s' % command, timeout_time, log_result)
+
+  def CanAccessProtectedFileContents(self):
+    """Returns True if Get/SetProtectedFileContents would work via "su" or adb
+    shell running as root.
+
+    Devices running user builds don't have adb root, but may provide "su" which
+    can be used for accessing protected files.
+    """
+    return (self._GetProtectedFileCommandRunner() != None)
+
+  def _GetProtectedFileCommandRunner(self):
+    """Finds the best method to access protected files on the device.
+
+    Returns:
+      1. None when privileged files cannot be accessed on the device.
+      2. Otherwise: A function taking a single parameter: a string with command
+         line arguments. Running that function executes the command with
+         the appropriate method.
+    """
+    if self._protected_file_access_method_initialized:
+      return self._privileged_command_runner
+
+    self._privileged_command_runner = None
+    self._protected_file_access_method_initialized = True
+
+    for cmd in [self.RunShellCommand, self.RunShellCommandWithSU]:
+      # Get contents of the auxv vector for the init(8) process from a small
+      # binary file that always exists on linux and is always read-protected.
+      contents = cmd('cat /proc/1/auxv')
+      # The leading 4 or 8-bytes of auxv vector is a_type. There are not many
+      # reserved a_type values, hence byte 2 must always be '\0' for a realistic
+      # auxv. See /usr/include/elf.h.
+      if len(contents) > 0 and (contents[0][2] == '\0'):
+        self._privileged_command_runner = cmd
+        break
+    return self._privileged_command_runner
+
+  def GetProtectedFileContents(self, filename):
+    """Gets contents from the protected file specified by |filename|.
+
+    This is potentially less efficient than GetFileContents.
+    """
+    command = 'cat "%s" 2> /dev/null' % filename
+    command_runner = self._GetProtectedFileCommandRunner()
+    if command_runner:
+      return command_runner(command)
+    else:
+      logging.warning('Could not access protected file: %s' % filename)
+      return []
+
+  def SetProtectedFileContents(self, filename, contents):
+    """Writes |contents| to the protected file specified by |filename|.
+
+    This is less efficient than SetFileContents.
+    """
+    with DeviceTempFile(self) as temp_file:
+      with DeviceTempFile(self, suffix=".sh") as temp_script:
+        # Put the contents in a temporary file
+        self.SetFileContents(temp_file.name, contents)
+        # Create a script to copy the file contents to its final destination
+        self.SetFileContents(temp_script.name,
+                             'cat %s > %s' % (temp_file.name, filename))
+
+        command = 'sh %s' % temp_script.name
+        command_runner = self._GetProtectedFileCommandRunner()
+        if command_runner:
+          return command_runner(command)
+        else:
+          logging.warning(
+              'Could not set contents of protected file: %s' % filename)
+
+
+  def RemovePushedFiles(self):
+    """Removes all files pushed with PushIfNeeded() from the device."""
+    for p in self._pushed_files:
+      self.RunShellCommand('rm -r %s' % p, timeout_time=2 * 60)
+
+  def ListPathContents(self, path):
+    """Lists files in all subdirectories of |path|.
+
+    Args:
+      path: The path to list.
+
+    Returns:
+      A dict of {"name": (size, lastmod), ...}.
+    """
+    # Example output:
+    # /foo/bar:
+    # -rw-r----- user group   102 2011-05-12 12:29:54.131623387 +0100 baz.txt
+    re_file = re.compile('^-(?P<perms>[^\s]+)\s+'
+                         '(?P<user>[^\s]+)\s+'
+                         '(?P<group>[^\s]+)\s+'
+                         '(?P<size>[^\s]+)\s+'
+                         '(?P<date>[^\s]+)\s+'
+                         '(?P<time>[^\s]+)\s+'
+                         '(?P<filename>[^\s]+)$')
+    return _GetFilesFromRecursiveLsOutput(
+        path, self.RunShellCommand('ls -lR %s' % path), re_file,
+        self.GetUtcOffset())
+
+  def GetUtcOffset(self):
+    if not self._device_utc_offset:
+      self._device_utc_offset = self.RunShellCommand('date +%z')[0]
+    return self._device_utc_offset
+
+  def SetJavaAssertsEnabled(self, enable):
+    """Sets or removes the device java assertions property.
+
+    Args:
+      enable: If True the property will be set.
+
+    Returns:
+      True if the file was modified (reboot is required for it to take effect).
+    """
+    # First ensure the desired property is persisted.
+    temp_props_file = tempfile.NamedTemporaryFile()
+    properties = ''
+    if self._adb.Pull(LOCAL_PROPERTIES_PATH, temp_props_file.name):
+      with open(temp_props_file.name) as f:
+        properties = f.read()
+    re_search = re.compile(r'^\s*' + re.escape(JAVA_ASSERT_PROPERTY) +
+                           r'\s*=\s*all\s*$', re.MULTILINE)
+    if enable != bool(re.search(re_search, properties)):
+      re_replace = re.compile(r'^\s*' + re.escape(JAVA_ASSERT_PROPERTY) +
+                              r'\s*=\s*\w+\s*$', re.MULTILINE)
+      properties = re.sub(re_replace, '', properties)
+      if enable:
+        properties += '\n%s=all\n' % JAVA_ASSERT_PROPERTY
+
+      file(temp_props_file.name, 'w').write(properties)
+      self._adb.Push(temp_props_file.name, LOCAL_PROPERTIES_PATH)
+
+    # Next, check the current runtime value is what we need, and
+    # if not, set it and report that a reboot is required.
+    was_set = 'all' in self.system_properties[JAVA_ASSERT_PROPERTY]
+    if was_set == enable:
+      return False
+    self.system_properties[JAVA_ASSERT_PROPERTY] = enable and 'all' or ''
+    return True
+
+  def GetBuildId(self):
+    """Returns the build ID of the system (e.g. JRM79C)."""
+    build_id = self.system_properties['ro.build.id']
+    assert build_id
+    return build_id
+
+  def GetBuildType(self):
+    """Returns the build type of the system (e.g. eng)."""
+    build_type = self.system_properties['ro.build.type']
+    assert build_type
+    return build_type
+
+  def GetBuildProduct(self):
+    """Returns the build product of the device (e.g. maguro)."""
+    build_product = self.system_properties['ro.build.product']
+    assert build_product
+    return build_product
+
+  def GetProductName(self):
+    """Returns the product name of the device (e.g. takju)."""
+    name = self.system_properties['ro.product.name']
+    assert name
+    return name
+
+  def GetBuildFingerprint(self):
+    """Returns the build fingerprint of the device."""
+    build_fingerprint = self.system_properties['ro.build.fingerprint']
+    assert build_fingerprint
+    return build_fingerprint
+
+  def GetDescription(self):
+    """Returns the description of the system.
+
+    For example, "yakju-userdebug 4.1 JRN54F 364167 dev-keys".
+    """
+    description = self.system_properties['ro.build.description']
+    assert description
+    return description
+
+  def GetProductModel(self):
+    """Returns the name of the product model (e.g. "Galaxy Nexus") """
+    model = self.system_properties['ro.product.model']
+    assert model
+    return model
+
+  def GetWifiIP(self):
+    """Returns the wifi IP on the device."""
+    wifi_ip = self.system_properties['dhcp.wlan0.ipaddress']
+    # Do not assert here. Devices (e.g. emulators) may not have a WifiIP.
+    return wifi_ip
+
+  def GetSubscriberInfo(self):
+    """Returns the device subscriber info (e.g. GSM and device ID) as string."""
+    iphone_sub = self.RunShellCommand('dumpsys iphonesubinfo')
+    # Do not assert here. Devices (e.g. Nakasi on K) may not have iphonesubinfo.
+    return '\n'.join(iphone_sub)
+
+  def GetBatteryInfo(self):
+    """Returns a {str: str} dict of battery info (e.g. status, level, etc)."""
+    battery = self.RunShellCommand('dumpsys battery')
+    assert battery
+    battery_info = {}
+    for line in battery[1:]:
+      k, _, v = line.partition(': ')
+      battery_info[k.strip()] = v.strip()
+    return battery_info
+
+  def GetSetupWizardStatus(self):
+    """Returns the status of the device setup wizard (e.g. DISABLED)."""
+    status = self.system_properties['ro.setupwizard.mode']
+    # On some devices, the status is empty if not otherwise set. In such cases
+    # the caller should expect an empty string to be returned.
+    return status
+
+  def StartMonitoringLogcat(self, clear=True, logfile=None, filters=None):
+    """Starts monitoring the output of logcat, for use with WaitForLogMatch.
+
+    Args:
+      clear: If True the existing logcat output will be cleared, to avoiding
+             matching historical output lurking in the log.
+      filters: A list of logcat filters to be used.
+    """
+    if clear:
+      self.RunShellCommand('logcat -c')
+    args = []
+    if self._adb._target_arg:
+      args += shlex.split(self._adb._target_arg)
+    args += ['logcat', '-v', 'threadtime']
+    if filters:
+      args.extend(filters)
+    else:
+      args.append('*:v')
+
+    if logfile:
+      logfile = NewLineNormalizer(logfile)
+
+    # Spawn logcat and synchronize with it.
+    for _ in range(4):
+      self._logcat = pexpect.spawn(constants.GetAdbPath(), args, timeout=10,
+                                   logfile=logfile)
+      if not clear or self.SyncLogCat():
+        break
+      self._logcat.close(force=True)
+    else:
+      logging.critical('Error reading from logcat: ' + str(self._logcat.match))
+      sys.exit(1)
+
+  def SyncLogCat(self):
+    """Synchronize with logcat.
+
+    Synchronize with the monitored logcat so that WaitForLogMatch will only
+    consider new message that are received after this point in time.
+
+    Returns:
+      True if the synchronization succeeded.
+    """
+    assert self._logcat
+    tag = 'logcat_sync_%s' % time.time()
+    self.RunShellCommand('log ' + tag)
+    return self._logcat.expect([tag, pexpect.EOF, pexpect.TIMEOUT]) == 0
+
+  def GetMonitoredLogCat(self):
+    """Returns an "adb logcat" command as created by pexpected.spawn."""
+    if not self._logcat:
+      self.StartMonitoringLogcat(clear=False)
+    return self._logcat
+
+  def WaitForLogMatch(self, success_re, error_re, clear=False, timeout=10):
+    """Blocks until a matching line is logged or a timeout occurs.
+
+    Args:
+      success_re: A compiled re to search each line for.
+      error_re: A compiled re which, if found, terminates the search for
+          |success_re|. If None is given, no error condition will be detected.
+      clear: If True the existing logcat output will be cleared, defaults to
+          false.
+      timeout: Timeout in seconds to wait for a log match.
+
+    Raises:
+      pexpect.TIMEOUT after |timeout| seconds without a match for |success_re|
+      or |error_re|.
+
+    Returns:
+      The re match object if |success_re| is matched first or None if |error_re|
+      is matched first.
+    """
+    logging.info('<<< Waiting for logcat:' + str(success_re.pattern))
+    t0 = time.time()
+    while True:
+      if not self._logcat:
+        self.StartMonitoringLogcat(clear)
+      try:
+        while True:
+          # Note this will block for upto the timeout _per log line_, so we need
+          # to calculate the overall timeout remaining since t0.
+          time_remaining = t0 + timeout - time.time()
+          if time_remaining < 0:
+            raise pexpect.TIMEOUT(self._logcat)
+          self._logcat.expect(PEXPECT_LINE_RE, timeout=time_remaining)
+          line = self._logcat.match.group(1)
+          if error_re:
+            error_match = error_re.search(line)
+            if error_match:
+              return None
+          success_match = success_re.search(line)
+          if success_match:
+            return success_match
+          logging.info('<<< Skipped Logcat Line:' + str(line))
+      except pexpect.TIMEOUT:
+        raise pexpect.TIMEOUT(
+            'Timeout (%ds) exceeded waiting for pattern "%s" (tip: use -vv '
+            'to debug)' %
+            (timeout, success_re.pattern))
+      except pexpect.EOF:
+        # It seems that sometimes logcat can end unexpectedly. This seems
+        # to happen during Chrome startup after a reboot followed by a cache
+        # clean. I don't understand why this happens, but this code deals with
+        # getting EOF in logcat.
+        logging.critical('Found EOF in adb logcat. Restarting...')
+        # Rerun spawn with original arguments. Note that self._logcat.args[0] is
+        # the path of adb, so we don't want it in the arguments.
+        self._logcat = pexpect.spawn(constants.GetAdbPath(),
+                                     self._logcat.args[1:],
+                                     timeout=self._logcat.timeout,
+                                     logfile=self._logcat.logfile)
+
+  def StartRecordingLogcat(self, clear=True, filters=None):
+    """Starts recording logcat output to eventually be saved as a string.
+
+    This call should come before some series of tests are run, with either
+    StopRecordingLogcat or SearchLogcatRecord following the tests.
+
+    Args:
+      clear: True if existing log output should be cleared.
+      filters: A list of logcat filters to be used.
+    """
+    if not filters:
+      filters = ['*:v']
+    if clear:
+      self._adb.SendCommand('logcat -c')
+    logcat_command = 'adb %s logcat -v threadtime %s' % (self._adb._target_arg,
+                                                         ' '.join(filters))
+    self._logcat_tmpoutfile = tempfile.NamedTemporaryFile(bufsize=0)
+    self.logcat_process = subprocess.Popen(logcat_command, shell=True,
+                                           stdout=self._logcat_tmpoutfile)
+
+  def GetCurrentRecordedLogcat(self):
+    """Return the current content of the logcat being recorded.
+       Call this after StartRecordingLogcat() and before StopRecordingLogcat().
+       This can be useful to perform timed polling/parsing.
+    Returns:
+       Current logcat output as a single string, or None if
+       StopRecordingLogcat() was already called.
+    """
+    if not self._logcat_tmpoutfile:
+      return None
+
+    with open(self._logcat_tmpoutfile.name) as f:
+      return f.read()
+
+  def StopRecordingLogcat(self):
+    """Stops an existing logcat recording subprocess and returns output.
+
+    Returns:
+      The logcat output as a string or an empty string if logcat was not
+      being recorded at the time.
+    """
+    if not self.logcat_process:
+      return ''
+    # Cannot evaluate directly as 0 is a possible value.
+    # Better to read the self.logcat_process.stdout before killing it,
+    # Otherwise the communicate may return incomplete output due to pipe break.
+    if self.logcat_process.poll() is None:
+      self.logcat_process.kill()
+    self.logcat_process.wait()
+    self.logcat_process = None
+    self._logcat_tmpoutfile.seek(0)
+    output = self._logcat_tmpoutfile.read()
+    self._logcat_tmpoutfile.close()
+    self._logcat_tmpoutfile = None
+    return output
+
+  @staticmethod
+  def SearchLogcatRecord(record, message, thread_id=None, proc_id=None,
+                         log_level=None, component=None):
+    """Searches the specified logcat output and returns results.
+
+    This method searches through the logcat output specified by record for a
+    certain message, narrowing results by matching them against any other
+    specified criteria.  It returns all matching lines as described below.
+
+    Args:
+      record: A string generated by Start/StopRecordingLogcat to search.
+      message: An output string to search for.
+      thread_id: The thread id that is the origin of the message.
+      proc_id: The process that is the origin of the message.
+      log_level: The log level of the message.
+      component: The name of the component that would create the message.
+
+    Returns:
+      A list of dictionaries represeting matching entries, each containing keys
+      thread_id, proc_id, log_level, component, and message.
+    """
+    if thread_id:
+      thread_id = str(thread_id)
+    if proc_id:
+      proc_id = str(proc_id)
+    results = []
+    reg = re.compile('(\d+)\s+(\d+)\s+([A-Z])\s+([A-Za-z]+)\s*:(.*)$',
+                     re.MULTILINE)
+    log_list = reg.findall(record)
+    for (tid, pid, log_lev, comp, msg) in log_list:
+      if ((not thread_id or thread_id == tid) and
+          (not proc_id or proc_id == pid) and
+          (not log_level or log_level == log_lev) and
+          (not component or component == comp) and msg.find(message) > -1):
+        match = dict({'thread_id': tid, 'proc_id': pid,
+                      'log_level': log_lev, 'component': comp,
+                      'message': msg})
+        results.append(match)
+    return results
+
+  def ExtractPid(self, process_name):
+    """Extracts Process Ids for a given process name from Android Shell.
+
+    Args:
+      process_name: name of the process on the device.
+
+    Returns:
+      List of all the process ids (as strings) that match the given name.
+      If the name of a process exactly matches the given name, the pid of
+      that process will be inserted to the front of the pid list.
+    """
+    pids = []
+    for line in self.RunShellCommand('ps', log_result=False):
+      data = line.split()
+      try:
+        if process_name in data[-1]:  # name is in the last column
+          if process_name == data[-1]:
+            pids.insert(0, data[1])  # PID is in the second column
+          else:
+            pids.append(data[1])
+      except IndexError:
+        pass
+    return pids
+
+  def GetIoStats(self):
+    """Gets cumulative disk IO stats since boot (for all processes).
+
+    Returns:
+      Dict of {num_reads, num_writes, read_ms, write_ms} or None if there
+      was an error.
+    """
+    IoStats = collections.namedtuple(
+        'IoStats',
+        ['device',
+         'num_reads_issued',
+         'num_reads_merged',
+         'num_sectors_read',
+         'ms_spent_reading',
+         'num_writes_completed',
+         'num_writes_merged',
+         'num_sectors_written',
+         'ms_spent_writing',
+         'num_ios_in_progress',
+         'ms_spent_doing_io',
+         'ms_spent_doing_io_weighted',
+        ])
+
+    for line in self.GetFileContents('/proc/diskstats', log_result=False):
+      fields = line.split()
+      stats = IoStats._make([fields[2]] + [int(f) for f in fields[3:]])
+      if stats.device == 'mmcblk0':
+        return {
+            'num_reads': stats.num_reads_issued,
+            'num_writes': stats.num_writes_completed,
+            'read_ms': stats.ms_spent_reading,
+            'write_ms': stats.ms_spent_writing,
+        }
+    logging.warning('Could not find disk IO stats.')
+    return None
+
+  def GetMemoryUsageForPid(self, pid):
+    """Returns the memory usage for given pid.
+
+    Args:
+      pid: The pid number of the specific process running on device.
+
+    Returns:
+      Dict of {metric:usage_kb}, for the process which has specified pid.
+      The metric keys which may be included are: Size, Rss, Pss, Shared_Clean,
+      Shared_Dirty, Private_Clean, Private_Dirty, VmHWM.
+    """
+    showmap = self.RunShellCommand('showmap %d' % pid)
+    if not showmap or not showmap[-1].endswith('TOTAL'):
+      logging.warning('Invalid output for showmap %s', str(showmap))
+      return {}
+    items = showmap[-1].split()
+    if len(items) != 9:
+      logging.warning('Invalid TOTAL for showmap %s', str(items))
+      return {}
+    usage_dict = collections.defaultdict(int)
+    usage_dict.update({
+        'Size': int(items[0].strip()),
+        'Rss': int(items[1].strip()),
+        'Pss': int(items[2].strip()),
+        'Shared_Clean': int(items[3].strip()),
+        'Shared_Dirty': int(items[4].strip()),
+        'Private_Clean': int(items[5].strip()),
+        'Private_Dirty': int(items[6].strip()),
+    })
+    peak_value_kb = 0
+    for line in self.GetProtectedFileContents('/proc/%s/status' % pid):
+      if not line.startswith('VmHWM:'):  # Format: 'VmHWM: +[0-9]+ kB'
+        continue
+      peak_value_kb = int(line.split(':')[1].strip().split(' ')[0])
+      break
+    usage_dict['VmHWM'] = peak_value_kb
+    if not peak_value_kb:
+      logging.warning('Could not find memory peak value for pid ' + str(pid))
+
+    return usage_dict
+
+  def ProcessesUsingDevicePort(self, device_port):
+    """Lists processes using the specified device port on loopback interface.
+
+    Args:
+      device_port: Port on device we want to check.
+
+    Returns:
+      A list of (pid, process_name) tuples using the specified port.
+    """
+    tcp_results = self.RunShellCommand('cat /proc/net/tcp', log_result=False)
+    tcp_address = '0100007F:%04X' % device_port
+    pids = []
+    for single_connect in tcp_results:
+      connect_results = single_connect.split()
+      # Column 1 is the TCP port, and Column 9 is the inode of the socket
+      if connect_results[1] == tcp_address:
+        socket_inode = connect_results[9]
+        socket_name = 'socket:[%s]' % socket_inode
+        lsof_results = self.RunShellCommand('lsof', log_result=False)
+        for single_process in lsof_results:
+          process_results = single_process.split()
+          # Ignore the line if it has less than nine columns in it, which may
+          # be the case when a process stops while lsof is executing.
+          if len(process_results) <= 8:
+            continue
+          # Column 0 is the executable name
+          # Column 1 is the pid
+          # Column 8 is the Inode in use
+          if process_results[8] == socket_name:
+            pids.append((int(process_results[1]), process_results[0]))
+        break
+    logging.info('PidsUsingDevicePort: %s', pids)
+    return pids
+
+  def FileExistsOnDevice(self, file_name):
+    """Checks whether the given file exists on the device.
+
+    Args:
+      file_name: Full path of file to check.
+
+    Returns:
+      True if the file exists, False otherwise.
+    """
+    assert '"' not in file_name, 'file_name cannot contain double quotes'
+    try:
+      status = self._adb.SendShellCommand(
+          '\'test -e "%s"; echo $?\'' % (file_name))
+      if 'test: not found' not in status:
+        return int(status) == 0
+
+      status = self._adb.SendShellCommand(
+          '\'ls "%s" >/dev/null 2>&1; echo $?\'' % (file_name))
+      return int(status) == 0
+    except ValueError:
+      if IsDeviceAttached(self._device):
+        raise errors.DeviceUnresponsiveError('Device may be offline.')
+
+      return False
+
+  def IsFileWritableOnDevice(self, file_name):
+    """Checks whether the given file (or directory) is writable on the device.
+
+    Args:
+      file_name: Full path of file/directory to check.
+
+    Returns:
+      True if writable, False otherwise.
+    """
+    assert '"' not in file_name, 'file_name cannot contain double quotes'
+    try:
+      status = self._adb.SendShellCommand(
+          '\'test -w "%s"; echo $?\'' % (file_name))
+      if 'test: not found' not in status:
+        return int(status) == 0
+      raise errors.AbortError('"test" binary not found. OS too old.')
+
+    except ValueError:
+      if IsDeviceAttached(self._device):
+        raise errors.DeviceUnresponsiveError('Device may be offline.')
+
+      return False
+
+  @staticmethod
+  def GetTimestamp():
+    return time.strftime('%Y-%m-%d-%H%M%S', time.localtime())
+
+  @staticmethod
+  def EnsureHostDirectory(host_file):
+    host_dir = os.path.dirname(os.path.abspath(host_file))
+    if not os.path.exists(host_dir):
+      os.makedirs(host_dir)
+
+  def TakeScreenshot(self, host_file=None):
+    """Saves a screenshot image to |host_file| on the host.
+
+    Args:
+      host_file: Absolute path to the image file to store on the host or None to
+                 use an autogenerated file name.
+
+    Returns:
+      Resulting host file name of the screenshot.
+    """
+    host_file = os.path.abspath(host_file or
+                                'screenshot-%s.png' % self.GetTimestamp())
+    self.EnsureHostDirectory(host_file)
+    device_file = '%s/screenshot.png' % self.GetExternalStorage()
+    self.RunShellCommand(
+        '/system/bin/screencap -p %s' % device_file)
+    self.PullFileFromDevice(device_file, host_file)
+    self.RunShellCommand('rm -f "%s"' % device_file)
+    return host_file
+
+  def PullFileFromDevice(self, device_file, host_file):
+    """Download |device_file| on the device from to |host_file| on the host.
+
+    Args:
+      device_file: Absolute path to the file to retrieve from the device.
+      host_file: Absolute path to the file to store on the host.
+    """
+    if not self._adb.Pull(device_file, host_file):
+      raise device_errors.AdbCommandFailedError(
+          ['pull', device_file, host_file], 'Failed to pull file from device.')
+    assert os.path.exists(host_file)
+
+  def SetUtilWrapper(self, util_wrapper):
+    """Sets a wrapper prefix to be used when running a locally-built
+    binary on the device (ex.: md5sum_bin).
+    """
+    self._util_wrapper = util_wrapper
+
+  def RunUIAutomatorTest(self, test, test_package, timeout):
+    """Runs a single uiautomator test.
+
+    Args:
+      test: Test class/method.
+      test_package: Name of the test jar.
+      timeout: Timeout time in seconds.
+
+    Returns:
+      An instance of am_instrument_parser.TestResult object.
+    """
+    cmd = 'uiautomator runtest %s -e class %s' % (test_package, test)
+    self._LogShell(cmd)
+    output = self._adb.SendShellCommand(cmd, timeout_time=timeout)
+    # uiautomator doesn't fully conform to the instrumenation test runner
+    # convention and doesn't terminate with INSTRUMENTATION_CODE.
+    # Just assume the first result is valid.
+    (test_results, _) = am_instrument_parser.ParseAmInstrumentOutput(output)
+    if not test_results:
+      raise errors.InstrumentationError(
+          'no test results... device setup correctly?')
+    return test_results[0]
+
+  def DismissCrashDialogIfNeeded(self):
+    """Dismiss the error/ANR dialog if present.
+
+    Returns: Name of the crashed package if a dialog is focused,
+             None otherwise.
+    """
+    re_focus = re.compile(
+        r'\s*mCurrentFocus.*Application (Error|Not Responding): (\S+)}')
+
+    def _FindFocusedWindow():
+      match = None
+      for line in self.RunShellCommand('dumpsys window windows'):
+        match = re.match(re_focus, line)
+        if match:
+          break
+      return match
+
+    match = _FindFocusedWindow()
+    if not match:
+      return
+    package = match.group(2)
+    logging.warning('Trying to dismiss %s dialog for %s' % match.groups())
+    self.SendKeyEvent(KEYCODE_DPAD_RIGHT)
+    self.SendKeyEvent(KEYCODE_DPAD_RIGHT)
+    self.SendKeyEvent(KEYCODE_ENTER)
+    match = _FindFocusedWindow()
+    if match:
+      logging.error('Still showing a %s dialog for %s' % match.groups())
+    return package
+
+  def EfficientDeviceDirectoryCopy(self, source, dest):
+    """ Copy a directory efficiently on the device
+
+    Uses a shell script running on the target to copy new and changed files the
+    source directory to the destination directory and remove added files. This
+    is in some cases much faster than cp -r.
+
+    Args:
+      source: absolute path of source directory
+      dest: absolute path of destination directory
+    """
+    logging.info('In EfficientDeviceDirectoryCopy %s %s', source, dest)
+    with DeviceTempFile(self, suffix=".sh") as temp_script_file:
+      host_script_path = os.path.join(constants.DIR_SOURCE_ROOT,
+                                      'build',
+                                      'android',
+                                      'pylib',
+                                      'efficient_android_directory_copy.sh')
+      self._adb.Push(host_script_path, temp_script_file.name)
+      out = self.RunShellCommand(
+          'sh %s %s %s' % (temp_script_file.name, source, dest),
+          timeout_time=120)
+      if self._device:
+        device_repr = self._device[-4:]
+      else:
+        device_repr = '????'
+      for line in out:
+        logging.info('[%s]> %s', device_repr, line)
+
+  def _GetControlUsbChargingCommand(self):
+    if self._control_usb_charging_command['cached']:
+      return self._control_usb_charging_command['command']
+    self._control_usb_charging_command['cached'] = True
+    if not self.IsRootEnabled():
+      return None
+    for command in CONTROL_USB_CHARGING_COMMANDS:
+      # Assert command is valid.
+      assert 'disable_command' in command
+      assert 'enable_command' in command
+      assert 'witness_file' in command
+      witness_file = command['witness_file']
+      if self.FileExistsOnDevice(witness_file):
+        self._control_usb_charging_command['command'] = command
+        return command
+    return None
+
+  def CanControlUsbCharging(self):
+    return self._GetControlUsbChargingCommand() is not None
+
+  def DisableUsbCharging(self, timeout=10):
+    command = self._GetControlUsbChargingCommand()
+    if not command:
+      raise Exception('Unable to act on usb charging.')
+    disable_command = command['disable_command']
+    t0 = time.time()
+    # Do not loop directly on self.IsDeviceCharging to cut the number of calls
+    # to the device.
+    while True:
+      if t0 + timeout - time.time() < 0:
+        raise pexpect.TIMEOUT('Unable to disable USB charging in time: %s' % (
+            self.GetBatteryInfo()))
+      self.RunShellCommand(disable_command)
+      if not self.IsDeviceCharging():
+        break
+
+  def EnableUsbCharging(self, timeout=10):
+    command = self._GetControlUsbChargingCommand()
+    if not command:
+      raise Exception('Unable to act on usb charging.')
+    disable_command = command['enable_command']
+    t0 = time.time()
+    # Do not loop directly on self.IsDeviceCharging to cut the number of calls
+    # to the device.
+    while True:
+      if t0 + timeout - time.time() < 0:
+        raise pexpect.TIMEOUT('Unable to enable USB charging in time.')
+      self.RunShellCommand(disable_command)
+      if self.IsDeviceCharging():
+        break
+
+  def IsDeviceCharging(self):
+    for line in self.RunShellCommand('dumpsys battery'):
+      if 'powered: ' in line:
+        if line.split('powered: ')[1] == 'true':
+          return True
+
+
+class NewLineNormalizer(object):
+  """A file-like object to normalize EOLs to '\n'.
+
+  Pexpect runs adb within a pseudo-tty device (see
+  http://www.noah.org/wiki/pexpect), so any '\n' printed by adb is written
+  as '\r\n' to the logfile. Since adb already uses '\r\n' to terminate
+  lines, the log ends up having '\r\r\n' at the end of each line. This
+  filter replaces the above with a single '\n' in the data stream.
+  """
+  def __init__(self, output):
+    self._output = output
+
+  def write(self, data):
+    data = data.replace('\r\r\n', '\n')
+    self._output.write(data)
+
+  def flush(self):
+    self._output.flush()
diff --git a/build/android/pylib/android_commands_unittest.py b/build/android/pylib/android_commands_unittest.py
new file mode 100644
index 0000000..21c34f9
--- /dev/null
+++ b/build/android/pylib/android_commands_unittest.py
@@ -0,0 +1,191 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+import shutil
+import sys
+import unittest
+
+sys.path.append(os.path.join(os.path.dirname(__file__), os.pardir))
+
+from pylib import android_commands
+
+# pylint: disable=W0212,W0702
+
+class TestDeviceTempFile(unittest.TestCase):
+  def setUp(self):
+    if not os.getenv('BUILDTYPE'):
+      os.environ['BUILDTYPE'] = 'Debug'
+
+    devices = android_commands.GetAttachedDevices()
+    self.assertGreater(len(devices), 0, 'No device attached!')
+    self.ac = android_commands.AndroidCommands(device=devices[0])
+
+  def testTempFileDeleted(self):
+    """Tests that DeviceTempFile deletes files when closed."""
+    temp_file = android_commands.DeviceTempFile(self.ac)
+    self.assertFalse(self.ac.FileExistsOnDevice(temp_file.name))
+    self.ac.SetFileContents(temp_file.name, "contents")
+    self.assertTrue(self.ac.FileExistsOnDevice(temp_file.name))
+    temp_file.close()
+    self.assertFalse(self.ac.FileExistsOnDevice(temp_file.name))
+
+    with android_commands.DeviceTempFile(self.ac) as with_temp_file:
+      self.assertFalse(self.ac.FileExistsOnDevice(with_temp_file.name))
+      self.ac.SetFileContents(with_temp_file.name, "contents")
+      self.assertTrue(self.ac.FileExistsOnDevice(with_temp_file.name))
+
+    self.assertFalse(self.ac.FileExistsOnDevice(with_temp_file.name))
+
+  def testTempFileNotWritten(self):
+    """Tests that device temp files work successfully even if not written to."""
+    temp_file = android_commands.DeviceTempFile(self.ac)
+    temp_file.close()
+    self.assertFalse(self.ac.FileExistsOnDevice(temp_file.name))
+
+    with android_commands.DeviceTempFile(self.ac) as with_temp_file:
+      pass
+    self.assertFalse(self.ac.FileExistsOnDevice(with_temp_file.name))
+
+  def testNaming(self):
+    """Tests that returned filenames are as requested."""
+    temp_file = android_commands.DeviceTempFile(self.ac, prefix="cat")
+    self.assertTrue(os.path.basename(temp_file.name).startswith("cat"))
+
+    temp_file = android_commands.DeviceTempFile(self.ac, suffix="dog")
+    self.assertTrue(temp_file.name.endswith("dog"))
+
+    temp_file = android_commands.DeviceTempFile(
+        self.ac, prefix="cat", suffix="dog")
+    self.assertTrue(os.path.basename(temp_file.name).startswith("cat"))
+    self.assertTrue(temp_file.name.endswith("dog"))
+
+
+class TestGetFilesChanged(unittest.TestCase):
+
+  def setUp(self):
+    if not os.getenv('BUILDTYPE'):
+      os.environ['BUILDTYPE'] = 'Debug'
+
+    devices = android_commands.GetAttachedDevices()
+    self.assertGreater(len(devices), 0, 'No device attached!')
+    self.ac = android_commands.AndroidCommands(device=devices[0])
+    self.host_data_dir = os.path.realpath('test_push_data')
+    self.device_data_dir = '%s/test_push_data' % (
+        self.ac.RunShellCommand('realpath %s' %
+            self.ac.GetExternalStorage())[0])
+
+    os.mkdir(self.host_data_dir)
+    for i in xrange(1, 10):
+      with open('%s/%d.txt' % (self.host_data_dir, i), 'w') as f:
+        f.write('file #%d' % i)
+
+    self.ac.RunShellCommand('mkdir %s' % self.device_data_dir)
+
+  def testGetFilesChangedAllNeeded(self):
+    """ Tests GetFilesChanged when none of the files are on the device.
+    """
+    expected = [('%s/%d.txt' % (self.host_data_dir, i),
+                 '%s/%d.txt' % (self.device_data_dir, i))
+                for i in xrange(1, 10)]
+    actual = self.ac.GetFilesChanged(self.host_data_dir, self.device_data_dir)
+    self.assertSequenceEqual(expected, actual)
+
+  def testGetFilesChangedSomeIdentical(self):
+    """ Tests GetFilesChanged when some of the files are on the device.
+    """
+    for i in xrange(1, 5):
+      self.ac._adb.Push('%s/%d.txt' % (self.host_data_dir, i),
+                        self.device_data_dir)
+    expected = [('%s/%d.txt' % (self.host_data_dir, i),
+                 '%s/%d.txt' % (self.device_data_dir, i))
+                for i in xrange(5, 10)]
+    actual = self.ac.GetFilesChanged(self.host_data_dir, self.device_data_dir)
+    self.assertSequenceEqual(expected, actual)
+
+  def testGetFilesChangedAllIdentical(self):
+    """ Tests GetFilesChanged when all of the files are on the device.
+    """
+    for i in xrange(1, 10):
+      self.ac._adb.Push('%s/%d.txt' % (self.host_data_dir, i),
+                        self.device_data_dir)
+    expected = []
+    actual = self.ac.GetFilesChanged(self.host_data_dir, self.device_data_dir)
+    self.assertSequenceEqual(expected, actual)
+
+  def testGetFilesChangedRename(self):
+    """ Tests GetFilesChanged when one of the files has been renamed.
+
+        This tests both with and without the ignore_filenames flag set.
+    """
+    for i in xrange(5, 10):
+      self.ac._adb.Push('%s/%d.txt' % (self.host_data_dir, i),
+                        self.device_data_dir)
+    os.rename('%s/5.txt' % (self.host_data_dir),
+              '%s/99.txt' % (self.host_data_dir))
+
+    expected = [('%s/%d.txt' % (self.host_data_dir, i),
+                 '%s/%d.txt' % (self.device_data_dir, i))
+                for i in xrange(1, 5)]
+    actual = self.ac.GetFilesChanged(self.host_data_dir, self.device_data_dir,
+                                     ignore_filenames=True)
+    self.assertSequenceEqual(expected, actual)
+
+    expected.append(('%s/99.txt' % self.host_data_dir,
+                     '%s/99.txt' % self.device_data_dir))
+    actual = self.ac.GetFilesChanged(self.host_data_dir, self.device_data_dir)
+    self.assertSequenceEqual(expected, actual)
+
+  def testGetFilesChangedCopy(self):
+    """ Tests GetFilesChanged when one of the files has been copied.
+
+        This tests both with and without the ignore_filenames flag set.
+    """
+    for i in xrange(5, 10):
+      self.ac._adb.Push('%s/%d.txt' % (self.host_data_dir, i),
+                        self.device_data_dir)
+    shutil.copy('%s/5.txt' % self.host_data_dir,
+                '%s/99.txt' % self.host_data_dir)
+
+    expected = [('%s/%d.txt' % (self.host_data_dir, i),
+                 '%s/%d.txt' % (self.device_data_dir, i))
+                for i in xrange(1, 5)]
+    actual = self.ac.GetFilesChanged(self.host_data_dir, self.device_data_dir,
+                                     ignore_filenames=True)
+    self.assertSequenceEqual(expected, actual)
+
+    expected.append(('%s/99.txt' % self.host_data_dir,
+                     '%s/99.txt' % self.device_data_dir))
+    actual = self.ac.GetFilesChanged(self.host_data_dir, self.device_data_dir)
+    self.assertSequenceEqual(expected, actual)
+
+  def testGetFilesChangedIndividual(self):
+    """ Tests GetFilesChanged when provided one file.
+    """
+    expected = [('%s/1.txt' % self.host_data_dir,
+                 '%s/1.txt' % self.device_data_dir)]
+    actual = self.ac.GetFilesChanged('%s/1.txt' % self.host_data_dir,
+                                     '%s/1.txt' % self.device_data_dir)
+    self.assertSequenceEqual(expected, actual)
+
+  def testGetFilesChangedFileToDirectory(self):
+    """ Tests GetFilesChanged when provided a file from the host and a
+        directory on the device.
+    """
+    expected = [('%s/1.txt' % self.host_data_dir,
+                 '%s' % self.device_data_dir)]
+    actual = self.ac.GetFilesChanged('%s/1.txt' % self.host_data_dir,
+                                     '%s' % self.device_data_dir)
+    self.assertSequenceEqual(expected, actual)
+
+  def tearDown(self):
+    try:
+      shutil.rmtree(self.host_data_dir)
+      self.ac.RunShellCommand('rm -rf %s' % self.device_data_dir)
+    except:
+      pass
+
+if __name__ == '__main__':
+  unittest.main()
+
diff --git a/build/android/pylib/base/__init__.py b/build/android/pylib/base/__init__.py
new file mode 100644
index 0000000..727e987
--- /dev/null
+++ b/build/android/pylib/base/__init__.py
@@ -0,0 +1,4 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
diff --git a/build/android/pylib/base/base_setup.py b/build/android/pylib/base/base_setup.py
new file mode 100644
index 0000000..a416380
--- /dev/null
+++ b/build/android/pylib/base/base_setup.py
@@ -0,0 +1,63 @@
+# Copyright (c) 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Base script for doing test setup."""
+
+import logging
+import os
+
+from pylib import constants
+from pylib import valgrind_tools
+from pylib.utils import isolator
+
+def GenerateDepsDirUsingIsolate(suite_name, isolate_file_path,
+                                isolate_file_paths, deps_exclusion_list):
+  """Generate the dependency dir for the test suite using isolate.
+
+  Args:
+    suite_name: Name of the test suite (e.g. base_unittests).
+    isolate_file_path: .isolate file path to use. If there is a default .isolate
+                       file path for the suite_name, this will override it.
+    isolate_file_paths: Dictionary with the default .isolate file paths for
+                        the test suites.
+    deps_exclusion_list: A list of files that are listed as dependencies in the
+                         .isolate files but should not be pushed to the device.
+  Returns:
+    The Isolator instance used to remap the dependencies, or None.
+  """
+  if isolate_file_path:
+    if os.path.isabs(isolate_file_path):
+      isolate_abs_path = isolate_file_path
+    else:
+      isolate_abs_path = os.path.join(constants.DIR_SOURCE_ROOT,
+                                      isolate_file_path)
+  else:
+    isolate_rel_path = isolate_file_paths.get(suite_name)
+    if not isolate_rel_path:
+      logging.info('Did not find an isolate file for the test suite.')
+      return
+    isolate_abs_path = os.path.join(constants.DIR_SOURCE_ROOT, isolate_rel_path)
+
+  isolated_abs_path = os.path.join(
+      constants.GetOutDirectory(), '%s.isolated' % suite_name)
+  assert os.path.exists(isolate_abs_path), 'Cannot find %s' % isolate_abs_path
+
+  i = isolator.Isolator(constants.ISOLATE_DEPS_DIR)
+  i.Clear()
+  i.Remap(isolate_abs_path, isolated_abs_path)
+  # We're relying on the fact that timestamps are preserved
+  # by the remap command (hardlinked). Otherwise, all the data
+  # will be pushed to the device once we move to using time diff
+  # instead of md5sum. Perform a sanity check here.
+  i.VerifyHardlinks()
+  i.PurgeExcluded(deps_exclusion_list)
+  i.MoveOutputDeps()
+  return i
+
+
+def PushDataDeps(device, device_dir, test_options):
+  valgrind_tools.PushFilesForTool(test_options.tool, device)
+  if os.path.exists(constants.ISOLATE_DEPS_DIR):
+    device.PushChangedFiles([(constants.ISOLATE_DEPS_DIR, device_dir)],
+                            delete_device_stale=test_options.delete_stale_data)
diff --git a/build/android/pylib/base/base_test_result.py b/build/android/pylib/base/base_test_result.py
new file mode 100644
index 0000000..58200f6
--- /dev/null
+++ b/build/android/pylib/base/base_test_result.py
@@ -0,0 +1,216 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Module containing base test results classes."""
+
+class ResultType(object):
+  """Class enumerating test types."""
+  PASS = 'PASS'
+  SKIP = 'SKIP'
+  FAIL = 'FAIL'
+  CRASH = 'CRASH'
+  TIMEOUT = 'TIMEOUT'
+  UNKNOWN = 'UNKNOWN'
+
+  @staticmethod
+  def GetTypes():
+    """Get a list of all test types."""
+    return [ResultType.PASS, ResultType.SKIP, ResultType.FAIL,
+            ResultType.CRASH, ResultType.TIMEOUT, ResultType.UNKNOWN]
+
+
+class BaseTestResult(object):
+  """Base class for a single test result."""
+
+  def __init__(self, name, test_type, duration=0, log=''):
+    """Construct a BaseTestResult.
+
+    Args:
+      name: Name of the test which defines uniqueness.
+      test_type: Type of the test result as defined in ResultType.
+      duration: Time it took for the test to run in milliseconds.
+      log: An optional string listing any errors.
+    """
+    assert name
+    assert test_type in ResultType.GetTypes()
+    self._name = name
+    self._test_type = test_type
+    self._duration = duration
+    self._log = log
+
+  def __str__(self):
+    return self._name
+
+  def __repr__(self):
+    return self._name
+
+  def __cmp__(self, other):
+    # pylint: disable=W0212
+    return cmp(self._name, other._name)
+
+  def __hash__(self):
+    return hash(self._name)
+
+  def SetName(self, name):
+    """Set the test name.
+
+    Because we're putting this into a set, this should only be used if moving
+    this test result into another set.
+    """
+    self._name = name
+
+  def GetName(self):
+    """Get the test name."""
+    return self._name
+
+  def SetType(self, test_type):
+    """Set the test result type."""
+    assert test_type in ResultType.GetTypes()
+    self._test_type = test_type
+
+  def GetType(self):
+    """Get the test result type."""
+    return self._test_type
+
+  def GetDuration(self):
+    """Get the test duration."""
+    return self._duration
+
+  def SetLog(self, log):
+    """Set the test log."""
+    self._log = log
+
+  def GetLog(self):
+    """Get the test log."""
+    return self._log
+
+
+class TestRunResults(object):
+  """Set of results for a test run."""
+
+  def __init__(self):
+    self._results = set()
+
+  def GetLogs(self):
+    """Get the string representation of all test logs."""
+    s = []
+    for test_type in ResultType.GetTypes():
+      if test_type != ResultType.PASS:
+        for t in sorted(self._GetType(test_type)):
+          log = t.GetLog()
+          if log:
+            s.append('[%s] %s:' % (test_type, t))
+            s.append(log)
+    return '\n'.join(s)
+
+  def GetGtestForm(self):
+    """Get the gtest string representation of this object."""
+    s = []
+    plural = lambda n, s, p: '%d %s' % (n, p if n != 1 else s)
+    tests = lambda n: plural(n, 'test', 'tests')
+
+    s.append('[==========] %s ran.' % (tests(len(self.GetAll()))))
+    s.append('[  PASSED  ] %s.' % (tests(len(self.GetPass()))))
+
+    skipped = self.GetSkip()
+    if skipped:
+      s.append('[  SKIPPED ] Skipped %s, listed below:' % tests(len(skipped)))
+      for t in sorted(skipped):
+        s.append('[  SKIPPED ] %s' % str(t))
+
+    all_failures = self.GetFail().union(self.GetCrash(), self.GetTimeout(),
+        self.GetUnknown())
+    if all_failures:
+      s.append('[  FAILED  ] %s, listed below:' % tests(len(all_failures)))
+      for t in sorted(self.GetFail()):
+        s.append('[  FAILED  ] %s' % str(t))
+      for t in sorted(self.GetCrash()):
+        s.append('[  FAILED  ] %s (CRASHED)' % str(t))
+      for t in sorted(self.GetTimeout()):
+        s.append('[  FAILED  ] %s (TIMEOUT)' % str(t))
+      for t in sorted(self.GetUnknown()):
+        s.append('[  FAILED  ] %s (UNKNOWN)' % str(t))
+      s.append('')
+      s.append(plural(len(all_failures), 'FAILED TEST', 'FAILED TESTS'))
+    return '\n'.join(s)
+
+  def GetShortForm(self):
+    """Get the short string representation of this object."""
+    s = []
+    s.append('ALL: %d' % len(self._results))
+    for test_type in ResultType.GetTypes():
+      s.append('%s: %d' % (test_type, len(self._GetType(test_type))))
+    return ''.join([x.ljust(15) for x in s])
+
+  def __str__(self):
+    return self.GetLongForm()
+
+  def AddResult(self, result):
+    """Add |result| to the set.
+
+    Args:
+      result: An instance of BaseTestResult.
+    """
+    assert isinstance(result, BaseTestResult)
+    self._results.add(result)
+
+  def AddResults(self, results):
+    """Add |results| to the set.
+
+    Args:
+      results: An iterable of BaseTestResult objects.
+    """
+    for t in results:
+      self.AddResult(t)
+
+  def AddTestRunResults(self, results):
+    """Add the set of test results from |results|.
+
+    Args:
+      results: An instance of TestRunResults.
+    """
+    assert isinstance(results, TestRunResults)
+    # pylint: disable=W0212
+    self._results.update(results._results)
+
+  def GetAll(self):
+    """Get the set of all test results."""
+    return self._results.copy()
+
+  def _GetType(self, test_type):
+    """Get the set of test results with the given test type."""
+    return set(t for t in self._results if t.GetType() == test_type)
+
+  def GetPass(self):
+    """Get the set of all passed test results."""
+    return self._GetType(ResultType.PASS)
+
+  def GetSkip(self):
+    """Get the set of all skipped test results."""
+    return self._GetType(ResultType.SKIP)
+
+  def GetFail(self):
+    """Get the set of all failed test results."""
+    return self._GetType(ResultType.FAIL)
+
+  def GetCrash(self):
+    """Get the set of all crashed test results."""
+    return self._GetType(ResultType.CRASH)
+
+  def GetTimeout(self):
+    """Get the set of all timed out test results."""
+    return self._GetType(ResultType.TIMEOUT)
+
+  def GetUnknown(self):
+    """Get the set of all unknown test results."""
+    return self._GetType(ResultType.UNKNOWN)
+
+  def GetNotPass(self):
+    """Get the set of all non-passed test results."""
+    return self.GetAll() - self.GetPass()
+
+  def DidRunPass(self):
+    """Return whether the test run was successful."""
+    return not self.GetNotPass() - self.GetSkip()
+
diff --git a/build/android/pylib/base/base_test_result_unittest.py b/build/android/pylib/base/base_test_result_unittest.py
new file mode 100644
index 0000000..6f0cba7
--- /dev/null
+++ b/build/android/pylib/base/base_test_result_unittest.py
@@ -0,0 +1,82 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unittests for TestRunResults."""
+
+import unittest
+
+from pylib.base.base_test_result import BaseTestResult
+from pylib.base.base_test_result import TestRunResults
+from pylib.base.base_test_result import ResultType
+
+
+class TestTestRunResults(unittest.TestCase):
+  def setUp(self):
+    self.p1 = BaseTestResult('p1', ResultType.PASS, log='pass1')
+    other_p1 = BaseTestResult('p1', ResultType.PASS)
+    self.p2 = BaseTestResult('p2', ResultType.PASS)
+    self.f1 = BaseTestResult('f1', ResultType.FAIL, log='failure1')
+    self.c1 = BaseTestResult('c1', ResultType.CRASH, log='crash1')
+    self.u1 = BaseTestResult('u1', ResultType.UNKNOWN)
+    self.tr = TestRunResults()
+    self.tr.AddResult(self.p1)
+    self.tr.AddResult(other_p1)
+    self.tr.AddResult(self.p2)
+    self.tr.AddResults(set([self.f1, self.c1, self.u1]))
+
+  def testGetAll(self):
+    self.assertFalse(
+        self.tr.GetAll().symmetric_difference(
+            [self.p1, self.p2, self.f1, self.c1, self.u1]))
+
+  def testGetPass(self):
+    self.assertFalse(self.tr.GetPass().symmetric_difference(
+        [self.p1, self.p2]))
+
+  def testGetNotPass(self):
+    self.assertFalse(self.tr.GetNotPass().symmetric_difference(
+        [self.f1, self.c1, self.u1]))
+
+  def testGetAddTestRunResults(self):
+    tr2 = TestRunResults()
+    other_p1 = BaseTestResult('p1', ResultType.PASS)
+    f2 = BaseTestResult('f2', ResultType.FAIL)
+    tr2.AddResult(other_p1)
+    tr2.AddResult(f2)
+    tr2.AddTestRunResults(self.tr)
+    self.assertFalse(
+        tr2.GetAll().symmetric_difference(
+            [self.p1, self.p2, self.f1, self.c1, self.u1, f2]))
+
+  def testGetLogs(self):
+    log_print = ('[FAIL] f1:\n'
+                 'failure1\n'
+                 '[CRASH] c1:\n'
+                 'crash1')
+    self.assertEqual(self.tr.GetLogs(), log_print)
+
+  def testGetShortForm(self):
+    short_print = ('ALL: 5         PASS: 2        FAIL: 1        '
+                   'CRASH: 1       TIMEOUT: 0     UNKNOWN: 1     ')
+    self.assertEqual(self.tr.GetShortForm(), short_print)
+
+  def testGetGtestForm(self):
+    gtest_print = ('[==========] 5 tests ran.\n'
+                   '[  PASSED  ] 2 tests.\n'
+                   '[  FAILED  ] 3 tests, listed below:\n'
+                   '[  FAILED  ] f1\n'
+                   '[  FAILED  ] c1 (CRASHED)\n'
+                   '[  FAILED  ] u1 (UNKNOWN)\n'
+                   '\n'
+                   '3 FAILED TESTS')
+    self.assertEqual(gtest_print, self.tr.GetGtestForm())
+
+  def testRunPassed(self):
+    self.assertFalse(self.tr.DidRunPass())
+    tr2 = TestRunResults()
+    self.assertTrue(tr2.DidRunPass())
+
+
+if __name__ == '__main__':
+  unittest.main()
diff --git a/build/android/pylib/base/base_test_runner.py b/build/android/pylib/base/base_test_runner.py
new file mode 100644
index 0000000..2a7fdd3
--- /dev/null
+++ b/build/android/pylib/base/base_test_runner.py
@@ -0,0 +1,138 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Base class for running tests on a single device."""
+
+# TODO(jbudorick) Deprecate and remove this class and all subclasses after
+# any relevant parts have been ported to the new environment + test instance
+# model.
+
+import logging
+
+from pylib import ports
+from pylib.device import device_utils
+from pylib.forwarder import Forwarder
+from pylib.valgrind_tools import CreateTool
+# TODO(frankf): Move this to pylib/utils
+import lighttpd_server
+
+
+# A file on device to store ports of net test server. The format of the file is
+# test-spawner-server-port:test-server-port
+NET_TEST_SERVER_PORT_INFO_FILE = 'net-test-server-ports'
+
+
+class BaseTestRunner(object):
+  """Base class for running tests on a single device."""
+
+  def __init__(self, device, tool):
+    """
+      Args:
+        device: An instance of DeviceUtils that the tests will run on.
+        tool: Name of the Valgrind tool.
+    """
+    assert isinstance(device, device_utils.DeviceUtils)
+    self.device = device
+    self.device_serial = self.device.adb.GetDeviceSerial()
+    self.tool = CreateTool(tool, self.device)
+    self._http_server = None
+    self._forwarder_device_port = 8000
+    self.forwarder_base_url = ('http://localhost:%d' %
+        self._forwarder_device_port)
+    # We will allocate port for test server spawner when calling method
+    # LaunchChromeTestServerSpawner and allocate port for test server when
+    # starting it in TestServerThread.
+    self.test_server_spawner_port = 0
+    self.test_server_port = 0
+
+  def _PushTestServerPortInfoToDevice(self):
+    """Pushes the latest port information to device."""
+    self.device.WriteFile(
+        self.device.GetExternalStoragePath() + '/' +
+            NET_TEST_SERVER_PORT_INFO_FILE,
+        '%d:%d' % (self.test_server_spawner_port, self.test_server_port))
+
+  def RunTest(self, test):
+    """Runs a test. Needs to be overridden.
+
+    Args:
+      test: A test to run.
+
+    Returns:
+      Tuple containing:
+        (base_test_result.TestRunResults, tests to rerun or None)
+    """
+    raise NotImplementedError
+
+  def InstallTestPackage(self):
+    """Installs the test package once before all tests are run."""
+    pass
+
+  def SetUp(self):
+    """Run once before all tests are run."""
+    self.InstallTestPackage()
+
+  def TearDown(self):
+    """Run once after all tests are run."""
+    self.ShutdownHelperToolsForTestSuite()
+
+  def LaunchTestHttpServer(self, document_root, port=None,
+                           extra_config_contents=None):
+    """Launches an HTTP server to serve HTTP tests.
+
+    Args:
+      document_root: Document root of the HTTP server.
+      port: port on which we want to the http server bind.
+      extra_config_contents: Extra config contents for the HTTP server.
+    """
+    self._http_server = lighttpd_server.LighttpdServer(
+        document_root, port=port, extra_config_contents=extra_config_contents)
+    if self._http_server.StartupHttpServer():
+      logging.info('http server started: http://localhost:%s',
+                   self._http_server.port)
+    else:
+      logging.critical('Failed to start http server')
+    self._ForwardPortsForHttpServer()
+    return (self._forwarder_device_port, self._http_server.port)
+
+  def _ForwardPorts(self, port_pairs):
+    """Forwards a port."""
+    Forwarder.Map(port_pairs, self.device, self.tool)
+
+  def _UnmapPorts(self, port_pairs):
+    """Unmap previously forwarded ports."""
+    for (device_port, _) in port_pairs:
+      Forwarder.UnmapDevicePort(device_port, self.device)
+
+  # Deprecated: Use ForwardPorts instead.
+  def StartForwarder(self, port_pairs):
+    """Starts TCP traffic forwarding for the given |port_pairs|.
+
+    Args:
+      host_port_pairs: A list of (device_port, local_port) tuples to forward.
+    """
+    self._ForwardPorts(port_pairs)
+
+  def _ForwardPortsForHttpServer(self):
+    """Starts a forwarder for the HTTP server.
+
+    The forwarder forwards HTTP requests and responses between host and device.
+    """
+    self._ForwardPorts([(self._forwarder_device_port, self._http_server.port)])
+
+  def _RestartHttpServerForwarderIfNecessary(self):
+    """Restarts the forwarder if it's not open."""
+    # Checks to see if the http server port is being used.  If not forwards the
+    # request.
+    # TODO(dtrainor): This is not always reliable because sometimes the port
+    # will be left open even after the forwarder has been killed.
+    if not ports.IsDevicePortUsed(self.device, self._forwarder_device_port):
+      self._ForwardPortsForHttpServer()
+
+  def ShutdownHelperToolsForTestSuite(self):
+    """Shuts down the server and the forwarder."""
+    if self._http_server:
+      self._UnmapPorts([(self._forwarder_device_port, self._http_server.port)])
+      self._http_server.ShutdownHttpServer()
+
diff --git a/build/android/pylib/base/environment.py b/build/android/pylib/base/environment.py
new file mode 100644
index 0000000..3f49f41
--- /dev/null
+++ b/build/android/pylib/base/environment.py
@@ -0,0 +1,34 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+
+class Environment(object):
+  """An environment in which tests can be run.
+
+  This is expected to handle all logic that is applicable to an entire specific
+  environment but is independent of the test type.
+
+  Examples include:
+    - The local device environment, for running tests on devices attached to
+      the local machine.
+    - The local machine environment, for running tests directly on the local
+      machine.
+  """
+
+  def __init__(self):
+    pass
+
+  def SetUp(self):
+    raise NotImplementedError
+
+  def TearDown(self):
+    raise NotImplementedError
+
+  def __enter__(self):
+    self.SetUp()
+    return self
+
+  def __exit__(self, _exc_type, _exc_val, _exc_tb):
+    self.TearDown()
+
diff --git a/build/android/pylib/base/environment_factory.py b/build/android/pylib/base/environment_factory.py
new file mode 100644
index 0000000..31b4952
--- /dev/null
+++ b/build/android/pylib/base/environment_factory.py
@@ -0,0 +1,18 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from pylib import constants
+from pylib.local.device import local_device_environment
+from pylib.remote.device import remote_device_environment
+
+def CreateEnvironment(args, error_func):
+
+  if args.environment == 'local':
+    if args.command not in constants.LOCAL_MACHINE_TESTS:
+      return local_device_environment.LocalDeviceEnvironment(args, error_func)
+    # TODO(jbudorick) Add local machine environment.
+  if args.environment == 'remote_device':
+    return remote_device_environment.RemoteDeviceEnvironment(args,
+                                                             error_func)
+  error_func('Unable to create %s environment.' % args.environment)
diff --git a/build/android/pylib/base/test_collection.py b/build/android/pylib/base/test_collection.py
new file mode 100644
index 0000000..de51027
--- /dev/null
+++ b/build/android/pylib/base/test_collection.py
@@ -0,0 +1,80 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import threading
+
+class TestCollection(object):
+  """A threadsafe collection of tests.
+
+  Args:
+    tests: List of tests to put in the collection.
+  """
+
+  def __init__(self, tests=None):
+    if not tests:
+      tests = []
+    self._lock = threading.Lock()
+    self._tests = []
+    self._tests_in_progress = 0
+    # Used to signal that an item is available or all items have been handled.
+    self._item_available_or_all_done = threading.Event()
+    for t in tests:
+      self.add(t)
+
+  def _pop(self):
+    """Pop a test from the collection.
+
+    Waits until a test is available or all tests have been handled.
+
+    Returns:
+      A test or None if all tests have been handled.
+    """
+    while True:
+      # Wait for a test to be available or all tests to have been handled.
+      self._item_available_or_all_done.wait()
+      with self._lock:
+        # Check which of the two conditions triggered the signal.
+        if self._tests_in_progress == 0:
+          return None
+        try:
+          return self._tests.pop(0)
+        except IndexError:
+          # Another thread beat us to the available test, wait again.
+          self._item_available_or_all_done.clear()
+
+  def add(self, test):
+    """Add a test to the collection.
+
+    Args:
+      test: A test to add.
+    """
+    with self._lock:
+      self._tests.append(test)
+      self._item_available_or_all_done.set()
+      self._tests_in_progress += 1
+
+  def test_completed(self):
+    """Indicate that a test has been fully handled."""
+    with self._lock:
+      self._tests_in_progress -= 1
+      if self._tests_in_progress == 0:
+        # All tests have been handled, signal all waiting threads.
+        self._item_available_or_all_done.set()
+
+  def __iter__(self):
+    """Iterate through tests in the collection until all have been handled."""
+    while True:
+      r = self._pop()
+      if r is None:
+        break
+      yield r
+
+  def __len__(self):
+    """Return the number of tests currently in the collection."""
+    return len(self._tests)
+
+  def test_names(self):
+    """Return a list of the names of the tests currently in the collection."""
+    with self._lock:
+      return list(t.test for t in self._tests)
diff --git a/build/android/pylib/base/test_dispatcher.py b/build/android/pylib/base/test_dispatcher.py
new file mode 100644
index 0000000..f919965
--- /dev/null
+++ b/build/android/pylib/base/test_dispatcher.py
@@ -0,0 +1,332 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Dispatches tests, either sharding or replicating them.
+
+Performs the following steps:
+* Create a test collection factory, using the given tests
+  - If sharding: test collection factory returns the same shared test collection
+    to all test runners
+  - If replciating: test collection factory returns a unique test collection to
+    each test runner, with the same set of tests in each.
+* Create a test runner for each device.
+* Run each test runner in its own thread, grabbing tests from the test
+  collection until there are no tests left.
+"""
+
+# TODO(jbudorick) Deprecate and remove this class after any relevant parts have
+# been ported to the new environment / test instance model.
+
+import logging
+import threading
+
+from pylib import constants
+from pylib.base import base_test_result
+from pylib.base import test_collection
+from pylib.device import device_errors
+from pylib.utils import reraiser_thread
+from pylib.utils import watchdog_timer
+
+
+DEFAULT_TIMEOUT = 7 * 60  # seven minutes
+
+
+class _ThreadSafeCounter(object):
+  """A threadsafe counter."""
+
+  def __init__(self):
+    self._lock = threading.Lock()
+    self._value = 0
+
+  def GetAndIncrement(self):
+    """Get the current value and increment it atomically.
+
+    Returns:
+      The value before incrementing.
+    """
+    with self._lock:
+      pre_increment = self._value
+      self._value += 1
+      return pre_increment
+
+
+class _Test(object):
+  """Holds a test with additional metadata."""
+
+  def __init__(self, test, tries=0):
+    """Initializes the _Test object.
+
+    Args:
+      test: The test.
+      tries: Number of tries so far.
+    """
+    self.test = test
+    self.tries = tries
+
+
+def _RunTestsFromQueue(runner, collection, out_results, watcher,
+                       num_retries, tag_results_with_device=False):
+  """Runs tests from the collection until empty using the given runner.
+
+  Adds TestRunResults objects to the out_results list and may add tests to the
+  out_retry list.
+
+  Args:
+    runner: A TestRunner object used to run the tests.
+    collection: A TestCollection from which to get _Test objects to run.
+    out_results: A list to add TestRunResults to.
+    watcher: A watchdog_timer.WatchdogTimer object, used as a shared timeout.
+    num_retries: Number of retries for a test.
+    tag_results_with_device: If True, appends the name of the device on which
+        the test was run to the test name. Used when replicating to identify
+        which device ran each copy of the test, and to ensure each copy of the
+        test is recorded separately.
+  """
+
+  def TagTestRunResults(test_run_results):
+    """Tags all results with the last 4 digits of the device id.
+
+    Used when replicating tests to distinguish the same tests run on different
+    devices. We use a set to store test results, so the hash (generated from
+    name and tag) must be unique to be considered different results.
+    """
+    new_test_run_results = base_test_result.TestRunResults()
+    for test_result in test_run_results.GetAll():
+      test_result.SetName('%s_%s' % (runner.device_serial[-4:],
+                                     test_result.GetName()))
+      new_test_run_results.AddResult(test_result)
+    return new_test_run_results
+
+  for test in collection:
+    watcher.Reset()
+    try:
+      if not runner.device.IsOnline():
+        # Device is unresponsive, stop handling tests on this device.
+        msg = 'Device %s is unresponsive.' % runner.device_serial
+        logging.warning(msg)
+        raise device_errors.DeviceUnreachableError(msg)
+      result, retry = runner.RunTest(test.test)
+      if tag_results_with_device:
+        result = TagTestRunResults(result)
+      test.tries += 1
+      if retry and test.tries <= num_retries:
+        # Retry non-passing results, only record passing results.
+        pass_results = base_test_result.TestRunResults()
+        pass_results.AddResults(result.GetPass())
+        out_results.append(pass_results)
+        logging.warning('Will retry test %s, try #%s.', retry, test.tries)
+        collection.add(_Test(test=retry, tries=test.tries))
+      else:
+        # All tests passed or retry limit reached. Either way, record results.
+        out_results.append(result)
+    except:
+      # An unhandleable exception, ensure tests get run by another device and
+      # reraise this exception on the main thread.
+      collection.add(test)
+      raise
+    finally:
+      # Retries count as separate tasks so always mark the popped test as done.
+      collection.test_completed()
+
+
+def _SetUp(runner_factory, device, out_runners, threadsafe_counter):
+  """Creates a test runner for each device and calls SetUp() in parallel.
+
+  Note: if a device is unresponsive the corresponding TestRunner will not be
+    added to out_runners.
+
+  Args:
+    runner_factory: Callable that takes a device and index and returns a
+      TestRunner object.
+    device: The device serial number to set up.
+    out_runners: List to add the successfully set up TestRunner object.
+    threadsafe_counter: A _ThreadSafeCounter object used to get shard indices.
+  """
+  try:
+    index = threadsafe_counter.GetAndIncrement()
+    logging.warning('Creating shard %s for device %s.', index, device)
+    runner = runner_factory(device, index)
+    runner.SetUp()
+    out_runners.append(runner)
+  except device_errors.DeviceUnreachableError as e:
+    logging.warning('Failed to create shard for %s: [%s]', device, e)
+
+
+def _RunAllTests(runners, test_collection_factory, num_retries, timeout=None,
+                 tag_results_with_device=False):
+  """Run all tests using the given TestRunners.
+
+  Args:
+    runners: A list of TestRunner objects.
+    test_collection_factory: A callable to generate a TestCollection object for
+        each test runner.
+    num_retries: Number of retries for a test.
+    timeout: Watchdog timeout in seconds.
+    tag_results_with_device: If True, appends the name of the device on which
+        the test was run to the test name. Used when replicating to identify
+        which device ran each copy of the test, and to ensure each copy of the
+        test is recorded separately.
+
+  Returns:
+    A tuple of (TestRunResults object, exit code)
+  """
+  logging.warning('Running tests with %s test runners.' % (len(runners)))
+  results = []
+  exit_code = 0
+  run_results = base_test_result.TestRunResults()
+  watcher = watchdog_timer.WatchdogTimer(timeout)
+  test_collections = [test_collection_factory() for _ in runners]
+
+  threads = [
+      reraiser_thread.ReraiserThread(
+          _RunTestsFromQueue,
+          [r, tc, results, watcher, num_retries, tag_results_with_device],
+          name=r.device_serial[-4:])
+      for r, tc in zip(runners, test_collections)]
+
+  workers = reraiser_thread.ReraiserThreadGroup(threads)
+  workers.StartAll()
+
+  # Catch DeviceUnreachableErrors and set a warning exit code
+  try:
+    workers.JoinAll(watcher)
+  except device_errors.DeviceUnreachableError as e:
+    logging.error(e)
+
+  if not all((len(tc) == 0 for tc in test_collections)):
+    logging.error('Only ran %d tests (all devices are likely offline).' %
+                  len(results))
+    for tc in test_collections:
+      run_results.AddResults(base_test_result.BaseTestResult(
+          t, base_test_result.ResultType.UNKNOWN) for t in tc.test_names())
+
+  for r in results:
+    run_results.AddTestRunResults(r)
+  if not run_results.DidRunPass():
+    exit_code = constants.ERROR_EXIT_CODE
+  return (run_results, exit_code)
+
+
+def _CreateRunners(runner_factory, devices, timeout=None):
+  """Creates a test runner for each device and calls SetUp() in parallel.
+
+  Note: if a device is unresponsive the corresponding TestRunner will not be
+    included in the returned list.
+
+  Args:
+    runner_factory: Callable that takes a device and index and returns a
+      TestRunner object.
+    devices: List of device serial numbers as strings.
+    timeout: Watchdog timeout in seconds, defaults to the default timeout.
+
+  Returns:
+    A list of TestRunner objects.
+  """
+  logging.warning('Creating %s test runners.' % len(devices))
+  runners = []
+  counter = _ThreadSafeCounter()
+  threads = reraiser_thread.ReraiserThreadGroup(
+      [reraiser_thread.ReraiserThread(_SetUp,
+                                      [runner_factory, d, runners, counter],
+                                      name=str(d)[-4:])
+       for d in devices])
+  threads.StartAll()
+  threads.JoinAll(watchdog_timer.WatchdogTimer(timeout))
+  return runners
+
+
+def _TearDownRunners(runners, timeout=None):
+  """Calls TearDown() for each test runner in parallel.
+
+  Args:
+    runners: A list of TestRunner objects.
+    timeout: Watchdog timeout in seconds, defaults to the default timeout.
+  """
+  threads = reraiser_thread.ReraiserThreadGroup(
+      [reraiser_thread.ReraiserThread(r.TearDown, name=r.device_serial[-4:])
+       for r in runners])
+  threads.StartAll()
+  threads.JoinAll(watchdog_timer.WatchdogTimer(timeout))
+
+
+def ApplyMaxPerRun(tests, max_per_run):
+  """Rearrange the tests so that no group contains more than max_per_run tests.
+
+  Args:
+    tests:
+    max_per_run:
+
+  Returns:
+    A list of tests with no more than max_per_run per run.
+  """
+  tests_expanded = []
+  for test_group in tests:
+    if type(test_group) != str:
+      # Do not split test objects which are not strings.
+      tests_expanded.append(test_group)
+    else:
+      test_split = test_group.split(':')
+      for i in range(0, len(test_split), max_per_run):
+        tests_expanded.append(':'.join(test_split[i:i+max_per_run]))
+  return tests_expanded
+
+
+def RunTests(tests, runner_factory, devices, shard=True,
+             test_timeout=DEFAULT_TIMEOUT, setup_timeout=DEFAULT_TIMEOUT,
+             num_retries=2, max_per_run=256):
+  """Run all tests on attached devices, retrying tests that don't pass.
+
+  Args:
+    tests: List of tests to run.
+    runner_factory: Callable that takes a device and index and returns a
+        TestRunner object.
+    devices: List of attached devices.
+    shard: True if we should shard, False if we should replicate tests.
+      - Sharding tests will distribute tests across all test runners through a
+        shared test collection.
+      - Replicating tests will copy all tests to each test runner through a
+        unique test collection for each test runner.
+    test_timeout: Watchdog timeout in seconds for running tests.
+    setup_timeout: Watchdog timeout in seconds for creating and cleaning up
+        test runners.
+    num_retries: Number of retries for a test.
+    max_per_run: Maximum number of tests to run in any group.
+
+  Returns:
+    A tuple of (base_test_result.TestRunResults object, exit code).
+  """
+  if not tests:
+    logging.critical('No tests to run.')
+    return (base_test_result.TestRunResults(), constants.ERROR_EXIT_CODE)
+
+  tests_expanded = ApplyMaxPerRun(tests, max_per_run)
+  if shard:
+    # Generate a shared TestCollection object for all test runners, so they
+    # draw from a common pool of tests.
+    shared_test_collection = test_collection.TestCollection(
+        [_Test(t) for t in tests_expanded])
+    test_collection_factory = lambda: shared_test_collection
+    tag_results_with_device = False
+    log_string = 'sharded across devices'
+  else:
+    # Generate a unique TestCollection object for each test runner, but use
+    # the same set of tests.
+    test_collection_factory = lambda: test_collection.TestCollection(
+        [_Test(t) for t in tests_expanded])
+    tag_results_with_device = True
+    log_string = 'replicated on each device'
+
+  logging.info('Will run %d tests (%s): %s',
+               len(tests_expanded), log_string, str(tests_expanded))
+  runners = _CreateRunners(runner_factory, devices, setup_timeout)
+  try:
+    return _RunAllTests(runners, test_collection_factory,
+                        num_retries, test_timeout, tag_results_with_device)
+  finally:
+    try:
+      _TearDownRunners(runners, setup_timeout)
+    except device_errors.DeviceUnreachableError as e:
+      logging.warning('Device unresponsive during TearDown: [%s]', e)
+    except Exception as e:
+      logging.error('Unexpected exception caught during TearDown: %s' % str(e))
diff --git a/build/android/pylib/base/test_dispatcher_unittest.py b/build/android/pylib/base/test_dispatcher_unittest.py
new file mode 100755
index 0000000..cace9a6
--- /dev/null
+++ b/build/android/pylib/base/test_dispatcher_unittest.py
@@ -0,0 +1,241 @@
+#!/usr/bin/env python
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unittests for test_dispatcher.py."""
+# pylint: disable=R0201
+# pylint: disable=W0212
+
+import os
+import sys
+import unittest
+
+
+from pylib import constants
+from pylib.base import base_test_result
+from pylib.base import test_collection
+from pylib.base import test_dispatcher
+from pylib.device import adb_wrapper
+from pylib.device import device_utils
+from pylib.utils import watchdog_timer
+
+sys.path.append(
+    os.path.join(constants.DIR_SOURCE_ROOT, 'third_party', 'pymock'))
+import mock
+
+
+class TestException(Exception):
+  pass
+
+
+def _MockDevice(serial):
+  d = mock.MagicMock(spec=device_utils.DeviceUtils)
+  d.__str__.return_value = serial
+  d.adb = mock.MagicMock(spec=adb_wrapper.AdbWrapper)
+  d.adb.GetDeviceSerial = mock.MagicMock(return_value=serial)
+  d.IsOnline = mock.MagicMock(return_value=True)
+  return d
+
+
+class MockRunner(object):
+  """A mock TestRunner."""
+  def __init__(self, device=None, shard_index=0):
+    self.device = device or _MockDevice('0')
+    self.device_serial = self.device.adb.GetDeviceSerial()
+    self.shard_index = shard_index
+    self.setups = 0
+    self.teardowns = 0
+
+  def RunTest(self, test):
+    results = base_test_result.TestRunResults()
+    results.AddResult(
+        base_test_result.BaseTestResult(test, base_test_result.ResultType.PASS))
+    return (results, None)
+
+  def SetUp(self):
+    self.setups += 1
+
+  def TearDown(self):
+    self.teardowns += 1
+
+
+class MockRunnerFail(MockRunner):
+  def RunTest(self, test):
+    results = base_test_result.TestRunResults()
+    results.AddResult(
+        base_test_result.BaseTestResult(test, base_test_result.ResultType.FAIL))
+    return (results, test)
+
+
+class MockRunnerFailTwice(MockRunner):
+  def __init__(self, device=None, shard_index=0):
+    super(MockRunnerFailTwice, self).__init__(device, shard_index)
+    self._fails = 0
+
+  def RunTest(self, test):
+    self._fails += 1
+    results = base_test_result.TestRunResults()
+    if self._fails <= 2:
+      results.AddResult(base_test_result.BaseTestResult(
+          test, base_test_result.ResultType.FAIL))
+      return (results, test)
+    else:
+      results.AddResult(base_test_result.BaseTestResult(
+          test, base_test_result.ResultType.PASS))
+      return (results, None)
+
+
+class MockRunnerException(MockRunner):
+  def RunTest(self, test):
+    raise TestException
+
+
+class TestFunctions(unittest.TestCase):
+  """Tests test_dispatcher._RunTestsFromQueue."""
+  @staticmethod
+  def _RunTests(mock_runner, tests):
+    results = []
+    tests = test_collection.TestCollection(
+        [test_dispatcher._Test(t) for t in tests])
+    test_dispatcher._RunTestsFromQueue(mock_runner, tests, results,
+                                       watchdog_timer.WatchdogTimer(None), 2)
+    run_results = base_test_result.TestRunResults()
+    for r in results:
+      run_results.AddTestRunResults(r)
+    return run_results
+
+  def testRunTestsFromQueue(self):
+    results = TestFunctions._RunTests(MockRunner(), ['a', 'b'])
+    self.assertEqual(len(results.GetPass()), 2)
+    self.assertEqual(len(results.GetNotPass()), 0)
+
+  def testRunTestsFromQueueRetry(self):
+    results = TestFunctions._RunTests(MockRunnerFail(), ['a', 'b'])
+    self.assertEqual(len(results.GetPass()), 0)
+    self.assertEqual(len(results.GetFail()), 2)
+
+  def testRunTestsFromQueueFailTwice(self):
+    results = TestFunctions._RunTests(MockRunnerFailTwice(), ['a', 'b'])
+    self.assertEqual(len(results.GetPass()), 2)
+    self.assertEqual(len(results.GetNotPass()), 0)
+
+  def testSetUp(self):
+    runners = []
+    counter = test_dispatcher._ThreadSafeCounter()
+    test_dispatcher._SetUp(MockRunner, _MockDevice('0'), runners, counter)
+    self.assertEqual(len(runners), 1)
+    self.assertEqual(runners[0].setups, 1)
+
+  def testThreadSafeCounter(self):
+    counter = test_dispatcher._ThreadSafeCounter()
+    for i in xrange(5):
+      self.assertEqual(counter.GetAndIncrement(), i)
+
+  def testApplyMaxPerRun(self):
+    self.assertEqual(
+        ['A:B', 'C:D', 'E', 'F:G', 'H:I'],
+        test_dispatcher.ApplyMaxPerRun(['A:B', 'C:D:E', 'F:G:H:I'], 2))
+
+
+class TestThreadGroupFunctions(unittest.TestCase):
+  """Tests test_dispatcher._RunAllTests and test_dispatcher._CreateRunners."""
+  def setUp(self):
+    self.tests = ['a', 'b', 'c', 'd', 'e', 'f', 'g']
+    shared_test_collection = test_collection.TestCollection(
+        [test_dispatcher._Test(t) for t in self.tests])
+    self.test_collection_factory = lambda: shared_test_collection
+
+  def testCreate(self):
+    runners = test_dispatcher._CreateRunners(
+        MockRunner, [_MockDevice('0'), _MockDevice('1')])
+    for runner in runners:
+      self.assertEqual(runner.setups, 1)
+    self.assertEqual(set([r.device_serial for r in runners]),
+                     set(['0', '1']))
+    self.assertEqual(set([r.shard_index for r in runners]),
+                     set([0, 1]))
+
+  def testRun(self):
+    runners = [MockRunner(_MockDevice('0')), MockRunner(_MockDevice('1'))]
+    results, exit_code = test_dispatcher._RunAllTests(
+        runners, self.test_collection_factory, 0)
+    self.assertEqual(len(results.GetPass()), len(self.tests))
+    self.assertEqual(exit_code, 0)
+
+  def testTearDown(self):
+    runners = [MockRunner(_MockDevice('0')), MockRunner(_MockDevice('1'))]
+    test_dispatcher._TearDownRunners(runners)
+    for runner in runners:
+      self.assertEqual(runner.teardowns, 1)
+
+  def testRetry(self):
+    runners = test_dispatcher._CreateRunners(
+        MockRunnerFail, [_MockDevice('0'), _MockDevice('1')])
+    results, exit_code = test_dispatcher._RunAllTests(
+        runners, self.test_collection_factory, 0)
+    self.assertEqual(len(results.GetFail()), len(self.tests))
+    self.assertEqual(exit_code, constants.ERROR_EXIT_CODE)
+
+  def testReraise(self):
+    runners = test_dispatcher._CreateRunners(
+        MockRunnerException, [_MockDevice('0'), _MockDevice('1')])
+    with self.assertRaises(TestException):
+      test_dispatcher._RunAllTests(runners, self.test_collection_factory, 0)
+
+
+class TestShard(unittest.TestCase):
+  """Tests test_dispatcher.RunTests with sharding."""
+  @staticmethod
+  def _RunShard(runner_factory):
+    return test_dispatcher.RunTests(
+        ['a', 'b', 'c'], runner_factory, [_MockDevice('0'), _MockDevice('1')],
+        shard=True)
+
+  def testShard(self):
+    results, exit_code = TestShard._RunShard(MockRunner)
+    self.assertEqual(len(results.GetPass()), 3)
+    self.assertEqual(exit_code, 0)
+
+  def testFailing(self):
+    results, exit_code = TestShard._RunShard(MockRunnerFail)
+    self.assertEqual(len(results.GetPass()), 0)
+    self.assertEqual(len(results.GetFail()), 3)
+    self.assertEqual(exit_code, constants.ERROR_EXIT_CODE)
+
+  def testNoTests(self):
+    results, exit_code = test_dispatcher.RunTests(
+        [], MockRunner, [_MockDevice('0'), _MockDevice('1')], shard=True)
+    self.assertEqual(len(results.GetAll()), 0)
+    self.assertEqual(exit_code, constants.ERROR_EXIT_CODE)
+
+
+class TestReplicate(unittest.TestCase):
+  """Tests test_dispatcher.RunTests with replication."""
+  @staticmethod
+  def _RunReplicate(runner_factory):
+    return test_dispatcher.RunTests(
+        ['a', 'b', 'c'], runner_factory, [_MockDevice('0'), _MockDevice('1')],
+        shard=False)
+
+  def testReplicate(self):
+    results, exit_code = TestReplicate._RunReplicate(MockRunner)
+    # We expect 6 results since each test should have been run on every device
+    self.assertEqual(len(results.GetPass()), 6)
+    self.assertEqual(exit_code, 0)
+
+  def testFailing(self):
+    results, exit_code = TestReplicate._RunReplicate(MockRunnerFail)
+    self.assertEqual(len(results.GetPass()), 0)
+    self.assertEqual(len(results.GetFail()), 6)
+    self.assertEqual(exit_code, constants.ERROR_EXIT_CODE)
+
+  def testNoTests(self):
+    results, exit_code = test_dispatcher.RunTests(
+        [], MockRunner, [_MockDevice('0'), _MockDevice('1')], shard=False)
+    self.assertEqual(len(results.GetAll()), 0)
+    self.assertEqual(exit_code, constants.ERROR_EXIT_CODE)
+
+
+if __name__ == '__main__':
+  unittest.main()
diff --git a/build/android/pylib/base/test_instance.py b/build/android/pylib/base/test_instance.py
new file mode 100644
index 0000000..cdf678f
--- /dev/null
+++ b/build/android/pylib/base/test_instance.py
@@ -0,0 +1,35 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+
+class TestInstance(object):
+  """A type of test.
+
+  This is expected to handle all logic that is test-type specific but
+  independent of the environment or device.
+
+  Examples include:
+    - gtests
+    - instrumentation tests
+  """
+
+  def __init__(self):
+    pass
+
+  def TestType(self):
+    raise NotImplementedError
+
+  def SetUp(self):
+    raise NotImplementedError
+
+  def TearDown(self):
+    raise NotImplementedError
+
+  def __enter__(self):
+    self.SetUp()
+    return self
+
+  def __exit__(self, _exc_type, _exc_val, _exc_tb):
+    self.TearDown()
+
diff --git a/build/android/pylib/base/test_instance_factory.py b/build/android/pylib/base/test_instance_factory.py
new file mode 100644
index 0000000..7e7cb0c
--- /dev/null
+++ b/build/android/pylib/base/test_instance_factory.py
@@ -0,0 +1,24 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from pylib import constants
+from pylib.gtest import gtest_test_instance
+from pylib.instrumentation import instrumentation_test_instance
+from pylib.utils import isolator
+from pylib.uirobot import uirobot_test_instance
+
+
+
+def CreateTestInstance(args, error_func):
+
+  if args.command == 'gtest':
+    return gtest_test_instance.GtestTestInstance(
+        args, isolator.Isolator(constants.ISOLATE_DEPS_DIR), error_func)
+  elif args.command == 'instrumentation':
+    return instrumentation_test_instance.InstrumentationTestInstance(
+        args, isolator.Isolator(constants.ISOLATE_DEPS_DIR), error_func)
+  elif args.command == 'uirobot':
+    return uirobot_test_instance.UirobotTestInstance(args, error_func)
+
+  error_func('Unable to create %s test instance.' % args.command)
diff --git a/build/android/pylib/base/test_run.py b/build/android/pylib/base/test_run.py
new file mode 100644
index 0000000..7380e78
--- /dev/null
+++ b/build/android/pylib/base/test_run.py
@@ -0,0 +1,39 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+
+class TestRun(object):
+  """An execution of a particular test on a particular device.
+
+  This is expected to handle all logic that is specific to the combination of
+  environment and test type.
+
+  Examples include:
+    - local gtests
+    - local instrumentation tests
+  """
+
+  def __init__(self, env, test_instance):
+    self._env = env
+    self._test_instance = test_instance
+
+  def TestPackage(self):
+    raise NotImplementedError
+
+  def SetUp(self):
+    raise NotImplementedError
+
+  def RunTests(self):
+    raise NotImplementedError
+
+  def TearDown(self):
+    raise NotImplementedError
+
+  def __enter__(self):
+    self.SetUp()
+    return self
+
+  def __exit__(self, exc_type, exc_val, exc_tb):
+    self.TearDown()
+
diff --git a/build/android/pylib/base/test_run_factory.py b/build/android/pylib/base/test_run_factory.py
new file mode 100644
index 0000000..8c71ebbd
--- /dev/null
+++ b/build/android/pylib/base/test_run_factory.py
@@ -0,0 +1,41 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from pylib.gtest import gtest_test_instance
+from pylib.gtest import local_device_gtest_run
+from pylib.instrumentation import instrumentation_test_instance
+from pylib.local.device import local_device_environment
+from pylib.local.device import local_device_instrumentation_test_run
+from pylib.remote.device import remote_device_environment
+from pylib.remote.device import remote_device_gtest_run
+from pylib.remote.device import remote_device_instrumentation_test_run
+from pylib.remote.device import remote_device_uirobot_test_run
+from pylib.uirobot import uirobot_test_instance
+
+
+def CreateTestRun(_args, env, test_instance, error_func):
+  if isinstance(env, local_device_environment.LocalDeviceEnvironment):
+    if isinstance(test_instance, gtest_test_instance.GtestTestInstance):
+      return local_device_gtest_run.LocalDeviceGtestRun(env, test_instance)
+    if isinstance(test_instance,
+                  instrumentation_test_instance.InstrumentationTestInstance):
+      return (local_device_instrumentation_test_run
+              .LocalDeviceInstrumentationTestRun(env, test_instance))
+
+  if isinstance(env, remote_device_environment.RemoteDeviceEnvironment):
+    if isinstance(test_instance, gtest_test_instance.GtestTestInstance):
+      return remote_device_gtest_run.RemoteDeviceGtestTestRun(
+          env, test_instance)
+    if isinstance(test_instance,
+                  instrumentation_test_instance.InstrumentationTestInstance):
+      return (remote_device_instrumentation_test_run
+              .RemoteDeviceInstrumentationTestRun(env, test_instance))
+    if isinstance(test_instance, uirobot_test_instance.UirobotTestInstance):
+      return remote_device_uirobot_test_run.RemoteDeviceUirobotTestRun(
+          env, test_instance)
+
+
+  error_func('Unable to create test run for %s tests in %s environment'
+             % (str(test_instance), str(env)))
+
diff --git a/build/android/pylib/base/test_server.py b/build/android/pylib/base/test_server.py
new file mode 100644
index 0000000..085a51e
--- /dev/null
+++ b/build/android/pylib/base/test_server.py
@@ -0,0 +1,19 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+class TestServer(object):
+  """Base class for any server that needs to be set up for the tests."""
+
+  def __init__(self, *args, **kwargs):
+    pass
+
+  def SetUp(self):
+    raise NotImplementedError
+
+  def Reset(self):
+    raise NotImplementedError
+
+  def TearDown(self):
+    raise NotImplementedError
+
diff --git a/build/android/pylib/chrome_test_server_spawner.py b/build/android/pylib/chrome_test_server_spawner.py
new file mode 100644
index 0000000..052c2fd
--- /dev/null
+++ b/build/android/pylib/chrome_test_server_spawner.py
@@ -0,0 +1,422 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""A "Test Server Spawner" that handles killing/stopping per-test test servers.
+
+It's used to accept requests from the device to spawn and kill instances of the
+chrome test server on the host.
+"""
+# pylint: disable=W0702
+
+import BaseHTTPServer
+import json
+import logging
+import os
+import select
+import struct
+import subprocess
+import sys
+import threading
+import time
+import urlparse
+
+from pylib import constants
+from pylib import ports
+
+from pylib.forwarder import Forwarder
+
+
+# Path that are needed to import necessary modules when launching a testserver.
+os.environ['PYTHONPATH'] = os.environ.get('PYTHONPATH', '') + (':%s:%s:%s:%s:%s'
+    % (os.path.join(constants.DIR_SOURCE_ROOT, 'third_party'),
+       os.path.join(constants.DIR_SOURCE_ROOT, 'third_party', 'tlslite'),
+       os.path.join(constants.DIR_SOURCE_ROOT, 'third_party', 'pyftpdlib',
+                    'src'),
+       os.path.join(constants.DIR_SOURCE_ROOT, 'net', 'tools', 'testserver'),
+       os.path.join(constants.DIR_SOURCE_ROOT, 'sync', 'tools', 'testserver')))
+
+
+SERVER_TYPES = {
+    'http': '',
+    'ftp': '-f',
+    'sync': '',  # Sync uses its own script, and doesn't take a server type arg.
+    'tcpecho': '--tcp-echo',
+    'udpecho': '--udp-echo',
+}
+
+
+# The timeout (in seconds) of starting up the Python test server.
+TEST_SERVER_STARTUP_TIMEOUT = 10
+
+def _WaitUntil(predicate, max_attempts=5):
+  """Blocks until the provided predicate (function) is true.
+
+  Returns:
+    Whether the provided predicate was satisfied once (before the timeout).
+  """
+  sleep_time_sec = 0.025
+  for _ in xrange(1, max_attempts):
+    if predicate():
+      return True
+    time.sleep(sleep_time_sec)
+    sleep_time_sec = min(1, sleep_time_sec * 2)  # Don't wait more than 1 sec.
+  return False
+
+
+def _CheckPortAvailable(port):
+  """Returns True if |port| is available."""
+  return _WaitUntil(lambda: ports.IsHostPortAvailable(port))
+
+
+def _CheckPortNotAvailable(port):
+  """Returns True if |port| is not available."""
+  return _WaitUntil(lambda: not ports.IsHostPortAvailable(port))
+
+
+def _CheckDevicePortStatus(device, port):
+  """Returns whether the provided port is used."""
+  return _WaitUntil(lambda: ports.IsDevicePortUsed(device, port))
+
+
+def _GetServerTypeCommandLine(server_type):
+  """Returns the command-line by the given server type.
+
+  Args:
+    server_type: the server type to be used (e.g. 'http').
+
+  Returns:
+    A string containing the command-line argument.
+  """
+  if server_type not in SERVER_TYPES:
+    raise NotImplementedError('Unknown server type: %s' % server_type)
+  if server_type == 'udpecho':
+    raise Exception('Please do not run UDP echo tests because we do not have '
+                    'a UDP forwarder tool.')
+  return SERVER_TYPES[server_type]
+
+
+class TestServerThread(threading.Thread):
+  """A thread to run the test server in a separate process."""
+
+  def __init__(self, ready_event, arguments, device, tool):
+    """Initialize TestServerThread with the following argument.
+
+    Args:
+      ready_event: event which will be set when the test server is ready.
+      arguments: dictionary of arguments to run the test server.
+      device: An instance of DeviceUtils.
+      tool: instance of runtime error detection tool.
+    """
+    threading.Thread.__init__(self)
+    self.wait_event = threading.Event()
+    self.stop_flag = False
+    self.ready_event = ready_event
+    self.ready_event.clear()
+    self.arguments = arguments
+    self.device = device
+    self.tool = tool
+    self.test_server_process = None
+    self.is_ready = False
+    self.host_port = self.arguments['port']
+    assert isinstance(self.host_port, int)
+    # The forwarder device port now is dynamically allocated.
+    self.forwarder_device_port = 0
+    # Anonymous pipe in order to get port info from test server.
+    self.pipe_in = None
+    self.pipe_out = None
+    self.process = None
+    self.command_line = []
+
+  def _WaitToStartAndGetPortFromTestServer(self):
+    """Waits for the Python test server to start and gets the port it is using.
+
+    The port information is passed by the Python test server with a pipe given
+    by self.pipe_out. It is written as a result to |self.host_port|.
+
+    Returns:
+      Whether the port used by the test server was successfully fetched.
+    """
+    assert self.host_port == 0 and self.pipe_out and self.pipe_in
+    (in_fds, _, _) = select.select([self.pipe_in, ], [], [],
+                                   TEST_SERVER_STARTUP_TIMEOUT)
+    if len(in_fds) == 0:
+      logging.error('Failed to wait to the Python test server to be started.')
+      return False
+    # First read the data length as an unsigned 4-byte value.  This
+    # is _not_ using network byte ordering since the Python test server packs
+    # size as native byte order and all Chromium platforms so far are
+    # configured to use little-endian.
+    # TODO(jnd): Change the Python test server and local_test_server_*.cc to
+    # use a unified byte order (either big-endian or little-endian).
+    data_length = os.read(self.pipe_in, struct.calcsize('=L'))
+    if data_length:
+      (data_length,) = struct.unpack('=L', data_length)
+      assert data_length
+    if not data_length:
+      logging.error('Failed to get length of server data.')
+      return False
+    port_json = os.read(self.pipe_in, data_length)
+    if not port_json:
+      logging.error('Failed to get server data.')
+      return False
+    logging.info('Got port json data: %s', port_json)
+    port_json = json.loads(port_json)
+    if port_json.has_key('port') and isinstance(port_json['port'], int):
+      self.host_port = port_json['port']
+      return _CheckPortNotAvailable(self.host_port)
+    logging.error('Failed to get port information from the server data.')
+    return False
+
+  def _GenerateCommandLineArguments(self):
+    """Generates the command line to run the test server.
+
+    Note that all options are processed by following the definitions in
+    testserver.py.
+    """
+    if self.command_line:
+      return
+
+    args_copy = dict(self.arguments)
+
+    # Translate the server type.
+    type_cmd = _GetServerTypeCommandLine(args_copy.pop('server-type'))
+    if type_cmd:
+      self.command_line.append(type_cmd)
+
+    # Use a pipe to get the port given by the instance of Python test server
+    # if the test does not specify the port.
+    assert self.host_port == args_copy['port']
+    if self.host_port == 0:
+      (self.pipe_in, self.pipe_out) = os.pipe()
+      self.command_line.append('--startup-pipe=%d' % self.pipe_out)
+
+    # Pass the remaining arguments as-is.
+    for key, values in args_copy.iteritems():
+      if not isinstance(values, list):
+        values = [values]
+      for value in values:
+        if value is None:
+          self.command_line.append('--%s' % key)
+        else:
+          self.command_line.append('--%s=%s' % (key, value))
+
+  def _CloseUnnecessaryFDsForTestServerProcess(self):
+    # This is required to avoid subtle deadlocks that could be caused by the
+    # test server child process inheriting undesirable file descriptors such as
+    # file lock file descriptors.
+    for fd in xrange(0, 1024):
+      if fd != self.pipe_out:
+        try:
+          os.close(fd)
+        except:
+          pass
+
+  def run(self):
+    logging.info('Start running the thread!')
+    self.wait_event.clear()
+    self._GenerateCommandLineArguments()
+    command = constants.DIR_SOURCE_ROOT
+    if self.arguments['server-type'] == 'sync':
+      command = [os.path.join(command, 'sync', 'tools', 'testserver',
+                              'sync_testserver.py')] + self.command_line
+    else:
+      command = [os.path.join(command, 'net', 'tools', 'testserver',
+                              'testserver.py')] + self.command_line
+    logging.info('Running: %s', command)
+    # Pass DIR_SOURCE_ROOT as the child's working directory so that relative
+    # paths in the arguments are resolved correctly.
+    self.process = subprocess.Popen(
+        command, preexec_fn=self._CloseUnnecessaryFDsForTestServerProcess,
+        cwd=constants.DIR_SOURCE_ROOT)
+    if self.process:
+      if self.pipe_out:
+        self.is_ready = self._WaitToStartAndGetPortFromTestServer()
+      else:
+        self.is_ready = _CheckPortNotAvailable(self.host_port)
+    if self.is_ready:
+      Forwarder.Map([(0, self.host_port)], self.device, self.tool)
+      # Check whether the forwarder is ready on the device.
+      self.is_ready = False
+      device_port = Forwarder.DevicePortForHostPort(self.host_port)
+      if device_port and _CheckDevicePortStatus(self.device, device_port):
+        self.is_ready = True
+        self.forwarder_device_port = device_port
+    # Wake up the request handler thread.
+    self.ready_event.set()
+    # Keep thread running until Stop() gets called.
+    _WaitUntil(lambda: self.stop_flag, max_attempts=sys.maxint)
+    if self.process.poll() is None:
+      self.process.kill()
+    Forwarder.UnmapDevicePort(self.forwarder_device_port, self.device)
+    self.process = None
+    self.is_ready = False
+    if self.pipe_out:
+      os.close(self.pipe_in)
+      os.close(self.pipe_out)
+      self.pipe_in = None
+      self.pipe_out = None
+    logging.info('Test-server has died.')
+    self.wait_event.set()
+
+  def Stop(self):
+    """Blocks until the loop has finished.
+
+    Note that this must be called in another thread.
+    """
+    if not self.process:
+      return
+    self.stop_flag = True
+    self.wait_event.wait()
+
+
+class SpawningServerRequestHandler(BaseHTTPServer.BaseHTTPRequestHandler):
+  """A handler used to process http GET/POST request."""
+
+  def _SendResponse(self, response_code, response_reason, additional_headers,
+                    contents):
+    """Generates a response sent to the client from the provided parameters.
+
+    Args:
+      response_code: number of the response status.
+      response_reason: string of reason description of the response.
+      additional_headers: dict of additional headers. Each key is the name of
+                          the header, each value is the content of the header.
+      contents: string of the contents we want to send to client.
+    """
+    self.send_response(response_code, response_reason)
+    self.send_header('Content-Type', 'text/html')
+    # Specify the content-length as without it the http(s) response will not
+    # be completed properly (and the browser keeps expecting data).
+    self.send_header('Content-Length', len(contents))
+    for header_name in additional_headers:
+      self.send_header(header_name, additional_headers[header_name])
+    self.end_headers()
+    self.wfile.write(contents)
+    self.wfile.flush()
+
+  def _StartTestServer(self):
+    """Starts the test server thread."""
+    logging.info('Handling request to spawn a test server.')
+    content_type = self.headers.getheader('content-type')
+    if content_type != 'application/json':
+      raise Exception('Bad content-type for start request.')
+    content_length = self.headers.getheader('content-length')
+    if not content_length:
+      content_length = 0
+    try:
+      content_length = int(content_length)
+    except:
+      raise Exception('Bad content-length for start request.')
+    logging.info(content_length)
+    test_server_argument_json = self.rfile.read(content_length)
+    logging.info(test_server_argument_json)
+    assert not self.server.test_server_instance
+    ready_event = threading.Event()
+    self.server.test_server_instance = TestServerThread(
+        ready_event,
+        json.loads(test_server_argument_json),
+        self.server.device,
+        self.server.tool)
+    self.server.test_server_instance.setDaemon(True)
+    self.server.test_server_instance.start()
+    ready_event.wait()
+    if self.server.test_server_instance.is_ready:
+      self._SendResponse(200, 'OK', {}, json.dumps(
+          {'port': self.server.test_server_instance.forwarder_device_port,
+           'message': 'started'}))
+      logging.info('Test server is running on port: %d.',
+                   self.server.test_server_instance.host_port)
+    else:
+      self.server.test_server_instance.Stop()
+      self.server.test_server_instance = None
+      self._SendResponse(500, 'Test Server Error.', {}, '')
+      logging.info('Encounter problem during starting a test server.')
+
+  def _KillTestServer(self):
+    """Stops the test server instance."""
+    # There should only ever be one test server at a time. This may do the
+    # wrong thing if we try and start multiple test servers.
+    if not self.server.test_server_instance:
+      return
+    port = self.server.test_server_instance.host_port
+    logging.info('Handling request to kill a test server on port: %d.', port)
+    self.server.test_server_instance.Stop()
+    # Make sure the status of test server is correct before sending response.
+    if _CheckPortAvailable(port):
+      self._SendResponse(200, 'OK', {}, 'killed')
+      logging.info('Test server on port %d is killed', port)
+    else:
+      self._SendResponse(500, 'Test Server Error.', {}, '')
+      logging.info('Encounter problem during killing a test server.')
+    self.server.test_server_instance = None
+
+  def do_POST(self):
+    parsed_path = urlparse.urlparse(self.path)
+    action = parsed_path.path
+    logging.info('Action for POST method is: %s.', action)
+    if action == '/start':
+      self._StartTestServer()
+    else:
+      self._SendResponse(400, 'Unknown request.', {}, '')
+      logging.info('Encounter unknown request: %s.', action)
+
+  def do_GET(self):
+    parsed_path = urlparse.urlparse(self.path)
+    action = parsed_path.path
+    params = urlparse.parse_qs(parsed_path.query, keep_blank_values=1)
+    logging.info('Action for GET method is: %s.', action)
+    for param in params:
+      logging.info('%s=%s', param, params[param][0])
+    if action == '/kill':
+      self._KillTestServer()
+    elif action == '/ping':
+      # The ping handler is used to check whether the spawner server is ready
+      # to serve the requests. We don't need to test the status of the test
+      # server when handling ping request.
+      self._SendResponse(200, 'OK', {}, 'ready')
+      logging.info('Handled ping request and sent response.')
+    else:
+      self._SendResponse(400, 'Unknown request', {}, '')
+      logging.info('Encounter unknown request: %s.', action)
+
+
+class SpawningServer(object):
+  """The class used to start/stop a http server."""
+
+  def __init__(self, test_server_spawner_port, device, tool):
+    logging.info('Creating new spawner on port: %d.', test_server_spawner_port)
+    self.server = BaseHTTPServer.HTTPServer(('', test_server_spawner_port),
+                                            SpawningServerRequestHandler)
+    self.server.device = device
+    self.server.tool = tool
+    self.server.test_server_instance = None
+    self.server.build_type = constants.GetBuildType()
+
+  def _Listen(self):
+    logging.info('Starting test server spawner')
+    self.server.serve_forever()
+
+  def Start(self):
+    """Starts the test server spawner."""
+    listener_thread = threading.Thread(target=self._Listen)
+    listener_thread.setDaemon(True)
+    listener_thread.start()
+
+  def Stop(self):
+    """Stops the test server spawner.
+
+    Also cleans the server state.
+    """
+    self.CleanupState()
+    self.server.shutdown()
+
+  def CleanupState(self):
+    """Cleans up the spawning server state.
+
+    This should be called if the test server spawner is reused,
+    to avoid sharing the test server instance.
+    """
+    if self.server.test_server_instance:
+      self.server.test_server_instance.Stop()
+      self.server.test_server_instance = None
diff --git a/build/android/pylib/cmd_helper.py b/build/android/pylib/cmd_helper.py
new file mode 100644
index 0000000..f881553
--- /dev/null
+++ b/build/android/pylib/cmd_helper.py
@@ -0,0 +1,261 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""A wrapper for subprocess to make calling shell commands easier."""
+
+import logging
+import os
+import pipes
+import select
+import signal
+import string
+import StringIO
+import subprocess
+import time
+
+# fcntl is not available on Windows.
+try:
+  import fcntl
+except ImportError:
+  fcntl = None
+
+_SafeShellChars = frozenset(string.ascii_letters + string.digits + '@%_-+=:,./')
+
+def SingleQuote(s):
+  """Return an shell-escaped version of the string using single quotes.
+
+  Reliably quote a string which may contain unsafe characters (e.g. space,
+  quote, or other special characters such as '$').
+
+  The returned value can be used in a shell command line as one token that gets
+  to be interpreted literally.
+
+  Args:
+    s: The string to quote.
+
+  Return:
+    The string quoted using single quotes.
+  """
+  return pipes.quote(s)
+
+def DoubleQuote(s):
+  """Return an shell-escaped version of the string using double quotes.
+
+  Reliably quote a string which may contain unsafe characters (e.g. space
+  or quote characters), while retaining some shell features such as variable
+  interpolation.
+
+  The returned value can be used in a shell command line as one token that gets
+  to be further interpreted by the shell.
+
+  The set of characters that retain their special meaning may depend on the
+  shell implementation. This set usually includes: '$', '`', '\', '!', '*',
+  and '@'.
+
+  Args:
+    s: The string to quote.
+
+  Return:
+    The string quoted using double quotes.
+  """
+  if not s:
+    return '""'
+  elif all(c in _SafeShellChars for c in s):
+    return s
+  else:
+    return '"' + s.replace('"', '\\"') + '"'
+
+
+def Popen(args, stdout=None, stderr=None, shell=None, cwd=None, env=None):
+  return subprocess.Popen(
+      args=args, cwd=cwd, stdout=stdout, stderr=stderr,
+      shell=shell, close_fds=True, env=env,
+      preexec_fn=lambda: signal.signal(signal.SIGPIPE, signal.SIG_DFL))
+
+
+def Call(args, stdout=None, stderr=None, shell=None, cwd=None, env=None):
+  pipe = Popen(args, stdout=stdout, stderr=stderr, shell=shell, cwd=cwd,
+               env=env)
+  pipe.communicate()
+  return pipe.wait()
+
+
+def RunCmd(args, cwd=None):
+  """Opens a subprocess to execute a program and returns its return value.
+
+  Args:
+    args: A string or a sequence of program arguments. The program to execute is
+      the string or the first item in the args sequence.
+    cwd: If not None, the subprocess's current directory will be changed to
+      |cwd| before it's executed.
+
+  Returns:
+    Return code from the command execution.
+  """
+  logging.info(str(args) + ' ' + (cwd or ''))
+  return Call(args, cwd=cwd)
+
+
+def GetCmdOutput(args, cwd=None, shell=False):
+  """Open a subprocess to execute a program and returns its output.
+
+  Args:
+    args: A string or a sequence of program arguments. The program to execute is
+      the string or the first item in the args sequence.
+    cwd: If not None, the subprocess's current directory will be changed to
+      |cwd| before it's executed.
+    shell: Whether to execute args as a shell command.
+
+  Returns:
+    Captures and returns the command's stdout.
+    Prints the command's stderr to logger (which defaults to stdout).
+  """
+  (_, output) = GetCmdStatusAndOutput(args, cwd, shell)
+  return output
+
+
+def _ValidateAndLogCommand(args, cwd, shell):
+  if isinstance(args, basestring):
+    if not shell:
+      raise Exception('string args must be run with shell=True')
+  else:
+    if shell:
+      raise Exception('array args must be run with shell=False')
+    args = ' '.join(SingleQuote(c) for c in args)
+  if cwd is None:
+    cwd = ''
+  else:
+    cwd = ':' + cwd
+  logging.info('[host]%s> %s', cwd, args)
+  return args
+
+
+def GetCmdStatusAndOutput(args, cwd=None, shell=False):
+  """Executes a subprocess and returns its exit code and output.
+
+  Args:
+    args: A string or a sequence of program arguments. The program to execute is
+      the string or the first item in the args sequence.
+    cwd: If not None, the subprocess's current directory will be changed to
+      |cwd| before it's executed.
+    shell: Whether to execute args as a shell command. Must be True if args
+      is a string and False if args is a sequence.
+
+  Returns:
+    The 2-tuple (exit code, output).
+  """
+  _ValidateAndLogCommand(args, cwd, shell)
+  pipe = Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE,
+               shell=shell, cwd=cwd)
+  stdout, stderr = pipe.communicate()
+
+  if stderr:
+    logging.critical(stderr)
+  if len(stdout) > 4096:
+    logging.debug('Truncated output:')
+  logging.debug(stdout[:4096])
+  return (pipe.returncode, stdout)
+
+
+class TimeoutError(Exception):
+  """Module-specific timeout exception."""
+  pass
+
+
+def _IterProcessStdout(process, timeout=None, buffer_size=4096,
+                       poll_interval=1):
+  assert fcntl, 'fcntl module is required'
+  try:
+    # Enable non-blocking reads from the child's stdout.
+    child_fd = process.stdout.fileno()
+    fl = fcntl.fcntl(child_fd, fcntl.F_GETFL)
+    fcntl.fcntl(child_fd, fcntl.F_SETFL, fl | os.O_NONBLOCK)
+
+    end_time = (time.time() + timeout) if timeout else None
+    while True:
+      if end_time and time.time() > end_time:
+        raise TimeoutError
+      read_fds, _, _ = select.select([child_fd], [], [], poll_interval)
+      if child_fd in read_fds:
+        data = os.read(child_fd, buffer_size)
+        if not data:
+          break
+        yield data
+      if process.poll() is not None:
+        break
+  finally:
+    try:
+      # Make sure the process doesn't stick around if we fail with an
+      # exception.
+      process.kill()
+    except OSError:
+      pass
+    process.wait()
+
+
+def GetCmdStatusAndOutputWithTimeout(args, timeout, cwd=None, shell=False,
+                                     logfile=None):
+  """Executes a subprocess with a timeout.
+
+  Args:
+    args: List of arguments to the program, the program to execute is the first
+      element.
+    timeout: the timeout in seconds or None to wait forever.
+    cwd: If not None, the subprocess's current directory will be changed to
+      |cwd| before it's executed.
+    shell: Whether to execute args as a shell command. Must be True if args
+      is a string and False if args is a sequence.
+    logfile: Optional file-like object that will receive output from the
+      command as it is running.
+
+  Returns:
+    The 2-tuple (exit code, output).
+  """
+  _ValidateAndLogCommand(args, cwd, shell)
+  output = StringIO.StringIO()
+  process = Popen(args, cwd=cwd, shell=shell, stdout=subprocess.PIPE,
+                  stderr=subprocess.STDOUT)
+  for data in _IterProcessStdout(process, timeout=timeout):
+    if logfile:
+      logfile.write(data)
+    output.write(data)
+  return process.returncode, output.getvalue()
+
+
+def IterCmdOutputLines(args, timeout=None, cwd=None, shell=False,
+                       check_status=True):
+  """Executes a subprocess and continuously yields lines from its output.
+
+  Args:
+    args: List of arguments to the program, the program to execute is the first
+      element.
+    cwd: If not None, the subprocess's current directory will be changed to
+      |cwd| before it's executed.
+    shell: Whether to execute args as a shell command. Must be True if args
+      is a string and False if args is a sequence.
+    check_status: A boolean indicating whether to check the exit status of the
+      process after all output has been read.
+
+  Yields:
+    The output of the subprocess, line by line.
+
+  Raises:
+    CalledProcessError if check_status is True and the process exited with a
+      non-zero exit status.
+  """
+  cmd = _ValidateAndLogCommand(args, cwd, shell)
+  process = Popen(args, cwd=cwd, shell=shell, stdout=subprocess.PIPE,
+                  stderr=subprocess.STDOUT)
+  buffer_output = ''
+  for data in _IterProcessStdout(process, timeout=timeout):
+    buffer_output += data
+    has_incomplete_line = buffer_output[-1] not in '\r\n'
+    lines = buffer_output.splitlines()
+    buffer_output = lines.pop() if has_incomplete_line else ''
+    for line in lines:
+      yield line
+  if buffer_output:
+    yield buffer_output
+  if check_status and process.returncode:
+    raise subprocess.CalledProcessError(process.returncode, cmd)
diff --git a/build/android/pylib/cmd_helper_test.py b/build/android/pylib/cmd_helper_test.py
new file mode 100644
index 0000000..5155cea
--- /dev/null
+++ b/build/android/pylib/cmd_helper_test.py
@@ -0,0 +1,83 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Tests for the cmd_helper module."""
+
+import unittest
+import subprocess
+
+from pylib import cmd_helper
+
+
+class CmdHelperSingleQuoteTest(unittest.TestCase):
+
+  def testSingleQuote_basic(self):
+    self.assertEquals('hello',
+                      cmd_helper.SingleQuote('hello'))
+
+  def testSingleQuote_withSpaces(self):
+    self.assertEquals("'hello world'",
+                      cmd_helper.SingleQuote('hello world'))
+
+  def testSingleQuote_withUnsafeChars(self):
+    self.assertEquals("""'hello'"'"'; rm -rf /'""",
+                      cmd_helper.SingleQuote("hello'; rm -rf /"))
+
+  def testSingleQuote_dontExpand(self):
+    test_string = 'hello $TEST_VAR'
+    cmd = 'TEST_VAR=world; echo %s' % cmd_helper.SingleQuote(test_string)
+    self.assertEquals(test_string,
+                      cmd_helper.GetCmdOutput(cmd, shell=True).rstrip())
+
+
+class CmdHelperDoubleQuoteTest(unittest.TestCase):
+
+  def testDoubleQuote_basic(self):
+    self.assertEquals('hello',
+                      cmd_helper.DoubleQuote('hello'))
+
+  def testDoubleQuote_withSpaces(self):
+    self.assertEquals('"hello world"',
+                      cmd_helper.DoubleQuote('hello world'))
+
+  def testDoubleQuote_withUnsafeChars(self):
+    self.assertEquals('''"hello\\"; rm -rf /"''',
+                      cmd_helper.DoubleQuote('hello"; rm -rf /'))
+
+  def testSingleQuote_doExpand(self):
+    test_string = 'hello $TEST_VAR'
+    cmd = 'TEST_VAR=world; echo %s' % cmd_helper.DoubleQuote(test_string)
+    self.assertEquals('hello world',
+                      cmd_helper.GetCmdOutput(cmd, shell=True).rstrip())
+
+
+class CmdHelperIterCmdOutputLinesTest(unittest.TestCase):
+  """Test IterCmdOutputLines with some calls to the unix 'seq' command."""
+
+  def testIterCmdOutputLines_success(self):
+    for num, line in enumerate(
+        cmd_helper.IterCmdOutputLines(['seq', '10']), 1):
+      self.assertEquals(num, int(line))
+
+  def testIterCmdOutputLines_exitStatusFail(self):
+    with self.assertRaises(subprocess.CalledProcessError):
+      for num, line in enumerate(
+          cmd_helper.IterCmdOutputLines('seq 10 && false', shell=True), 1):
+        self.assertEquals(num, int(line))
+      # after reading all the output we get an exit status of 1
+
+  def testIterCmdOutputLines_exitStatusIgnored(self):
+    for num, line in enumerate(
+        cmd_helper.IterCmdOutputLines('seq 10 && false', shell=True,
+                                      check_status=False), 1):
+      self.assertEquals(num, int(line))
+
+  def testIterCmdOutputLines_exitStatusSkipped(self):
+    for num, line in enumerate(
+        cmd_helper.IterCmdOutputLines('seq 10 && false', shell=True), 1):
+      self.assertEquals(num, int(line))
+      # no exception will be raised because we don't attempt to read past
+      # the end of the output and, thus, the status never gets checked
+      if num == 10:
+        break
diff --git a/build/android/pylib/constants/__init__.py b/build/android/pylib/constants/__init__.py
new file mode 100644
index 0000000..8821f97
--- /dev/null
+++ b/build/android/pylib/constants/__init__.py
@@ -0,0 +1,308 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Defines a set of constants shared by test runners and other scripts."""
+
+# TODO(jbudorick): Split these constants into coherent modules.
+
+# pylint: disable=W0212
+
+import collections
+import logging
+import os
+import subprocess
+
+
+DIR_SOURCE_ROOT = os.environ.get('CHECKOUT_SOURCE_ROOT',
+    os.path.abspath(os.path.join(os.path.dirname(__file__),
+                                 os.pardir, os.pardir, os.pardir, os.pardir)))
+ISOLATE_DEPS_DIR = os.path.join(DIR_SOURCE_ROOT, 'isolate_deps_dir')
+
+CHROME_SHELL_HOST_DRIVEN_DIR = os.path.join(
+    DIR_SOURCE_ROOT, 'chrome', 'android')
+
+
+PackageInfo = collections.namedtuple('PackageInfo',
+    ['package', 'activity', 'cmdline_file', 'devtools_socket',
+     'test_package'])
+
+PACKAGE_INFO = {
+    'chrome_document': PackageInfo(
+        'com.google.android.apps.chrome.document',
+        'com.google.android.apps.chrome.document.ChromeLauncherActivity',
+        '/data/local/chrome-command-line',
+        'chrome_devtools_remote',
+        None),
+    'chrome': PackageInfo(
+        'com.google.android.apps.chrome',
+        'com.google.android.apps.chrome.Main',
+        '/data/local/chrome-command-line',
+        'chrome_devtools_remote',
+        'com.google.android.apps.chrome.tests'),
+    'chrome_beta': PackageInfo(
+        'com.chrome.beta',
+        'com.google.android.apps.chrome.Main',
+        '/data/local/chrome-command-line',
+        'chrome_devtools_remote',
+        None),
+    'chrome_stable': PackageInfo(
+        'com.android.chrome',
+        'com.google.android.apps.chrome.Main',
+        '/data/local/chrome-command-line',
+        'chrome_devtools_remote',
+        None),
+    'chrome_dev': PackageInfo(
+        'com.chrome.dev',
+        'com.google.android.apps.chrome.Main',
+        '/data/local/chrome-command-line',
+        'chrome_devtools_remote',
+        None),
+    'chrome_canary': PackageInfo(
+        'com.chrome.canary',
+        'com.google.android.apps.chrome.Main',
+        '/data/local/chrome-command-line',
+        'chrome_devtools_remote',
+        None),
+    'chrome_work': PackageInfo(
+        'com.chrome.work',
+        'com.google.android.apps.chrome.Main',
+        '/data/local/chrome-command-line',
+        'chrome_devtools_remote',
+        None),
+    'chromium': PackageInfo(
+        'org.chromium.chrome',
+        'com.google.android.apps.chrome.Main',
+        '/data/local/chrome-command-line',
+        'chrome_devtools_remote',
+        None),
+    'legacy_browser': PackageInfo(
+        'com.google.android.browser',
+        'com.android.browser.BrowserActivity',
+        None,
+        None,
+        None),
+    'chromecast_shell': PackageInfo(
+        'com.google.android.apps.mediashell',
+        'com.google.android.apps.mediashell.MediaShellActivity',
+        '/data/local/tmp/castshell-command-line',
+        None,
+        None),
+    'content_shell': PackageInfo(
+        'org.chromium.content_shell_apk',
+        'org.chromium.content_shell_apk.ContentShellActivity',
+        '/data/local/tmp/content-shell-command-line',
+        None,
+        'org.chromium.content_shell_apk.tests'),
+    'chrome_shell': PackageInfo(
+        'org.chromium.chrome.shell',
+        'org.chromium.chrome.shell.ChromeShellActivity',
+        '/data/local/tmp/chrome-shell-command-line',
+        'chrome_shell_devtools_remote',
+        'org.chromium.chrome.shell.tests'),
+    'android_webview_shell': PackageInfo(
+        'org.chromium.android_webview.shell',
+        'org.chromium.android_webview.shell.AwShellActivity',
+        '/data/local/tmp/android-webview-command-line',
+        None,
+        'org.chromium.android_webview.test'),
+    'gtest': PackageInfo(
+        'org.chromium.native_test',
+        'org.chromium.native_test.NativeUnitTestActivity',
+        '/data/local/tmp/chrome-native-tests-command-line',
+        None,
+        None),
+    'components_browsertests': PackageInfo(
+        'org.chromium.components_browsertests_apk',
+        ('org.chromium.components_browsertests_apk' +
+         '.ComponentsBrowserTestsActivity'),
+        '/data/local/tmp/chrome-native-tests-command-line',
+        None,
+        None),
+    'content_browsertests': PackageInfo(
+        'org.chromium.content_browsertests_apk',
+        'org.chromium.content_browsertests_apk.ContentBrowserTestsActivity',
+        '/data/local/tmp/chrome-native-tests-command-line',
+        None,
+        None),
+    'chromedriver_webview_shell': PackageInfo(
+        'org.chromium.chromedriver_webview_shell',
+        'org.chromium.chromedriver_webview_shell.Main',
+        None,
+        None,
+        None),
+}
+
+
+# Ports arrangement for various test servers used in Chrome for Android.
+# Lighttpd server will attempt to use 9000 as default port, if unavailable it
+# will find a free port from 8001 - 8999.
+LIGHTTPD_DEFAULT_PORT = 9000
+LIGHTTPD_RANDOM_PORT_FIRST = 8001
+LIGHTTPD_RANDOM_PORT_LAST = 8999
+TEST_SYNC_SERVER_PORT = 9031
+TEST_SEARCH_BY_IMAGE_SERVER_PORT = 9041
+TEST_POLICY_SERVER_PORT = 9051
+
+# The net test server is started from port 10201.
+# TODO(pliard): http://crbug.com/239014. Remove this dirty workaround once
+# http://crbug.com/239014 is fixed properly.
+TEST_SERVER_PORT_FIRST = 10201
+TEST_SERVER_PORT_LAST = 30000
+# A file to record next valid port of test server.
+TEST_SERVER_PORT_FILE = '/tmp/test_server_port'
+TEST_SERVER_PORT_LOCKFILE = '/tmp/test_server_port.lock'
+
+TEST_EXECUTABLE_DIR = '/data/local/tmp'
+# Directories for common java libraries for SDK build.
+# These constants are defined in build/android/ant/common.xml
+SDK_BUILD_JAVALIB_DIR = 'lib.java'
+SDK_BUILD_TEST_JAVALIB_DIR = 'test.lib.java'
+SDK_BUILD_APKS_DIR = 'apks'
+
+ADB_KEYS_FILE = '/data/misc/adb/adb_keys'
+
+PERF_OUTPUT_DIR = os.path.join(DIR_SOURCE_ROOT, 'out', 'step_results')
+# The directory on the device where perf test output gets saved to.
+DEVICE_PERF_OUTPUT_DIR = (
+    '/data/data/' + PACKAGE_INFO['chrome'].package + '/files')
+
+SCREENSHOTS_DIR = os.path.join(DIR_SOURCE_ROOT, 'out_screenshots')
+
+class ANDROID_SDK_VERSION_CODES(object):
+  """Android SDK version codes.
+
+  http://developer.android.com/reference/android/os/Build.VERSION_CODES.html
+  """
+
+  JELLY_BEAN = 16
+  JELLY_BEAN_MR1 = 17
+  JELLY_BEAN_MR2 = 18
+  KITKAT = 19
+  KITKAT_WATCH = 20
+  LOLLIPOP = 21
+  LOLLIPOP_MR1 = 22
+
+ANDROID_SDK_VERSION = ANDROID_SDK_VERSION_CODES.LOLLIPOP_MR1
+ANDROID_SDK_BUILD_TOOLS_VERSION = '22.0.1'
+ANDROID_SDK_ROOT = os.path.join(DIR_SOURCE_ROOT,
+                                'third_party/android_tools/sdk')
+ANDROID_SDK_TOOLS = os.path.join(ANDROID_SDK_ROOT,
+                                 'build-tools', ANDROID_SDK_BUILD_TOOLS_VERSION)
+ANDROID_NDK_ROOT = os.path.join(DIR_SOURCE_ROOT,
+                                'third_party/android_tools/ndk')
+
+EMULATOR_SDK_ROOT = os.environ.get('ANDROID_EMULATOR_SDK_ROOT',
+                                   os.path.join(DIR_SOURCE_ROOT,
+                                                'android_emulator_sdk'))
+
+BAD_DEVICES_JSON = os.path.join(DIR_SOURCE_ROOT,
+                                os.environ.get('CHROMIUM_OUT_DIR', 'out'),
+                                'bad_devices.json')
+
+UPSTREAM_FLAKINESS_SERVER = 'test-results.appspot.com'
+
+DEVICE_LOCAL_PROPERTIES_PATH = '/data/local.prop'
+
+PYTHON_UNIT_TEST_SUITES = {
+  'pylib_py_unittests': {
+    'path': os.path.join(DIR_SOURCE_ROOT, 'build', 'android'),
+    'test_modules': [
+      'pylib.cmd_helper_test',
+      'pylib.device.device_utils_test',
+      'pylib.results.json_results_test',
+      'pylib.utils.md5sum_test',
+    ]
+  },
+  'gyp_py_unittests': {
+    'path': os.path.join(DIR_SOURCE_ROOT, 'build', 'android', 'gyp'),
+    'test_modules': [
+      'java_cpp_enum_tests',
+    ]
+  },
+}
+
+LOCAL_MACHINE_TESTS = ['junit', 'python']
+VALID_ENVIRONMENTS = ['local', 'remote_device']
+VALID_TEST_TYPES = ['gtest', 'instrumentation', 'junit', 'linker', 'monkey',
+                    'perf', 'python', 'uiautomator', 'uirobot']
+VALID_DEVICE_TYPES = ['Android', 'iOS']
+
+
+def GetBuildType():
+  try:
+    return os.environ['BUILDTYPE']
+  except KeyError:
+    raise EnvironmentError(
+        'The BUILDTYPE environment variable has not been set')
+
+
+def SetBuildType(build_type):
+  os.environ['BUILDTYPE'] = build_type
+
+
+def SetBuildDirectory(build_directory):
+  os.environ['CHROMIUM_OUT_DIR'] = build_directory
+
+
+def SetOutputDirectory(output_directory):
+  os.environ['CHROMIUM_OUTPUT_DIR'] = output_directory
+
+
+def GetOutDirectory(build_type=None):
+  """Returns the out directory where the output binaries are built.
+
+  Args:
+    build_type: Build type, generally 'Debug' or 'Release'. Defaults to the
+      globally set build type environment variable BUILDTYPE.
+  """
+  if 'CHROMIUM_OUTPUT_DIR' in os.environ:
+    return os.path.abspath(os.path.join(
+        DIR_SOURCE_ROOT, os.environ.get('CHROMIUM_OUTPUT_DIR')))
+
+  return os.path.abspath(os.path.join(
+      DIR_SOURCE_ROOT, os.environ.get('CHROMIUM_OUT_DIR', 'out'),
+      GetBuildType() if build_type is None else build_type))
+
+
+def _Memoize(func):
+  def Wrapper():
+    try:
+      return func._result
+    except AttributeError:
+      func._result = func()
+      return func._result
+  return Wrapper
+
+
+def SetAdbPath(adb_path):
+  os.environ['ADB_PATH'] = adb_path
+
+
+def GetAdbPath():
+  # Check if a custom adb path as been set. If not, try to find adb
+  # on the system.
+  if os.environ.get('ADB_PATH'):
+    return os.environ.get('ADB_PATH')
+  else:
+    return _FindAdbPath()
+
+
+@_Memoize
+def _FindAdbPath():
+  if os.environ.get('ANDROID_SDK_ROOT'):
+    return 'adb'
+  # If envsetup.sh hasn't been sourced and there's no adb in the path,
+  # set it here.
+  try:
+    with file(os.devnull, 'w') as devnull:
+      subprocess.call(['adb', 'version'], stdout=devnull, stderr=devnull)
+    return 'adb'
+  except OSError:
+    logging.debug('No adb found in $PATH, fallback to checked in binary.')
+    return os.path.join(ANDROID_SDK_ROOT, 'platform-tools', 'adb')
+
+# Exit codes
+ERROR_EXIT_CODE = 1
+INFRA_EXIT_CODE = 87
+WARNING_EXIT_CODE = 88
diff --git a/build/android/pylib/constants/keyevent.py b/build/android/pylib/constants/keyevent.py
new file mode 100644
index 0000000..06736b3
--- /dev/null
+++ b/build/android/pylib/constants/keyevent.py
@@ -0,0 +1,14 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Android KeyEvent constants.
+
+http://developer.android.com/reference/android/view/KeyEvent.html
+"""
+
+KEYCODE_BACK = 4
+KEYCODE_DPAD_RIGHT = 22
+KEYCODE_ENTER = 66
+KEYCODE_MENU = 82
+
diff --git a/build/android/pylib/content_settings.py b/build/android/pylib/content_settings.py
new file mode 100644
index 0000000..8594140
--- /dev/null
+++ b/build/android/pylib/content_settings.py
@@ -0,0 +1,82 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from pylib import constants
+
+
+class ContentSettings(dict):
+
+  """A dict interface to interact with device content settings.
+
+  System properties are key/value pairs as exposed by adb shell content.
+  """
+
+  def __init__(self, table, device):
+    super(ContentSettings, self).__init__()
+    self._table = table
+    self._device = device
+
+  @staticmethod
+  def _GetTypeBinding(value):
+    if isinstance(value, bool):
+      return 'b'
+    if isinstance(value, float):
+      return 'f'
+    if isinstance(value, int):
+      return 'i'
+    if isinstance(value, long):
+      return 'l'
+    if isinstance(value, str):
+      return 's'
+    raise ValueError('Unsupported type %s' % type(value))
+
+  def iteritems(self):
+    # Example row:
+    # 'Row: 0 _id=13, name=logging_id2, value=-1fccbaa546705b05'
+    for row in self._device.RunShellCommand(
+        'content query --uri content://%s' % self._table, as_root=True):
+      fields = row.split(', ')
+      key = None
+      value = None
+      for field in fields:
+        k, _, v = field.partition('=')
+        if k == 'name':
+          key = v
+        elif k == 'value':
+          value = v
+      if not key:
+        continue
+      if not value:
+        value = ''
+      yield key, value
+
+  def __getitem__(self, key):
+    return self._device.RunShellCommand(
+        'content query --uri content://%s --where "name=\'%s\'" '
+        '--projection value' % (self._table, key), as_root=True).strip()
+
+  def __setitem__(self, key, value):
+    if key in self:
+      self._device.RunShellCommand(
+          'content update --uri content://%s '
+          '--bind value:%s:%s --where "name=\'%s\'"' % (
+              self._table,
+              self._GetTypeBinding(value), value, key),
+          as_root=True)
+    else:
+      self._device.RunShellCommand(
+          'content insert --uri content://%s '
+          '--bind name:%s:%s --bind value:%s:%s' % (
+              self._table,
+              self._GetTypeBinding(key), key,
+              self._GetTypeBinding(value), value),
+          as_root=True)
+
+  def __delitem__(self, key):
+    self._device.RunShellCommand(
+        'content delete --uri content://%s '
+        '--bind name:%s:%s' % (
+            self._table,
+            self._GetTypeBinding(key), key),
+        as_root=True)
diff --git a/build/android/pylib/device/OWNERS b/build/android/pylib/device/OWNERS
new file mode 100644
index 0000000..c35d7ac
--- /dev/null
+++ b/build/android/pylib/device/OWNERS
@@ -0,0 +1,2 @@
+jbudorick@chromium.org
+perezju@chromium.org
diff --git a/build/android/pylib/device/__init__.py b/build/android/pylib/device/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/build/android/pylib/device/__init__.py
diff --git a/build/android/pylib/device/adb_wrapper.py b/build/android/pylib/device/adb_wrapper.py
new file mode 100644
index 0000000..e897326
--- /dev/null
+++ b/build/android/pylib/device/adb_wrapper.py
@@ -0,0 +1,608 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""This module wraps Android's adb tool.
+
+This is a thin wrapper around the adb interface. Any additional complexity
+should be delegated to a higher level (ex. DeviceUtils).
+"""
+
+import collections
+import errno
+import logging
+import os
+import re
+
+from pylib import cmd_helper
+from pylib import constants
+from pylib.device import decorators
+from pylib.device import device_errors
+from pylib.utils import timeout_retry
+
+
+_DEFAULT_TIMEOUT = 30
+_DEFAULT_RETRIES = 2
+
+_EMULATOR_RE = re.compile(r'^emulator-[0-9]+$')
+
+_READY_STATE = 'device'
+
+
+def _VerifyLocalFileExists(path):
+  """Verifies a local file exists.
+
+  Args:
+    path: Path to the local file.
+
+  Raises:
+    IOError: If the file doesn't exist.
+  """
+  if not os.path.exists(path):
+    raise IOError(errno.ENOENT, os.strerror(errno.ENOENT), path)
+
+
+DeviceStat = collections.namedtuple('DeviceStat',
+                                    ['st_mode', 'st_size', 'st_time'])
+
+
+class AdbWrapper(object):
+  """A wrapper around a local Android Debug Bridge executable."""
+
+  def __init__(self, device_serial):
+    """Initializes the AdbWrapper.
+
+    Args:
+      device_serial: The device serial number as a string.
+    """
+    if not device_serial:
+      raise ValueError('A device serial must be specified')
+    self._device_serial = str(device_serial)
+
+  # pylint: disable=unused-argument
+  @classmethod
+  def _BuildAdbCmd(cls, args, device_serial, cpu_affinity=None):
+    if cpu_affinity is not None:
+      cmd = ['taskset', '-c', str(cpu_affinity)]
+    else:
+      cmd = []
+    cmd.append(constants.GetAdbPath())
+    if device_serial is not None:
+      cmd.extend(['-s', device_serial])
+    cmd.extend(args)
+    return cmd
+  # pylint: enable=unused-argument
+
+  # pylint: disable=unused-argument
+  @classmethod
+  @decorators.WithTimeoutAndRetries
+  def _RunAdbCmd(cls, args, timeout=None, retries=None, device_serial=None,
+                 check_error=True, cpu_affinity=None):
+    status, output = cmd_helper.GetCmdStatusAndOutputWithTimeout(
+        cls._BuildAdbCmd(args, device_serial, cpu_affinity=cpu_affinity),
+        timeout_retry.CurrentTimeoutThread().GetRemainingTime())
+    if status != 0:
+      raise device_errors.AdbCommandFailedError(
+          args, output, status, device_serial)
+    # This catches some errors, including when the device drops offline;
+    # unfortunately adb is very inconsistent with error reporting so many
+    # command failures present differently.
+    if check_error and output.startswith('error:'):
+      raise device_errors.AdbCommandFailedError(args, output)
+    return output
+  # pylint: enable=unused-argument
+
+  def _RunDeviceAdbCmd(self, args, timeout, retries, check_error=True):
+    """Runs an adb command on the device associated with this object.
+
+    Args:
+      args: A list of arguments to adb.
+      timeout: Timeout in seconds.
+      retries: Number of retries.
+      check_error: Check that the command doesn't return an error message. This
+        does NOT check the exit status of shell commands.
+
+    Returns:
+      The output of the command.
+    """
+    return self._RunAdbCmd(args, timeout=timeout, retries=retries,
+                           device_serial=self._device_serial,
+                           check_error=check_error)
+
+  def _IterRunDeviceAdbCmd(self, args, timeout):
+    """Runs an adb command and returns an iterator over its output lines.
+
+    Args:
+      args: A list of arguments to adb.
+      timeout: Timeout in seconds.
+
+    Yields:
+      The output of the command line by line.
+    """
+    return cmd_helper.IterCmdOutputLines(
+      self._BuildAdbCmd(args, self._device_serial), timeout=timeout)
+
+  def __eq__(self, other):
+    """Consider instances equal if they refer to the same device.
+
+    Args:
+      other: The instance to compare equality with.
+
+    Returns:
+      True if the instances are considered equal, false otherwise.
+    """
+    return self._device_serial == str(other)
+
+  def __str__(self):
+    """The string representation of an instance.
+
+    Returns:
+      The device serial number as a string.
+    """
+    return self._device_serial
+
+  def __repr__(self):
+    return '%s(\'%s\')' % (self.__class__.__name__, self)
+
+  # pylint: disable=unused-argument
+  @classmethod
+  def IsServerOnline(cls):
+    status, output = cmd_helper.GetCmdStatusAndOutput(['pgrep', 'adb'])
+    output = [int(x) for x in output.split()]
+    logging.info('PIDs for adb found: %r', output)
+    return status == 0
+  # pylint: enable=unused-argument
+
+  @classmethod
+  def KillServer(cls, timeout=_DEFAULT_TIMEOUT, retries=_DEFAULT_RETRIES):
+    cls._RunAdbCmd(['kill-server'], timeout=timeout, retries=retries)
+
+  @classmethod
+  def StartServer(cls, timeout=_DEFAULT_TIMEOUT, retries=_DEFAULT_RETRIES):
+    # CPU affinity is used to reduce adb instability http://crbug.com/268450
+    cls._RunAdbCmd(['start-server'], timeout=timeout, retries=retries,
+                   cpu_affinity=0)
+
+  @classmethod
+  def GetDevices(cls, timeout=_DEFAULT_TIMEOUT, retries=_DEFAULT_RETRIES):
+    """DEPRECATED. Refer to Devices(...) below."""
+    # TODO(jbudorick): Remove this function once no more clients are using it.
+    return cls.Devices(timeout=timeout, retries=retries)
+
+  @classmethod
+  def Devices(cls, is_ready=True, timeout=_DEFAULT_TIMEOUT,
+              retries=_DEFAULT_RETRIES):
+    """Get the list of active attached devices.
+
+    Args:
+      is_ready: Whether the devices should be limited to only those that are
+        ready for use.
+      timeout: (optional) Timeout per try in seconds.
+      retries: (optional) Number of retries to attempt.
+
+    Yields:
+      AdbWrapper instances.
+    """
+    output = cls._RunAdbCmd(['devices'], timeout=timeout, retries=retries)
+    lines = (line.split() for line in output.splitlines())
+    return [AdbWrapper(line[0]) for line in lines
+            if len(line) == 2 and (not is_ready or line[1] == _READY_STATE)]
+
+  def GetDeviceSerial(self):
+    """Gets the device serial number associated with this object.
+
+    Returns:
+      Device serial number as a string.
+    """
+    return self._device_serial
+
+  def Push(self, local, remote, timeout=60*5, retries=_DEFAULT_RETRIES):
+    """Pushes a file from the host to the device.
+
+    Args:
+      local: Path on the host filesystem.
+      remote: Path on the device filesystem.
+      timeout: (optional) Timeout per try in seconds.
+      retries: (optional) Number of retries to attempt.
+    """
+    _VerifyLocalFileExists(local)
+    self._RunDeviceAdbCmd(['push', local, remote], timeout, retries)
+
+  def Pull(self, remote, local, timeout=60*5, retries=_DEFAULT_RETRIES):
+    """Pulls a file from the device to the host.
+
+    Args:
+      remote: Path on the device filesystem.
+      local: Path on the host filesystem.
+      timeout: (optional) Timeout per try in seconds.
+      retries: (optional) Number of retries to attempt.
+    """
+    cmd = ['pull', remote, local]
+    self._RunDeviceAdbCmd(cmd, timeout, retries)
+    try:
+      _VerifyLocalFileExists(local)
+    except IOError:
+      raise device_errors.AdbCommandFailedError(
+          cmd, 'File not found on host: %s' % local, device_serial=str(self))
+
+  def Shell(self, command, expect_status=0, timeout=_DEFAULT_TIMEOUT,
+            retries=_DEFAULT_RETRIES):
+    """Runs a shell command on the device.
+
+    Args:
+      command: A string with the shell command to run.
+      expect_status: (optional) Check that the command's exit status matches
+        this value. Default is 0. If set to None the test is skipped.
+      timeout: (optional) Timeout per try in seconds.
+      retries: (optional) Number of retries to attempt.
+
+    Returns:
+      The output of the shell command as a string.
+
+    Raises:
+      device_errors.AdbCommandFailedError: If the exit status doesn't match
+        |expect_status|.
+    """
+    if expect_status is None:
+      args = ['shell', command]
+    else:
+      args = ['shell', '%s; echo %%$?;' % command.rstrip()]
+    output = self._RunDeviceAdbCmd(args, timeout, retries, check_error=False)
+    if expect_status is not None:
+      output_end = output.rfind('%')
+      if output_end < 0:
+        # causes the status string to become empty and raise a ValueError
+        output_end = len(output)
+
+      try:
+        status = int(output[output_end+1:])
+      except ValueError:
+        logging.warning('exit status of shell command %r missing.', command)
+        raise device_errors.AdbShellCommandFailedError(
+            command, output, status=None, device_serial=self._device_serial)
+      output = output[:output_end]
+      if status != expect_status:
+        raise device_errors.AdbShellCommandFailedError(
+            command, output, status=status, device_serial=self._device_serial)
+    return output
+
+  def IterShell(self, command, timeout):
+    """Runs a shell command and returns an iterator over its output lines.
+
+    Args:
+      command: A string with the shell command to run.
+      timeout: Timeout in seconds.
+
+    Yields:
+      The output of the command line by line.
+    """
+    args = ['shell', command]
+    return cmd_helper.IterCmdOutputLines(
+      self._BuildAdbCmd(args, self._device_serial), timeout=timeout)
+
+  def Ls(self, path, timeout=_DEFAULT_TIMEOUT, retries=_DEFAULT_RETRIES):
+    """List the contents of a directory on the device.
+
+    Args:
+      path: Path on the device filesystem.
+      timeout: (optional) Timeout per try in seconds.
+      retries: (optional) Number of retries to attempt.
+
+    Returns:
+      A list of pairs (filename, stat) for each file found in the directory,
+      where the stat object has the properties: st_mode, st_size, and st_time.
+
+    Raises:
+      AdbCommandFailedError if |path| does not specify a valid and accessible
+          directory in the device.
+    """
+    def ParseLine(line):
+      cols = line.split(None, 3)
+      filename = cols.pop()
+      stat = DeviceStat(*[int(num, base=16) for num in cols])
+      return (filename, stat)
+
+    cmd = ['ls', path]
+    lines = self._RunDeviceAdbCmd(
+        cmd, timeout=timeout, retries=retries).splitlines()
+    if lines:
+      return [ParseLine(line) for line in lines]
+    else:
+      raise device_errors.AdbCommandFailedError(
+          cmd, 'path does not specify an accessible directory in the device',
+          device_serial=self._device_serial)
+
+  def Logcat(self, clear=False, dump=False, filter_specs=None,
+             logcat_format=None, ring_buffer=None, timeout=None,
+             retries=_DEFAULT_RETRIES):
+    """Get an iterable over the logcat output.
+
+    Args:
+      clear: If true, clear the logcat.
+      dump: If true, dump the current logcat contents.
+      filter_specs: If set, a list of specs to filter the logcat.
+      logcat_format: If set, the format in which the logcat should be output.
+        Options include "brief", "process", "tag", "thread", "raw", "time",
+        "threadtime", and "long"
+      ring_buffer: If set, a list of alternate ring buffers to request.
+        Options include "main", "system", "radio", "events", "crash" or "all".
+        The default is equivalent to ["main", "system", "crash"].
+      timeout: (optional) If set, timeout per try in seconds. If clear or dump
+        is set, defaults to _DEFAULT_TIMEOUT.
+      retries: (optional) If clear or dump is set, the number of retries to
+        attempt. Otherwise, does nothing.
+
+    Yields:
+      logcat output line by line.
+    """
+    cmd = ['logcat']
+    use_iter = True
+    if clear:
+      cmd.append('-c')
+      use_iter = False
+    if dump:
+      cmd.append('-d')
+      use_iter = False
+    if logcat_format:
+      cmd.extend(['-v', logcat_format])
+    if ring_buffer:
+      for buffer_name in ring_buffer:
+        cmd.extend(['-b', buffer_name])
+    if filter_specs:
+      cmd.extend(filter_specs)
+
+    if use_iter:
+      return self._IterRunDeviceAdbCmd(cmd, timeout)
+    else:
+      timeout = timeout if timeout is not None else _DEFAULT_TIMEOUT
+      return self._RunDeviceAdbCmd(cmd, timeout, retries).splitlines()
+
+  def Forward(self, local, remote, timeout=_DEFAULT_TIMEOUT,
+              retries=_DEFAULT_RETRIES):
+    """Forward socket connections from the local socket to the remote socket.
+
+    Sockets are specified by one of:
+      tcp:<port>
+      localabstract:<unix domain socket name>
+      localreserved:<unix domain socket name>
+      localfilesystem:<unix domain socket name>
+      dev:<character device name>
+      jdwp:<process pid> (remote only)
+
+    Args:
+      local: The host socket.
+      remote: The device socket.
+      timeout: (optional) Timeout per try in seconds.
+      retries: (optional) Number of retries to attempt.
+    """
+    self._RunDeviceAdbCmd(['forward', str(local), str(remote)], timeout,
+                          retries)
+
+  def JDWP(self, timeout=_DEFAULT_TIMEOUT, retries=_DEFAULT_RETRIES):
+    """List of PIDs of processes hosting a JDWP transport.
+
+    Args:
+      timeout: (optional) Timeout per try in seconds.
+      retries: (optional) Number of retries to attempt.
+
+    Returns:
+      A list of PIDs as strings.
+    """
+    return [a.strip() for a in
+            self._RunDeviceAdbCmd(['jdwp'], timeout, retries).split('\n')]
+
+  def Install(self, apk_path, forward_lock=False, reinstall=False,
+              sd_card=False, timeout=60*2, retries=_DEFAULT_RETRIES):
+    """Install an apk on the device.
+
+    Args:
+      apk_path: Host path to the APK file.
+      forward_lock: (optional) If set forward-locks the app.
+      reinstall: (optional) If set reinstalls the app, keeping its data.
+      sd_card: (optional) If set installs on the SD card.
+      timeout: (optional) Timeout per try in seconds.
+      retries: (optional) Number of retries to attempt.
+    """
+    _VerifyLocalFileExists(apk_path)
+    cmd = ['install']
+    if forward_lock:
+      cmd.append('-l')
+    if reinstall:
+      cmd.append('-r')
+    if sd_card:
+      cmd.append('-s')
+    cmd.append(apk_path)
+    output = self._RunDeviceAdbCmd(cmd, timeout, retries)
+    if 'Success' not in output:
+      raise device_errors.AdbCommandFailedError(
+          cmd, output, device_serial=self._device_serial)
+
+  def InstallMultiple(self, apk_paths, forward_lock=False, reinstall=False,
+                      sd_card=False, allow_downgrade=False, partial=False,
+                      timeout=60*2, retries=_DEFAULT_RETRIES):
+    """Install an apk with splits on the device.
+
+    Args:
+      apk_paths: Host path to the APK file.
+      forward_lock: (optional) If set forward-locks the app.
+      reinstall: (optional) If set reinstalls the app, keeping its data.
+      sd_card: (optional) If set installs on the SD card.
+      timeout: (optional) Timeout per try in seconds.
+      retries: (optional) Number of retries to attempt.
+      allow_downgrade: (optional) Allow versionCode downgrade.
+      partial: (optional) Package ID if apk_paths doesn't include all .apks.
+    """
+    for path in apk_paths:
+      _VerifyLocalFileExists(path)
+    cmd = ['install-multiple']
+    if forward_lock:
+      cmd.append('-l')
+    if reinstall:
+      cmd.append('-r')
+    if sd_card:
+      cmd.append('-s')
+    if allow_downgrade:
+      cmd.append('-d')
+    if partial:
+      cmd.extend(('-p', partial))
+    cmd.extend(apk_paths)
+    output = self._RunDeviceAdbCmd(cmd, timeout, retries)
+    if 'Success' not in output:
+      raise device_errors.AdbCommandFailedError(
+          cmd, output, device_serial=self._device_serial)
+
+  def Uninstall(self, package, keep_data=False, timeout=_DEFAULT_TIMEOUT,
+                retries=_DEFAULT_RETRIES):
+    """Remove the app |package| from the device.
+
+    Args:
+      package: The package to uninstall.
+      keep_data: (optional) If set keep the data and cache directories.
+      timeout: (optional) Timeout per try in seconds.
+      retries: (optional) Number of retries to attempt.
+    """
+    cmd = ['uninstall']
+    if keep_data:
+      cmd.append('-k')
+    cmd.append(package)
+    output = self._RunDeviceAdbCmd(cmd, timeout, retries)
+    if 'Failure' in output:
+      raise device_errors.AdbCommandFailedError(
+          cmd, output, device_serial=self._device_serial)
+
+  def Backup(self, path, packages=None, apk=False, shared=False,
+             nosystem=True, include_all=False, timeout=_DEFAULT_TIMEOUT,
+             retries=_DEFAULT_RETRIES):
+    """Write an archive of the device's data to |path|.
+
+    Args:
+      path: Local path to store the backup file.
+      packages: List of to packages to be backed up.
+      apk: (optional) If set include the .apk files in the archive.
+      shared: (optional) If set buckup the device's SD card.
+      nosystem: (optional) If set exclude system applications.
+      include_all: (optional) If set back up all installed applications and
+        |packages| is optional.
+      timeout: (optional) Timeout per try in seconds.
+      retries: (optional) Number of retries to attempt.
+    """
+    cmd = ['backup', '-f', path]
+    if apk:
+      cmd.append('-apk')
+    if shared:
+      cmd.append('-shared')
+    if nosystem:
+      cmd.append('-nosystem')
+    if include_all:
+      cmd.append('-all')
+    if packages:
+      cmd.extend(packages)
+    assert bool(packages) ^ bool(include_all), (
+        'Provide \'packages\' or set \'include_all\' but not both.')
+    ret = self._RunDeviceAdbCmd(cmd, timeout, retries)
+    _VerifyLocalFileExists(path)
+    return ret
+
+  def Restore(self, path, timeout=_DEFAULT_TIMEOUT, retries=_DEFAULT_RETRIES):
+    """Restore device contents from the backup archive.
+
+    Args:
+      path: Host path to the backup archive.
+      timeout: (optional) Timeout per try in seconds.
+      retries: (optional) Number of retries to attempt.
+    """
+    _VerifyLocalFileExists(path)
+    self._RunDeviceAdbCmd(['restore'] + [path], timeout, retries)
+
+  def WaitForDevice(self, timeout=60*5, retries=_DEFAULT_RETRIES):
+    """Block until the device is online.
+
+    Args:
+      timeout: (optional) Timeout per try in seconds.
+      retries: (optional) Number of retries to attempt.
+    """
+    self._RunDeviceAdbCmd(['wait-for-device'], timeout, retries)
+
+  def GetState(self, timeout=_DEFAULT_TIMEOUT, retries=_DEFAULT_RETRIES):
+    """Get device state.
+
+    Args:
+      timeout: (optional) Timeout per try in seconds.
+      retries: (optional) Number of retries to attempt.
+
+    Returns:
+      One of 'offline', 'bootloader', or 'device'.
+    """
+    return self._RunDeviceAdbCmd(['get-state'], timeout, retries).strip()
+
+  def GetDevPath(self, timeout=_DEFAULT_TIMEOUT, retries=_DEFAULT_RETRIES):
+    """Gets the device path.
+
+    Args:
+      timeout: (optional) Timeout per try in seconds.
+      retries: (optional) Number of retries to attempt.
+
+    Returns:
+      The device path (e.g. usb:3-4)
+    """
+    return self._RunDeviceAdbCmd(['get-devpath'], timeout, retries)
+
+  def Remount(self, timeout=_DEFAULT_TIMEOUT, retries=_DEFAULT_RETRIES):
+    """Remounts the /system partition on the device read-write."""
+    self._RunDeviceAdbCmd(['remount'], timeout, retries)
+
+  def Reboot(self, to_bootloader=False, timeout=60*5,
+             retries=_DEFAULT_RETRIES):
+    """Reboots the device.
+
+    Args:
+      to_bootloader: (optional) If set reboots to the bootloader.
+      timeout: (optional) Timeout per try in seconds.
+      retries: (optional) Number of retries to attempt.
+    """
+    if to_bootloader:
+      cmd = ['reboot-bootloader']
+    else:
+      cmd = ['reboot']
+    self._RunDeviceAdbCmd(cmd, timeout, retries)
+
+  def Root(self, timeout=_DEFAULT_TIMEOUT, retries=_DEFAULT_RETRIES):
+    """Restarts the adbd daemon with root permissions, if possible.
+
+    Args:
+      timeout: (optional) Timeout per try in seconds.
+      retries: (optional) Number of retries to attempt.
+    """
+    output = self._RunDeviceAdbCmd(['root'], timeout, retries)
+    if 'cannot' in output:
+      raise device_errors.AdbCommandFailedError(
+          ['root'], output, device_serial=self._device_serial)
+
+  def Emu(self, cmd, timeout=_DEFAULT_TIMEOUT,
+               retries=_DEFAULT_RETRIES):
+    """Runs an emulator console command.
+
+    See http://developer.android.com/tools/devices/emulator.html#console
+
+    Args:
+      cmd: The command to run on the emulator console.
+      timeout: (optional) Timeout per try in seconds.
+      retries: (optional) Number of retries to attempt.
+
+    Returns:
+      The output of the emulator console command.
+    """
+    if isinstance(cmd, basestring):
+      cmd = [cmd]
+    return self._RunDeviceAdbCmd(['emu'] + cmd, timeout, retries)
+
+  @property
+  def is_emulator(self):
+    return _EMULATOR_RE.match(self._device_serial)
+
+  @property
+  def is_ready(self):
+    try:
+      return self.GetState() == _READY_STATE
+    except device_errors.CommandFailedError:
+      return False
diff --git a/build/android/pylib/device/adb_wrapper_test.py b/build/android/pylib/device/adb_wrapper_test.py
new file mode 100644
index 0000000..5fc9eb6
--- /dev/null
+++ b/build/android/pylib/device/adb_wrapper_test.py
@@ -0,0 +1,96 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Tests for the AdbWrapper class."""
+
+import os
+import tempfile
+import time
+import unittest
+
+from pylib.device import adb_wrapper
+from pylib.device import device_errors
+
+
+class TestAdbWrapper(unittest.TestCase):
+
+  def setUp(self):
+    devices = adb_wrapper.AdbWrapper.Devices()
+    assert devices, 'A device must be attached'
+    self._adb = devices[0]
+    self._adb.WaitForDevice()
+
+  @staticmethod
+  def _MakeTempFile(contents):
+    """Make a temporary file with the given contents.
+
+    Args:
+      contents: string to write to the temporary file.
+
+    Returns:
+      The absolute path to the file.
+    """
+    fi, path = tempfile.mkstemp()
+    with os.fdopen(fi, 'wb') as f:
+      f.write(contents)
+    return path
+
+  def testShell(self):
+    output = self._adb.Shell('echo test', expect_status=0)
+    self.assertEqual(output.strip(), 'test')
+    output = self._adb.Shell('echo test')
+    self.assertEqual(output.strip(), 'test')
+    with self.assertRaises(device_errors.AdbCommandFailedError):
+        self._adb.Shell('echo test', expect_status=1)
+
+  def testPushLsPull(self):
+    path = self._MakeTempFile('foo')
+    device_path = '/data/local/tmp/testfile.txt'
+    local_tmpdir = os.path.dirname(path)
+    self._adb.Push(path, device_path)
+    files = dict(self._adb.Ls('/data/local/tmp'))
+    self.assertTrue('testfile.txt' in files)
+    self.assertEquals(3, files['testfile.txt'].st_size)
+    self.assertEqual(self._adb.Shell('cat %s' % device_path), 'foo')
+    self._adb.Pull(device_path, local_tmpdir)
+    with open(os.path.join(local_tmpdir, 'testfile.txt'), 'r') as f:
+      self.assertEqual(f.read(), 'foo')
+
+  def testInstall(self):
+    path = self._MakeTempFile('foo')
+    with self.assertRaises(device_errors.AdbCommandFailedError):
+      self._adb.Install(path)
+
+  def testForward(self):
+    with self.assertRaises(device_errors.AdbCommandFailedError):
+      self._adb.Forward(0, 0)
+
+  def testUninstall(self):
+    with self.assertRaises(device_errors.AdbCommandFailedError):
+      self._adb.Uninstall('some.nonexistant.package')
+
+  def testRebootWaitForDevice(self):
+    self._adb.Reboot()
+    print 'waiting for device to reboot...'
+    while self._adb.GetState() == 'device':
+      time.sleep(1)
+    self._adb.WaitForDevice()
+    self.assertEqual(self._adb.GetState(), 'device')
+    print 'waiting for package manager...'
+    while 'package:' not in self._adb.Shell('pm path android'):
+      time.sleep(1)
+
+  def testRootRemount(self):
+    self._adb.Root()
+    while True:
+      try:
+        self._adb.Shell('start')
+        break
+      except device_errors.AdbCommandFailedError:
+        time.sleep(1)
+    self._adb.Remount()
+
+
+if __name__ == '__main__':
+  unittest.main()
diff --git a/build/android/pylib/device/battery_utils.py b/build/android/pylib/device/battery_utils.py
new file mode 100644
index 0000000..eab558e
--- /dev/null
+++ b/build/android/pylib/device/battery_utils.py
@@ -0,0 +1,593 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Provides a variety of device interactions with power.
+"""
+# pylint: disable=unused-argument
+
+import collections
+import contextlib
+import csv
+import logging
+
+from pylib import constants
+from pylib.device import decorators
+from pylib.device import device_errors
+from pylib.device import device_utils
+from pylib.utils import timeout_retry
+
+_DEFAULT_TIMEOUT = 30
+_DEFAULT_RETRIES = 3
+
+
+_DEVICE_PROFILES = [
+  {
+    'name': 'Nexus 4',
+    'witness_file': '/sys/module/pm8921_charger/parameters/disabled',
+    'enable_command': (
+        'echo 0 > /sys/module/pm8921_charger/parameters/disabled && '
+        'dumpsys battery reset'),
+    'disable_command': (
+        'echo 1 > /sys/module/pm8921_charger/parameters/disabled && '
+        'dumpsys battery set ac 0 && dumpsys battery set usb 0'),
+    'charge_counter': None,
+    'voltage': None,
+    'current': None,
+  },
+  {
+    'name': 'Nexus 5',
+    # Nexus 5
+    # Setting the HIZ bit of the bq24192 causes the charger to actually ignore
+    # energy coming from USB. Setting the power_supply offline just updates the
+    # Android system to reflect that.
+    'witness_file': '/sys/kernel/debug/bq24192/INPUT_SRC_CONT',
+    'enable_command': (
+        'echo 0x4A > /sys/kernel/debug/bq24192/INPUT_SRC_CONT && '
+        'echo 1 > /sys/class/power_supply/usb/online &&'
+        'dumpsys battery reset'),
+    'disable_command': (
+        'echo 0xCA > /sys/kernel/debug/bq24192/INPUT_SRC_CONT && '
+        'chmod 644 /sys/class/power_supply/usb/online && '
+        'echo 0 > /sys/class/power_supply/usb/online && '
+        'dumpsys battery set ac 0 && dumpsys battery set usb 0'),
+    'charge_counter': None,
+    'voltage': None,
+    'current': None,
+  },
+  {
+    'name': 'Nexus 6',
+    'witness_file': None,
+    'enable_command': (
+        'echo 1 > /sys/class/power_supply/battery/charging_enabled && '
+        'dumpsys battery reset'),
+    'disable_command': (
+        'echo 0 > /sys/class/power_supply/battery/charging_enabled && '
+        'dumpsys battery set ac 0 && dumpsys battery set usb 0'),
+    'charge_counter': (
+        '/sys/class/power_supply/max170xx_battery/charge_counter_ext'),
+    'voltage': '/sys/class/power_supply/max170xx_battery/voltage_now',
+    'current': '/sys/class/power_supply/max170xx_battery/current_now',
+  },
+  {
+    'name': 'Nexus 9',
+    'witness_file': None,
+    'enable_command': (
+        'echo Disconnected > '
+        '/sys/bus/i2c/drivers/bq2419x/0-006b/input_cable_state && '
+        'dumpsys battery reset'),
+    'disable_command': (
+        'echo Connected > '
+        '/sys/bus/i2c/drivers/bq2419x/0-006b/input_cable_state && '
+        'dumpsys battery set ac 0 && dumpsys battery set usb 0'),
+    'charge_counter': '/sys/class/power_supply/battery/charge_counter_ext',
+    'voltage': '/sys/class/power_supply/battery/voltage_now',
+    'current': '/sys/class/power_supply/battery/current_now',
+  },
+  {
+    'name': 'Nexus 10',
+    'witness_file': None,
+    'enable_command': None,
+    'disable_command': None,
+    'charge_counter': None,
+    'voltage': '/sys/class/power_supply/ds2784-fuelgauge/voltage_now',
+    'current': '/sys/class/power_supply/ds2784-fuelgauge/current_now',
+
+  },
+]
+
+# The list of useful dumpsys columns.
+# Index of the column containing the format version.
+_DUMP_VERSION_INDEX = 0
+# Index of the column containing the type of the row.
+_ROW_TYPE_INDEX = 3
+# Index of the column containing the uid.
+_PACKAGE_UID_INDEX = 4
+# Index of the column containing the application package.
+_PACKAGE_NAME_INDEX = 5
+# The column containing the uid of the power data.
+_PWI_UID_INDEX = 1
+# The column containing the type of consumption. Only consumtion since last
+# charge are of interest here.
+_PWI_AGGREGATION_INDEX = 2
+# The column containing the amount of power used, in mah.
+_PWI_POWER_CONSUMPTION_INDEX = 5
+
+
+class BatteryUtils(object):
+
+  def __init__(self, device, default_timeout=_DEFAULT_TIMEOUT,
+               default_retries=_DEFAULT_RETRIES):
+    """BatteryUtils constructor.
+
+      Args:
+        device: A DeviceUtils instance.
+        default_timeout: An integer containing the default number of seconds to
+                         wait for an operation to complete if no explicit value
+                         is provided.
+        default_retries: An integer containing the default number or times an
+                         operation should be retried on failure if no explicit
+                         value is provided.
+
+      Raises:
+        TypeError: If it is not passed a DeviceUtils instance.
+    """
+    if not isinstance(device, device_utils.DeviceUtils):
+      raise TypeError('Must be initialized with DeviceUtils object.')
+    self._device = device
+    self._cache = device.GetClientCache(self.__class__.__name__)
+    self._default_timeout = default_timeout
+    self._default_retries = default_retries
+
+  @decorators.WithTimeoutAndRetriesFromInstance()
+  def SupportsFuelGauge(self, timeout=None, retries=None):
+    """Detect if fuel gauge chip is present.
+
+    Args:
+      timeout: timeout in seconds
+      retries: number of retries
+
+    Returns:
+      True if known fuel gauge files are present.
+      False otherwise.
+    """
+    self._DiscoverDeviceProfile()
+    return (self._cache['profile']['enable_command'] != None
+        and self._cache['profile']['charge_counter'] != None)
+
+  @decorators.WithTimeoutAndRetriesFromInstance()
+  def GetFuelGaugeChargeCounter(self, timeout=None, retries=None):
+    """Get value of charge_counter on fuel gauge chip.
+
+    Device must have charging disabled for this, not just battery updates
+    disabled. The only device that this currently works with is the nexus 5.
+
+    Args:
+      timeout: timeout in seconds
+      retries: number of retries
+
+    Returns:
+      value of charge_counter for fuel gauge chip in units of nAh.
+
+    Raises:
+      device_errors.CommandFailedError: If fuel gauge chip not found.
+    """
+    if self.SupportsFuelGauge():
+       return int(self._device.ReadFile(
+          self._cache['profile']['charge_counter']))
+    raise device_errors.CommandFailedError(
+        'Unable to find fuel gauge.')
+
+  @decorators.WithTimeoutAndRetriesFromInstance()
+  def GetNetworkData(self, package, timeout=None, retries=None):
+    """Get network data for specific package.
+
+    Args:
+      package: package name you want network data for.
+      timeout: timeout in seconds
+      retries: number of retries
+
+    Returns:
+      Tuple of (sent_data, recieved_data)
+      None if no network data found
+    """
+    # If device_utils clears cache, cache['uids'] doesn't exist
+    if 'uids' not in self._cache:
+      self._cache['uids'] = {}
+    if package not in self._cache['uids']:
+      self.GetPowerData()
+      if package not in self._cache['uids']:
+        logging.warning('No UID found for %s. Can\'t get network data.',
+                        package)
+        return None
+
+    network_data_path = '/proc/uid_stat/%s/' % self._cache['uids'][package]
+    try:
+      send_data = int(self._device.ReadFile(network_data_path + 'tcp_snd'))
+    # If ReadFile throws exception, it means no network data usage file for
+    # package has been recorded. Return 0 sent and 0 received.
+    except device_errors.AdbShellCommandFailedError:
+      logging.warning('No sent data found for package %s', package)
+      send_data = 0
+    try:
+      recv_data = int(self._device.ReadFile(network_data_path + 'tcp_rcv'))
+    except device_errors.AdbShellCommandFailedError:
+      logging.warning('No received data found for package %s', package)
+      recv_data = 0
+    return (send_data, recv_data)
+
+  @decorators.WithTimeoutAndRetriesFromInstance()
+  def GetPowerData(self, timeout=None, retries=None):
+    """Get power data for device.
+
+    Args:
+      timeout: timeout in seconds
+      retries: number of retries
+
+    Returns:
+      Dict of power data, keyed on package names.
+      {
+        package_name: {
+          'uid': uid,
+          'data': [1,2,3]
+        },
+      }
+    """
+    if 'uids' not in self._cache:
+      self._cache['uids'] = {}
+    dumpsys_output = self._device.RunShellCommand(
+        ['dumpsys', 'batterystats', '-c'], check_return=True)
+    csvreader = csv.reader(dumpsys_output)
+    pwi_entries = collections.defaultdict(list)
+    for entry in csvreader:
+      if entry[_DUMP_VERSION_INDEX] not in ['8', '9']:
+        # Wrong dumpsys version.
+        raise device_errors.DeviceVersionError(
+            'Dumpsys version must be 8 or 9. %s found.'
+            % entry[_DUMP_VERSION_INDEX])
+      if _ROW_TYPE_INDEX < len(entry) and entry[_ROW_TYPE_INDEX] == 'uid':
+        current_package = entry[_PACKAGE_NAME_INDEX]
+        if (self._cache['uids'].get(current_package)
+            and self._cache['uids'].get(current_package)
+            != entry[_PACKAGE_UID_INDEX]):
+          raise device_errors.CommandFailedError(
+              'Package %s found multiple times with differnt UIDs %s and %s'
+               % (current_package, self._cache['uids'][current_package],
+               entry[_PACKAGE_UID_INDEX]))
+        self._cache['uids'][current_package] = entry[_PACKAGE_UID_INDEX]
+      elif (_PWI_POWER_CONSUMPTION_INDEX < len(entry)
+          and entry[_ROW_TYPE_INDEX] == 'pwi'
+          and entry[_PWI_AGGREGATION_INDEX] == 'l'):
+        pwi_entries[entry[_PWI_UID_INDEX]].append(
+            float(entry[_PWI_POWER_CONSUMPTION_INDEX]))
+
+    return {p: {'uid': uid, 'data': pwi_entries[uid]}
+            for p, uid in self._cache['uids'].iteritems()}
+
+  @decorators.WithTimeoutAndRetriesFromInstance()
+  def GetPackagePowerData(self, package, timeout=None, retries=None):
+    """Get power data for particular package.
+
+    Args:
+      package: Package to get power data on.
+
+    returns:
+      Dict of UID and power data.
+      {
+        'uid': uid,
+        'data': [1,2,3]
+      }
+      None if the package is not found in the power data.
+    """
+    return self.GetPowerData().get(package)
+
+  @decorators.WithTimeoutAndRetriesFromInstance()
+  def GetBatteryInfo(self, timeout=None, retries=None):
+    """Gets battery info for the device.
+
+    Args:
+      timeout: timeout in seconds
+      retries: number of retries
+    Returns:
+      A dict containing various battery information as reported by dumpsys
+      battery.
+    """
+    result = {}
+    # Skip the first line, which is just a header.
+    for line in self._device.RunShellCommand(
+        ['dumpsys', 'battery'], check_return=True)[1:]:
+      # If usb charging has been disabled, an extra line of header exists.
+      if 'UPDATES STOPPED' in line:
+        logging.warning('Dumpsys battery not receiving updates. '
+                        'Run dumpsys battery reset if this is in error.')
+      elif ':' not in line:
+        logging.warning('Unknown line found in dumpsys battery: "%s"', line)
+      else:
+        k, v = line.split(':', 1)
+        result[k.strip()] = v.strip()
+    return result
+
+  @decorators.WithTimeoutAndRetriesFromInstance()
+  def GetCharging(self, timeout=None, retries=None):
+    """Gets the charging state of the device.
+
+    Args:
+      timeout: timeout in seconds
+      retries: number of retries
+    Returns:
+      True if the device is charging, false otherwise.
+    """
+    battery_info = self.GetBatteryInfo()
+    for k in ('AC powered', 'USB powered', 'Wireless powered'):
+      if (k in battery_info and
+          battery_info[k].lower() in ('true', '1', 'yes')):
+        return True
+    return False
+
+  @decorators.WithTimeoutAndRetriesFromInstance()
+  def SetCharging(self, enabled, timeout=None, retries=None):
+    """Enables or disables charging on the device.
+
+    Args:
+      enabled: A boolean indicating whether charging should be enabled or
+        disabled.
+      timeout: timeout in seconds
+      retries: number of retries
+
+    Raises:
+      device_errors.CommandFailedError: If method of disabling charging cannot
+        be determined.
+    """
+    self._DiscoverDeviceProfile()
+    if not self._cache['profile']['enable_command']:
+      raise device_errors.CommandFailedError(
+          'Unable to find charging commands.')
+
+    if enabled:
+      command = self._cache['profile']['enable_command']
+    else:
+      command = self._cache['profile']['disable_command']
+
+    def set_and_verify_charging():
+      self._device.RunShellCommand(command, check_return=True)
+      return self.GetCharging() == enabled
+
+    timeout_retry.WaitFor(set_and_verify_charging, wait_period=1)
+
+  # TODO(rnephew): Make private when all use cases can use the context manager.
+  @decorators.WithTimeoutAndRetriesFromInstance()
+  def DisableBatteryUpdates(self, timeout=None, retries=None):
+    """Resets battery data and makes device appear like it is not
+    charging so that it will collect power data since last charge.
+
+    Args:
+      timeout: timeout in seconds
+      retries: number of retries
+
+    Raises:
+      device_errors.CommandFailedError: When resetting batterystats fails to
+        reset power values.
+      device_errors.DeviceVersionError: If device is not L or higher.
+    """
+    def battery_updates_disabled():
+      return self.GetCharging() is False
+
+    self._ClearPowerData()
+    self._device.RunShellCommand(['dumpsys', 'battery', 'set', 'ac', '0'],
+                                 check_return=True)
+    self._device.RunShellCommand(['dumpsys', 'battery', 'set', 'usb', '0'],
+                                 check_return=True)
+    timeout_retry.WaitFor(battery_updates_disabled, wait_period=1)
+
+  # TODO(rnephew): Make private when all use cases can use the context manager.
+  @decorators.WithTimeoutAndRetriesFromInstance()
+  def EnableBatteryUpdates(self, timeout=None, retries=None):
+    """Restarts device charging so that dumpsys no longer collects power data.
+
+    Args:
+      timeout: timeout in seconds
+      retries: number of retries
+
+    Raises:
+      device_errors.DeviceVersionError: If device is not L or higher.
+    """
+    def battery_updates_enabled():
+      return (self.GetCharging()
+              or not bool('UPDATES STOPPED' in self._device.RunShellCommand(
+                  ['dumpsys', 'battery'], check_return=True)))
+
+    self._device.RunShellCommand(['dumpsys', 'battery', 'reset'],
+                                 check_return=True)
+    timeout_retry.WaitFor(battery_updates_enabled, wait_period=1)
+
+  @contextlib.contextmanager
+  def BatteryMeasurement(self, timeout=None, retries=None):
+    """Context manager that enables battery data collection. It makes
+    the device appear to stop charging so that dumpsys will start collecting
+    power data since last charge. Once the with block is exited, charging is
+    resumed and power data since last charge is no longer collected.
+
+    Only for devices L and higher.
+
+    Example usage:
+      with BatteryMeasurement():
+        browser_actions()
+        get_power_data() # report usage within this block
+      after_measurements() # Anything that runs after power
+                           # measurements are collected
+
+    Args:
+      timeout: timeout in seconds
+      retries: number of retries
+
+    Raises:
+      device_errors.DeviceVersionError: If device is not L or higher.
+    """
+    if (self._device.build_version_sdk <
+        constants.ANDROID_SDK_VERSION_CODES.LOLLIPOP):
+      raise device_errors.DeviceVersionError('Device must be L or higher.')
+    try:
+      self.DisableBatteryUpdates(timeout=timeout, retries=retries)
+      yield
+    finally:
+      self.EnableBatteryUpdates(timeout=timeout, retries=retries)
+
+  def ChargeDeviceToLevel(self, level, wait_period=60):
+    """Enables charging and waits for device to be charged to given level.
+
+    Args:
+      level: level of charge to wait for.
+      wait_period: time in seconds to wait between checking.
+    """
+    self.SetCharging(True)
+
+    def device_charged():
+      battery_level = self.GetBatteryInfo().get('level')
+      if battery_level is None:
+        logging.warning('Unable to find current battery level.')
+        battery_level = 100
+      else:
+        logging.info('current battery level: %s', battery_level)
+        battery_level = int(battery_level)
+      return battery_level >= level
+
+    timeout_retry.WaitFor(device_charged, wait_period=wait_period)
+
+  def LetBatteryCoolToTemperature(self, target_temp, wait_period=60):
+    """Lets device sit to give battery time to cool down
+    Args:
+      temp: maximum temperature to allow in tenths of degrees c.
+      wait_period: time in seconds to wait between checking.
+    """
+    def cool_device():
+      temp = self.GetBatteryInfo().get('temperature')
+      if temp is None:
+        logging.warning('Unable to find current battery temperature.')
+        temp = 0
+      else:
+        logging.info('Current battery temperature: %s', temp)
+      return int(temp) <= target_temp
+    self.EnableBatteryUpdates()
+    logging.info('Waiting for the device to cool down to %s (0.1 C)',
+                 target_temp)
+    timeout_retry.WaitFor(cool_device, wait_period=wait_period)
+
+  @decorators.WithTimeoutAndRetriesFromInstance()
+  def TieredSetCharging(self, enabled, timeout=None, retries=None):
+    """Enables or disables charging on the device.
+
+    Args:
+      enabled: A boolean indicating whether charging should be enabled or
+        disabled.
+      timeout: timeout in seconds
+      retries: number of retries
+    """
+    if self.GetCharging() == enabled:
+      logging.warning('Device charging already in expected state: %s', enabled)
+      return
+
+    if enabled:
+      try:
+        self.SetCharging(enabled)
+      except device_errors.CommandFailedError:
+        logging.info('Unable to enable charging via hardware.'
+                     ' Falling back to software enabling.')
+        self.EnableBatteryUpdates()
+    else:
+      try:
+        self._ClearPowerData()
+        self.SetCharging(enabled)
+      except device_errors.CommandFailedError:
+        logging.info('Unable to disable charging via hardware.'
+                     ' Falling back to software disabling.')
+        self.DisableBatteryUpdates()
+
+  @contextlib.contextmanager
+  def PowerMeasurement(self, timeout=None, retries=None):
+    """Context manager that enables battery power collection.
+
+    Once the with block is exited, charging is resumed. Will attempt to disable
+    charging at the hardware level, and if that fails will fall back to software
+    disabling of battery updates.
+
+    Only for devices L and higher.
+
+    Example usage:
+      with PowerMeasurement():
+        browser_actions()
+        get_power_data() # report usage within this block
+      after_measurements() # Anything that runs after power
+                           # measurements are collected
+
+    Args:
+      timeout: timeout in seconds
+      retries: number of retries
+    """
+    try:
+      self.TieredSetCharging(False, timeout=timeout, retries=retries)
+      yield
+    finally:
+      self.TieredSetCharging(True, timeout=timeout, retries=retries)
+
+  def _ClearPowerData(self):
+    """Resets battery data and makes device appear like it is not
+    charging so that it will collect power data since last charge.
+
+    Returns:
+      True if power data cleared.
+      False if power data clearing is not supported (pre-L)
+
+    Raises:
+      device_errors.DeviceVersionError: If power clearing is supported,
+        but fails.
+    """
+    if (self._device.build_version_sdk <
+        constants.ANDROID_SDK_VERSION_CODES.LOLLIPOP):
+      logging.warning('Dumpsys power data only available on 5.0 and above. '
+                      'Cannot clear power data.')
+      return False
+
+    self._device.RunShellCommand(
+        ['dumpsys', 'battery', 'set', 'usb', '1'], check_return=True)
+    self._device.RunShellCommand(
+        ['dumpsys', 'battery', 'set', 'ac', '1'], check_return=True)
+    self._device.RunShellCommand(
+        ['dumpsys', 'batterystats', '--reset'], check_return=True)
+    battery_data = self._device.RunShellCommand(
+        ['dumpsys', 'batterystats', '--charged', '--checkin'],
+        check_return=True, large_output=True)
+    for line in battery_data:
+      l = line.split(',')
+      if (len(l) > _PWI_POWER_CONSUMPTION_INDEX and l[_ROW_TYPE_INDEX] == 'pwi'
+          and l[_PWI_POWER_CONSUMPTION_INDEX] != 0):
+        self._device.RunShellCommand(
+            ['dumpsys', 'battery', 'reset'], check_return=True)
+        raise device_errors.CommandFailedError(
+            'Non-zero pmi value found after reset.')
+    self._device.RunShellCommand(
+        ['dumpsys', 'battery', 'reset'], check_return=True)
+    return True
+
+  def _DiscoverDeviceProfile(self):
+    """Checks and caches device information.
+
+    Returns:
+      True if profile is found, false otherwise.
+    """
+
+    if 'profile' in self._cache:
+      return True
+    for profile in _DEVICE_PROFILES:
+      if self._device.product_model == profile['name']:
+        self._cache['profile'] = profile
+        return True
+    self._cache['profile'] = {
+        'name': None,
+        'witness_file': None,
+        'enable_command': None,
+        'disable_command': None,
+        'charge_counter': None,
+        'voltage': None,
+        'current': None,
+    }
+    return False
diff --git a/build/android/pylib/device/battery_utils_test.py b/build/android/pylib/device/battery_utils_test.py
new file mode 100755
index 0000000..b968fa6
--- /dev/null
+++ b/build/android/pylib/device/battery_utils_test.py
@@ -0,0 +1,574 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Unit tests for the contents of battery_utils.py
+"""
+
+# pylint: disable=W0613
+
+import logging
+import os
+import sys
+import unittest
+
+from pylib import constants
+from pylib.device import battery_utils
+from pylib.device import device_errors
+from pylib.device import device_utils
+from pylib.device import device_utils_test
+from pylib.utils import mock_calls
+
+# RunCommand from third_party/android_testrunner/run_command.py is mocked
+# below, so its path needs to be in sys.path.
+sys.path.append(os.path.join(
+    constants.DIR_SOURCE_ROOT, 'third_party', 'android_testrunner'))
+
+sys.path.append(os.path.join(
+    constants.DIR_SOURCE_ROOT, 'third_party', 'pymock'))
+import mock # pylint: disable=F0401
+
+_DUMPSYS_OUTPUT = [
+    '9,0,i,uid,1000,test_package1',
+    '9,0,i,uid,1001,test_package2',
+    '9,1000,l,pwi,uid,1',
+    '9,1001,l,pwi,uid,2'
+]
+
+
+class BatteryUtilsTest(mock_calls.TestCase):
+
+  _NEXUS_5 = {
+    'name': 'Nexus 5',
+    'witness_file': '/sys/kernel/debug/bq24192/INPUT_SRC_CONT',
+    'enable_command': (
+        'echo 0x4A > /sys/kernel/debug/bq24192/INPUT_SRC_CONT && '
+        'echo 1 > /sys/class/power_supply/usb/online'),
+    'disable_command': (
+        'echo 0xCA > /sys/kernel/debug/bq24192/INPUT_SRC_CONT && '
+        'chmod 644 /sys/class/power_supply/usb/online && '
+        'echo 0 > /sys/class/power_supply/usb/online'),
+    'charge_counter': None,
+    'voltage': None,
+    'current': None,
+  }
+
+  _NEXUS_6 = {
+    'name': 'Nexus 6',
+    'witness_file': None,
+    'enable_command': None,
+    'disable_command': None,
+    'charge_counter': (
+        '/sys/class/power_supply/max170xx_battery/charge_counter_ext'),
+    'voltage': '/sys/class/power_supply/max170xx_battery/voltage_now',
+    'current': '/sys/class/power_supply/max170xx_battery/current_now',
+  }
+
+  _NEXUS_10 = {
+    'name': 'Nexus 10',
+    'witness_file': None,
+    'enable_command': None,
+    'disable_command': None,
+    'charge_counter': (
+        '/sys/class/power_supply/ds2784-fuelgauge/charge_counter_ext'),
+    'voltage': '/sys/class/power_supply/ds2784-fuelgauge/voltage_now',
+    'current': '/sys/class/power_supply/ds2784-fuelgauge/current_now',
+  }
+
+  def ShellError(self, output=None, status=1):
+    def action(cmd, *args, **kwargs):
+      raise device_errors.AdbShellCommandFailedError(
+          cmd, output, status, str(self.device))
+    if output is None:
+      output = 'Permission denied\n'
+    return action
+
+  def setUp(self):
+    self.adb = device_utils_test._AdbWrapperMock('0123456789abcdef')
+    self.device = device_utils.DeviceUtils(
+        self.adb, default_timeout=10, default_retries=0)
+    self.watchMethodCalls(self.call.adb, ignore=['GetDeviceSerial'])
+    self.battery = battery_utils.BatteryUtils(
+        self.device, default_timeout=10, default_retries=0)
+
+
+class BatteryUtilsInitTest(unittest.TestCase):
+
+  def testInitWithDeviceUtil(self):
+    serial = '0fedcba987654321'
+    d = device_utils.DeviceUtils(serial)
+    b = battery_utils.BatteryUtils(d)
+    self.assertEqual(d, b._device)
+
+  def testInitWithMissing_fails(self):
+    with self.assertRaises(TypeError):
+      battery_utils.BatteryUtils(None)
+    with self.assertRaises(TypeError):
+      battery_utils.BatteryUtils('')
+
+
+class BatteryUtilsSetChargingTest(BatteryUtilsTest):
+
+  @mock.patch('time.sleep', mock.Mock())
+  def testSetCharging_enabled(self):
+    self.battery._cache['profile'] = self._NEXUS_5
+    with self.assertCalls(
+        (self.call.device.RunShellCommand(mock.ANY, check_return=True), []),
+        (self.call.battery.GetCharging(), False),
+        (self.call.device.RunShellCommand(mock.ANY, check_return=True), []),
+        (self.call.battery.GetCharging(), True)):
+      self.battery.SetCharging(True)
+
+  def testSetCharging_alreadyEnabled(self):
+    self.battery._cache['profile'] = self._NEXUS_5
+    with self.assertCalls(
+        (self.call.device.RunShellCommand(mock.ANY, check_return=True), []),
+        (self.call.battery.GetCharging(), True)):
+      self.battery.SetCharging(True)
+
+  @mock.patch('time.sleep', mock.Mock())
+  def testSetCharging_disabled(self):
+    self.battery._cache['profile'] = self._NEXUS_5
+    with self.assertCalls(
+        (self.call.device.RunShellCommand(mock.ANY, check_return=True), []),
+        (self.call.battery.GetCharging(), True),
+        (self.call.device.RunShellCommand(mock.ANY, check_return=True), []),
+        (self.call.battery.GetCharging(), False)):
+      self.battery.SetCharging(False)
+
+
+class BatteryUtilsSetBatteryMeasurementTest(BatteryUtilsTest):
+
+  @mock.patch('time.sleep', mock.Mock())
+  def testBatteryMeasurementWifi(self):
+    with self.assertCalls(
+        (self.call.device.RunShellCommand(
+            mock.ANY, retries=0, single_line=True,
+            timeout=10, check_return=True), '22'),
+        (self.call.battery._ClearPowerData(), True),
+        (self.call.device.RunShellCommand(
+            ['dumpsys', 'battery', 'set', 'ac', '0'], check_return=True), []),
+        (self.call.device.RunShellCommand(
+            ['dumpsys', 'battery', 'set', 'usb', '0'], check_return=True), []),
+        (self.call.battery.GetCharging(), False),
+        (self.call.device.RunShellCommand(
+            ['dumpsys', 'battery', 'reset'], check_return=True), []),
+        (self.call.battery.GetCharging(), False),
+        (self.call.device.RunShellCommand(
+            ['dumpsys', 'battery'], check_return=True), ['UPDATES STOPPED']),
+        (self.call.battery.GetCharging(), False),
+        (self.call.device.RunShellCommand(
+            ['dumpsys', 'battery'], check_return=True), [])):
+      with self.battery.BatteryMeasurement():
+        pass
+
+  @mock.patch('time.sleep', mock.Mock())
+  def testBatteryMeasurementUsb(self):
+    with self.assertCalls(
+        (self.call.device.RunShellCommand(
+            mock.ANY, retries=0, single_line=True,
+            timeout=10, check_return=True), '22'),
+        (self.call.battery._ClearPowerData(), True),
+        (self.call.device.RunShellCommand(
+            ['dumpsys', 'battery', 'set', 'ac', '0'], check_return=True), []),
+        (self.call.device.RunShellCommand(
+            ['dumpsys', 'battery', 'set', 'usb', '0'], check_return=True), []),
+        (self.call.battery.GetCharging(), False),
+        (self.call.device.RunShellCommand(
+            ['dumpsys', 'battery', 'reset'], check_return=True), []),
+        (self.call.battery.GetCharging(), False),
+        (self.call.device.RunShellCommand(
+            ['dumpsys', 'battery'], check_return=True), ['UPDATES STOPPED']),
+        (self.call.battery.GetCharging(), True)):
+      with self.battery.BatteryMeasurement():
+        pass
+
+
+class BatteryUtilsGetPowerData(BatteryUtilsTest):
+
+  def testGetPowerData(self):
+    with self.assertCalls(
+        (self.call.device.RunShellCommand(
+            ['dumpsys', 'batterystats', '-c'], check_return=True),
+            _DUMPSYS_OUTPUT)):
+      data = self.battery.GetPowerData()
+      check = {
+          'test_package1': {'uid': '1000', 'data': [1.0]},
+          'test_package2': {'uid': '1001', 'data': [2.0]}
+      }
+      self.assertEqual(data, check)
+
+  def testGetPowerData_packageCollisionSame(self):
+      self.battery._cache['uids'] = {'test_package1': '1000'}
+      with self.assertCall(
+        self.call.device.RunShellCommand(
+            ['dumpsys', 'batterystats', '-c'], check_return=True),
+            _DUMPSYS_OUTPUT):
+        data = self.battery.GetPowerData()
+        check = {
+            'test_package1': {'uid': '1000', 'data': [1.0]},
+            'test_package2': {'uid': '1001', 'data': [2.0]}
+        }
+        self.assertEqual(data, check)
+
+  def testGetPowerData_packageCollisionDifferent(self):
+      self.battery._cache['uids'] = {'test_package1': '1'}
+      with self.assertCall(
+        self.call.device.RunShellCommand(
+            ['dumpsys', 'batterystats', '-c'], check_return=True),
+            _DUMPSYS_OUTPUT):
+        with self.assertRaises(device_errors.CommandFailedError):
+          self.battery.GetPowerData()
+
+  def testGetPowerData_cacheCleared(self):
+    with self.assertCalls(
+        (self.call.device.RunShellCommand(
+            ['dumpsys', 'batterystats', '-c'], check_return=True),
+            _DUMPSYS_OUTPUT)):
+      self.battery._cache.clear()
+      data = self.battery.GetPowerData()
+      check = {
+          'test_package1': {'uid': '1000', 'data': [1.0]},
+          'test_package2': {'uid': '1001', 'data': [2.0]}
+      }
+      self.assertEqual(data, check)
+
+  def testGetPackagePowerData(self):
+    with self.assertCalls(
+        (self.call.device.RunShellCommand(
+            ['dumpsys', 'batterystats', '-c'], check_return=True),
+            _DUMPSYS_OUTPUT)):
+      data = self.battery.GetPackagePowerData('test_package2')
+      self.assertEqual(data, {'uid': '1001', 'data': [2.0]})
+
+  def testGetPackagePowerData_badPackage(self):
+    with self.assertCalls(
+        (self.call.device.RunShellCommand(
+            ['dumpsys', 'batterystats', '-c'], check_return=True),
+            _DUMPSYS_OUTPUT)):
+      data = self.battery.GetPackagePowerData('not_a_package')
+      self.assertEqual(data, None)
+
+
+class BatteryUtilsChargeDevice(BatteryUtilsTest):
+
+  @mock.patch('time.sleep', mock.Mock())
+  def testChargeDeviceToLevel(self):
+    with self.assertCalls(
+        (self.call.battery.SetCharging(True)),
+        (self.call.battery.GetBatteryInfo(), {'level': '50'}),
+        (self.call.battery.GetBatteryInfo(), {'level': '100'})):
+      self.battery.ChargeDeviceToLevel(95)
+
+
+class BatteryUtilsGetBatteryInfoTest(BatteryUtilsTest):
+
+  def testGetBatteryInfo_normal(self):
+    with self.assertCall(
+        self.call.device.RunShellCommand(
+            ['dumpsys', 'battery'], check_return=True),
+        [
+          'Current Battery Service state:',
+          '  AC powered: false',
+          '  USB powered: true',
+          '  level: 100',
+          '  temperature: 321',
+        ]):
+      self.assertEquals(
+          {
+            'AC powered': 'false',
+            'USB powered': 'true',
+            'level': '100',
+            'temperature': '321',
+          },
+          self.battery.GetBatteryInfo())
+
+  def testGetBatteryInfo_nothing(self):
+    with self.assertCall(
+        self.call.device.RunShellCommand(
+            ['dumpsys', 'battery'], check_return=True), []):
+      self.assertEquals({}, self.battery.GetBatteryInfo())
+
+
+class BatteryUtilsGetChargingTest(BatteryUtilsTest):
+
+  def testGetCharging_usb(self):
+    with self.assertCall(
+        self.call.battery.GetBatteryInfo(), {'USB powered': 'true'}):
+      self.assertTrue(self.battery.GetCharging())
+
+  def testGetCharging_usbFalse(self):
+    with self.assertCall(
+        self.call.battery.GetBatteryInfo(), {'USB powered': 'false'}):
+      self.assertFalse(self.battery.GetCharging())
+
+  def testGetCharging_ac(self):
+    with self.assertCall(
+        self.call.battery.GetBatteryInfo(), {'AC powered': 'true'}):
+      self.assertTrue(self.battery.GetCharging())
+
+  def testGetCharging_wireless(self):
+    with self.assertCall(
+        self.call.battery.GetBatteryInfo(), {'Wireless powered': 'true'}):
+      self.assertTrue(self.battery.GetCharging())
+
+  def testGetCharging_unknown(self):
+    with self.assertCall(
+        self.call.battery.GetBatteryInfo(), {'level': '42'}):
+      self.assertFalse(self.battery.GetCharging())
+
+
+class BatteryUtilsGetNetworkDataTest(BatteryUtilsTest):
+
+  def testGetNetworkData_noDataUsage(self):
+    with self.assertCalls(
+        (self.call.device.RunShellCommand(
+            ['dumpsys', 'batterystats', '-c'], check_return=True),
+            _DUMPSYS_OUTPUT),
+        (self.call.device.ReadFile('/proc/uid_stat/1000/tcp_snd'),
+            self.ShellError()),
+        (self.call.device.ReadFile('/proc/uid_stat/1000/tcp_rcv'),
+            self.ShellError())):
+      self.assertEquals(self.battery.GetNetworkData('test_package1'), (0, 0))
+
+  def testGetNetworkData_badPackage(self):
+    with self.assertCall(
+        self.call.device.RunShellCommand(
+            ['dumpsys', 'batterystats', '-c'], check_return=True),
+            _DUMPSYS_OUTPUT):
+      self.assertEqual(self.battery.GetNetworkData('asdf'), None)
+
+  def testGetNetworkData_packageNotCached(self):
+    with self.assertCalls(
+        (self.call.device.RunShellCommand(
+            ['dumpsys', 'batterystats', '-c'], check_return=True),
+            _DUMPSYS_OUTPUT),
+        (self.call.device.ReadFile('/proc/uid_stat/1000/tcp_snd'), 1),
+        (self.call.device.ReadFile('/proc/uid_stat/1000/tcp_rcv'), 2)):
+      self.assertEqual(self.battery.GetNetworkData('test_package1'), (1,2))
+
+  def testGetNetworkData_packageCached(self):
+    self.battery._cache['uids'] = {'test_package1': '1000'}
+    with self.assertCalls(
+        (self.call.device.ReadFile('/proc/uid_stat/1000/tcp_snd'), 1),
+        (self.call.device.ReadFile('/proc/uid_stat/1000/tcp_rcv'), 2)):
+      self.assertEqual(self.battery.GetNetworkData('test_package1'), (1,2))
+
+  def testGetNetworkData_clearedCache(self):
+    with self.assertCalls(
+        (self.call.device.RunShellCommand(
+            ['dumpsys', 'batterystats', '-c'], check_return=True),
+            _DUMPSYS_OUTPUT),
+        (self.call.device.ReadFile('/proc/uid_stat/1000/tcp_snd'), 1),
+        (self.call.device.ReadFile('/proc/uid_stat/1000/tcp_rcv'), 2)):
+      self.battery._cache.clear()
+      self.assertEqual(self.battery.GetNetworkData('test_package1'), (1,2))
+
+
+class BatteryUtilsLetBatteryCoolToTemperatureTest(BatteryUtilsTest):
+
+  @mock.patch('time.sleep', mock.Mock())
+  def testLetBatteryCoolToTemperature_startUnder(self):
+    with self.assertCalls(
+        (self.call.battery.EnableBatteryUpdates(), []),
+        (self.call.battery.GetBatteryInfo(), {'temperature': '500'})):
+      self.battery.LetBatteryCoolToTemperature(600)
+
+  @mock.patch('time.sleep', mock.Mock())
+  def testLetBatteryCoolToTemperature_startOver(self):
+    with self.assertCalls(
+        (self.call.battery.EnableBatteryUpdates(), []),
+        (self.call.battery.GetBatteryInfo(), {'temperature': '500'}),
+        (self.call.battery.GetBatteryInfo(), {'temperature': '400'})):
+      self.battery.LetBatteryCoolToTemperature(400)
+
+class BatteryUtilsSupportsFuelGaugeTest(BatteryUtilsTest):
+
+  def testSupportsFuelGauge_false(self):
+    self.battery._cache['profile'] = self._NEXUS_5
+    self.assertFalse(self.battery.SupportsFuelGauge())
+
+  def testSupportsFuelGauge_trueMax(self):
+    self.battery._cache['profile'] = self._NEXUS_6
+    # TODO(rnephew): Change this to assertTrue when we have support for
+    # disabling hardware charging on nexus 6.
+    self.assertFalse(self.battery.SupportsFuelGauge())
+
+  def testSupportsFuelGauge_trueDS(self):
+    self.battery._cache['profile'] = self._NEXUS_10
+    # TODO(rnephew): Change this to assertTrue when we have support for
+    # disabling hardware charging on nexus 10.
+    self.assertFalse(self.battery.SupportsFuelGauge())
+
+
+class BatteryUtilsGetFuelGaugeChargeCounterTest(BatteryUtilsTest):
+
+  def testGetFuelGaugeChargeCounter_noFuelGauge(self):
+    self.battery._cache['profile'] = self._NEXUS_5
+    with self.assertRaises(device_errors.CommandFailedError):
+        self.battery.GetFuelGaugeChargeCounter()
+
+  def testGetFuelGaugeChargeCounter_fuelGaugePresent(self):
+    self.battery._cache['profile']= self._NEXUS_6
+    with self.assertCalls(
+        (self.call.battery.SupportsFuelGauge(), True),
+        (self.call.device.ReadFile(mock.ANY), '123')):
+      self.assertEqual(self.battery.GetFuelGaugeChargeCounter(), 123)
+
+
+class BatteryUtilsTieredSetCharging(BatteryUtilsTest):
+
+  @mock.patch('time.sleep', mock.Mock())
+  def testTieredSetCharging_softwareSetTrue(self):
+    self.battery._cache['profile'] = self._NEXUS_6
+    with self.assertCalls(
+        (self.call.battery.GetCharging(), False),
+        (self.call.device.RunShellCommand(
+            ['dumpsys', 'battery', 'reset'], check_return=True), []),
+        (self.call.battery.GetCharging(), False),
+        (self.call.device.RunShellCommand(
+            ['dumpsys', 'battery'], check_return=True), ['UPDATES STOPPED']),
+        (self.call.battery.GetCharging(), True)):
+      self.battery.TieredSetCharging(True)
+
+  @mock.patch('time.sleep', mock.Mock())
+  def testTieredSetCharging_softwareSetFalse(self):
+    self.battery._cache['profile'] = self._NEXUS_6
+    with self.assertCalls(
+        (self.call.battery.GetCharging(), True),
+        (self.call.battery._ClearPowerData(), True),
+        (self.call.battery._ClearPowerData(), True),
+        (self.call.device.RunShellCommand(
+            ['dumpsys', 'battery', 'set', 'ac', '0'], check_return=True), []),
+        (self.call.device.RunShellCommand(
+            ['dumpsys', 'battery', 'set', 'usb', '0'], check_return=True), []),
+        (self.call.battery.GetCharging(), False)):
+      self.battery.TieredSetCharging(False)
+
+  @mock.patch('time.sleep', mock.Mock())
+  def testTieredSetCharging_hardwareSetTrue(self):
+    self.battery._cache['profile'] = self._NEXUS_5
+    with self.assertCalls(
+        (self.call.battery.GetCharging(), False),
+        (self.call.battery.SetCharging(True))):
+      self.battery.TieredSetCharging(True)
+
+  @mock.patch('time.sleep', mock.Mock())
+  def testTieredSetCharging_hardwareSetFalse(self):
+    self.battery._cache['profile'] = self._NEXUS_5
+    with self.assertCalls(
+        (self.call.battery.GetCharging(), True),
+        (self.call.battery._ClearPowerData(), True),
+        (self.call.battery.SetCharging(False))):
+      self.battery.TieredSetCharging(False)
+
+  def testTieredSetCharging_expectedStateAlreadyTrue(self):
+    with self.assertCalls((self.call.battery.GetCharging(), True)):
+      self.battery.TieredSetCharging(True)
+
+  def testTieredSetCharging_expectedStateAlreadyFalse(self):
+    with self.assertCalls((self.call.battery.GetCharging(), False)):
+      self.battery.TieredSetCharging(False)
+
+
+class BatteryUtilsPowerMeasurement(BatteryUtilsTest):
+
+  def testPowerMeasurement_hardware(self):
+    self.battery._cache['profile'] = self._NEXUS_5
+    with self.assertCalls(
+        (self.call.battery.GetCharging(), True),
+        (self.call.battery._ClearPowerData(), True),
+        (self.call.battery.SetCharging(False)),
+        (self.call.battery.GetCharging(), False),
+        (self.call.battery.SetCharging(True))):
+      with self.battery.PowerMeasurement():
+        pass
+
+  @mock.patch('time.sleep', mock.Mock())
+  def testPowerMeasurement_software(self):
+    self.battery._cache['profile'] = self._NEXUS_6
+    with self.assertCalls(
+        (self.call.battery.GetCharging(), True),
+        (self.call.battery._ClearPowerData(), True),
+        (self.call.battery._ClearPowerData(), True),
+        (self.call.device.RunShellCommand(
+            ['dumpsys', 'battery', 'set', 'ac', '0'], check_return=True), []),
+        (self.call.device.RunShellCommand(
+            ['dumpsys', 'battery', 'set', 'usb', '0'], check_return=True), []),
+        (self.call.battery.GetCharging(), False),
+        (self.call.battery.GetCharging(), False),
+        (self.call.device.RunShellCommand(
+            ['dumpsys', 'battery', 'reset'], check_return=True), []),
+        (self.call.battery.GetCharging(), False),
+        (self.call.device.RunShellCommand(
+            ['dumpsys', 'battery'], check_return=True), ['UPDATES STOPPED']),
+        (self.call.battery.GetCharging(), True)):
+      with self.battery.PowerMeasurement():
+        pass
+
+
+class BatteryUtilsDiscoverDeviceProfile(BatteryUtilsTest):
+
+  def testDiscoverDeviceProfile_known(self):
+    with self.assertCalls(
+        (self.call.adb.Shell('getprop ro.product.model'), "Nexus 4")):
+      self.battery._DiscoverDeviceProfile()
+      self.assertEqual(self.battery._cache['profile']['name'], "Nexus 4")
+
+  def testDiscoverDeviceProfile_unknown(self):
+    with self.assertCalls(
+        (self.call.adb.Shell('getprop ro.product.model'), "Other")):
+      self.battery._DiscoverDeviceProfile()
+      self.assertEqual(self.battery._cache['profile']['name'], None)
+
+
+class BatteryUtilsClearPowerData(BatteryUtilsTest):
+
+  def testClearPowerData_preL(self):
+    with self.assertCalls(
+        (self.call.device.RunShellCommand(mock.ANY, retries=0,
+            single_line=True, timeout=10, check_return=True), '20')):
+      self.assertFalse(self.battery._ClearPowerData())
+
+  def testClearPowerData_clearedL(self):
+    with self.assertCalls(
+        (self.call.device.RunShellCommand(mock.ANY, retries=0,
+            single_line=True, timeout=10, check_return=True), '22'),
+        (self.call.device.RunShellCommand(
+            ['dumpsys', 'battery', 'set', 'usb', '1'], check_return=True), []),
+        (self.call.device.RunShellCommand(
+            ['dumpsys', 'battery', 'set', 'ac', '1'], check_return=True), []),
+        (self.call.device.RunShellCommand(
+            ['dumpsys', 'batterystats', '--reset'], check_return=True), []),
+        (self.call.device.RunShellCommand(
+            ['dumpsys', 'batterystats', '--charged', '--checkin'],
+            check_return=True, large_output=True), []),
+        (self.call.device.RunShellCommand(
+            ['dumpsys', 'battery', 'reset'], check_return=True), [])):
+      self.assertTrue(self.battery._ClearPowerData())
+
+  def testClearPowerData_notClearedL(self):
+    with self.assertCalls(
+        (self.call.device.RunShellCommand(mock.ANY, retries=0,
+            single_line=True, timeout=10, check_return=True), '22'),
+        (self.call.device.RunShellCommand(
+            ['dumpsys', 'battery', 'set', 'usb', '1'], check_return=True), []),
+        (self.call.device.RunShellCommand(
+            ['dumpsys', 'battery', 'set', 'ac', '1'], check_return=True), []),
+        (self.call.device.RunShellCommand(
+            ['dumpsys', 'batterystats', '--reset'], check_return=True), []),
+        (self.call.device.RunShellCommand(
+            ['dumpsys', 'batterystats', '--charged', '--checkin'],
+            check_return=True, large_output=True),
+            ['9,1000,l,pwi,uid,0.0327']),
+        (self.call.device.RunShellCommand(
+            ['dumpsys', 'battery', 'reset'], check_return=True), [])):
+      with self.assertRaises(device_errors.CommandFailedError):
+        self.battery._ClearPowerData()
+
+
+if __name__ == '__main__':
+  logging.getLogger().setLevel(logging.DEBUG)
+  unittest.main(verbosity=2)
diff --git a/build/android/pylib/device/commands/BUILD.gn b/build/android/pylib/device/commands/BUILD.gn
new file mode 100644
index 0000000..66e1010
--- /dev/null
+++ b/build/android/pylib/device/commands/BUILD.gn
@@ -0,0 +1,17 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/android/rules.gni")
+
+group("commands") {
+  datadeps = [
+    ":chromium_commands",
+  ]
+}
+
+# GYP: //build/android/pylib/device/commands/commands.gyp:chromium_commands
+android_library("chromium_commands") {
+  java_files = [ "java/src/org/chromium/android/commands/unzip/Unzip.java" ]
+  dex_path = "$root_build_dir/lib.java/chromium_commands.dex.jar"
+}
diff --git a/build/android/pylib/device/commands/__init__.py b/build/android/pylib/device/commands/__init__.py
new file mode 100644
index 0000000..4d6aabb
--- /dev/null
+++ b/build/android/pylib/device/commands/__init__.py
@@ -0,0 +1,3 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
diff --git a/build/android/pylib/device/commands/commands.gyp b/build/android/pylib/device/commands/commands.gyp
new file mode 100644
index 0000000..b5b5bc8
--- /dev/null
+++ b/build/android/pylib/device/commands/commands.gyp
@@ -0,0 +1,20 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+  'targets': [
+    {
+      # GN version: //build/android/pylib/devices/commands:chromium_commands
+      'target_name': 'chromium_commands',
+      'type': 'none',
+      'variables': {
+        'add_to_dependents_classpaths': 0,
+        'java_in_dir': ['java'],
+      },
+      'includes': [
+        '../../../../../build/java.gypi',
+      ],
+    }
+  ],
+}
diff --git a/build/android/pylib/device/commands/install_commands.py b/build/android/pylib/device/commands/install_commands.py
new file mode 100644
index 0000000..58c56cc
--- /dev/null
+++ b/build/android/pylib/device/commands/install_commands.py
@@ -0,0 +1,51 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+
+from pylib import constants
+
+BIN_DIR = '%s/bin' % constants.TEST_EXECUTABLE_DIR
+_FRAMEWORK_DIR = '%s/framework' % constants.TEST_EXECUTABLE_DIR
+
+_COMMANDS = {
+  'unzip': 'org.chromium.android.commands.unzip.Unzip',
+}
+
+_SHELL_COMMAND_FORMAT = (
+"""#!/system/bin/sh
+base=%s
+export CLASSPATH=$base/framework/chromium_commands.jar
+exec app_process $base/bin %s $@
+""")
+
+
+def Installed(device):
+  return (all(device.FileExists('%s/%s' % (BIN_DIR, c)) for c in _COMMANDS)
+          and device.FileExists('%s/chromium_commands.jar' % _FRAMEWORK_DIR))
+
+def InstallCommands(device):
+  if device.IsUserBuild():
+    raise Exception('chromium_commands currently requires a userdebug build.')
+
+  chromium_commands_jar_path = os.path.join(
+      constants.GetOutDirectory(), constants.SDK_BUILD_JAVALIB_DIR,
+      'chromium_commands.dex.jar')
+  if not os.path.exists(chromium_commands_jar_path):
+    raise Exception('%s not found. Please build chromium_commands.'
+                    % chromium_commands_jar_path)
+
+  device.RunShellCommand(['mkdir', BIN_DIR, _FRAMEWORK_DIR])
+  for command, main_class in _COMMANDS.iteritems():
+    shell_command = _SHELL_COMMAND_FORMAT % (
+        constants.TEST_EXECUTABLE_DIR, main_class)
+    shell_file = '%s/%s' % (BIN_DIR, command)
+    device.WriteFile(shell_file, shell_command)
+    device.RunShellCommand(
+        ['chmod', '755', shell_file], check_return=True)
+
+  device.adb.Push(
+      chromium_commands_jar_path,
+      '%s/chromium_commands.jar' % _FRAMEWORK_DIR)
+
diff --git a/build/android/pylib/device/commands/java/src/org/chromium/android/commands/unzip/Unzip.java b/build/android/pylib/device/commands/java/src/org/chromium/android/commands/unzip/Unzip.java
new file mode 100644
index 0000000..7cbbb73
--- /dev/null
+++ b/build/android/pylib/device/commands/java/src/org/chromium/android/commands/unzip/Unzip.java
@@ -0,0 +1,95 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package org.chromium.android.commands.unzip;
+
+import android.util.Log;
+
+import java.io.BufferedInputStream;
+import java.io.BufferedOutputStream;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.OutputStream;
+import java.io.PrintStream;
+import java.util.zip.ZipEntry;
+import java.util.zip.ZipInputStream;
+
+/**
+ *  Minimal implementation of the command-line unzip utility for Android.
+ */
+public class Unzip {
+
+    private static final String TAG = "Unzip";
+
+    public static void main(String[] args) {
+        try {
+            (new Unzip()).run(args);
+        } catch (RuntimeException e) {
+            Log.e(TAG, e.toString());
+            System.exit(1);
+        }
+    }
+
+    private void showUsage(PrintStream s) {
+        s.println("Usage:");
+        s.println("unzip [zipfile]");
+    }
+
+    @SuppressWarnings("Finally")
+    private void unzip(String[] args) {
+        ZipInputStream zis = null;
+        try {
+            String zipfile = args[0];
+            zis = new ZipInputStream(new BufferedInputStream(new FileInputStream(zipfile)));
+            ZipEntry ze = null;
+
+            byte[] bytes = new byte[1024];
+            while ((ze = zis.getNextEntry()) != null) {
+                File outputFile = new File(ze.getName());
+                if (ze.isDirectory()) {
+                    if (!outputFile.exists() && !outputFile.mkdirs()) {
+                        throw new RuntimeException(
+                                "Failed to create directory: " + outputFile.toString());
+                    }
+                } else {
+                    File parentDir = outputFile.getParentFile();
+                    if (!parentDir.exists() && !parentDir.mkdirs()) {
+                        throw new RuntimeException(
+                                "Failed to create directory: " + parentDir.toString());
+                    }
+                    OutputStream out = new BufferedOutputStream(new FileOutputStream(outputFile));
+                    int actual_bytes = 0;
+                    int total_bytes = 0;
+                    while ((actual_bytes = zis.read(bytes)) != -1) {
+                        out.write(bytes, 0, actual_bytes);
+                        total_bytes += actual_bytes;
+                    }
+                    out.close();
+                }
+                zis.closeEntry();
+            }
+
+        } catch (IOException e) {
+            throw new RuntimeException("Error while unzipping: " + e.toString());
+        } finally {
+            try {
+                if (zis != null) zis.close();
+            } catch (IOException e) {
+                throw new RuntimeException("Error while closing zip: " + e.toString());
+            }
+        }
+    }
+
+    public void run(String[] args) {
+        if (args.length != 1) {
+            showUsage(System.err);
+            throw new RuntimeException("Incorrect usage.");
+        }
+
+        unzip(args);
+    }
+}
+
diff --git a/build/android/pylib/device/decorators.py b/build/android/pylib/device/decorators.py
new file mode 100644
index 0000000..73c13da
--- /dev/null
+++ b/build/android/pylib/device/decorators.py
@@ -0,0 +1,157 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Function/method decorators that provide timeout and retry logic.
+"""
+
+import functools
+import os
+import sys
+import threading
+
+from pylib import cmd_helper
+from pylib import constants
+from pylib.device import device_errors
+from pylib.utils import reraiser_thread
+from pylib.utils import timeout_retry
+
+# TODO(jbudorick) Remove once the DeviceUtils implementations are no longer
+#                 backed by AndroidCommands / android_testrunner.
+sys.path.append(os.path.join(constants.DIR_SOURCE_ROOT, 'third_party',
+                             'android_testrunner'))
+import errors as old_errors
+
+DEFAULT_TIMEOUT_ATTR = '_default_timeout'
+DEFAULT_RETRIES_ATTR = '_default_retries'
+
+
+def _TimeoutRetryWrapper(f, timeout_func, retries_func, pass_values=False):
+  """ Wraps a funcion with timeout and retry handling logic.
+
+  Args:
+    f: The function to wrap.
+    timeout_func: A callable that returns the timeout value.
+    retries_func: A callable that returns the retries value.
+    pass_values: If True, passes the values returned by |timeout_func| and
+                 |retries_func| to the wrapped function as 'timeout' and
+                 'retries' kwargs, respectively.
+  Returns:
+    The wrapped function.
+  """
+  @functools.wraps(f)
+  def TimeoutRetryWrapper(*args, **kwargs):
+    timeout = timeout_func(*args, **kwargs)
+    retries = retries_func(*args, **kwargs)
+    if pass_values:
+      kwargs['timeout'] = timeout
+      kwargs['retries'] = retries
+    def impl():
+      return f(*args, **kwargs)
+    try:
+      if isinstance(threading.current_thread(),
+                    timeout_retry.TimeoutRetryThread):
+        return impl()
+      else:
+        return timeout_retry.Run(impl, timeout, retries)
+    except old_errors.WaitForResponseTimedOutError as e:
+      raise device_errors.CommandTimeoutError(str(e)), None, (
+          sys.exc_info()[2])
+    except old_errors.DeviceUnresponsiveError as e:
+      raise device_errors.DeviceUnreachableError(str(e)), None, (
+          sys.exc_info()[2])
+    except reraiser_thread.TimeoutError as e:
+      raise device_errors.CommandTimeoutError(str(e)), None, (
+          sys.exc_info()[2])
+    except cmd_helper.TimeoutError as e:
+      raise device_errors.CommandTimeoutError(str(e)), None, (
+          sys.exc_info()[2])
+  return TimeoutRetryWrapper
+
+
+def WithTimeoutAndRetries(f):
+  """A decorator that handles timeouts and retries.
+
+  'timeout' and 'retries' kwargs must be passed to the function.
+
+  Args:
+    f: The function to decorate.
+  Returns:
+    The decorated function.
+  """
+  get_timeout = lambda *a, **kw: kw['timeout']
+  get_retries = lambda *a, **kw: kw['retries']
+  return _TimeoutRetryWrapper(f, get_timeout, get_retries)
+
+
+def WithExplicitTimeoutAndRetries(timeout, retries):
+  """Returns a decorator that handles timeouts and retries.
+
+  The provided |timeout| and |retries| values are always used.
+
+  Args:
+    timeout: The number of seconds to wait for the decorated function to
+             return. Always used.
+    retries: The number of times the decorated function should be retried on
+             failure. Always used.
+  Returns:
+    The actual decorator.
+  """
+  def decorator(f):
+    get_timeout = lambda *a, **kw: timeout
+    get_retries = lambda *a, **kw: retries
+    return _TimeoutRetryWrapper(f, get_timeout, get_retries)
+  return decorator
+
+
+def WithTimeoutAndRetriesDefaults(default_timeout, default_retries):
+  """Returns a decorator that handles timeouts and retries.
+
+  The provided |default_timeout| and |default_retries| values are used only
+  if timeout and retries values are not provided.
+
+  Args:
+    default_timeout: The number of seconds to wait for the decorated function
+                     to return. Only used if a 'timeout' kwarg is not passed
+                     to the decorated function.
+    default_retries: The number of times the decorated function should be
+                     retried on failure. Only used if a 'retries' kwarg is not
+                     passed to the decorated function.
+  Returns:
+    The actual decorator.
+  """
+  def decorator(f):
+    get_timeout = lambda *a, **kw: kw.get('timeout', default_timeout)
+    get_retries = lambda *a, **kw: kw.get('retries', default_retries)
+    return _TimeoutRetryWrapper(f, get_timeout, get_retries, pass_values=True)
+  return decorator
+
+
+def WithTimeoutAndRetriesFromInstance(
+    default_timeout_name=DEFAULT_TIMEOUT_ATTR,
+    default_retries_name=DEFAULT_RETRIES_ATTR):
+  """Returns a decorator that handles timeouts and retries.
+
+  The provided |default_timeout_name| and |default_retries_name| are used to
+  get the default timeout value and the default retries value from the object
+  instance if timeout and retries values are not provided.
+
+  Note that this should only be used to decorate methods, not functions.
+
+  Args:
+    default_timeout_name: The name of the default timeout attribute of the
+                          instance.
+    default_retries_name: The name of the default retries attribute of the
+                          instance.
+  Returns:
+    The actual decorator.
+  """
+  def decorator(f):
+    def get_timeout(inst, *_args, **kwargs):
+      return kwargs.get('timeout', getattr(inst, default_timeout_name))
+    def get_retries(inst, *_args, **kwargs):
+      return kwargs.get('retries', getattr(inst, default_retries_name))
+    return _TimeoutRetryWrapper(f, get_timeout, get_retries, pass_values=True)
+  return decorator
+
diff --git a/build/android/pylib/device/decorators_test.py b/build/android/pylib/device/decorators_test.py
new file mode 100644
index 0000000..b75618b
--- /dev/null
+++ b/build/android/pylib/device/decorators_test.py
@@ -0,0 +1,365 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Unit tests for decorators.py.
+"""
+
+# pylint: disable=W0613
+
+import os
+import sys
+import time
+import traceback
+import unittest
+
+from pylib import constants
+from pylib.device import decorators
+from pylib.device import device_errors
+from pylib.utils import reraiser_thread
+
+# TODO(jbudorick) Remove once the DeviceUtils implementations are no longer
+#                 backed by AndroidCommands / android_testrunner.
+sys.path.append(os.path.join(constants.DIR_SOURCE_ROOT, 'third_party',
+                             'android_testrunner'))
+import errors as old_errors
+
+_DEFAULT_TIMEOUT = 30
+_DEFAULT_RETRIES = 3
+
+class DecoratorsTest(unittest.TestCase):
+  _decorated_function_called_count = 0
+
+  def testFunctionDecoratorDoesTimeouts(self):
+    """Tests that the base decorator handles the timeout logic."""
+    DecoratorsTest._decorated_function_called_count = 0
+    @decorators.WithTimeoutAndRetries
+    def alwaysTimesOut(timeout=None, retries=None):
+      DecoratorsTest._decorated_function_called_count += 1
+      time.sleep(100)
+
+    start_time = time.time()
+    with self.assertRaises(device_errors.CommandTimeoutError):
+      alwaysTimesOut(timeout=1, retries=0)
+    elapsed_time = time.time() - start_time
+    self.assertTrue(elapsed_time >= 1)
+    self.assertEquals(1, DecoratorsTest._decorated_function_called_count)
+
+  def testFunctionDecoratorDoesRetries(self):
+    """Tests that the base decorator handles the retries logic."""
+    DecoratorsTest._decorated_function_called_count = 0
+    @decorators.WithTimeoutAndRetries
+    def alwaysRaisesCommandFailedError(timeout=None, retries=None):
+      DecoratorsTest._decorated_function_called_count += 1
+      raise device_errors.CommandFailedError('testCommand failed')
+
+    with self.assertRaises(device_errors.CommandFailedError):
+      alwaysRaisesCommandFailedError(timeout=30, retries=10)
+    self.assertEquals(11, DecoratorsTest._decorated_function_called_count)
+
+  def testFunctionDecoratorRequiresParams(self):
+    """Tests that the base decorator requires timeout and retries params."""
+    @decorators.WithTimeoutAndRetries
+    def requiresExplicitTimeoutAndRetries(timeout=None, retries=None):
+      return (timeout, retries)
+
+    with self.assertRaises(KeyError):
+      requiresExplicitTimeoutAndRetries()
+    with self.assertRaises(KeyError):
+      requiresExplicitTimeoutAndRetries(timeout=10)
+    with self.assertRaises(KeyError):
+      requiresExplicitTimeoutAndRetries(retries=0)
+    expected_timeout = 10
+    expected_retries = 1
+    (actual_timeout, actual_retries) = (
+        requiresExplicitTimeoutAndRetries(timeout=expected_timeout,
+                                          retries=expected_retries))
+    self.assertEquals(expected_timeout, actual_timeout)
+    self.assertEquals(expected_retries, actual_retries)
+
+  def testFunctionDecoratorTranslatesOldExceptions(self):
+    """Tests that the explicit decorator translates old exceptions."""
+    @decorators.WithTimeoutAndRetries
+    def alwaysRaisesProvidedException(exception, timeout=None, retries=None):
+      raise exception
+
+    exception_desc = 'Old response timeout error'
+    with self.assertRaises(device_errors.CommandTimeoutError) as e:
+      alwaysRaisesProvidedException(
+          old_errors.WaitForResponseTimedOutError(exception_desc),
+          timeout=10, retries=1)
+    self.assertEquals(exception_desc, str(e.exception))
+
+    exception_desc = 'Old device error'
+    with self.assertRaises(device_errors.DeviceUnreachableError) as e:
+      alwaysRaisesProvidedException(
+          old_errors.DeviceUnresponsiveError(exception_desc),
+          timeout=10, retries=1)
+    self.assertEquals(exception_desc, str(e.exception))
+
+  def testFunctionDecoratorTranslatesReraiserExceptions(self):
+    """Tests that the explicit decorator translates reraiser exceptions."""
+    @decorators.WithTimeoutAndRetries
+    def alwaysRaisesProvidedException(exception, timeout=None, retries=None):
+      raise exception
+
+    exception_desc = 'Reraiser thread timeout error'
+    with self.assertRaises(device_errors.CommandTimeoutError) as e:
+      alwaysRaisesProvidedException(
+          reraiser_thread.TimeoutError(exception_desc),
+          timeout=10, retries=1)
+    self.assertEquals(exception_desc, str(e.exception))
+
+  def testDefaultsFunctionDecoratorDoesTimeouts(self):
+    """Tests that the defaults decorator handles timeout logic."""
+    DecoratorsTest._decorated_function_called_count = 0
+    @decorators.WithTimeoutAndRetriesDefaults(1, 0)
+    def alwaysTimesOut(timeout=None, retries=None):
+      DecoratorsTest._decorated_function_called_count += 1
+      time.sleep(100)
+
+    start_time = time.time()
+    with self.assertRaises(device_errors.CommandTimeoutError):
+      alwaysTimesOut()
+    elapsed_time = time.time() - start_time
+    self.assertTrue(elapsed_time >= 1)
+    self.assertEquals(1, DecoratorsTest._decorated_function_called_count)
+
+    DecoratorsTest._decorated_function_called_count = 0
+    with self.assertRaises(device_errors.CommandTimeoutError):
+      alwaysTimesOut(timeout=2)
+    elapsed_time = time.time() - start_time
+    self.assertTrue(elapsed_time >= 2)
+    self.assertEquals(1, DecoratorsTest._decorated_function_called_count)
+
+  def testDefaultsFunctionDecoratorDoesRetries(self):
+    """Tests that the defaults decorator handles retries logic."""
+    DecoratorsTest._decorated_function_called_count = 0
+    @decorators.WithTimeoutAndRetriesDefaults(30, 10)
+    def alwaysRaisesCommandFailedError(timeout=None, retries=None):
+      DecoratorsTest._decorated_function_called_count += 1
+      raise device_errors.CommandFailedError('testCommand failed')
+
+    with self.assertRaises(device_errors.CommandFailedError):
+      alwaysRaisesCommandFailedError()
+    self.assertEquals(11, DecoratorsTest._decorated_function_called_count)
+
+    DecoratorsTest._decorated_function_called_count = 0
+    with self.assertRaises(device_errors.CommandFailedError):
+      alwaysRaisesCommandFailedError(retries=5)
+    self.assertEquals(6, DecoratorsTest._decorated_function_called_count)
+
+  def testDefaultsFunctionDecoratorPassesValues(self):
+    """Tests that the defaults decorator passes timeout and retries kwargs."""
+    @decorators.WithTimeoutAndRetriesDefaults(30, 10)
+    def alwaysReturnsTimeouts(timeout=None, retries=None):
+      return timeout
+
+    self.assertEquals(30, alwaysReturnsTimeouts())
+    self.assertEquals(120, alwaysReturnsTimeouts(timeout=120))
+
+    @decorators.WithTimeoutAndRetriesDefaults(30, 10)
+    def alwaysReturnsRetries(timeout=None, retries=None):
+      return retries
+
+    self.assertEquals(10, alwaysReturnsRetries())
+    self.assertEquals(1, alwaysReturnsRetries(retries=1))
+
+  def testDefaultsFunctionDecoratorTranslatesOldExceptions(self):
+    """Tests that the explicit decorator translates old exceptions."""
+    @decorators.WithTimeoutAndRetriesDefaults(30, 10)
+    def alwaysRaisesProvidedException(exception, timeout=None, retries=None):
+      raise exception
+
+    exception_desc = 'Old response timeout error'
+    with self.assertRaises(device_errors.CommandTimeoutError) as e:
+      alwaysRaisesProvidedException(
+          old_errors.WaitForResponseTimedOutError(exception_desc))
+    self.assertEquals(exception_desc, str(e.exception))
+
+    exception_desc = 'Old device error'
+    with self.assertRaises(device_errors.DeviceUnreachableError) as e:
+      alwaysRaisesProvidedException(
+          old_errors.DeviceUnresponsiveError(exception_desc))
+    self.assertEquals(exception_desc, str(e.exception))
+
+  def testDefaultsFunctionDecoratorTranslatesReraiserExceptions(self):
+    """Tests that the explicit decorator translates reraiser exceptions."""
+    @decorators.WithTimeoutAndRetriesDefaults(30, 10)
+    def alwaysRaisesProvidedException(exception, timeout=None, retries=None):
+      raise exception
+
+    exception_desc = 'Reraiser thread timeout error'
+    with self.assertRaises(device_errors.CommandTimeoutError) as e:
+      alwaysRaisesProvidedException(
+          reraiser_thread.TimeoutError(exception_desc))
+    self.assertEquals(exception_desc, str(e.exception))
+
+  def testExplicitFunctionDecoratorDoesTimeouts(self):
+    """Tests that the explicit decorator handles timeout logic."""
+    DecoratorsTest._decorated_function_called_count = 0
+    @decorators.WithExplicitTimeoutAndRetries(1, 0)
+    def alwaysTimesOut():
+      DecoratorsTest._decorated_function_called_count += 1
+      time.sleep(100)
+
+    start_time = time.time()
+    with self.assertRaises(device_errors.CommandTimeoutError):
+      alwaysTimesOut()
+    elapsed_time = time.time() - start_time
+    self.assertTrue(elapsed_time >= 1)
+    self.assertEquals(1, DecoratorsTest._decorated_function_called_count)
+
+  def testExplicitFunctionDecoratorDoesRetries(self):
+    """Tests that the explicit decorator handles retries logic."""
+    DecoratorsTest._decorated_function_called_count = 0
+    @decorators.WithExplicitTimeoutAndRetries(30, 10)
+    def alwaysRaisesCommandFailedError():
+      DecoratorsTest._decorated_function_called_count += 1
+      raise device_errors.CommandFailedError('testCommand failed')
+
+    with self.assertRaises(device_errors.CommandFailedError):
+      alwaysRaisesCommandFailedError()
+    self.assertEquals(11, DecoratorsTest._decorated_function_called_count)
+
+  def testExplicitDecoratorTranslatesOldExceptions(self):
+    """Tests that the explicit decorator translates old exceptions."""
+    @decorators.WithExplicitTimeoutAndRetries(30, 10)
+    def alwaysRaisesProvidedException(exception):
+      raise exception
+
+    exception_desc = 'Old response timeout error'
+    with self.assertRaises(device_errors.CommandTimeoutError) as e:
+      alwaysRaisesProvidedException(
+          old_errors.WaitForResponseTimedOutError(exception_desc))
+    self.assertEquals(exception_desc, str(e.exception))
+
+    exception_desc = 'Old device error'
+    with self.assertRaises(device_errors.DeviceUnreachableError) as e:
+      alwaysRaisesProvidedException(
+          old_errors.DeviceUnresponsiveError(exception_desc))
+    self.assertEquals(exception_desc, str(e.exception))
+
+  def testExplicitDecoratorTranslatesReraiserExceptions(self):
+    """Tests that the explicit decorator translates reraiser exceptions."""
+    @decorators.WithExplicitTimeoutAndRetries(30, 10)
+    def alwaysRaisesProvidedException(exception):
+      raise exception
+
+    exception_desc = 'Reraiser thread timeout error'
+    with self.assertRaises(device_errors.CommandTimeoutError) as e:
+      alwaysRaisesProvidedException(
+          reraiser_thread.TimeoutError(exception_desc))
+    self.assertEquals(exception_desc, str(e.exception))
+
+  class _MethodDecoratorTestObject(object):
+    """An object suitable for testing the method decorator."""
+
+    def __init__(self, test_case, default_timeout=_DEFAULT_TIMEOUT,
+                 default_retries=_DEFAULT_RETRIES):
+      self._test_case = test_case
+      self.default_timeout = default_timeout
+      self.default_retries = default_retries
+      self.function_call_counters = {
+          'alwaysRaisesCommandFailedError': 0,
+          'alwaysTimesOut': 0,
+          'requiresExplicitTimeoutAndRetries': 0,
+      }
+
+    @decorators.WithTimeoutAndRetriesFromInstance(
+        'default_timeout', 'default_retries')
+    def alwaysTimesOut(self, timeout=None, retries=None):
+      self.function_call_counters['alwaysTimesOut'] += 1
+      time.sleep(100)
+      self._test_case.assertFalse(True, msg='Failed to time out?')
+
+    @decorators.WithTimeoutAndRetriesFromInstance(
+        'default_timeout', 'default_retries')
+    def alwaysRaisesCommandFailedError(self, timeout=None, retries=None):
+      self.function_call_counters['alwaysRaisesCommandFailedError'] += 1
+      raise device_errors.CommandFailedError('testCommand failed')
+
+    # pylint: disable=no-self-use
+
+    @decorators.WithTimeoutAndRetriesFromInstance(
+        'default_timeout', 'default_retries')
+    def alwaysReturnsTimeout(self, timeout=None, retries=None):
+      return timeout
+
+    @decorators.WithTimeoutAndRetriesFromInstance(
+        'default_timeout', 'default_retries')
+    def alwaysReturnsRetries(self, timeout=None, retries=None):
+      return retries
+
+    @decorators.WithTimeoutAndRetriesFromInstance(
+        'default_timeout', 'default_retries')
+    def alwaysRaisesProvidedException(self, exception, timeout=None,
+                                      retries=None):
+      raise exception
+
+    # pylint: enable=no-self-use
+
+
+  def testMethodDecoratorDoesTimeout(self):
+    """Tests that the method decorator handles timeout logic."""
+    test_obj = self._MethodDecoratorTestObject(self)
+    start_time = time.time()
+    with self.assertRaises(device_errors.CommandTimeoutError):
+      try:
+        test_obj.alwaysTimesOut(timeout=1, retries=0)
+      except:
+        traceback.print_exc()
+        raise
+    elapsed_time = time.time() - start_time
+    self.assertTrue(elapsed_time >= 1)
+    self.assertEquals(1, test_obj.function_call_counters['alwaysTimesOut'])
+
+  def testMethodDecoratorDoesRetries(self):
+    """Tests that the method decorator handles retries logic."""
+    test_obj = self._MethodDecoratorTestObject(self)
+    with self.assertRaises(device_errors.CommandFailedError):
+      try:
+        test_obj.alwaysRaisesCommandFailedError(retries=10)
+      except:
+        traceback.print_exc()
+        raise
+    self.assertEquals(
+        11, test_obj.function_call_counters['alwaysRaisesCommandFailedError'])
+
+  def testMethodDecoratorPassesValues(self):
+    """Tests that the method decorator passes timeout and retries kwargs."""
+    test_obj = self._MethodDecoratorTestObject(
+        self, default_timeout=42, default_retries=31)
+    self.assertEquals(42, test_obj.alwaysReturnsTimeout())
+    self.assertEquals(41, test_obj.alwaysReturnsTimeout(timeout=41))
+    self.assertEquals(31, test_obj.alwaysReturnsRetries())
+    self.assertEquals(32, test_obj.alwaysReturnsRetries(retries=32))
+
+  def testMethodDecoratorTranslatesOldExceptions(self):
+    test_obj = self._MethodDecoratorTestObject(self)
+
+    exception_desc = 'Old response timeout error'
+    with self.assertRaises(device_errors.CommandTimeoutError) as e:
+      test_obj.alwaysRaisesProvidedException(
+          old_errors.WaitForResponseTimedOutError(exception_desc))
+    self.assertEquals(exception_desc, str(e.exception))
+
+    exception_desc = 'Old device error'
+    with self.assertRaises(device_errors.DeviceUnreachableError) as e:
+      test_obj.alwaysRaisesProvidedException(
+          old_errors.DeviceUnresponsiveError(exception_desc))
+    self.assertEquals(exception_desc, str(e.exception))
+
+  def testMethodDecoratorTranslatesReraiserExceptions(self):
+    test_obj = self._MethodDecoratorTestObject(self)
+
+    exception_desc = 'Reraiser thread timeout error'
+    with self.assertRaises(device_errors.CommandTimeoutError) as e:
+      test_obj.alwaysRaisesProvidedException(
+          reraiser_thread.TimeoutError(exception_desc))
+    self.assertEquals(exception_desc, str(e.exception))
+
+if __name__ == '__main__':
+  unittest.main(verbosity=2)
+
diff --git a/build/android/pylib/device/device_blacklist.py b/build/android/pylib/device/device_blacklist.py
new file mode 100644
index 0000000..a141d62
--- /dev/null
+++ b/build/android/pylib/device/device_blacklist.py
@@ -0,0 +1,61 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import json
+import os
+import threading
+
+from pylib import constants
+_BLACKLIST_JSON = os.path.join(
+    constants.DIR_SOURCE_ROOT,
+    os.environ.get('CHROMIUM_OUT_DIR', 'out'),
+    'bad_devices.json')
+
+# Note that this only protects against concurrent accesses to the blacklist
+# within a process.
+_blacklist_lock = threading.RLock()
+
+def ReadBlacklist():
+  """Reads the blacklist from the _BLACKLIST_JSON file.
+
+  Returns:
+    A list containing bad devices.
+  """
+  with _blacklist_lock:
+    if not os.path.exists(_BLACKLIST_JSON):
+      return []
+
+    with open(_BLACKLIST_JSON, 'r') as f:
+      return json.load(f)
+
+
+def WriteBlacklist(blacklist):
+  """Writes the provided blacklist to the _BLACKLIST_JSON file.
+
+  Args:
+    blacklist: list of bad devices to write to the _BLACKLIST_JSON file.
+  """
+  with _blacklist_lock:
+    with open(_BLACKLIST_JSON, 'w') as f:
+      json.dump(list(set(blacklist)), f)
+
+
+def ExtendBlacklist(devices):
+  """Adds devices to _BLACKLIST_JSON file.
+
+  Args:
+    devices: list of bad devices to be added to the _BLACKLIST_JSON file.
+  """
+  with _blacklist_lock:
+    blacklist = ReadBlacklist()
+    blacklist.extend(devices)
+    WriteBlacklist(blacklist)
+
+
+def ResetBlacklist():
+  """Erases the _BLACKLIST_JSON file if it exists."""
+  with _blacklist_lock:
+    if os.path.exists(_BLACKLIST_JSON):
+      os.remove(_BLACKLIST_JSON)
+
diff --git a/build/android/pylib/device/device_errors.py b/build/android/pylib/device/device_errors.py
new file mode 100644
index 0000000..2492015
--- /dev/null
+++ b/build/android/pylib/device/device_errors.py
@@ -0,0 +1,89 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Exception classes raised by AdbWrapper and DeviceUtils.
+"""
+
+from pylib import cmd_helper
+from pylib.utils import base_error
+
+
+class CommandFailedError(base_error.BaseError):
+  """Exception for command failures."""
+
+  def __init__(self, message, device_serial=None):
+    if device_serial is not None:
+      message = '(device: %s) %s' % (device_serial, message)
+    self.device_serial = device_serial
+    super(CommandFailedError, self).__init__(message)
+
+
+class AdbCommandFailedError(CommandFailedError):
+  """Exception for adb command failures."""
+
+  def __init__(self, args, output, status=None, device_serial=None,
+               message=None):
+    self.args = args
+    self.output = output
+    self.status = status
+    if not message:
+      adb_cmd = ' '.join(cmd_helper.SingleQuote(arg) for arg in self.args)
+      message = ['adb %s: failed ' % adb_cmd]
+      if status:
+        message.append('with exit status %s ' % self.status)
+      if output:
+        message.append('and output:\n')
+        message.extend('- %s\n' % line for line in output.splitlines())
+      else:
+        message.append('and no output.')
+      message = ''.join(message)
+    super(AdbCommandFailedError, self).__init__(message, device_serial)
+
+
+class DeviceVersionError(CommandFailedError):
+  """Exception for device version failures."""
+
+  def __init__(self, message, device_serial=None):
+    super(DeviceVersionError, self).__init__(message, device_serial)
+
+
+class AdbShellCommandFailedError(AdbCommandFailedError):
+  """Exception for shell command failures run via adb."""
+
+  def __init__(self, command, output, status, device_serial=None):
+    self.command = command
+    message = ['shell command run via adb failed on the device:\n',
+               '  command: %s\n' % command]
+    message.append('  exit status: %s\n' % status)
+    if output:
+      message.append('  output:\n')
+      if isinstance(output, basestring):
+        output_lines = output.splitlines()
+      else:
+        output_lines = output
+      message.extend('  - %s\n' % line for line in output_lines)
+    else:
+      message.append("  output: ''\n")
+    message = ''.join(message)
+    super(AdbShellCommandFailedError, self).__init__(
+      ['shell', command], output, status, device_serial, message)
+
+
+class CommandTimeoutError(base_error.BaseError):
+  """Exception for command timeouts."""
+  pass
+
+
+class DeviceUnreachableError(base_error.BaseError):
+  """Exception for device unreachable failures."""
+  pass
+
+
+class NoDevicesError(base_error.BaseError):
+  """Exception for having no devices attached."""
+
+  def __init__(self):
+    super(NoDevicesError, self).__init__(
+        'No devices attached.', is_infra_error=True)
diff --git a/build/android/pylib/device/device_list.py b/build/android/pylib/device/device_list.py
new file mode 100644
index 0000000..0eb6acb
--- /dev/null
+++ b/build/android/pylib/device/device_list.py
@@ -0,0 +1,30 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""A module to keep track of devices across builds."""
+
+import os
+
+LAST_DEVICES_FILENAME = '.last_devices'
+LAST_MISSING_DEVICES_FILENAME = '.last_missing'
+
+
+def GetPersistentDeviceList(file_name):
+  """Returns a list of devices.
+
+  Args:
+    file_name: the file name containing a list of devices.
+
+  Returns: List of device serial numbers that were on the bot.
+  """
+  with open(file_name) as f:
+    return f.read().splitlines()
+
+
+def WritePersistentDeviceList(file_name, device_list):
+  path = os.path.dirname(file_name)
+  if not os.path.exists(path):
+    os.makedirs(path)
+  with open(file_name, 'w') as f:
+    f.write('\n'.join(set(device_list)))
diff --git a/build/android/pylib/device/device_utils.py b/build/android/pylib/device/device_utils.py
new file mode 100644
index 0000000..f201ef3
--- /dev/null
+++ b/build/android/pylib/device/device_utils.py
@@ -0,0 +1,1754 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Provides a variety of device interactions based on adb.
+
+Eventually, this will be based on adb_wrapper.
+"""
+# pylint: disable=unused-argument
+
+import collections
+import contextlib
+import itertools
+import logging
+import multiprocessing
+import os
+import posixpath
+import re
+import shutil
+import sys
+import tempfile
+import time
+import zipfile
+
+import pylib.android_commands
+from pylib import cmd_helper
+from pylib import constants
+from pylib import device_signal
+from pylib.constants import keyevent
+from pylib.device import adb_wrapper
+from pylib.device import decorators
+from pylib.device import device_blacklist
+from pylib.device import device_errors
+from pylib.device import intent
+from pylib.device import logcat_monitor
+from pylib.device.commands import install_commands
+from pylib.sdk import split_select
+from pylib.utils import apk_helper
+from pylib.utils import base_error
+from pylib.utils import device_temp_file
+from pylib.utils import host_utils
+from pylib.utils import md5sum
+from pylib.utils import parallelizer
+from pylib.utils import timeout_retry
+from pylib.utils import zip_utils
+
+_DEFAULT_TIMEOUT = 30
+_DEFAULT_RETRIES = 3
+
+# A sentinel object for default values
+# TODO(jbudorick,perezju): revisit how default values are handled by
+# the timeout_retry decorators.
+DEFAULT = object()
+
+_CONTROL_CHARGING_COMMANDS = [
+  {
+    # Nexus 4
+    'witness_file': '/sys/module/pm8921_charger/parameters/disabled',
+    'enable_command': 'echo 0 > /sys/module/pm8921_charger/parameters/disabled',
+    'disable_command':
+        'echo 1 > /sys/module/pm8921_charger/parameters/disabled',
+  },
+  {
+    # Nexus 5
+    # Setting the HIZ bit of the bq24192 causes the charger to actually ignore
+    # energy coming from USB. Setting the power_supply offline just updates the
+    # Android system to reflect that.
+    'witness_file': '/sys/kernel/debug/bq24192/INPUT_SRC_CONT',
+    'enable_command': (
+        'echo 0x4A > /sys/kernel/debug/bq24192/INPUT_SRC_CONT && '
+        'echo 1 > /sys/class/power_supply/usb/online'),
+    'disable_command': (
+        'echo 0xCA > /sys/kernel/debug/bq24192/INPUT_SRC_CONT && '
+        'chmod 644 /sys/class/power_supply/usb/online && '
+        'echo 0 > /sys/class/power_supply/usb/online'),
+  },
+]
+
+
+@decorators.WithExplicitTimeoutAndRetries(
+    _DEFAULT_TIMEOUT, _DEFAULT_RETRIES)
+def GetAVDs():
+  """Returns a list of Android Virtual Devices.
+
+  Returns:
+    A list containing the configured AVDs.
+  """
+  lines = cmd_helper.GetCmdOutput([
+      os.path.join(constants.ANDROID_SDK_ROOT, 'tools', 'android'),
+      'list', 'avd']).splitlines()
+  avds = []
+  for line in lines:
+    if 'Name:' not in line:
+      continue
+    key, value = (s.strip() for s in line.split(':', 1))
+    if key == 'Name':
+      avds.append(value)
+  return avds
+
+
+@decorators.WithExplicitTimeoutAndRetries(
+    _DEFAULT_TIMEOUT, _DEFAULT_RETRIES)
+def RestartServer():
+  """Restarts the adb server.
+
+  Raises:
+    CommandFailedError if we fail to kill or restart the server.
+  """
+  def adb_killed():
+    return not adb_wrapper.AdbWrapper.IsServerOnline()
+
+  def adb_started():
+    return adb_wrapper.AdbWrapper.IsServerOnline()
+
+  adb_wrapper.AdbWrapper.KillServer()
+  if not timeout_retry.WaitFor(adb_killed, wait_period=1, max_tries=5):
+    # TODO(perezju): raise an exception after fixng http://crbug.com/442319
+    logging.warning('Failed to kill adb server')
+  adb_wrapper.AdbWrapper.StartServer()
+  if not timeout_retry.WaitFor(adb_started, wait_period=1, max_tries=5):
+    raise device_errors.CommandFailedError('Failed to start adb server')
+
+
+def _GetTimeStamp():
+  """Return a basic ISO 8601 time stamp with the current local time."""
+  return time.strftime('%Y%m%dT%H%M%S', time.localtime())
+
+
+def _JoinLines(lines):
+  # makes sure that the last line is also terminated, and is more memory
+  # efficient than first appending an end-line to each line and then joining
+  # all of them together.
+  return ''.join(s for line in lines for s in (line, '\n'))
+
+
+class DeviceUtils(object):
+
+  _MAX_ADB_COMMAND_LENGTH = 512
+  _MAX_ADB_OUTPUT_LENGTH = 32768
+  _LAUNCHER_FOCUSED_RE = re.compile(
+      '\s*mCurrentFocus.*(Launcher|launcher).*')
+  _VALID_SHELL_VARIABLE = re.compile('^[a-zA-Z_][a-zA-Z0-9_]*$')
+
+  # Property in /data/local.prop that controls Java assertions.
+  JAVA_ASSERT_PROPERTY = 'dalvik.vm.enableassertions'
+
+  def __init__(self, device, default_timeout=_DEFAULT_TIMEOUT,
+               default_retries=_DEFAULT_RETRIES):
+    """DeviceUtils constructor.
+
+    Args:
+      device: Either a device serial, an existing AdbWrapper instance, or an
+              an existing AndroidCommands instance.
+      default_timeout: An integer containing the default number of seconds to
+                       wait for an operation to complete if no explicit value
+                       is provided.
+      default_retries: An integer containing the default number or times an
+                       operation should be retried on failure if no explicit
+                       value is provided.
+    """
+    self.adb = None
+    self.old_interface = None
+    if isinstance(device, basestring):
+      self.adb = adb_wrapper.AdbWrapper(device)
+      self.old_interface = pylib.android_commands.AndroidCommands(device)
+    elif isinstance(device, adb_wrapper.AdbWrapper):
+      self.adb = device
+      self.old_interface = pylib.android_commands.AndroidCommands(str(device))
+    elif isinstance(device, pylib.android_commands.AndroidCommands):
+      self.adb = adb_wrapper.AdbWrapper(device.GetDevice())
+      self.old_interface = device
+    else:
+      raise ValueError('Unsupported device value: %r' % device)
+    self._commands_installed = None
+    self._default_timeout = default_timeout
+    self._default_retries = default_retries
+    self._cache = {}
+    self._client_caches = {}
+    assert hasattr(self, decorators.DEFAULT_TIMEOUT_ATTR)
+    assert hasattr(self, decorators.DEFAULT_RETRIES_ATTR)
+
+  def __eq__(self, other):
+    """Checks whether |other| refers to the same device as |self|.
+
+    Args:
+      other: The object to compare to. This can be a basestring, an instance
+        of adb_wrapper.AdbWrapper, or an instance of DeviceUtils.
+    Returns:
+      Whether |other| refers to the same device as |self|.
+    """
+    return self.adb.GetDeviceSerial() == str(other)
+
+  def __lt__(self, other):
+    """Compares two instances of DeviceUtils.
+
+    This merely compares their serial numbers.
+
+    Args:
+      other: The instance of DeviceUtils to compare to.
+    Returns:
+      Whether |self| is less than |other|.
+    """
+    return self.adb.GetDeviceSerial() < other.adb.GetDeviceSerial()
+
+  def __str__(self):
+    """Returns the device serial."""
+    return self.adb.GetDeviceSerial()
+
+  @decorators.WithTimeoutAndRetriesFromInstance()
+  def IsOnline(self, timeout=None, retries=None):
+    """Checks whether the device is online.
+
+    Args:
+      timeout: timeout in seconds
+      retries: number of retries
+
+    Returns:
+      True if the device is online, False otherwise.
+
+    Raises:
+      CommandTimeoutError on timeout.
+    """
+    try:
+      return self.adb.GetState() == 'device'
+    except base_error.BaseError as exc:
+      logging.info('Failed to get state: %s', exc)
+      return False
+
+  @decorators.WithTimeoutAndRetriesFromInstance()
+  def HasRoot(self, timeout=None, retries=None):
+    """Checks whether or not adbd has root privileges.
+
+    Args:
+      timeout: timeout in seconds
+      retries: number of retries
+
+    Returns:
+      True if adbd has root privileges, False otherwise.
+
+    Raises:
+      CommandTimeoutError on timeout.
+      DeviceUnreachableError on missing device.
+    """
+    try:
+      self.RunShellCommand('ls /root', check_return=True)
+      return True
+    except device_errors.AdbCommandFailedError:
+      return False
+
+  def NeedsSU(self, timeout=DEFAULT, retries=DEFAULT):
+    """Checks whether 'su' is needed to access protected resources.
+
+    Args:
+      timeout: timeout in seconds
+      retries: number of retries
+
+    Returns:
+      True if 'su' is available on the device and is needed to to access
+        protected resources; False otherwise if either 'su' is not available
+        (e.g. because the device has a user build), or not needed (because adbd
+        already has root privileges).
+
+    Raises:
+      CommandTimeoutError on timeout.
+      DeviceUnreachableError on missing device.
+    """
+    if 'needs_su' not in self._cache:
+      try:
+        self.RunShellCommand(
+            'su -c ls /root && ! ls /root', check_return=True,
+            timeout=self._default_timeout if timeout is DEFAULT else timeout,
+            retries=self._default_retries if retries is DEFAULT else retries)
+        self._cache['needs_su'] = True
+      except device_errors.AdbCommandFailedError:
+        self._cache['needs_su'] = False
+    return self._cache['needs_su']
+
+
+  @decorators.WithTimeoutAndRetriesFromInstance()
+  def EnableRoot(self, timeout=None, retries=None):
+    """Restarts adbd with root privileges.
+
+    Args:
+      timeout: timeout in seconds
+      retries: number of retries
+
+    Raises:
+      CommandFailedError if root could not be enabled.
+      CommandTimeoutError on timeout.
+    """
+    if self.IsUserBuild():
+      raise device_errors.CommandFailedError(
+          'Cannot enable root in user builds.', str(self))
+    if 'needs_su' in self._cache:
+      del self._cache['needs_su']
+    self.adb.Root()
+    self.WaitUntilFullyBooted()
+
+  @decorators.WithTimeoutAndRetriesFromInstance()
+  def IsUserBuild(self, timeout=None, retries=None):
+    """Checks whether or not the device is running a user build.
+
+    Args:
+      timeout: timeout in seconds
+      retries: number of retries
+
+    Returns:
+      True if the device is running a user build, False otherwise (i.e. if
+        it's running a userdebug build).
+
+    Raises:
+      CommandTimeoutError on timeout.
+      DeviceUnreachableError on missing device.
+    """
+    return self.build_type == 'user'
+
+  @decorators.WithTimeoutAndRetriesFromInstance()
+  def GetExternalStoragePath(self, timeout=None, retries=None):
+    """Get the device's path to its SD card.
+
+    Args:
+      timeout: timeout in seconds
+      retries: number of retries
+
+    Returns:
+      The device's path to its SD card.
+
+    Raises:
+      CommandFailedError if the external storage path could not be determined.
+      CommandTimeoutError on timeout.
+      DeviceUnreachableError on missing device.
+    """
+    if 'external_storage' in self._cache:
+      return self._cache['external_storage']
+
+    value = self.RunShellCommand('echo $EXTERNAL_STORAGE',
+                                 single_line=True,
+                                 check_return=True)
+    if not value:
+      raise device_errors.CommandFailedError('$EXTERNAL_STORAGE is not set',
+                                             str(self))
+    self._cache['external_storage'] = value
+    return value
+
+  @decorators.WithTimeoutAndRetriesFromInstance()
+  def GetApplicationPaths(self, package, timeout=None, retries=None):
+    """Get the paths of the installed apks on the device for the given package.
+
+    Args:
+      package: Name of the package.
+
+    Returns:
+      List of paths to the apks on the device for the given package.
+    """
+    # 'pm path' is liable to incorrectly exit with a nonzero number starting
+    # in Lollipop.
+    # TODO(jbudorick): Check if this is fixed as new Android versions are
+    # released to put an upper bound on this.
+    should_check_return = (self.build_version_sdk <
+                           constants.ANDROID_SDK_VERSION_CODES.LOLLIPOP)
+    output = self.RunShellCommand(
+        ['pm', 'path', package], check_return=should_check_return)
+    apks = []
+    for line in output:
+      if not line.startswith('package:'):
+        raise device_errors.CommandFailedError(
+            'pm path returned: %r' % '\n'.join(output), str(self))
+      apks.append(line[len('package:'):])
+    return apks
+
+  @decorators.WithTimeoutAndRetriesFromInstance()
+  def GetApplicationDataDirectory(self, package, timeout=None, retries=None):
+    """Get the data directory on the device for the given package.
+
+    Args:
+      package: Name of the package.
+
+    Returns:
+      The package's data directory, or None if the package doesn't exist on the
+      device.
+    """
+    try:
+      output = self._RunPipedShellCommand(
+          'pm dump %s | grep dataDir=' % cmd_helper.SingleQuote(package))
+      for line in output:
+        _, _, dataDir = line.partition('dataDir=')
+        if dataDir:
+          return dataDir
+    except device_errors.CommandFailedError:
+      logging.exception('Could not find data directory for %s', package)
+    return None
+
+  @decorators.WithTimeoutAndRetriesFromInstance()
+  def WaitUntilFullyBooted(self, wifi=False, timeout=None, retries=None):
+    """Wait for the device to fully boot.
+
+    This means waiting for the device to boot, the package manager to be
+    available, and the SD card to be ready. It can optionally mean waiting
+    for wifi to come up, too.
+
+    Args:
+      wifi: A boolean indicating if we should wait for wifi to come up or not.
+      timeout: timeout in seconds
+      retries: number of retries
+
+    Raises:
+      CommandFailedError on failure.
+      CommandTimeoutError if one of the component waits times out.
+      DeviceUnreachableError if the device becomes unresponsive.
+    """
+    def sd_card_ready():
+      try:
+        self.RunShellCommand(['test', '-d', self.GetExternalStoragePath()],
+                             check_return=True)
+        return True
+      except device_errors.AdbCommandFailedError:
+        return False
+
+    def pm_ready():
+      try:
+        return self.GetApplicationPaths('android')
+      except device_errors.CommandFailedError:
+        return False
+
+    def boot_completed():
+      return self.GetProp('sys.boot_completed') == '1'
+
+    def wifi_enabled():
+      return 'Wi-Fi is enabled' in self.RunShellCommand(['dumpsys', 'wifi'],
+                                                        check_return=False)
+
+    self.adb.WaitForDevice()
+    timeout_retry.WaitFor(sd_card_ready)
+    timeout_retry.WaitFor(pm_ready)
+    timeout_retry.WaitFor(boot_completed)
+    if wifi:
+      timeout_retry.WaitFor(wifi_enabled)
+
+  REBOOT_DEFAULT_TIMEOUT = 10 * _DEFAULT_TIMEOUT
+  REBOOT_DEFAULT_RETRIES = _DEFAULT_RETRIES
+
+  @decorators.WithTimeoutAndRetriesDefaults(
+      REBOOT_DEFAULT_TIMEOUT,
+      REBOOT_DEFAULT_RETRIES)
+  def Reboot(self, block=True, wifi=False, timeout=None, retries=None):
+    """Reboot the device.
+
+    Args:
+      block: A boolean indicating if we should wait for the reboot to complete.
+      wifi: A boolean indicating if we should wait for wifi to be enabled after
+        the reboot. The option has no effect unless |block| is also True.
+      timeout: timeout in seconds
+      retries: number of retries
+
+    Raises:
+      CommandTimeoutError on timeout.
+      DeviceUnreachableError on missing device.
+    """
+    def device_offline():
+      return not self.IsOnline()
+
+    self.adb.Reboot()
+    self._ClearCache()
+    timeout_retry.WaitFor(device_offline, wait_period=1)
+    if block:
+      self.WaitUntilFullyBooted(wifi=wifi)
+
+  INSTALL_DEFAULT_TIMEOUT = 4 * _DEFAULT_TIMEOUT
+  INSTALL_DEFAULT_RETRIES = _DEFAULT_RETRIES
+
+  @decorators.WithTimeoutAndRetriesDefaults(
+      INSTALL_DEFAULT_TIMEOUT,
+      INSTALL_DEFAULT_RETRIES)
+  def Install(self, apk_path, reinstall=False, timeout=None, retries=None):
+    """Install an APK.
+
+    Noop if an identical APK is already installed.
+
+    Args:
+      apk_path: A string containing the path to the APK to install.
+      reinstall: A boolean indicating if we should keep any existing app data.
+      timeout: timeout in seconds
+      retries: number of retries
+
+    Raises:
+      CommandFailedError if the installation fails.
+      CommandTimeoutError if the installation times out.
+      DeviceUnreachableError on missing device.
+    """
+    package_name = apk_helper.GetPackageName(apk_path)
+    device_paths = self.GetApplicationPaths(package_name)
+    if device_paths:
+      if len(device_paths) > 1:
+        logging.warning(
+            'Installing single APK (%s) when split APKs (%s) are currently '
+            'installed.', apk_path, ' '.join(device_paths))
+      (files_to_push, _) = self._GetChangedAndStaleFiles(
+          apk_path, device_paths[0])
+      should_install = bool(files_to_push)
+      if should_install and not reinstall:
+        self.adb.Uninstall(package_name)
+    else:
+      should_install = True
+    if should_install:
+      self.adb.Install(apk_path, reinstall=reinstall)
+
+  @decorators.WithTimeoutAndRetriesDefaults(
+      INSTALL_DEFAULT_TIMEOUT,
+      INSTALL_DEFAULT_RETRIES)
+  def InstallSplitApk(self, base_apk, split_apks, reinstall=False,
+                      timeout=None, retries=None):
+    """Install a split APK.
+
+    Noop if all of the APK splits are already installed.
+
+    Args:
+      base_apk: A string of the path to the base APK.
+      split_apks: A list of strings of paths of all of the APK splits.
+      reinstall: A boolean indicating if we should keep any existing app data.
+      timeout: timeout in seconds
+      retries: number of retries
+
+    Raises:
+      CommandFailedError if the installation fails.
+      CommandTimeoutError if the installation times out.
+      DeviceUnreachableError on missing device.
+      DeviceVersionError if device SDK is less than Android L.
+    """
+    self._CheckSdkLevel(constants.ANDROID_SDK_VERSION_CODES.LOLLIPOP)
+
+    all_apks = [base_apk] + split_select.SelectSplits(
+        self, base_apk, split_apks)
+    package_name = apk_helper.GetPackageName(base_apk)
+    device_apk_paths = self.GetApplicationPaths(package_name)
+
+    if device_apk_paths:
+      partial_install_package = package_name
+      device_checksums = md5sum.CalculateDeviceMd5Sums(device_apk_paths, self)
+      host_checksums = md5sum.CalculateHostMd5Sums(all_apks)
+      apks_to_install = [k for (k, v) in host_checksums.iteritems()
+                         if v not in device_checksums.values()]
+      if apks_to_install and not reinstall:
+        self.adb.Uninstall(package_name)
+        partial_install_package = None
+        apks_to_install = all_apks
+    else:
+      partial_install_package = None
+      apks_to_install = all_apks
+    if apks_to_install:
+      self.adb.InstallMultiple(
+          apks_to_install, partial=partial_install_package, reinstall=reinstall)
+
+  def _CheckSdkLevel(self, required_sdk_level):
+    """Raises an exception if the device does not have the required SDK level.
+    """
+    if self.build_version_sdk < required_sdk_level:
+      raise device_errors.DeviceVersionError(
+          ('Requires SDK level %s, device is SDK level %s' %
+           (required_sdk_level, self.build_version_sdk)),
+           device_serial=self.adb.GetDeviceSerial())
+
+
+  @decorators.WithTimeoutAndRetriesFromInstance()
+  def RunShellCommand(self, cmd, check_return=False, cwd=None, env=None,
+                      as_root=False, single_line=False, large_output=False,
+                      timeout=None, retries=None):
+    """Run an ADB shell command.
+
+    The command to run |cmd| should be a sequence of program arguments or else
+    a single string.
+
+    When |cmd| is a sequence, it is assumed to contain the name of the command
+    to run followed by its arguments. In this case, arguments are passed to the
+    command exactly as given, without any further processing by the shell. This
+    allows to easily pass arguments containing spaces or special characters
+    without having to worry about getting quoting right. Whenever possible, it
+    is recomended to pass |cmd| as a sequence.
+
+    When |cmd| is given as a string, it will be interpreted and run by the
+    shell on the device.
+
+    This behaviour is consistent with that of command runners in cmd_helper as
+    well as Python's own subprocess.Popen.
+
+    TODO(perezju) Change the default of |check_return| to True when callers
+      have switched to the new behaviour.
+
+    Args:
+      cmd: A string with the full command to run on the device, or a sequence
+        containing the command and its arguments.
+      check_return: A boolean indicating whether or not the return code should
+        be checked.
+      cwd: The device directory in which the command should be run.
+      env: The environment variables with which the command should be run.
+      as_root: A boolean indicating whether the shell command should be run
+        with root privileges.
+      single_line: A boolean indicating if only a single line of output is
+        expected.
+      large_output: Uses a work-around for large shell command output. Without
+        this large output will be truncated.
+      timeout: timeout in seconds
+      retries: number of retries
+
+    Returns:
+      If single_line is False, the output of the command as a list of lines,
+      otherwise, a string with the unique line of output emmited by the command
+      (with the optional newline at the end stripped).
+
+    Raises:
+      AdbCommandFailedError if check_return is True and the exit code of
+        the command run on the device is non-zero.
+      CommandFailedError if single_line is True but the output contains two or
+        more lines.
+      CommandTimeoutError on timeout.
+      DeviceUnreachableError on missing device.
+    """
+    def env_quote(key, value):
+      if not DeviceUtils._VALID_SHELL_VARIABLE.match(key):
+        raise KeyError('Invalid shell variable name %r' % key)
+      # using double quotes here to allow interpolation of shell variables
+      return '%s=%s' % (key, cmd_helper.DoubleQuote(value))
+
+    def run(cmd):
+      return self.adb.Shell(cmd)
+
+    def handle_check_return(cmd):
+      try:
+        return run(cmd)
+      except device_errors.AdbCommandFailedError as exc:
+        if check_return:
+          raise
+        else:
+          return exc.output
+
+    def handle_large_command(cmd):
+      if len(cmd) < self._MAX_ADB_COMMAND_LENGTH:
+        return handle_check_return(cmd)
+      else:
+        with device_temp_file.DeviceTempFile(self.adb, suffix='.sh') as script:
+          self._WriteFileWithPush(script.name, cmd)
+          logging.info('Large shell command will be run from file: %s ...',
+                       cmd[:100])
+          return handle_check_return('sh %s' % script.name_quoted)
+
+    def handle_large_output(cmd, large_output_mode):
+      if large_output_mode:
+        with device_temp_file.DeviceTempFile(self.adb) as large_output_file:
+          cmd = '%s > %s' % (cmd, large_output_file.name)
+          logging.debug('Large output mode enabled. Will write output to '
+                        'device and read results from file.')
+          handle_large_command(cmd)
+          return self.ReadFile(large_output_file.name, force_pull=True)
+      else:
+        try:
+          return handle_large_command(cmd)
+        except device_errors.AdbCommandFailedError as exc:
+          if exc.status is None:
+            logging.exception('No output found for %s', cmd)
+            logging.warning('Attempting to run in large_output mode.')
+            logging.warning('Use RunShellCommand(..., large_output=True) for '
+                            'shell commands that expect a lot of output.')
+            return handle_large_output(cmd, True)
+          else:
+            raise
+
+    if not isinstance(cmd, basestring):
+      cmd = ' '.join(cmd_helper.SingleQuote(s) for s in cmd)
+    if env:
+      env = ' '.join(env_quote(k, v) for k, v in env.iteritems())
+      cmd = '%s %s' % (env, cmd)
+    if cwd:
+      cmd = 'cd %s && %s' % (cmd_helper.SingleQuote(cwd), cmd)
+    if as_root and self.NeedsSU():
+      # "su -c sh -c" allows using shell features in |cmd|
+      cmd = 'su -c sh -c %s' % cmd_helper.SingleQuote(cmd)
+
+    output = handle_large_output(cmd, large_output).splitlines()
+
+    if single_line:
+      if not output:
+        return ''
+      elif len(output) == 1:
+        return output[0]
+      else:
+        msg = 'one line of output was expected, but got: %s'
+        raise device_errors.CommandFailedError(msg % output, str(self))
+    else:
+      return output
+
+  def _RunPipedShellCommand(self, script, **kwargs):
+    PIPESTATUS_LEADER = 'PIPESTATUS: '
+
+    script += '; echo "%s${PIPESTATUS[@]}"' % PIPESTATUS_LEADER
+    kwargs['check_return'] = True
+    output = self.RunShellCommand(script, **kwargs)
+    pipestatus_line = output[-1]
+
+    if not pipestatus_line.startswith(PIPESTATUS_LEADER):
+      logging.error('Pipe exit statuses of shell script missing.')
+      raise device_errors.AdbShellCommandFailedError(
+          script, output, status=None,
+          device_serial=self.adb.GetDeviceSerial())
+
+    output = output[:-1]
+    statuses = [
+        int(s) for s in pipestatus_line[len(PIPESTATUS_LEADER):].split()]
+    if any(statuses):
+      raise device_errors.AdbShellCommandFailedError(
+          script, output, status=statuses,
+          device_serial=self.adb.GetDeviceSerial())
+    return output
+
+  @decorators.WithTimeoutAndRetriesFromInstance()
+  def KillAll(self, process_name, signum=device_signal.SIGKILL, as_root=False,
+              blocking=False, quiet=False, timeout=None, retries=None):
+    """Kill all processes with the given name on the device.
+
+    Args:
+      process_name: A string containing the name of the process to kill.
+      signum: An integer containing the signal number to send to kill. Defaults
+              to SIGKILL (9).
+      as_root: A boolean indicating whether the kill should be executed with
+               root privileges.
+      blocking: A boolean indicating whether we should wait until all processes
+                with the given |process_name| are dead.
+      quiet: A boolean indicating whether to ignore the fact that no processes
+             to kill were found.
+      timeout: timeout in seconds
+      retries: number of retries
+
+    Returns:
+      The number of processes attempted to kill.
+
+    Raises:
+      CommandFailedError if no process was killed and |quiet| is False.
+      CommandTimeoutError on timeout.
+      DeviceUnreachableError on missing device.
+    """
+    pids = self.GetPids(process_name)
+    if not pids:
+      if quiet:
+        return 0
+      else:
+        raise device_errors.CommandFailedError(
+            'No process "%s"' % process_name, str(self))
+
+    cmd = ['kill', '-%d' % signum] + pids.values()
+    self.RunShellCommand(cmd, as_root=as_root, check_return=True)
+
+    if blocking:
+      # TODO(perezu): use timeout_retry.WaitFor
+      wait_period = 0.1
+      while self.GetPids(process_name):
+        time.sleep(wait_period)
+
+    return len(pids)
+
+  @decorators.WithTimeoutAndRetriesFromInstance()
+  def StartActivity(self, intent_obj, blocking=False, trace_file_name=None,
+                    force_stop=False, timeout=None, retries=None):
+    """Start package's activity on the device.
+
+    Args:
+      intent_obj: An Intent object to send.
+      blocking: A boolean indicating whether we should wait for the activity to
+                finish launching.
+      trace_file_name: If present, a string that both indicates that we want to
+                       profile the activity and contains the path to which the
+                       trace should be saved.
+      force_stop: A boolean indicating whether we should stop the activity
+                  before starting it.
+      timeout: timeout in seconds
+      retries: number of retries
+
+    Raises:
+      CommandFailedError if the activity could not be started.
+      CommandTimeoutError on timeout.
+      DeviceUnreachableError on missing device.
+    """
+    cmd = ['am', 'start']
+    if blocking:
+      cmd.append('-W')
+    if trace_file_name:
+      cmd.extend(['--start-profiler', trace_file_name])
+    if force_stop:
+      cmd.append('-S')
+    cmd.extend(intent_obj.am_args)
+    for line in self.RunShellCommand(cmd, check_return=True):
+      if line.startswith('Error:'):
+        raise device_errors.CommandFailedError(line, str(self))
+
+  @decorators.WithTimeoutAndRetriesFromInstance()
+  def StartInstrumentation(self, component, finish=True, raw=False,
+                           extras=None, timeout=None, retries=None):
+    if extras is None:
+      extras = {}
+
+    cmd = ['am', 'instrument']
+    if finish:
+      cmd.append('-w')
+    if raw:
+      cmd.append('-r')
+    for k, v in extras.iteritems():
+      cmd.extend(['-e', str(k), str(v)])
+    cmd.append(component)
+    return self.RunShellCommand(cmd, check_return=True, large_output=True)
+
+  @decorators.WithTimeoutAndRetriesFromInstance()
+  def BroadcastIntent(self, intent_obj, timeout=None, retries=None):
+    """Send a broadcast intent.
+
+    Args:
+      intent: An Intent to broadcast.
+      timeout: timeout in seconds
+      retries: number of retries
+
+    Raises:
+      CommandTimeoutError on timeout.
+      DeviceUnreachableError on missing device.
+    """
+    cmd = ['am', 'broadcast'] + intent_obj.am_args
+    self.RunShellCommand(cmd, check_return=True)
+
+  @decorators.WithTimeoutAndRetriesFromInstance()
+  def GoHome(self, timeout=None, retries=None):
+    """Return to the home screen and obtain launcher focus.
+
+    This command launches the home screen and attempts to obtain
+    launcher focus until the timeout is reached.
+
+    Args:
+      timeout: timeout in seconds
+      retries: number of retries
+
+    Raises:
+      CommandTimeoutError on timeout.
+      DeviceUnreachableError on missing device.
+    """
+    def is_launcher_focused():
+      output = self.RunShellCommand(['dumpsys', 'window', 'windows'],
+                                    check_return=True, large_output=True)
+      return any(self._LAUNCHER_FOCUSED_RE.match(l) for l in output)
+
+    def dismiss_popups():
+      # There is a dialog present; attempt to get rid of it.
+      # Not all dialogs can be dismissed with back.
+      self.SendKeyEvent(keyevent.KEYCODE_ENTER)
+      self.SendKeyEvent(keyevent.KEYCODE_BACK)
+      return is_launcher_focused()
+
+    # If Home is already focused, return early to avoid unnecessary work.
+    if is_launcher_focused():
+      return
+
+    self.StartActivity(
+        intent.Intent(action='android.intent.action.MAIN',
+                      category='android.intent.category.HOME'),
+        blocking=True)
+
+    if not is_launcher_focused():
+      timeout_retry.WaitFor(dismiss_popups, wait_period=1)
+
+  @decorators.WithTimeoutAndRetriesFromInstance()
+  def ForceStop(self, package, timeout=None, retries=None):
+    """Close the application.
+
+    Args:
+      package: A string containing the name of the package to stop.
+      timeout: timeout in seconds
+      retries: number of retries
+
+    Raises:
+      CommandTimeoutError on timeout.
+      DeviceUnreachableError on missing device.
+    """
+    self.RunShellCommand(['am', 'force-stop', package], check_return=True)
+
+  @decorators.WithTimeoutAndRetriesFromInstance()
+  def ClearApplicationState(self, package, timeout=None, retries=None):
+    """Clear all state for the given package.
+
+    Args:
+      package: A string containing the name of the package to stop.
+      timeout: timeout in seconds
+      retries: number of retries
+
+    Raises:
+      CommandTimeoutError on timeout.
+      DeviceUnreachableError on missing device.
+    """
+    # Check that the package exists before clearing it for android builds below
+    # JB MR2. Necessary because calling pm clear on a package that doesn't exist
+    # may never return.
+    if ((self.build_version_sdk >=
+         constants.ANDROID_SDK_VERSION_CODES.JELLY_BEAN_MR2)
+        or self.GetApplicationPaths(package)):
+      self.RunShellCommand(['pm', 'clear', package], check_return=True)
+
+  @decorators.WithTimeoutAndRetriesFromInstance()
+  def SendKeyEvent(self, keycode, timeout=None, retries=None):
+    """Sends a keycode to the device.
+
+    See the pylib.constants.keyevent module for suitable keycode values.
+
+    Args:
+      keycode: A integer keycode to send to the device.
+      timeout: timeout in seconds
+      retries: number of retries
+
+    Raises:
+      CommandTimeoutError on timeout.
+      DeviceUnreachableError on missing device.
+    """
+    self.RunShellCommand(['input', 'keyevent', format(keycode, 'd')],
+                         check_return=True)
+
+  PUSH_CHANGED_FILES_DEFAULT_TIMEOUT = 10 * _DEFAULT_TIMEOUT
+  PUSH_CHANGED_FILES_DEFAULT_RETRIES = _DEFAULT_RETRIES
+
+  @decorators.WithTimeoutAndRetriesDefaults(
+      PUSH_CHANGED_FILES_DEFAULT_TIMEOUT,
+      PUSH_CHANGED_FILES_DEFAULT_RETRIES)
+  def PushChangedFiles(self, host_device_tuples, timeout=None,
+                       retries=None, delete_device_stale=False):
+    """Push files to the device, skipping files that don't need updating.
+
+    When a directory is pushed, it is traversed recursively on the host and
+    all files in it are pushed to the device as needed.
+    Additionally, if delete_device_stale option is True,
+    files that exist on the device but don't exist on the host are deleted.
+
+    Args:
+      host_device_tuples: A list of (host_path, device_path) tuples, where
+        |host_path| is an absolute path of a file or directory on the host
+        that should be minimially pushed to the device, and |device_path| is
+        an absolute path of the destination on the device.
+      timeout: timeout in seconds
+      retries: number of retries
+      delete_device_stale: option to delete stale files on device
+
+    Raises:
+      CommandFailedError on failure.
+      CommandTimeoutError on timeout.
+      DeviceUnreachableError on missing device.
+    """
+
+    all_changed_files = []
+    all_stale_files = []
+    for h, d in host_device_tuples:
+      if os.path.isdir(h):
+        self.RunShellCommand(['mkdir', '-p', d], check_return=True)
+      changed_files, stale_files = (
+          self._GetChangedAndStaleFiles(h, d, delete_device_stale))
+      all_changed_files += changed_files
+      all_stale_files += stale_files
+
+    if delete_device_stale:
+      self.RunShellCommand(['rm', '-f'] + all_stale_files,
+                             check_return=True)
+
+    if not all_changed_files:
+      return
+
+    self._PushFilesImpl(host_device_tuples, all_changed_files)
+
+  def _GetChangedAndStaleFiles(self, host_path, device_path, track_stale=False):
+    """Get files to push and delete
+
+    Args:
+      host_path: an absolute path of a file or directory on the host
+      device_path: an absolute path of a file or directory on the device
+      track_stale: whether to bother looking for stale files (slower)
+
+    Returns:
+      a two-element tuple
+      1st element: a list of (host_files_path, device_files_path) tuples to push
+      2nd element: a list of stale files under device_path, or [] when
+        track_stale == False
+    """
+    real_host_path = os.path.realpath(host_path)
+    try:
+      real_device_path = self.RunShellCommand(
+          ['realpath', device_path], single_line=True, check_return=True)
+    except device_errors.CommandFailedError:
+      real_device_path = None
+    if not real_device_path:
+      return ([(host_path, device_path)], [])
+
+    try:
+      host_checksums = md5sum.CalculateHostMd5Sums([real_host_path])
+      interesting_device_paths = [real_device_path]
+      if not track_stale and os.path.isdir(real_host_path):
+        interesting_device_paths = [
+            posixpath.join(real_device_path, os.path.relpath(p, real_host_path))
+            for p in host_checksums.keys()]
+      device_checksums = md5sum.CalculateDeviceMd5Sums(
+          interesting_device_paths, self)
+    except EnvironmentError as e:
+      logging.warning('Error calculating md5: %s', e)
+      return ([(host_path, device_path)], [])
+
+    if os.path.isfile(host_path):
+      host_checksum = host_checksums.get(real_host_path)
+      device_checksum = device_checksums.get(real_device_path)
+      if host_checksum != device_checksum:
+        return ([(host_path, device_path)], [])
+      else:
+        return ([], [])
+    else:
+      to_push = []
+      for host_abs_path, host_checksum in host_checksums.iteritems():
+        device_abs_path = '%s/%s' % (
+            real_device_path, os.path.relpath(host_abs_path, real_host_path))
+        device_checksum = device_checksums.pop(device_abs_path, None)
+        if device_checksum != host_checksum:
+          to_push.append((host_abs_path, device_abs_path))
+      to_delete = device_checksums.keys()
+      return (to_push, to_delete)
+
+  def _PushFilesImpl(self, host_device_tuples, files):
+    size = sum(host_utils.GetRecursiveDiskUsage(h) for h, _ in files)
+    file_count = len(files)
+    dir_size = sum(host_utils.GetRecursiveDiskUsage(h)
+                   for h, _ in host_device_tuples)
+    dir_file_count = 0
+    for h, _ in host_device_tuples:
+      if os.path.isdir(h):
+        dir_file_count += sum(len(f) for _r, _d, f in os.walk(h))
+      else:
+        dir_file_count += 1
+
+    push_duration = self._ApproximateDuration(
+        file_count, file_count, size, False)
+    dir_push_duration = self._ApproximateDuration(
+        len(host_device_tuples), dir_file_count, dir_size, False)
+    zip_duration = self._ApproximateDuration(1, 1, size, True)
+
+    self._InstallCommands()
+
+    if dir_push_duration < push_duration and (
+        dir_push_duration < zip_duration or not self._commands_installed):
+      self._PushChangedFilesIndividually(host_device_tuples)
+    elif push_duration < zip_duration or not self._commands_installed:
+      self._PushChangedFilesIndividually(files)
+    else:
+      self._PushChangedFilesZipped(files)
+      self.RunShellCommand(
+          ['chmod', '-R', '777'] + [d for _, d in host_device_tuples],
+          as_root=True, check_return=True)
+
+  def _InstallCommands(self):
+    if self._commands_installed is None:
+      try:
+        if not install_commands.Installed(self):
+          install_commands.InstallCommands(self)
+        self._commands_installed = True
+      except Exception as e:
+        logging.warning('unzip not available: %s' % str(e))
+        self._commands_installed = False
+
+  @staticmethod
+  def _ApproximateDuration(adb_calls, file_count, byte_count, is_zipping):
+    # We approximate the time to push a set of files to a device as:
+    #   t = c1 * a + c2 * f + c3 + b / c4 + b / (c5 * c6), where
+    #     t: total time (sec)
+    #     c1: adb call time delay (sec)
+    #     a: number of times adb is called (unitless)
+    #     c2: push time delay (sec)
+    #     f: number of files pushed via adb (unitless)
+    #     c3: zip time delay (sec)
+    #     c4: zip rate (bytes/sec)
+    #     b: total number of bytes (bytes)
+    #     c5: transfer rate (bytes/sec)
+    #     c6: compression ratio (unitless)
+
+    # All of these are approximations.
+    ADB_CALL_PENALTY = 0.1 # seconds
+    ADB_PUSH_PENALTY = 0.01 # seconds
+    ZIP_PENALTY = 2.0 # seconds
+    ZIP_RATE = 10000000.0 # bytes / second
+    TRANSFER_RATE = 2000000.0 # bytes / second
+    COMPRESSION_RATIO = 2.0 # unitless
+
+    adb_call_time = ADB_CALL_PENALTY * adb_calls
+    adb_push_setup_time = ADB_PUSH_PENALTY * file_count
+    if is_zipping:
+      zip_time = ZIP_PENALTY + byte_count / ZIP_RATE
+      transfer_time = byte_count / (TRANSFER_RATE * COMPRESSION_RATIO)
+    else:
+      zip_time = 0
+      transfer_time = byte_count / TRANSFER_RATE
+    return adb_call_time + adb_push_setup_time + zip_time + transfer_time
+
+  def _PushChangedFilesIndividually(self, files):
+    for h, d in files:
+      self.adb.Push(h, d)
+
+  def _PushChangedFilesZipped(self, files):
+    if not files:
+      return
+
+    with tempfile.NamedTemporaryFile(suffix='.zip') as zip_file:
+      zip_proc = multiprocessing.Process(
+          target=DeviceUtils._CreateDeviceZip,
+          args=(zip_file.name, files))
+      zip_proc.start()
+      zip_proc.join()
+
+      zip_on_device = '%s/tmp.zip' % self.GetExternalStoragePath()
+      try:
+        self.adb.Push(zip_file.name, zip_on_device)
+        self.RunShellCommand(
+            ['unzip', zip_on_device],
+            as_root=True,
+            env={'PATH': '%s:$PATH' % install_commands.BIN_DIR},
+            check_return=True)
+      finally:
+        if zip_proc.is_alive():
+          zip_proc.terminate()
+        if self.IsOnline():
+          self.RunShellCommand(['rm', zip_on_device], check_return=True)
+
+  @staticmethod
+  def _CreateDeviceZip(zip_path, host_device_tuples):
+    with zipfile.ZipFile(zip_path, 'w') as zip_file:
+      for host_path, device_path in host_device_tuples:
+        zip_utils.WriteToZipFile(zip_file, host_path, device_path)
+
+  @decorators.WithTimeoutAndRetriesFromInstance()
+  def FileExists(self, device_path, timeout=None, retries=None):
+    """Checks whether the given file exists on the device.
+
+    Args:
+      device_path: A string containing the absolute path to the file on the
+                   device.
+      timeout: timeout in seconds
+      retries: number of retries
+
+    Returns:
+      True if the file exists on the device, False otherwise.
+
+    Raises:
+      CommandTimeoutError on timeout.
+      DeviceUnreachableError on missing device.
+    """
+    try:
+      self.RunShellCommand(['test', '-e', device_path], check_return=True)
+      return True
+    except device_errors.AdbCommandFailedError:
+      return False
+
+  @decorators.WithTimeoutAndRetriesFromInstance()
+  def PullFile(self, device_path, host_path, timeout=None, retries=None):
+    """Pull a file from the device.
+
+    Args:
+      device_path: A string containing the absolute path of the file to pull
+                   from the device.
+      host_path: A string containing the absolute path of the destination on
+                 the host.
+      timeout: timeout in seconds
+      retries: number of retries
+
+    Raises:
+      CommandFailedError on failure.
+      CommandTimeoutError on timeout.
+    """
+    # Create the base dir if it doesn't exist already
+    dirname = os.path.dirname(host_path)
+    if dirname and not os.path.exists(dirname):
+      os.makedirs(dirname)
+    self.adb.Pull(device_path, host_path)
+
+  def _ReadFileWithPull(self, device_path):
+    try:
+      d = tempfile.mkdtemp()
+      host_temp_path = os.path.join(d, 'tmp_ReadFileWithPull')
+      self.adb.Pull(device_path, host_temp_path)
+      with open(host_temp_path, 'r') as host_temp:
+        return host_temp.read()
+    finally:
+      if os.path.exists(d):
+        shutil.rmtree(d)
+
+  _LS_RE = re.compile(
+      r'(?P<perms>\S+) +(?P<owner>\S+) +(?P<group>\S+) +(?:(?P<size>\d+) +)?'
+      + r'(?P<date>\S+) +(?P<time>\S+) +(?P<name>.+)$')
+
+  @decorators.WithTimeoutAndRetriesFromInstance()
+  def ReadFile(self, device_path, as_root=False, force_pull=False,
+               timeout=None, retries=None):
+    """Reads the contents of a file from the device.
+
+    Args:
+      device_path: A string containing the absolute path of the file to read
+                   from the device.
+      as_root: A boolean indicating whether the read should be executed with
+               root privileges.
+      force_pull: A boolean indicating whether to force the operation to be
+          performed by pulling a file from the device. The default is, when the
+          contents are short, to retrieve the contents using cat instead.
+      timeout: timeout in seconds
+      retries: number of retries
+
+    Returns:
+      The contents of |device_path| as a string. Contents are intepreted using
+      universal newlines, so the caller will see them encoded as '\n'. Also,
+      all lines will be terminated.
+
+    Raises:
+      AdbCommandFailedError if the file can't be read.
+      CommandTimeoutError on timeout.
+      DeviceUnreachableError on missing device.
+    """
+    def get_size(path):
+      # TODO(jbudorick): Implement a generic version of Stat() that handles
+      # as_root=True, then switch this implementation to use that.
+      ls_out = self.RunShellCommand(['ls', '-l', device_path], as_root=as_root,
+                                    check_return=True)
+      for line in ls_out:
+        m = self._LS_RE.match(line)
+        if m and m.group('name') == posixpath.basename(device_path):
+          return int(m.group('size'))
+      logging.warning('Could not determine size of %s.', device_path)
+      return None
+
+    if (not force_pull
+        and 0 < get_size(device_path) <= self._MAX_ADB_OUTPUT_LENGTH):
+      return _JoinLines(self.RunShellCommand(
+          ['cat', device_path], as_root=as_root, check_return=True))
+    elif as_root and self.NeedsSU():
+      with device_temp_file.DeviceTempFile(self.adb) as device_temp:
+        self.RunShellCommand(['cp', device_path, device_temp.name],
+                             as_root=True, check_return=True)
+        return self._ReadFileWithPull(device_temp.name)
+    else:
+      return self._ReadFileWithPull(device_path)
+
+  def _WriteFileWithPush(self, device_path, contents):
+    with tempfile.NamedTemporaryFile() as host_temp:
+      host_temp.write(contents)
+      host_temp.flush()
+      self.adb.Push(host_temp.name, device_path)
+
+  @decorators.WithTimeoutAndRetriesFromInstance()
+  def WriteFile(self, device_path, contents, as_root=False, force_push=False,
+                timeout=None, retries=None):
+    """Writes |contents| to a file on the device.
+
+    Args:
+      device_path: A string containing the absolute path to the file to write
+          on the device.
+      contents: A string containing the data to write to the device.
+      as_root: A boolean indicating whether the write should be executed with
+          root privileges (if available).
+      force_push: A boolean indicating whether to force the operation to be
+          performed by pushing a file to the device. The default is, when the
+          contents are short, to pass the contents using a shell script instead.
+      timeout: timeout in seconds
+      retries: number of retries
+
+    Raises:
+      CommandFailedError if the file could not be written on the device.
+      CommandTimeoutError on timeout.
+      DeviceUnreachableError on missing device.
+    """
+    if not force_push and len(contents) < self._MAX_ADB_COMMAND_LENGTH:
+      # If the contents are small, for efficieny we write the contents with
+      # a shell command rather than pushing a file.
+      cmd = 'echo -n %s > %s' % (cmd_helper.SingleQuote(contents),
+                                 cmd_helper.SingleQuote(device_path))
+      self.RunShellCommand(cmd, as_root=as_root, check_return=True)
+    elif as_root and self.NeedsSU():
+      # Adb does not allow to "push with su", so we first push to a temp file
+      # on a safe location, and then copy it to the desired location with su.
+      with device_temp_file.DeviceTempFile(self.adb) as device_temp:
+        self._WriteFileWithPush(device_temp.name, contents)
+        # Here we need 'cp' rather than 'mv' because the temp and
+        # destination files might be on different file systems (e.g.
+        # on internal storage and an external sd card).
+        self.RunShellCommand(['cp', device_temp.name, device_path],
+                             as_root=True, check_return=True)
+    else:
+      # If root is not needed, we can push directly to the desired location.
+      self._WriteFileWithPush(device_path, contents)
+
+  @decorators.WithTimeoutAndRetriesFromInstance()
+  def Ls(self, device_path, timeout=None, retries=None):
+    """Lists the contents of a directory on the device.
+
+    Args:
+      device_path: A string containing the path of the directory on the device
+                   to list.
+      timeout: timeout in seconds
+      retries: number of retries
+
+    Returns:
+      A list of pairs (filename, stat) for each file found in the directory,
+      where the stat object has the properties: st_mode, st_size, and st_time.
+
+    Raises:
+      AdbCommandFailedError if |device_path| does not specify a valid and
+          accessible directory in the device.
+      CommandTimeoutError on timeout.
+      DeviceUnreachableError on missing device.
+    """
+    return self.adb.Ls(device_path)
+
+  @decorators.WithTimeoutAndRetriesFromInstance()
+  def Stat(self, device_path, timeout=None, retries=None):
+    """Get the stat attributes of a file or directory on the device.
+
+    Args:
+      device_path: A string containing the path of from which to get attributes
+                   on the device.
+      timeout: timeout in seconds
+      retries: number of retries
+
+    Returns:
+      A stat object with the properties: st_mode, st_size, and st_time
+
+    Raises:
+      CommandFailedError if device_path cannot be found on the device.
+      CommandTimeoutError on timeout.
+      DeviceUnreachableError on missing device.
+    """
+    dirname, target = device_path.rsplit('/', 1)
+    for filename, stat in self.adb.Ls(dirname):
+      if filename == target:
+        return stat
+    raise device_errors.CommandFailedError(
+        'Cannot find file or directory: %r' % device_path, str(self))
+
+  @decorators.WithTimeoutAndRetriesFromInstance()
+  def SetJavaAsserts(self, enabled, timeout=None, retries=None):
+    """Enables or disables Java asserts.
+
+    Args:
+      enabled: A boolean indicating whether Java asserts should be enabled
+               or disabled.
+      timeout: timeout in seconds
+      retries: number of retries
+
+    Returns:
+      True if the device-side property changed and a restart is required as a
+      result, False otherwise.
+
+    Raises:
+      CommandTimeoutError on timeout.
+    """
+    def find_property(lines, property_name):
+      for index, line in enumerate(lines):
+        if line.strip() == '':
+          continue
+        key, value = (s.strip() for s in line.split('=', 1))
+        if key == property_name:
+          return index, value
+      return None, ''
+
+    new_value = 'all' if enabled else ''
+
+    # First ensure the desired property is persisted.
+    try:
+      properties = self.ReadFile(
+          constants.DEVICE_LOCAL_PROPERTIES_PATH).splitlines()
+    except device_errors.CommandFailedError:
+      properties = []
+    index, value = find_property(properties, self.JAVA_ASSERT_PROPERTY)
+    if new_value != value:
+      if new_value:
+        new_line = '%s=%s' % (self.JAVA_ASSERT_PROPERTY, new_value)
+        if index is None:
+          properties.append(new_line)
+        else:
+          properties[index] = new_line
+      else:
+        assert index is not None # since new_value == '' and new_value != value
+        properties.pop(index)
+      self.WriteFile(constants.DEVICE_LOCAL_PROPERTIES_PATH,
+                     _JoinLines(properties))
+
+    # Next, check the current runtime value is what we need, and
+    # if not, set it and report that a reboot is required.
+    value = self.GetProp(self.JAVA_ASSERT_PROPERTY)
+    if new_value != value:
+      self.SetProp(self.JAVA_ASSERT_PROPERTY, new_value)
+      return True
+    else:
+      return False
+
+  @property
+  def language(self):
+    """Returns the language setting on the device."""
+    return self.GetProp('persist.sys.language', cache=False)
+
+  @property
+  def country(self):
+    """Returns the country setting on the device."""
+    return self.GetProp('persist.sys.country', cache=False)
+
+  @property
+  def screen_density(self):
+    """Returns the screen density of the device."""
+    DPI_TO_DENSITY = {
+      120: 'ldpi',
+      160: 'mdpi',
+      240: 'hdpi',
+      320: 'xhdpi',
+      480: 'xxhdpi',
+      640: 'xxxhdpi',
+    }
+    dpi = int(self.GetProp('ro.sf.lcd_density', cache=True))
+    return DPI_TO_DENSITY.get(dpi, 'tvdpi')
+
+  @property
+  def build_description(self):
+    """Returns the build description of the system.
+
+    For example:
+      nakasi-user 4.4.4 KTU84P 1227136 release-keys
+    """
+    return self.GetProp('ro.build.description', cache=True)
+
+  @property
+  def build_fingerprint(self):
+    """Returns the build fingerprint of the system.
+
+    For example:
+      google/nakasi/grouper:4.4.4/KTU84P/1227136:user/release-keys
+    """
+    return self.GetProp('ro.build.fingerprint', cache=True)
+
+  @property
+  def build_id(self):
+    """Returns the build ID of the system (e.g. 'KTU84P')."""
+    return self.GetProp('ro.build.id', cache=True)
+
+  @property
+  def build_product(self):
+    """Returns the build product of the system (e.g. 'grouper')."""
+    return self.GetProp('ro.build.product', cache=True)
+
+  @property
+  def build_type(self):
+    """Returns the build type of the system (e.g. 'user')."""
+    return self.GetProp('ro.build.type', cache=True)
+
+  @property
+  def build_version_sdk(self):
+    """Returns the build version sdk of the system as a number (e.g. 19).
+
+    For version code numbers see:
+    http://developer.android.com/reference/android/os/Build.VERSION_CODES.html
+
+    For named constants see:
+    pylib.constants.ANDROID_SDK_VERSION_CODES
+
+    Raises:
+      CommandFailedError if the build version sdk is not a number.
+    """
+    value = self.GetProp('ro.build.version.sdk', cache=True)
+    try:
+      return int(value)
+    except ValueError:
+      raise device_errors.CommandFailedError(
+          'Invalid build version sdk: %r' % value)
+
+  @property
+  def product_cpu_abi(self):
+    """Returns the product cpu abi of the device (e.g. 'armeabi-v7a')."""
+    return self.GetProp('ro.product.cpu.abi', cache=True)
+
+  @property
+  def product_model(self):
+    """Returns the name of the product model (e.g. 'Nexus 7')."""
+    return self.GetProp('ro.product.model', cache=True)
+
+  @property
+  def product_name(self):
+    """Returns the product name of the device (e.g. 'nakasi')."""
+    return self.GetProp('ro.product.name', cache=True)
+
+  def GetProp(self, property_name, cache=False, timeout=DEFAULT,
+              retries=DEFAULT):
+    """Gets a property from the device.
+
+    Args:
+      property_name: A string containing the name of the property to get from
+                     the device.
+      cache: A boolean indicating whether to cache the value of this property.
+      timeout: timeout in seconds
+      retries: number of retries
+
+    Returns:
+      The value of the device's |property_name| property.
+
+    Raises:
+      CommandTimeoutError on timeout.
+    """
+    assert isinstance(property_name, basestring), (
+        "property_name is not a string: %r" % property_name)
+
+    cache_key = '_prop:' + property_name
+    if cache and cache_key in self._cache:
+      return self._cache[cache_key]
+    else:
+      # timeout and retries are handled down at run shell, because we don't
+      # want to apply them in the other branch when reading from the cache
+      value = self.RunShellCommand(
+          ['getprop', property_name], single_line=True, check_return=True,
+          timeout=self._default_timeout if timeout is DEFAULT else timeout,
+          retries=self._default_retries if retries is DEFAULT else retries)
+      if cache or cache_key in self._cache:
+        self._cache[cache_key] = value
+      return value
+
+  @decorators.WithTimeoutAndRetriesFromInstance()
+  def SetProp(self, property_name, value, check=False, timeout=None,
+              retries=None):
+    """Sets a property on the device.
+
+    Args:
+      property_name: A string containing the name of the property to set on
+                     the device.
+      value: A string containing the value to set to the property on the
+             device.
+      check: A boolean indicating whether to check that the property was
+             successfully set on the device.
+      timeout: timeout in seconds
+      retries: number of retries
+
+    Raises:
+      CommandFailedError if check is true and the property was not correctly
+        set on the device (e.g. because it is not rooted).
+      CommandTimeoutError on timeout.
+    """
+    assert isinstance(property_name, basestring), (
+        "property_name is not a string: %r" % property_name)
+    assert isinstance(value, basestring), "value is not a string: %r" % value
+
+    self.RunShellCommand(['setprop', property_name, value], check_return=True)
+    if property_name in self._cache:
+      del self._cache[property_name]
+    # TODO(perezju) remove the option and make the check mandatory, but using a
+    # single shell script to both set- and getprop.
+    if check and value != self.GetProp(property_name):
+      raise device_errors.CommandFailedError(
+          'Unable to set property %r on the device to %r'
+          % (property_name, value), str(self))
+
+  @decorators.WithTimeoutAndRetriesFromInstance()
+  def GetABI(self, timeout=None, retries=None):
+    """Gets the device main ABI.
+
+    Args:
+      timeout: timeout in seconds
+      retries: number of retries
+
+    Returns:
+      The device's main ABI name.
+
+    Raises:
+      CommandTimeoutError on timeout.
+    """
+    return self.GetProp('ro.product.cpu.abi')
+
+  @decorators.WithTimeoutAndRetriesFromInstance()
+  def GetPids(self, process_name, timeout=None, retries=None):
+    """Returns the PIDs of processes with the given name.
+
+    Note that the |process_name| is often the package name.
+
+    Args:
+      process_name: A string containing the process name to get the PIDs for.
+      timeout: timeout in seconds
+      retries: number of retries
+
+    Returns:
+      A dict mapping process name to PID for each process that contained the
+      provided |process_name|.
+
+    Raises:
+      CommandTimeoutError on timeout.
+      DeviceUnreachableError on missing device.
+    """
+    procs_pids = {}
+    try:
+      ps_output = self._RunPipedShellCommand(
+          'ps | grep -F %s' % cmd_helper.SingleQuote(process_name))
+    except device_errors.AdbShellCommandFailedError as e:
+      if e.status and isinstance(e.status, list) and not e.status[0]:
+        # If ps succeeded but grep failed, there were no processes with the
+        # given name.
+        return procs_pids
+      else:
+        raise
+
+    for line in ps_output:
+      try:
+        ps_data = line.split()
+        if process_name in ps_data[-1]:
+          procs_pids[ps_data[-1]] = ps_data[1]
+      except IndexError:
+        pass
+    return procs_pids
+
+  @decorators.WithTimeoutAndRetriesFromInstance()
+  def TakeScreenshot(self, host_path=None, timeout=None, retries=None):
+    """Takes a screenshot of the device.
+
+    Args:
+      host_path: A string containing the path on the host to save the
+                 screenshot to. If None, a file name in the current
+                 directory will be generated.
+      timeout: timeout in seconds
+      retries: number of retries
+
+    Returns:
+      The name of the file on the host to which the screenshot was saved.
+
+    Raises:
+      CommandFailedError on failure.
+      CommandTimeoutError on timeout.
+      DeviceUnreachableError on missing device.
+    """
+    if not host_path:
+      host_path = os.path.abspath('screenshot-%s.png' % _GetTimeStamp())
+    with device_temp_file.DeviceTempFile(self.adb, suffix='.png') as device_tmp:
+      self.RunShellCommand(['/system/bin/screencap', '-p', device_tmp.name],
+                           check_return=True)
+      self.PullFile(device_tmp.name, host_path)
+    return host_path
+
+  @decorators.WithTimeoutAndRetriesFromInstance()
+  def GetMemoryUsageForPid(self, pid, timeout=None, retries=None):
+    """Gets the memory usage for the given PID.
+
+    Args:
+      pid: PID of the process.
+      timeout: timeout in seconds
+      retries: number of retries
+
+    Returns:
+      A dict containing memory usage statistics for the PID. May include:
+        Size, Rss, Pss, Shared_Clean, Shared_Dirty, Private_Clean,
+        Private_Dirty, VmHWM
+
+    Raises:
+      CommandTimeoutError on timeout.
+    """
+    result = collections.defaultdict(int)
+
+    try:
+      result.update(self._GetMemoryUsageForPidFromSmaps(pid))
+    except device_errors.CommandFailedError:
+      logging.exception('Error getting memory usage from smaps')
+
+    try:
+      result.update(self._GetMemoryUsageForPidFromStatus(pid))
+    except device_errors.CommandFailedError:
+      logging.exception('Error getting memory usage from status')
+
+    return result
+
+  def _GetMemoryUsageForPidFromSmaps(self, pid):
+    SMAPS_COLUMNS = (
+        'Size', 'Rss', 'Pss', 'Shared_Clean', 'Shared_Dirty', 'Private_Clean',
+        'Private_Dirty')
+
+    showmap_out = self._RunPipedShellCommand(
+        'showmap %d | grep TOTAL' % int(pid), as_root=True)
+
+    split_totals = showmap_out[-1].split()
+    if (not split_totals
+        or len(split_totals) != 9
+        or split_totals[-1] != 'TOTAL'):
+      raise device_errors.CommandFailedError(
+          'Invalid output from showmap: %s' % '\n'.join(showmap_out))
+
+    return dict(itertools.izip(SMAPS_COLUMNS, (int(n) for n in split_totals)))
+
+  def _GetMemoryUsageForPidFromStatus(self, pid):
+    for line in self.ReadFile(
+        '/proc/%s/status' % str(pid), as_root=True).splitlines():
+      if line.startswith('VmHWM:'):
+        return {'VmHWM': int(line.split()[1])}
+    else:
+      raise device_errors.CommandFailedError(
+          'Could not find memory peak value for pid %s', str(pid))
+
+  @decorators.WithTimeoutAndRetriesFromInstance()
+  def GetLogcatMonitor(self, timeout=None, retries=None, *args, **kwargs):
+    """Returns a new LogcatMonitor associated with this device.
+
+    Parameters passed to this function are passed directly to
+    |logcat_monitor.LogcatMonitor| and are documented there.
+
+    Args:
+      timeout: timeout in seconds
+      retries: number of retries
+    """
+    return logcat_monitor.LogcatMonitor(self.adb, *args, **kwargs)
+
+  def GetClientCache(self, client_name):
+    """Returns client cache."""
+    if client_name not in self._client_caches:
+      self._client_caches[client_name] = {}
+    return self._client_caches[client_name]
+
+  def _ClearCache(self):
+    """Clears all caches."""
+    for client in self._client_caches:
+      self._client_caches[client].clear()
+    self._cache.clear()
+
+  @classmethod
+  def parallel(cls, devices=None, async=False):
+    """Creates a Parallelizer to operate over the provided list of devices.
+
+    If |devices| is either |None| or an empty list, the Parallelizer will
+    operate over all attached devices that have not been blacklisted.
+
+    Args:
+      devices: A list of either DeviceUtils instances or objects from
+               from which DeviceUtils instances can be constructed. If None,
+               all attached devices will be used.
+      async: If true, returns a Parallelizer that runs operations
+             asynchronously.
+
+    Returns:
+      A Parallelizer operating over |devices|.
+    """
+    if not devices:
+      devices = cls.HealthyDevices()
+      if not devices:
+        raise device_errors.NoDevicesError()
+
+    devices = [d if isinstance(d, cls) else cls(d) for d in devices]
+    if async:
+      return parallelizer.Parallelizer(devices)
+    else:
+      return parallelizer.SyncParallelizer(devices)
+
+  @classmethod
+  def HealthyDevices(cls):
+    blacklist = device_blacklist.ReadBlacklist()
+    def blacklisted(adb):
+      if adb.GetDeviceSerial() in blacklist:
+        logging.warning('Device %s is blacklisted.', adb.GetDeviceSerial())
+        return True
+      return False
+
+    return [cls(adb) for adb in adb_wrapper.AdbWrapper.Devices()
+            if not blacklisted(adb)]
diff --git a/build/android/pylib/device/device_utils_device_test.py b/build/android/pylib/device/device_utils_device_test.py
new file mode 100755
index 0000000..daae2b6
--- /dev/null
+++ b/build/android/pylib/device/device_utils_device_test.py
@@ -0,0 +1,211 @@
+#!/usr/bin/env python
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Unit tests for the contents of device_utils.py (mostly DeviceUtils).
+The test will invoke real devices
+"""
+
+import os
+import tempfile
+import unittest
+
+from pylib import cmd_helper
+from pylib import constants
+from pylib.device import adb_wrapper
+from pylib.device import device_utils
+from pylib.utils import md5sum
+
+_OLD_CONTENTS = "foo"
+_NEW_CONTENTS = "bar"
+_DEVICE_DIR = "/data/local/tmp/device_utils_test"
+_SUB_DIR = "sub"
+_SUB_DIR1 = "sub1"
+_SUB_DIR2 = "sub2"
+
+class DeviceUtilsPushDeleteFilesTest(unittest.TestCase):
+
+  def setUp(self):
+    devices = adb_wrapper.AdbWrapper.Devices()
+    assert devices, 'A device must be attached'
+    self.adb = devices[0]
+    self.adb.WaitForDevice()
+    self.device = device_utils.DeviceUtils(
+        self.adb, default_timeout=10, default_retries=0)
+    default_build_type = os.environ.get('BUILDTYPE', 'Debug')
+    constants.SetBuildType(default_build_type)
+
+  @staticmethod
+  def _MakeTempFile(contents):
+    """Make a temporary file with the given contents.
+
+    Args:
+      contents: string to write to the temporary file.
+
+    Returns:
+      the tuple contains the absolute path to the file and the file name
+    """
+    fi, path = tempfile.mkstemp(text=True)
+    with os.fdopen(fi, 'w') as f:
+      f.write(contents)
+    file_name = os.path.basename(path)
+    return (path, file_name)
+
+  @staticmethod
+  def _MakeTempFileGivenDir(directory, contents):
+    """Make a temporary file under the given directory
+    with the given contents
+
+    Args:
+      directory: the temp directory to create the file
+      contents: string to write to the temp file
+
+    Returns:
+      the list contains the absolute path to the file and the file name
+    """
+    fi, path = tempfile.mkstemp(dir=directory, text=True)
+    with os.fdopen(fi, 'w') as f:
+      f.write(contents)
+    file_name = os.path.basename(path)
+    return (path, file_name)
+
+  @staticmethod
+  def _ChangeTempFile(path, contents):
+    with os.open(path, 'w') as f:
+      f.write(contents)
+
+  @staticmethod
+  def _DeleteTempFile(path):
+    os.remove(path)
+
+  def testPushChangedFiles_noFileChange(self):
+    (host_file_path, file_name) = self._MakeTempFile(_OLD_CONTENTS)
+    device_file_path = "%s/%s" % (_DEVICE_DIR, file_name)
+    self.adb.Push(host_file_path, device_file_path)
+    self.device.PushChangedFiles([(host_file_path, device_file_path)])
+    result = self.device.RunShellCommand(['cat', device_file_path],
+                                         single_line=True)
+    self.assertEqual(_OLD_CONTENTS, result)
+
+    cmd_helper.RunCmd(['rm', host_file_path])
+    self.device.RunShellCommand(['rm', '-rf',  _DEVICE_DIR])
+
+  def testPushChangedFiles_singleFileChange(self):
+    (host_file_path, file_name) = self._MakeTempFile(_OLD_CONTENTS)
+    device_file_path = "%s/%s" % (_DEVICE_DIR, file_name)
+    self.adb.Push(host_file_path, device_file_path)
+
+    with open(host_file_path, 'w') as f:
+      f.write(_NEW_CONTENTS)
+    self.device.PushChangedFiles([(host_file_path, device_file_path)])
+    result = self.device.RunShellCommand(['cat', device_file_path],
+                                         single_line=True)
+    self.assertEqual(_NEW_CONTENTS, result)
+
+    cmd_helper.RunCmd(['rm', host_file_path])
+    self.device.RunShellCommand(['rm', '-rf',  _DEVICE_DIR])
+
+  def testDeleteFiles(self):
+    host_tmp_dir = tempfile.mkdtemp()
+    (host_file_path, file_name) = self._MakeTempFileGivenDir(
+        host_tmp_dir, _OLD_CONTENTS)
+
+    device_file_path = "%s/%s" % (_DEVICE_DIR, file_name)
+    self.adb.Push(host_file_path, device_file_path)
+
+    cmd_helper.RunCmd(['rm', host_file_path])
+    self.device.PushChangedFiles([(host_tmp_dir, _DEVICE_DIR)],
+                                 delete_device_stale=True)
+    result = self.device.RunShellCommand(['ls', _DEVICE_DIR], single_line=True)
+    self.assertEqual('', result)
+
+    cmd_helper.RunCmd(['rm', '-rf', host_tmp_dir])
+    self.device.RunShellCommand(['rm', '-rf',  _DEVICE_DIR])
+
+  def testPushAndDeleteFiles_noSubDir(self):
+    host_tmp_dir = tempfile.mkdtemp()
+    (host_file_path1, file_name1) = self._MakeTempFileGivenDir(
+        host_tmp_dir, _OLD_CONTENTS)
+    (host_file_path2, file_name2) = self._MakeTempFileGivenDir(
+        host_tmp_dir, _OLD_CONTENTS)
+
+    device_file_path1 = "%s/%s" % (_DEVICE_DIR, file_name1)
+    device_file_path2 = "%s/%s" % (_DEVICE_DIR, file_name2)
+    self.adb.Push(host_file_path1, device_file_path1)
+    self.adb.Push(host_file_path2, device_file_path2)
+
+    with open(host_file_path1, 'w') as f:
+      f.write(_NEW_CONTENTS)
+    cmd_helper.RunCmd(['rm', host_file_path2])
+
+    self.device.PushChangedFiles([(host_tmp_dir, _DEVICE_DIR)],
+                                   delete_device_stale=True)
+    result = self.device.RunShellCommand(['cat', device_file_path1],
+                                         single_line=True)
+    self.assertEqual(_NEW_CONTENTS, result)
+    result = self.device.RunShellCommand(['ls', _DEVICE_DIR], single_line=True)
+    self.assertEqual(file_name1, result)
+
+    self.device.RunShellCommand(['rm', '-rf',  _DEVICE_DIR])
+    cmd_helper.RunCmd(['rm', '-rf', host_tmp_dir])
+
+  def testPushAndDeleteFiles_SubDir(self):
+    host_tmp_dir = tempfile.mkdtemp()
+    host_sub_dir1 = "%s/%s" % (host_tmp_dir, _SUB_DIR1)
+    host_sub_dir2 = "%s/%s/%s" % (host_tmp_dir, _SUB_DIR, _SUB_DIR2)
+    cmd_helper.RunCmd(['mkdir', '-p', host_sub_dir1])
+    cmd_helper.RunCmd(['mkdir', '-p', host_sub_dir2])
+
+    (host_file_path1, file_name1) = self._MakeTempFileGivenDir(
+        host_tmp_dir, _OLD_CONTENTS)
+    (host_file_path2, file_name2) = self._MakeTempFileGivenDir(
+        host_tmp_dir, _OLD_CONTENTS)
+    (host_file_path3, file_name3) = self._MakeTempFileGivenDir(
+        host_sub_dir1, _OLD_CONTENTS)
+    (host_file_path4, file_name4) = self._MakeTempFileGivenDir(
+        host_sub_dir2, _OLD_CONTENTS)
+
+    device_file_path1 = "%s/%s" % (_DEVICE_DIR, file_name1)
+    device_file_path2 = "%s/%s" % (_DEVICE_DIR, file_name2)
+    device_file_path3 = "%s/%s/%s" % (_DEVICE_DIR, _SUB_DIR1, file_name3)
+    device_file_path4 = "%s/%s/%s/%s" % (_DEVICE_DIR, _SUB_DIR,
+                                         _SUB_DIR2, file_name4)
+
+    self.adb.Push(host_file_path1, device_file_path1)
+    self.adb.Push(host_file_path2, device_file_path2)
+    self.adb.Push(host_file_path3, device_file_path3)
+    self.adb.Push(host_file_path4, device_file_path4)
+
+    with open(host_file_path1, 'w') as f:
+      f.write(_NEW_CONTENTS)
+    cmd_helper.RunCmd(['rm', host_file_path2])
+    cmd_helper.RunCmd(['rm', host_file_path4])
+
+    self.device.PushChangedFiles([(host_tmp_dir, _DEVICE_DIR)],
+                                   delete_device_stale=True)
+    result = self.device.RunShellCommand(['cat', device_file_path1],
+                                         single_line=True)
+    self.assertEqual(_NEW_CONTENTS, result)
+
+    result = self.device.RunShellCommand(['ls', _DEVICE_DIR])
+    self.assertIn(file_name1, result)
+    self.assertIn(_SUB_DIR1, result)
+    self.assertIn(_SUB_DIR, result)
+    self.assertEqual(3, len(result))
+
+    result = self.device.RunShellCommand(['cat', device_file_path3],
+                                      single_line=True)
+    self.assertEqual(_OLD_CONTENTS, result)
+
+    result = self.device.RunShellCommand(["ls", "%s/%s/%s"
+                                          % (_DEVICE_DIR, _SUB_DIR, _SUB_DIR2)],
+                                         single_line=True)
+    self.assertEqual('', result)
+
+    self.device.RunShellCommand(['rm', '-rf',  _DEVICE_DIR])
+    cmd_helper.RunCmd(['rm', '-rf', host_tmp_dir])
+
+if __name__ == '__main__':
+  unittest.main()
diff --git a/build/android/pylib/device/device_utils_test.py b/build/android/pylib/device/device_utils_test.py
new file mode 100755
index 0000000..6699673
--- /dev/null
+++ b/build/android/pylib/device/device_utils_test.py
@@ -0,0 +1,1845 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Unit tests for the contents of device_utils.py (mostly DeviceUtils).
+"""
+
+# pylint: disable=C0321
+# pylint: disable=W0212
+# pylint: disable=W0613
+
+import collections
+import datetime
+import logging
+import os
+import re
+import sys
+import unittest
+
+from pylib import android_commands
+from pylib import cmd_helper
+from pylib import constants
+from pylib import device_signal
+from pylib.device import adb_wrapper
+from pylib.device import device_errors
+from pylib.device import device_utils
+from pylib.device import intent
+from pylib.sdk import split_select
+from pylib.utils import mock_calls
+
+# RunCommand from third_party/android_testrunner/run_command.py is mocked
+# below, so its path needs to be in sys.path.
+sys.path.append(os.path.join(
+    constants.DIR_SOURCE_ROOT, 'third_party', 'android_testrunner'))
+
+sys.path.append(os.path.join(
+    constants.DIR_SOURCE_ROOT, 'third_party', 'pymock'))
+import mock # pylint: disable=F0401
+
+
+class DeviceUtilsInitTest(unittest.TestCase):
+
+  def testInitWithStr(self):
+    serial_as_str = str('0123456789abcdef')
+    d = device_utils.DeviceUtils('0123456789abcdef')
+    self.assertEqual(serial_as_str, d.adb.GetDeviceSerial())
+
+  def testInitWithUnicode(self):
+    serial_as_unicode = unicode('fedcba9876543210')
+    d = device_utils.DeviceUtils(serial_as_unicode)
+    self.assertEqual(serial_as_unicode, d.adb.GetDeviceSerial())
+
+  def testInitWithAdbWrapper(self):
+    serial = '123456789abcdef0'
+    a = adb_wrapper.AdbWrapper(serial)
+    d = device_utils.DeviceUtils(a)
+    self.assertEqual(serial, d.adb.GetDeviceSerial())
+
+  def testInitWithAndroidCommands(self):
+    serial = '0fedcba987654321'
+    a = android_commands.AndroidCommands(device=serial)
+    d = device_utils.DeviceUtils(a)
+    self.assertEqual(serial, d.adb.GetDeviceSerial())
+
+  def testInitWithMissing_fails(self):
+    with self.assertRaises(ValueError):
+      device_utils.DeviceUtils(None)
+    with self.assertRaises(ValueError):
+      device_utils.DeviceUtils('')
+
+
+class DeviceUtilsGetAVDsTest(mock_calls.TestCase):
+
+  def testGetAVDs(self):
+    with self.assertCall(
+        mock.call.pylib.cmd_helper.GetCmdOutput([mock.ANY, 'list', 'avd']),
+        'Available Android Virtual Devices:\n'
+        '    Name: my_android5.0\n'
+        '    Path: /some/path/to/.android/avd/my_android5.0.avd\n'
+        '  Target: Android 5.0 (API level 21)\n'
+        ' Tag/ABI: default/x86\n'
+        '    Skin: WVGA800\n'):
+      self.assertEquals(['my_android5.0'],
+                        device_utils.GetAVDs())
+
+
+class DeviceUtilsRestartServerTest(mock_calls.TestCase):
+
+  @mock.patch('time.sleep', mock.Mock())
+  def testRestartServer_succeeds(self):
+    with self.assertCalls(
+        mock.call.pylib.device.adb_wrapper.AdbWrapper.KillServer(),
+        (mock.call.pylib.cmd_helper.GetCmdStatusAndOutput(['pgrep', 'adb']),
+         (1, '')),
+        mock.call.pylib.device.adb_wrapper.AdbWrapper.StartServer(),
+        (mock.call.pylib.cmd_helper.GetCmdStatusAndOutput(['pgrep', 'adb']),
+         (1, '')),
+        (mock.call.pylib.cmd_helper.GetCmdStatusAndOutput(['pgrep', 'adb']),
+         (0, '123\n'))):
+      device_utils.RestartServer()
+
+
+class MockTempFile(object):
+
+  def __init__(self, name='/tmp/some/file'):
+    self.file = mock.MagicMock(spec=file)
+    self.file.name = name
+    self.file.name_quoted = cmd_helper.SingleQuote(name)
+
+  def __enter__(self):
+    return self.file
+
+  def __exit__(self, exc_type, exc_val, exc_tb):
+    pass
+
+  @property
+  def name(self):
+    return self.file.name
+
+
+class _PatchedFunction(object):
+  def __init__(self, patched=None, mocked=None):
+    self.patched = patched
+    self.mocked = mocked
+
+
+def _AdbWrapperMock(test_serial):
+  adb = mock.Mock(spec=adb_wrapper.AdbWrapper)
+  adb.__str__ = mock.Mock(return_value=test_serial)
+  adb.GetDeviceSerial.return_value = test_serial
+  return adb
+
+
+class DeviceUtilsTest(mock_calls.TestCase):
+
+  def setUp(self):
+    self.adb = _AdbWrapperMock('0123456789abcdef')
+    self.device = device_utils.DeviceUtils(
+        self.adb, default_timeout=10, default_retries=0)
+    self.watchMethodCalls(self.call.adb, ignore=['GetDeviceSerial'])
+
+  def AdbCommandError(self, args=None, output=None, status=None, msg=None):
+    if args is None:
+      args = ['[unspecified]']
+    return mock.Mock(side_effect=device_errors.AdbCommandFailedError(
+        args, output, status, msg, str(self.device)))
+
+  def CommandError(self, msg=None):
+    if msg is None:
+      msg = 'Command failed'
+    return mock.Mock(side_effect=device_errors.CommandFailedError(
+        msg, str(self.device)))
+
+  def ShellError(self, output=None, status=1):
+    def action(cmd, *args, **kwargs):
+      raise device_errors.AdbShellCommandFailedError(
+          cmd, output, status, str(self.device))
+    if output is None:
+      output = 'Permission denied\n'
+    return action
+
+  def TimeoutError(self, msg=None):
+    if msg is None:
+      msg = 'Operation timed out'
+    return mock.Mock(side_effect=device_errors.CommandTimeoutError(
+        msg, str(self.device)))
+
+
+class DeviceUtilsEqTest(DeviceUtilsTest):
+
+  def testEq_equal_deviceUtils(self):
+    other = device_utils.DeviceUtils(_AdbWrapperMock('0123456789abcdef'))
+    self.assertTrue(self.device == other)
+    self.assertTrue(other == self.device)
+
+  def testEq_equal_adbWrapper(self):
+    other = adb_wrapper.AdbWrapper('0123456789abcdef')
+    self.assertTrue(self.device == other)
+    self.assertTrue(other == self.device)
+
+  def testEq_equal_string(self):
+    other = '0123456789abcdef'
+    self.assertTrue(self.device == other)
+    self.assertTrue(other == self.device)
+
+  def testEq_devicesNotEqual(self):
+    other = device_utils.DeviceUtils(_AdbWrapperMock('0123456789abcdee'))
+    self.assertFalse(self.device == other)
+    self.assertFalse(other == self.device)
+
+  def testEq_identity(self):
+    self.assertTrue(self.device == self.device)
+
+  def testEq_serialInList(self):
+    devices = [self.device]
+    self.assertTrue('0123456789abcdef' in devices)
+
+
+class DeviceUtilsLtTest(DeviceUtilsTest):
+
+  def testLt_lessThan(self):
+    other = device_utils.DeviceUtils(_AdbWrapperMock('ffffffffffffffff'))
+    self.assertTrue(self.device < other)
+    self.assertTrue(other > self.device)
+
+  def testLt_greaterThan_lhs(self):
+    other = device_utils.DeviceUtils(_AdbWrapperMock('0000000000000000'))
+    self.assertFalse(self.device < other)
+    self.assertFalse(other > self.device)
+
+  def testLt_equal(self):
+    other = device_utils.DeviceUtils(_AdbWrapperMock('0123456789abcdef'))
+    self.assertFalse(self.device < other)
+    self.assertFalse(other > self.device)
+
+  def testLt_sorted(self):
+    devices = [
+        device_utils.DeviceUtils(_AdbWrapperMock('ffffffffffffffff')),
+        device_utils.DeviceUtils(_AdbWrapperMock('0000000000000000')),
+    ]
+    sorted_devices = sorted(devices)
+    self.assertEquals('0000000000000000',
+                      sorted_devices[0].adb.GetDeviceSerial())
+    self.assertEquals('ffffffffffffffff',
+                      sorted_devices[1].adb.GetDeviceSerial())
+
+
+class DeviceUtilsStrTest(DeviceUtilsTest):
+
+  def testStr_returnsSerial(self):
+    with self.assertCalls(
+        (self.call.adb.GetDeviceSerial(), '0123456789abcdef')):
+      self.assertEqual('0123456789abcdef', str(self.device))
+
+
+class DeviceUtilsIsOnlineTest(DeviceUtilsTest):
+
+  def testIsOnline_true(self):
+    with self.assertCall(self.call.adb.GetState(), 'device'):
+      self.assertTrue(self.device.IsOnline())
+
+  def testIsOnline_false(self):
+    with self.assertCall(self.call.adb.GetState(), 'offline'):
+      self.assertFalse(self.device.IsOnline())
+
+  def testIsOnline_error(self):
+    with self.assertCall(self.call.adb.GetState(), self.CommandError()):
+      self.assertFalse(self.device.IsOnline())
+
+
+class DeviceUtilsHasRootTest(DeviceUtilsTest):
+
+  def testHasRoot_true(self):
+    with self.assertCall(self.call.adb.Shell('ls /root'), 'foo\n'):
+      self.assertTrue(self.device.HasRoot())
+
+  def testHasRoot_false(self):
+    with self.assertCall(self.call.adb.Shell('ls /root'), self.ShellError()):
+      self.assertFalse(self.device.HasRoot())
+
+
+class DeviceUtilsEnableRootTest(DeviceUtilsTest):
+
+  def testEnableRoot_succeeds(self):
+    with self.assertCalls(
+        (self.call.device.IsUserBuild(), False),
+         self.call.adb.Root(),
+         self.call.device.WaitUntilFullyBooted()):
+      self.device.EnableRoot()
+
+  def testEnableRoot_userBuild(self):
+    with self.assertCalls(
+        (self.call.device.IsUserBuild(), True)):
+      with self.assertRaises(device_errors.CommandFailedError):
+        self.device.EnableRoot()
+
+  def testEnableRoot_rootFails(self):
+    with self.assertCalls(
+        (self.call.device.IsUserBuild(), False),
+        (self.call.adb.Root(), self.CommandError())):
+      with self.assertRaises(device_errors.CommandFailedError):
+        self.device.EnableRoot()
+
+
+class DeviceUtilsIsUserBuildTest(DeviceUtilsTest):
+
+  def testIsUserBuild_yes(self):
+    with self.assertCall(
+        self.call.device.GetProp('ro.build.type', cache=True), 'user'):
+      self.assertTrue(self.device.IsUserBuild())
+
+  def testIsUserBuild_no(self):
+    with self.assertCall(
+        self.call.device.GetProp('ro.build.type', cache=True), 'userdebug'):
+      self.assertFalse(self.device.IsUserBuild())
+
+
+class DeviceUtilsGetExternalStoragePathTest(DeviceUtilsTest):
+
+  def testGetExternalStoragePath_succeeds(self):
+    with self.assertCall(
+        self.call.adb.Shell('echo $EXTERNAL_STORAGE'), '/fake/storage/path\n'):
+      self.assertEquals('/fake/storage/path',
+                        self.device.GetExternalStoragePath())
+
+  def testGetExternalStoragePath_fails(self):
+    with self.assertCall(self.call.adb.Shell('echo $EXTERNAL_STORAGE'), '\n'):
+      with self.assertRaises(device_errors.CommandFailedError):
+        self.device.GetExternalStoragePath()
+
+
+class DeviceUtilsGetApplicationPathsTest(DeviceUtilsTest):
+
+  def testGetApplicationPaths_exists(self):
+    with self.assertCalls(
+        (self.call.adb.Shell('getprop ro.build.version.sdk'), '19\n'),
+        (self.call.adb.Shell('pm path android'),
+         'package:/path/to/android.apk\n')):
+      self.assertEquals(['/path/to/android.apk'],
+                        self.device.GetApplicationPaths('android'))
+
+  def testGetApplicationPaths_notExists(self):
+    with self.assertCalls(
+        (self.call.adb.Shell('getprop ro.build.version.sdk'), '19\n'),
+        (self.call.adb.Shell('pm path not.installed.app'), '')):
+      self.assertEquals([],
+                        self.device.GetApplicationPaths('not.installed.app'))
+
+  def testGetApplicationPaths_fails(self):
+    with self.assertCalls(
+        (self.call.adb.Shell('getprop ro.build.version.sdk'), '19\n'),
+        (self.call.adb.Shell('pm path android'),
+         self.CommandError('ERROR. Is package manager running?\n'))):
+      with self.assertRaises(device_errors.CommandFailedError):
+        self.device.GetApplicationPaths('android')
+
+
+class DeviceUtilsGetApplicationDataDirectoryTest(DeviceUtilsTest):
+
+  def testGetApplicationDataDirectory_exists(self):
+    with self.assertCall(
+        self.call.device._RunPipedShellCommand(
+            'pm dump foo.bar.baz | grep dataDir='),
+        ['dataDir=/data/data/foo.bar.baz']):
+      self.assertEquals(
+          '/data/data/foo.bar.baz',
+          self.device.GetApplicationDataDirectory('foo.bar.baz'))
+
+  def testGetApplicationDataDirectory_notExists(self):
+    with self.assertCall(
+        self.call.device._RunPipedShellCommand(
+            'pm dump foo.bar.baz | grep dataDir='),
+        self.ShellError()):
+      self.assertIsNone(self.device.GetApplicationDataDirectory('foo.bar.baz'))
+
+
+@mock.patch('time.sleep', mock.Mock())
+class DeviceUtilsWaitUntilFullyBootedTest(DeviceUtilsTest):
+
+  def testWaitUntilFullyBooted_succeedsNoWifi(self):
+    with self.assertCalls(
+        self.call.adb.WaitForDevice(),
+        # sd_card_ready
+        (self.call.device.GetExternalStoragePath(), '/fake/storage/path'),
+        (self.call.adb.Shell('test -d /fake/storage/path'), ''),
+        # pm_ready
+        (self.call.device.GetApplicationPaths('android'),
+         ['package:/some/fake/path']),
+        # boot_completed
+        (self.call.device.GetProp('sys.boot_completed'), '1')):
+      self.device.WaitUntilFullyBooted(wifi=False)
+
+  def testWaitUntilFullyBooted_succeedsWithWifi(self):
+    with self.assertCalls(
+        self.call.adb.WaitForDevice(),
+        # sd_card_ready
+        (self.call.device.GetExternalStoragePath(), '/fake/storage/path'),
+        (self.call.adb.Shell('test -d /fake/storage/path'), ''),
+        # pm_ready
+        (self.call.device.GetApplicationPaths('android'),
+         ['package:/some/fake/path']),
+        # boot_completed
+        (self.call.device.GetProp('sys.boot_completed'), '1'),
+        # wifi_enabled
+        (self.call.adb.Shell('dumpsys wifi'),
+         'stuff\nWi-Fi is enabled\nmore stuff\n')):
+      self.device.WaitUntilFullyBooted(wifi=True)
+
+  def testWaitUntilFullyBooted_deviceNotInitiallyAvailable(self):
+    with self.assertCalls(
+        self.call.adb.WaitForDevice(),
+        # sd_card_ready
+        (self.call.device.GetExternalStoragePath(), self.AdbCommandError()),
+        # sd_card_ready
+        (self.call.device.GetExternalStoragePath(), self.AdbCommandError()),
+        # sd_card_ready
+        (self.call.device.GetExternalStoragePath(), self.AdbCommandError()),
+        # sd_card_ready
+        (self.call.device.GetExternalStoragePath(), self.AdbCommandError()),
+        # sd_card_ready
+        (self.call.device.GetExternalStoragePath(), '/fake/storage/path'),
+        (self.call.adb.Shell('test -d /fake/storage/path'), ''),
+        # pm_ready
+        (self.call.device.GetApplicationPaths('android'),
+         ['package:/some/fake/path']),
+        # boot_completed
+        (self.call.device.GetProp('sys.boot_completed'), '1')):
+      self.device.WaitUntilFullyBooted(wifi=False)
+
+  def testWaitUntilFullyBooted_sdCardReadyFails_noPath(self):
+    with self.assertCalls(
+        self.call.adb.WaitForDevice(),
+        # sd_card_ready
+        (self.call.device.GetExternalStoragePath(), self.CommandError())):
+      with self.assertRaises(device_errors.CommandFailedError):
+        self.device.WaitUntilFullyBooted(wifi=False)
+
+  def testWaitUntilFullyBooted_sdCardReadyFails_notExists(self):
+    with self.assertCalls(
+        self.call.adb.WaitForDevice(),
+        # sd_card_ready
+        (self.call.device.GetExternalStoragePath(), '/fake/storage/path'),
+        (self.call.adb.Shell('test -d /fake/storage/path'), self.ShellError()),
+        # sd_card_ready
+        (self.call.device.GetExternalStoragePath(), '/fake/storage/path'),
+        (self.call.adb.Shell('test -d /fake/storage/path'), self.ShellError()),
+        # sd_card_ready
+        (self.call.device.GetExternalStoragePath(), '/fake/storage/path'),
+        (self.call.adb.Shell('test -d /fake/storage/path'),
+         self.TimeoutError())):
+      with self.assertRaises(device_errors.CommandTimeoutError):
+        self.device.WaitUntilFullyBooted(wifi=False)
+
+  def testWaitUntilFullyBooted_devicePmFails(self):
+    with self.assertCalls(
+        self.call.adb.WaitForDevice(),
+        # sd_card_ready
+        (self.call.device.GetExternalStoragePath(), '/fake/storage/path'),
+        (self.call.adb.Shell('test -d /fake/storage/path'), ''),
+        # pm_ready
+        (self.call.device.GetApplicationPaths('android'), self.CommandError()),
+        # pm_ready
+        (self.call.device.GetApplicationPaths('android'), self.CommandError()),
+        # pm_ready
+        (self.call.device.GetApplicationPaths('android'), self.TimeoutError())):
+      with self.assertRaises(device_errors.CommandTimeoutError):
+        self.device.WaitUntilFullyBooted(wifi=False)
+
+  def testWaitUntilFullyBooted_bootFails(self):
+    with self.assertCalls(
+        self.call.adb.WaitForDevice(),
+        # sd_card_ready
+        (self.call.device.GetExternalStoragePath(), '/fake/storage/path'),
+        (self.call.adb.Shell('test -d /fake/storage/path'), ''),
+        # pm_ready
+        (self.call.device.GetApplicationPaths('android'),
+         ['package:/some/fake/path']),
+        # boot_completed
+        (self.call.device.GetProp('sys.boot_completed'), '0'),
+        # boot_completed
+        (self.call.device.GetProp('sys.boot_completed'), '0'),
+        # boot_completed
+        (self.call.device.GetProp('sys.boot_completed'), self.TimeoutError())):
+      with self.assertRaises(device_errors.CommandTimeoutError):
+        self.device.WaitUntilFullyBooted(wifi=False)
+
+  def testWaitUntilFullyBooted_wifiFails(self):
+    with self.assertCalls(
+        self.call.adb.WaitForDevice(),
+        # sd_card_ready
+        (self.call.device.GetExternalStoragePath(), '/fake/storage/path'),
+        (self.call.adb.Shell('test -d /fake/storage/path'), ''),
+        # pm_ready
+        (self.call.device.GetApplicationPaths('android'),
+         ['package:/some/fake/path']),
+        # boot_completed
+        (self.call.device.GetProp('sys.boot_completed'), '1'),
+        # wifi_enabled
+        (self.call.adb.Shell('dumpsys wifi'), 'stuff\nmore stuff\n'),
+        # wifi_enabled
+        (self.call.adb.Shell('dumpsys wifi'), 'stuff\nmore stuff\n'),
+        # wifi_enabled
+        (self.call.adb.Shell('dumpsys wifi'), self.TimeoutError())):
+      with self.assertRaises(device_errors.CommandTimeoutError):
+        self.device.WaitUntilFullyBooted(wifi=True)
+
+
+@mock.patch('time.sleep', mock.Mock())
+class DeviceUtilsRebootTest(DeviceUtilsTest):
+
+  def testReboot_nonBlocking(self):
+    with self.assertCalls(
+        self.call.adb.Reboot(),
+        (self.call.device.IsOnline(), True),
+        (self.call.device.IsOnline(), False)):
+      self.device.Reboot(block=False)
+
+  def testReboot_blocking(self):
+    with self.assertCalls(
+        self.call.adb.Reboot(),
+        (self.call.device.IsOnline(), True),
+        (self.call.device.IsOnline(), False),
+        self.call.device.WaitUntilFullyBooted(wifi=False)):
+      self.device.Reboot(block=True)
+
+  def testReboot_blockUntilWifi(self):
+    with self.assertCalls(
+        self.call.adb.Reboot(),
+        (self.call.device.IsOnline(), True),
+        (self.call.device.IsOnline(), False),
+        self.call.device.WaitUntilFullyBooted(wifi=True)):
+      self.device.Reboot(block=True, wifi=True)
+
+
+class DeviceUtilsInstallTest(DeviceUtilsTest):
+
+  def testInstall_noPriorInstall(self):
+    with self.assertCalls(
+        (mock.call.pylib.utils.apk_helper.GetPackageName('/fake/test/app.apk'),
+         'this.is.a.test.package'),
+        (self.call.device.GetApplicationPaths('this.is.a.test.package'), []),
+        self.call.adb.Install('/fake/test/app.apk', reinstall=False)):
+      self.device.Install('/fake/test/app.apk', retries=0)
+
+  def testInstall_differentPriorInstall(self):
+    with self.assertCalls(
+        (mock.call.pylib.utils.apk_helper.GetPackageName('/fake/test/app.apk'),
+         'this.is.a.test.package'),
+        (self.call.device.GetApplicationPaths('this.is.a.test.package'),
+         ['/fake/data/app/this.is.a.test.package.apk']),
+        (self.call.device._GetChangedAndStaleFiles(
+            '/fake/test/app.apk', '/fake/data/app/this.is.a.test.package.apk'),
+         ([('/fake/test/app.apk', '/fake/data/app/this.is.a.test.package.apk')],
+          [])),
+        self.call.adb.Uninstall('this.is.a.test.package'),
+        self.call.adb.Install('/fake/test/app.apk', reinstall=False)):
+      self.device.Install('/fake/test/app.apk', retries=0)
+
+  def testInstall_differentPriorInstall_reinstall(self):
+    with self.assertCalls(
+        (mock.call.pylib.utils.apk_helper.GetPackageName('/fake/test/app.apk'),
+         'this.is.a.test.package'),
+        (self.call.device.GetApplicationPaths('this.is.a.test.package'),
+         ['/fake/data/app/this.is.a.test.package.apk']),
+        (self.call.device._GetChangedAndStaleFiles(
+            '/fake/test/app.apk', '/fake/data/app/this.is.a.test.package.apk'),
+         ([('/fake/test/app.apk', '/fake/data/app/this.is.a.test.package.apk')],
+          [])),
+        self.call.adb.Install('/fake/test/app.apk', reinstall=True)):
+      self.device.Install('/fake/test/app.apk', reinstall=True, retries=0)
+
+  def testInstall_identicalPriorInstall(self):
+    with self.assertCalls(
+        (mock.call.pylib.utils.apk_helper.GetPackageName('/fake/test/app.apk'),
+         'this.is.a.test.package'),
+        (self.call.device.GetApplicationPaths('this.is.a.test.package'),
+         ['/fake/data/app/this.is.a.test.package.apk']),
+        (self.call.device._GetChangedAndStaleFiles(
+            '/fake/test/app.apk', '/fake/data/app/this.is.a.test.package.apk'),
+         ([], []))):
+      self.device.Install('/fake/test/app.apk', retries=0)
+
+  def testInstall_fails(self):
+    with self.assertCalls(
+        (mock.call.pylib.utils.apk_helper.GetPackageName('/fake/test/app.apk'),
+         'this.is.a.test.package'),
+        (self.call.device.GetApplicationPaths('this.is.a.test.package'), []),
+        (self.call.adb.Install('/fake/test/app.apk', reinstall=False),
+         self.CommandError('Failure\r\n'))):
+      with self.assertRaises(device_errors.CommandFailedError):
+        self.device.Install('/fake/test/app.apk', retries=0)
+
+class DeviceUtilsInstallSplitApkTest(DeviceUtilsTest):
+
+  def testInstallSplitApk_noPriorInstall(self):
+    with self.assertCalls(
+        (self.call.device._CheckSdkLevel(21)),
+        (mock.call.pylib.sdk.split_select.SelectSplits(
+            self.device, 'base.apk',
+            ['split1.apk', 'split2.apk', 'split3.apk']),
+         ['split2.apk']),
+        (mock.call.pylib.utils.apk_helper.GetPackageName('base.apk'),
+         'this.is.a.test.package'),
+        (self.call.device.GetApplicationPaths('this.is.a.test.package'), []),
+        (self.call.adb.InstallMultiple(
+            ['base.apk', 'split2.apk'], partial=None, reinstall=False))):
+      self.device.InstallSplitApk('base.apk',
+          ['split1.apk', 'split2.apk', 'split3.apk'], retries=0)
+
+  def testInstallSplitApk_partialInstall(self):
+    with self.assertCalls(
+        (self.call.device._CheckSdkLevel(21)),
+        (mock.call.pylib.sdk.split_select.SelectSplits(
+            self.device, 'base.apk',
+            ['split1.apk', 'split2.apk', 'split3.apk']),
+         ['split2.apk']),
+        (mock.call.pylib.utils.apk_helper.GetPackageName('base.apk'),
+         'test.package'),
+        (self.call.device.GetApplicationPaths('test.package'),
+         ['base-on-device.apk', 'split2-on-device.apk']),
+        (mock.call.pylib.utils.md5sum.CalculateDeviceMd5Sums(
+            ['base-on-device.apk', 'split2-on-device.apk'], self.device),
+         {'base-on-device.apk': 'AAA', 'split2-on-device.apk': 'BBB'}),
+        (mock.call.pylib.utils.md5sum.CalculateHostMd5Sums(
+            ['base.apk', 'split2.apk']),
+         {'base.apk': 'AAA', 'split2.apk': 'CCC'}),
+        (self.call.adb.InstallMultiple(
+            ['split2.apk'], partial='test.package', reinstall=True))):
+      self.device.InstallSplitApk('base.apk',
+          ['split1.apk', 'split2.apk', 'split3.apk'], reinstall=True, retries=0)
+
+
+class DeviceUtilsRunShellCommandTest(DeviceUtilsTest):
+
+  def setUp(self):
+    super(DeviceUtilsRunShellCommandTest, self).setUp()
+    self.device.NeedsSU = mock.Mock(return_value=False)
+
+  def testRunShellCommand_commandAsList(self):
+    with self.assertCall(self.call.adb.Shell('pm list packages'), ''):
+      self.device.RunShellCommand(['pm', 'list', 'packages'])
+
+  def testRunShellCommand_commandAsListQuoted(self):
+    with self.assertCall(self.call.adb.Shell("echo 'hello world' '$10'"), ''):
+      self.device.RunShellCommand(['echo', 'hello world', '$10'])
+
+  def testRunShellCommand_commandAsString(self):
+    with self.assertCall(self.call.adb.Shell('echo "$VAR"'), ''):
+      self.device.RunShellCommand('echo "$VAR"')
+
+  def testNewRunShellImpl_withEnv(self):
+    with self.assertCall(
+        self.call.adb.Shell('VAR=some_string echo "$VAR"'), ''):
+      self.device.RunShellCommand('echo "$VAR"', env={'VAR': 'some_string'})
+
+  def testNewRunShellImpl_withEnvQuoted(self):
+    with self.assertCall(
+        self.call.adb.Shell('PATH="$PATH:/other/path" run_this'), ''):
+      self.device.RunShellCommand('run_this', env={'PATH': '$PATH:/other/path'})
+
+  def testNewRunShellImpl_withEnv_failure(self):
+    with self.assertRaises(KeyError):
+      self.device.RunShellCommand('some_cmd', env={'INVALID NAME': 'value'})
+
+  def testNewRunShellImpl_withCwd(self):
+    with self.assertCall(self.call.adb.Shell('cd /some/test/path && ls'), ''):
+      self.device.RunShellCommand('ls', cwd='/some/test/path')
+
+  def testNewRunShellImpl_withCwdQuoted(self):
+    with self.assertCall(
+        self.call.adb.Shell("cd '/some test/path with/spaces' && ls"), ''):
+      self.device.RunShellCommand('ls', cwd='/some test/path with/spaces')
+
+  def testRunShellCommand_withHugeCmd(self):
+    payload = 'hi! ' * 1024
+    expected_cmd = "echo '%s'" % payload
+    with self.assertCalls(
+      (mock.call.pylib.utils.device_temp_file.DeviceTempFile(
+          self.adb, suffix='.sh'), MockTempFile('/sdcard/temp-123.sh')),
+      self.call.device._WriteFileWithPush('/sdcard/temp-123.sh', expected_cmd),
+      (self.call.adb.Shell('sh /sdcard/temp-123.sh'), payload + '\n')):
+      self.assertEquals([payload],
+                        self.device.RunShellCommand(['echo', payload]))
+
+  def testRunShellCommand_withHugeCmdAmdSU(self):
+    payload = 'hi! ' * 1024
+    expected_cmd = """su -c sh -c 'echo '"'"'%s'"'"''""" % payload
+    with self.assertCalls(
+      (self.call.device.NeedsSU(), True),
+      (mock.call.pylib.utils.device_temp_file.DeviceTempFile(
+          self.adb, suffix='.sh'), MockTempFile('/sdcard/temp-123.sh')),
+      self.call.device._WriteFileWithPush('/sdcard/temp-123.sh', expected_cmd),
+      (self.call.adb.Shell('sh /sdcard/temp-123.sh'), payload + '\n')):
+      self.assertEquals(
+          [payload],
+          self.device.RunShellCommand(['echo', payload], as_root=True))
+
+  def testRunShellCommand_withSu(self):
+    with self.assertCalls(
+        (self.call.device.NeedsSU(), True),
+        (self.call.adb.Shell("su -c sh -c 'setprop service.adb.root 0'"), '')):
+      self.device.RunShellCommand('setprop service.adb.root 0', as_root=True)
+
+  def testRunShellCommand_manyLines(self):
+    cmd = 'ls /some/path'
+    with self.assertCall(self.call.adb.Shell(cmd), 'file1\nfile2\nfile3\n'):
+      self.assertEquals(['file1', 'file2', 'file3'],
+                        self.device.RunShellCommand(cmd))
+
+  def testRunShellCommand_singleLine_success(self):
+    cmd = 'echo $VALUE'
+    with self.assertCall(self.call.adb.Shell(cmd), 'some value\n'):
+      self.assertEquals('some value',
+                        self.device.RunShellCommand(cmd, single_line=True))
+
+  def testRunShellCommand_singleLine_successEmptyLine(self):
+    cmd = 'echo $VALUE'
+    with self.assertCall(self.call.adb.Shell(cmd), '\n'):
+      self.assertEquals('',
+                        self.device.RunShellCommand(cmd, single_line=True))
+
+  def testRunShellCommand_singleLine_successWithoutEndLine(self):
+    cmd = 'echo -n $VALUE'
+    with self.assertCall(self.call.adb.Shell(cmd), 'some value'):
+      self.assertEquals('some value',
+                        self.device.RunShellCommand(cmd, single_line=True))
+
+  def testRunShellCommand_singleLine_successNoOutput(self):
+    cmd = 'echo -n $VALUE'
+    with self.assertCall(self.call.adb.Shell(cmd), ''):
+      self.assertEquals('',
+                        self.device.RunShellCommand(cmd, single_line=True))
+
+  def testRunShellCommand_singleLine_failTooManyLines(self):
+    cmd = 'echo $VALUE'
+    with self.assertCall(self.call.adb.Shell(cmd),
+                         'some value\nanother value\n'):
+      with self.assertRaises(device_errors.CommandFailedError):
+        self.device.RunShellCommand(cmd, single_line=True)
+
+  def testRunShellCommand_checkReturn_success(self):
+    cmd = 'echo $ANDROID_DATA'
+    output = '/data\n'
+    with self.assertCall(self.call.adb.Shell(cmd), output):
+      self.assertEquals([output.rstrip()],
+                        self.device.RunShellCommand(cmd, check_return=True))
+
+  def testRunShellCommand_checkReturn_failure(self):
+    cmd = 'ls /root'
+    output = 'opendir failed, Permission denied\n'
+    with self.assertCall(self.call.adb.Shell(cmd), self.ShellError(output)):
+      with self.assertRaises(device_errors.AdbCommandFailedError):
+        self.device.RunShellCommand(cmd, check_return=True)
+
+  def testRunShellCommand_checkReturn_disabled(self):
+    cmd = 'ls /root'
+    output = 'opendir failed, Permission denied\n'
+    with self.assertCall(self.call.adb.Shell(cmd), self.ShellError(output)):
+      self.assertEquals([output.rstrip()],
+                        self.device.RunShellCommand(cmd, check_return=False))
+
+  def testRunShellCommand_largeOutput_enabled(self):
+    cmd = 'echo $VALUE'
+    temp_file = MockTempFile('/sdcard/temp-123')
+    cmd_redirect = '%s > %s' % (cmd, temp_file.name)
+    with self.assertCalls(
+        (mock.call.pylib.utils.device_temp_file.DeviceTempFile(self.adb),
+            temp_file),
+        (self.call.adb.Shell(cmd_redirect)),
+        (self.call.device.ReadFile(temp_file.name, force_pull=True),
+         'something')):
+      self.assertEquals(
+          ['something'],
+          self.device.RunShellCommand(
+              cmd, large_output=True, check_return=True))
+
+  def testRunShellCommand_largeOutput_disabledNoTrigger(self):
+    cmd = 'something'
+    with self.assertCall(self.call.adb.Shell(cmd), self.ShellError('')):
+      with self.assertRaises(device_errors.AdbCommandFailedError):
+        self.device.RunShellCommand(cmd, check_return=True)
+
+  def testRunShellCommand_largeOutput_disabledTrigger(self):
+    cmd = 'echo $VALUE'
+    temp_file = MockTempFile('/sdcard/temp-123')
+    cmd_redirect = '%s > %s' % (cmd, temp_file.name)
+    with self.assertCalls(
+        (self.call.adb.Shell(cmd), self.ShellError('', None)),
+        (mock.call.pylib.utils.device_temp_file.DeviceTempFile(self.adb),
+            temp_file),
+        (self.call.adb.Shell(cmd_redirect)),
+        (self.call.device.ReadFile(mock.ANY, force_pull=True),
+         'something')):
+      self.assertEquals(['something'],
+                        self.device.RunShellCommand(cmd, check_return=True))
+
+
+class DeviceUtilsRunPipedShellCommandTest(DeviceUtilsTest):
+
+  def testRunPipedShellCommand_success(self):
+    with self.assertCall(
+        self.call.device.RunShellCommand(
+            'ps | grep foo; echo "PIPESTATUS: ${PIPESTATUS[@]}"',
+            check_return=True),
+        ['This line contains foo', 'PIPESTATUS: 0 0']):
+      self.assertEquals(['This line contains foo'],
+                        self.device._RunPipedShellCommand('ps | grep foo'))
+
+  def testRunPipedShellCommand_firstCommandFails(self):
+    with self.assertCall(
+        self.call.device.RunShellCommand(
+            'ps | grep foo; echo "PIPESTATUS: ${PIPESTATUS[@]}"',
+            check_return=True),
+        ['PIPESTATUS: 1 0']):
+      with self.assertRaises(device_errors.AdbShellCommandFailedError) as ec:
+        self.device._RunPipedShellCommand('ps | grep foo')
+      self.assertEquals([1, 0], ec.exception.status)
+
+  def testRunPipedShellCommand_secondCommandFails(self):
+    with self.assertCall(
+        self.call.device.RunShellCommand(
+            'ps | grep foo; echo "PIPESTATUS: ${PIPESTATUS[@]}"',
+            check_return=True),
+        ['PIPESTATUS: 0 1']):
+      with self.assertRaises(device_errors.AdbShellCommandFailedError) as ec:
+        self.device._RunPipedShellCommand('ps | grep foo')
+      self.assertEquals([0, 1], ec.exception.status)
+
+  def testRunPipedShellCommand_outputCutOff(self):
+    with self.assertCall(
+        self.call.device.RunShellCommand(
+            'ps | grep foo; echo "PIPESTATUS: ${PIPESTATUS[@]}"',
+            check_return=True),
+        ['foo.bar'] * 256 + ['foo.ba']):
+      with self.assertRaises(device_errors.AdbShellCommandFailedError) as ec:
+        self.device._RunPipedShellCommand('ps | grep foo')
+      self.assertIs(None, ec.exception.status)
+
+
+@mock.patch('time.sleep', mock.Mock())
+class DeviceUtilsKillAllTest(DeviceUtilsTest):
+
+  def testKillAll_noMatchingProcessesFailure(self):
+    with self.assertCall(self.call.device.GetPids('test_process'), {}):
+      with self.assertRaises(device_errors.CommandFailedError):
+        self.device.KillAll('test_process')
+
+  def testKillAll_noMatchingProcessesQuiet(self):
+    with self.assertCall(self.call.device.GetPids('test_process'), {}):
+      self.assertEqual(0, self.device.KillAll('test_process', quiet=True))
+
+  def testKillAll_nonblocking(self):
+    with self.assertCalls(
+        (self.call.device.GetPids('some.process'), {'some.process': '1234'}),
+        (self.call.adb.Shell('kill -9 1234'), '')):
+      self.assertEquals(
+          1, self.device.KillAll('some.process', blocking=False))
+
+  def testKillAll_blocking(self):
+    with self.assertCalls(
+        (self.call.device.GetPids('some.process'), {'some.process': '1234'}),
+        (self.call.adb.Shell('kill -9 1234'), ''),
+        (self.call.device.GetPids('some.process'), {'some.process': '1234'}),
+        (self.call.device.GetPids('some.process'), [])):
+      self.assertEquals(
+          1, self.device.KillAll('some.process', blocking=True))
+
+  def testKillAll_root(self):
+    with self.assertCalls(
+        (self.call.device.GetPids('some.process'), {'some.process': '1234'}),
+        (self.call.device.NeedsSU(), True),
+        (self.call.adb.Shell("su -c sh -c 'kill -9 1234'"), '')):
+      self.assertEquals(
+          1, self.device.KillAll('some.process', as_root=True))
+
+  def testKillAll_sigterm(self):
+    with self.assertCalls(
+        (self.call.device.GetPids('some.process'), {'some.process': '1234'}),
+        (self.call.adb.Shell('kill -15 1234'), '')):
+      self.assertEquals(
+          1, self.device.KillAll('some.process', signum=device_signal.SIGTERM))
+
+
+class DeviceUtilsStartActivityTest(DeviceUtilsTest):
+
+  def testStartActivity_actionOnly(self):
+    test_intent = intent.Intent(action='android.intent.action.VIEW')
+    with self.assertCall(
+        self.call.adb.Shell('am start '
+                            '-a android.intent.action.VIEW'),
+        'Starting: Intent { act=android.intent.action.VIEW }'):
+      self.device.StartActivity(test_intent)
+
+  def testStartActivity_success(self):
+    test_intent = intent.Intent(action='android.intent.action.VIEW',
+                                package='this.is.a.test.package',
+                                activity='.Main')
+    with self.assertCall(
+        self.call.adb.Shell('am start '
+                            '-a android.intent.action.VIEW '
+                            '-n this.is.a.test.package/.Main'),
+        'Starting: Intent { act=android.intent.action.VIEW }'):
+      self.device.StartActivity(test_intent)
+
+  def testStartActivity_failure(self):
+    test_intent = intent.Intent(action='android.intent.action.VIEW',
+                                package='this.is.a.test.package',
+                                activity='.Main')
+    with self.assertCall(
+        self.call.adb.Shell('am start '
+                            '-a android.intent.action.VIEW '
+                            '-n this.is.a.test.package/.Main'),
+        'Error: Failed to start test activity'):
+      with self.assertRaises(device_errors.CommandFailedError):
+        self.device.StartActivity(test_intent)
+
+  def testStartActivity_blocking(self):
+    test_intent = intent.Intent(action='android.intent.action.VIEW',
+                                package='this.is.a.test.package',
+                                activity='.Main')
+    with self.assertCall(
+        self.call.adb.Shell('am start '
+                            '-W '
+                            '-a android.intent.action.VIEW '
+                            '-n this.is.a.test.package/.Main'),
+        'Starting: Intent { act=android.intent.action.VIEW }'):
+      self.device.StartActivity(test_intent, blocking=True)
+
+  def testStartActivity_withCategory(self):
+    test_intent = intent.Intent(action='android.intent.action.VIEW',
+                                package='this.is.a.test.package',
+                                activity='.Main',
+                                category='android.intent.category.HOME')
+    with self.assertCall(
+        self.call.adb.Shell('am start '
+                            '-a android.intent.action.VIEW '
+                            '-c android.intent.category.HOME '
+                            '-n this.is.a.test.package/.Main'),
+        'Starting: Intent { act=android.intent.action.VIEW }'):
+      self.device.StartActivity(test_intent)
+
+  def testStartActivity_withMultipleCategories(self):
+    test_intent = intent.Intent(action='android.intent.action.VIEW',
+                                package='this.is.a.test.package',
+                                activity='.Main',
+                                category=['android.intent.category.HOME',
+                                          'android.intent.category.BROWSABLE'])
+    with self.assertCall(
+        self.call.adb.Shell('am start '
+                            '-a android.intent.action.VIEW '
+                            '-c android.intent.category.HOME '
+                            '-c android.intent.category.BROWSABLE '
+                            '-n this.is.a.test.package/.Main'),
+        'Starting: Intent { act=android.intent.action.VIEW }'):
+      self.device.StartActivity(test_intent)
+
+  def testStartActivity_withData(self):
+    test_intent = intent.Intent(action='android.intent.action.VIEW',
+                                package='this.is.a.test.package',
+                                activity='.Main',
+                                data='http://www.google.com/')
+    with self.assertCall(
+        self.call.adb.Shell('am start '
+                            '-a android.intent.action.VIEW '
+                            '-d http://www.google.com/ '
+                            '-n this.is.a.test.package/.Main'),
+        'Starting: Intent { act=android.intent.action.VIEW }'):
+      self.device.StartActivity(test_intent)
+
+  def testStartActivity_withStringExtra(self):
+    test_intent = intent.Intent(action='android.intent.action.VIEW',
+                                package='this.is.a.test.package',
+                                activity='.Main',
+                                extras={'foo': 'test'})
+    with self.assertCall(
+        self.call.adb.Shell('am start '
+                            '-a android.intent.action.VIEW '
+                            '-n this.is.a.test.package/.Main '
+                            '--es foo test'),
+        'Starting: Intent { act=android.intent.action.VIEW }'):
+      self.device.StartActivity(test_intent)
+
+  def testStartActivity_withBoolExtra(self):
+    test_intent = intent.Intent(action='android.intent.action.VIEW',
+                                package='this.is.a.test.package',
+                                activity='.Main',
+                                extras={'foo': True})
+    with self.assertCall(
+        self.call.adb.Shell('am start '
+                            '-a android.intent.action.VIEW '
+                            '-n this.is.a.test.package/.Main '
+                            '--ez foo True'),
+        'Starting: Intent { act=android.intent.action.VIEW }'):
+      self.device.StartActivity(test_intent)
+
+  def testStartActivity_withIntExtra(self):
+    test_intent = intent.Intent(action='android.intent.action.VIEW',
+                                package='this.is.a.test.package',
+                                activity='.Main',
+                                extras={'foo': 123})
+    with self.assertCall(
+        self.call.adb.Shell('am start '
+                            '-a android.intent.action.VIEW '
+                            '-n this.is.a.test.package/.Main '
+                            '--ei foo 123'),
+        'Starting: Intent { act=android.intent.action.VIEW }'):
+      self.device.StartActivity(test_intent)
+
+  def testStartActivity_withTraceFile(self):
+    test_intent = intent.Intent(action='android.intent.action.VIEW',
+                                package='this.is.a.test.package',
+                                activity='.Main')
+    with self.assertCall(
+        self.call.adb.Shell('am start '
+                            '--start-profiler test_trace_file.out '
+                            '-a android.intent.action.VIEW '
+                            '-n this.is.a.test.package/.Main'),
+        'Starting: Intent { act=android.intent.action.VIEW }'):
+      self.device.StartActivity(test_intent,
+                                trace_file_name='test_trace_file.out')
+
+  def testStartActivity_withForceStop(self):
+    test_intent = intent.Intent(action='android.intent.action.VIEW',
+                                package='this.is.a.test.package',
+                                activity='.Main')
+    with self.assertCall(
+        self.call.adb.Shell('am start '
+                            '-S '
+                            '-a android.intent.action.VIEW '
+                            '-n this.is.a.test.package/.Main'),
+        'Starting: Intent { act=android.intent.action.VIEW }'):
+      self.device.StartActivity(test_intent, force_stop=True)
+
+  def testStartActivity_withFlags(self):
+    test_intent = intent.Intent(action='android.intent.action.VIEW',
+                                package='this.is.a.test.package',
+                                activity='.Main',
+                                flags='0x10000000')
+    with self.assertCall(
+        self.call.adb.Shell('am start '
+                            '-a android.intent.action.VIEW '
+                            '-n this.is.a.test.package/.Main '
+                            '-f 0x10000000'),
+        'Starting: Intent { act=android.intent.action.VIEW }'):
+      self.device.StartActivity(test_intent)
+
+
+class DeviceUtilsStartInstrumentationTest(DeviceUtilsTest):
+
+  def testStartInstrumentation_nothing(self):
+    with self.assertCalls(
+        self.call.device.RunShellCommand(
+            ['am', 'instrument', 'test.package/.TestInstrumentation'],
+            check_return=True, large_output=True)):
+      self.device.StartInstrumentation(
+          'test.package/.TestInstrumentation',
+          finish=False, raw=False, extras=None)
+
+  def testStartInstrumentation_finish(self):
+    with self.assertCalls(
+        (self.call.device.RunShellCommand(
+            ['am', 'instrument', '-w', 'test.package/.TestInstrumentation'],
+            check_return=True, large_output=True),
+         ['OK (1 test)'])):
+      output = self.device.StartInstrumentation(
+          'test.package/.TestInstrumentation',
+          finish=True, raw=False, extras=None)
+      self.assertEquals(['OK (1 test)'], output)
+
+  def testStartInstrumentation_raw(self):
+    with self.assertCalls(
+        self.call.device.RunShellCommand(
+            ['am', 'instrument', '-r', 'test.package/.TestInstrumentation'],
+            check_return=True, large_output=True)):
+      self.device.StartInstrumentation(
+          'test.package/.TestInstrumentation',
+          finish=False, raw=True, extras=None)
+
+  def testStartInstrumentation_extras(self):
+    with self.assertCalls(
+        self.call.device.RunShellCommand(
+            ['am', 'instrument', '-e', 'foo', 'Foo', '-e', 'bar', 'Bar',
+             'test.package/.TestInstrumentation'],
+            check_return=True, large_output=True)):
+      self.device.StartInstrumentation(
+          'test.package/.TestInstrumentation',
+          finish=False, raw=False, extras={'foo': 'Foo', 'bar': 'Bar'})
+
+
+class DeviceUtilsBroadcastIntentTest(DeviceUtilsTest):
+
+  def testBroadcastIntent_noExtras(self):
+    test_intent = intent.Intent(action='test.package.with.an.INTENT')
+    with self.assertCall(
+        self.call.adb.Shell('am broadcast -a test.package.with.an.INTENT'),
+        'Broadcasting: Intent { act=test.package.with.an.INTENT } '):
+      self.device.BroadcastIntent(test_intent)
+
+  def testBroadcastIntent_withExtra(self):
+    test_intent = intent.Intent(action='test.package.with.an.INTENT',
+                                extras={'foo': 'bar value'})
+    with self.assertCall(
+        self.call.adb.Shell(
+            "am broadcast -a test.package.with.an.INTENT --es foo 'bar value'"),
+        'Broadcasting: Intent { act=test.package.with.an.INTENT } '):
+      self.device.BroadcastIntent(test_intent)
+
+  def testBroadcastIntent_withExtra_noValue(self):
+    test_intent = intent.Intent(action='test.package.with.an.INTENT',
+                                extras={'foo': None})
+    with self.assertCall(
+        self.call.adb.Shell(
+            'am broadcast -a test.package.with.an.INTENT --esn foo'),
+        'Broadcasting: Intent { act=test.package.with.an.INTENT } '):
+      self.device.BroadcastIntent(test_intent)
+
+
+class DeviceUtilsGoHomeTest(DeviceUtilsTest):
+
+  def testGoHome_popupsExist(self):
+    with self.assertCalls(
+        (self.call.device.RunShellCommand(
+            ['dumpsys', 'window', 'windows'], check_return=True,
+            large_output=True), []),
+        (self.call.device.RunShellCommand(
+            ['am', 'start', '-W', '-a', 'android.intent.action.MAIN',
+            '-c', 'android.intent.category.HOME'], check_return=True),
+         'Starting: Intent { act=android.intent.action.MAIN }\r\n'''),
+        (self.call.device.RunShellCommand(
+            ['dumpsys', 'window', 'windows'], check_return=True,
+            large_output=True), []),
+        (self.call.device.RunShellCommand(
+            ['input', 'keyevent', '66'], check_return=True)),
+        (self.call.device.RunShellCommand(
+            ['input', 'keyevent', '4'], check_return=True)),
+        (self.call.device.RunShellCommand(
+            ['dumpsys', 'window', 'windows'], check_return=True,
+            large_output=True),
+         ['mCurrentFocus Launcher'])):
+      self.device.GoHome()
+
+  def testGoHome_willRetry(self):
+    with self.assertCalls(
+        (self.call.device.RunShellCommand(
+            ['dumpsys', 'window', 'windows'], check_return=True,
+            large_output=True), []),
+        (self.call.device.RunShellCommand(
+            ['am', 'start', '-W', '-a', 'android.intent.action.MAIN',
+            '-c', 'android.intent.category.HOME'], check_return=True),
+         'Starting: Intent { act=android.intent.action.MAIN }\r\n'''),
+        (self.call.device.RunShellCommand(
+            ['dumpsys', 'window', 'windows'], check_return=True,
+            large_output=True), []),
+        (self.call.device.RunShellCommand(
+            ['input', 'keyevent', '66'], check_return=True,)),
+        (self.call.device.RunShellCommand(
+            ['input', 'keyevent', '4'], check_return=True)),
+        (self.call.device.RunShellCommand(
+            ['dumpsys', 'window', 'windows'], check_return=True,
+            large_output=True), []),
+        (self.call.device.RunShellCommand(
+            ['input', 'keyevent', '66'], check_return=True)),
+        (self.call.device.RunShellCommand(
+            ['input', 'keyevent', '4'], check_return=True)),
+        (self.call.device.RunShellCommand(
+            ['dumpsys', 'window', 'windows'], check_return=True,
+            large_output=True),
+         self.TimeoutError())):
+      with self.assertRaises(device_errors.CommandTimeoutError):
+        self.device.GoHome()
+
+  def testGoHome_alreadyFocused(self):
+    with self.assertCall(
+        self.call.device.RunShellCommand(
+            ['dumpsys', 'window', 'windows'], check_return=True,
+            large_output=True),
+        ['mCurrentFocus Launcher']):
+      self.device.GoHome()
+
+  def testGoHome_alreadyFocusedAlternateCase(self):
+    with self.assertCall(
+        self.call.device.RunShellCommand(
+            ['dumpsys', 'window', 'windows'], check_return=True,
+            large_output=True),
+        [' mCurrentFocus .launcher/.']):
+      self.device.GoHome()
+
+  def testGoHome_obtainsFocusAfterGoingHome(self):
+    with self.assertCalls(
+        (self.call.device.RunShellCommand(
+            ['dumpsys', 'window', 'windows'], check_return=True,
+            large_output=True), []),
+        (self.call.device.RunShellCommand(
+            ['am', 'start', '-W', '-a', 'android.intent.action.MAIN',
+            '-c', 'android.intent.category.HOME'], check_return=True),
+         'Starting: Intent { act=android.intent.action.MAIN }\r\n'''),
+        (self.call.device.RunShellCommand(
+            ['dumpsys', 'window', 'windows'], check_return=True,
+            large_output=True),
+         ['mCurrentFocus Launcher'])):
+      self.device.GoHome()
+
+class DeviceUtilsForceStopTest(DeviceUtilsTest):
+
+  def testForceStop(self):
+    with self.assertCall(
+        self.call.adb.Shell('am force-stop this.is.a.test.package'),
+        ''):
+      self.device.ForceStop('this.is.a.test.package')
+
+
+class DeviceUtilsClearApplicationStateTest(DeviceUtilsTest):
+
+  def testClearApplicationState_packageDoesntExist(self):
+    with self.assertCalls(
+        (self.call.adb.Shell('getprop ro.build.version.sdk'), '17\n'),
+        (self.call.device.GetApplicationPaths('this.package.does.not.exist'),
+         [])):
+      self.device.ClearApplicationState('this.package.does.not.exist')
+
+  def testClearApplicationState_packageDoesntExistOnAndroidJBMR2OrAbove(self):
+    with self.assertCalls(
+        (self.call.adb.Shell('getprop ro.build.version.sdk'), '18\n'),
+        (self.call.adb.Shell('pm clear this.package.does.not.exist'),
+         'Failed\r\n')):
+      self.device.ClearApplicationState('this.package.does.not.exist')
+
+  def testClearApplicationState_packageExists(self):
+    with self.assertCalls(
+        (self.call.adb.Shell('getprop ro.build.version.sdk'), '17\n'),
+        (self.call.device.GetApplicationPaths('this.package.exists'),
+         ['/data/app/this.package.exists.apk']),
+        (self.call.adb.Shell('pm clear this.package.exists'),
+         'Success\r\n')):
+      self.device.ClearApplicationState('this.package.exists')
+
+  def testClearApplicationState_packageExistsOnAndroidJBMR2OrAbove(self):
+    with self.assertCalls(
+        (self.call.adb.Shell('getprop ro.build.version.sdk'), '18\n'),
+        (self.call.adb.Shell('pm clear this.package.exists'),
+         'Success\r\n')):
+      self.device.ClearApplicationState('this.package.exists')
+
+
+class DeviceUtilsSendKeyEventTest(DeviceUtilsTest):
+
+  def testSendKeyEvent(self):
+    with self.assertCall(self.call.adb.Shell('input keyevent 66'), ''):
+      self.device.SendKeyEvent(66)
+
+
+class DeviceUtilsPushChangedFilesIndividuallyTest(DeviceUtilsTest):
+
+  def testPushChangedFilesIndividually_empty(self):
+    test_files = []
+    with self.assertCalls():
+      self.device._PushChangedFilesIndividually(test_files)
+
+  def testPushChangedFilesIndividually_single(self):
+    test_files = [('/test/host/path', '/test/device/path')]
+    with self.assertCalls(self.call.adb.Push(*test_files[0])):
+      self.device._PushChangedFilesIndividually(test_files)
+
+  def testPushChangedFilesIndividually_multiple(self):
+    test_files = [
+        ('/test/host/path/file1', '/test/device/path/file1'),
+        ('/test/host/path/file2', '/test/device/path/file2')]
+    with self.assertCalls(
+        self.call.adb.Push(*test_files[0]),
+        self.call.adb.Push(*test_files[1])):
+      self.device._PushChangedFilesIndividually(test_files)
+
+
+class DeviceUtilsPushChangedFilesZippedTest(DeviceUtilsTest):
+
+  def testPushChangedFilesZipped_empty(self):
+    test_files = []
+    with self.assertCalls():
+      self.device._PushChangedFilesZipped(test_files)
+
+  def _testPushChangedFilesZipped_spec(self, test_files):
+    mock_zip_temp = mock.mock_open()
+    mock_zip_temp.return_value.name = '/test/temp/file/tmp.zip'
+    with self.assertCalls(
+        (mock.call.tempfile.NamedTemporaryFile(suffix='.zip'), mock_zip_temp),
+        (mock.call.multiprocessing.Process(
+            target=device_utils.DeviceUtils._CreateDeviceZip,
+            args=('/test/temp/file/tmp.zip', test_files)), mock.Mock()),
+        (self.call.device.GetExternalStoragePath(),
+         '/test/device/external_dir'),
+        self.call.adb.Push(
+            '/test/temp/file/tmp.zip', '/test/device/external_dir/tmp.zip'),
+        self.call.device.RunShellCommand(
+            ['unzip', '/test/device/external_dir/tmp.zip'],
+            as_root=True,
+            env={'PATH': '/data/local/tmp/bin:$PATH'},
+            check_return=True),
+        (self.call.device.IsOnline(), True),
+        self.call.device.RunShellCommand(
+            ['rm', '/test/device/external_dir/tmp.zip'], check_return=True)):
+      self.device._PushChangedFilesZipped(test_files)
+
+  def testPushChangedFilesZipped_single(self):
+    self._testPushChangedFilesZipped_spec(
+        [('/test/host/path/file1', '/test/device/path/file1')])
+
+  def testPushChangedFilesZipped_multiple(self):
+    self._testPushChangedFilesZipped_spec(
+        [('/test/host/path/file1', '/test/device/path/file1'),
+         ('/test/host/path/file2', '/test/device/path/file2')])
+
+
+class DeviceUtilsFileExistsTest(DeviceUtilsTest):
+
+  def testFileExists_usingTest_fileExists(self):
+    with self.assertCall(
+        self.call.device.RunShellCommand(
+            ['test', '-e', '/path/file.exists'], check_return=True), ''):
+      self.assertTrue(self.device.FileExists('/path/file.exists'))
+
+  def testFileExists_usingTest_fileDoesntExist(self):
+    with self.assertCall(
+        self.call.device.RunShellCommand(
+            ['test', '-e', '/does/not/exist'], check_return=True),
+        self.ShellError('', 1)):
+      self.assertFalse(self.device.FileExists('/does/not/exist'))
+
+
+class DeviceUtilsPullFileTest(DeviceUtilsTest):
+
+  def testPullFile_existsOnDevice(self):
+    with mock.patch('os.path.exists', return_value=True):
+      with self.assertCall(
+          self.call.adb.Pull('/data/app/test.file.exists',
+                             '/test/file/host/path')):
+        self.device.PullFile('/data/app/test.file.exists',
+                             '/test/file/host/path')
+
+  def testPullFile_doesntExistOnDevice(self):
+    with mock.patch('os.path.exists', return_value=True):
+      with self.assertCall(
+          self.call.adb.Pull('/data/app/test.file.does.not.exist',
+                             '/test/file/host/path'),
+          self.CommandError('remote object does not exist')):
+        with self.assertRaises(device_errors.CommandFailedError):
+          self.device.PullFile('/data/app/test.file.does.not.exist',
+                               '/test/file/host/path')
+
+
+class DeviceUtilsReadFileTest(DeviceUtilsTest):
+
+  def testReadFileWithPull_success(self):
+    tmp_host_dir = '/tmp/dir/on.host/'
+    tmp_host = MockTempFile('/tmp/dir/on.host/tmp_ReadFileWithPull')
+    tmp_host.file.read.return_value = 'some interesting contents'
+    with self.assertCalls(
+        (mock.call.tempfile.mkdtemp(), tmp_host_dir),
+        (self.call.adb.Pull('/path/to/device/file', mock.ANY)),
+        (mock.call.__builtin__.open(mock.ANY, 'r'), tmp_host),
+        (mock.call.os.path.exists(tmp_host_dir), True),
+        (mock.call.shutil.rmtree(tmp_host_dir), None)):
+      self.assertEquals('some interesting contents',
+                        self.device._ReadFileWithPull('/path/to/device/file'))
+    tmp_host.file.read.assert_called_once_with()
+
+  def testReadFileWithPull_rejected(self):
+    tmp_host_dir = '/tmp/dir/on.host/'
+    with self.assertCalls(
+        (mock.call.tempfile.mkdtemp(), tmp_host_dir),
+        (self.call.adb.Pull('/path/to/device/file', mock.ANY),
+         self.CommandError()),
+        (mock.call.os.path.exists(tmp_host_dir), True),
+        (mock.call.shutil.rmtree(tmp_host_dir), None)):
+      with self.assertRaises(device_errors.CommandFailedError):
+        self.device._ReadFileWithPull('/path/to/device/file')
+
+  def testReadFile_exists(self):
+    with self.assertCalls(
+        (self.call.device.RunShellCommand(
+            ['ls', '-l', '/read/this/test/file'],
+            as_root=False, check_return=True),
+         ['-rw-rw---- root foo 256 1970-01-01 00:00 file']),
+        (self.call.device.RunShellCommand(
+            ['cat', '/read/this/test/file'],
+            as_root=False, check_return=True),
+         ['this is a test file'])):
+      self.assertEqual('this is a test file\n',
+                       self.device.ReadFile('/read/this/test/file'))
+
+  def testReadFile_doesNotExist(self):
+    with self.assertCall(
+        self.call.device.RunShellCommand(
+            ['ls', '-l', '/this/file/does.not.exist'],
+            as_root=False, check_return=True),
+        self.CommandError('File does not exist')):
+      with self.assertRaises(device_errors.CommandFailedError):
+        self.device.ReadFile('/this/file/does.not.exist')
+
+  def testReadFile_zeroSize(self):
+    with self.assertCalls(
+        (self.call.device.RunShellCommand(
+            ['ls', '-l', '/this/file/has/zero/size'],
+            as_root=False, check_return=True),
+         ['-r--r--r-- root foo 0 1970-01-01 00:00 zero_size_file']),
+        (self.call.device._ReadFileWithPull('/this/file/has/zero/size'),
+         'but it has contents\n')):
+      self.assertEqual('but it has contents\n',
+                       self.device.ReadFile('/this/file/has/zero/size'))
+
+  def testReadFile_withSU(self):
+    with self.assertCalls(
+        (self.call.device.RunShellCommand(
+            ['ls', '-l', '/this/file/can.be.read.with.su'],
+            as_root=True, check_return=True),
+         ['-rw------- root root 256 1970-01-01 00:00 can.be.read.with.su']),
+        (self.call.device.RunShellCommand(
+            ['cat', '/this/file/can.be.read.with.su'],
+            as_root=True, check_return=True),
+         ['this is a test file', 'read with su'])):
+      self.assertEqual(
+          'this is a test file\nread with su\n',
+          self.device.ReadFile('/this/file/can.be.read.with.su',
+                               as_root=True))
+
+  def testReadFile_withPull(self):
+    contents = 'a' * 123456
+    with self.assertCalls(
+        (self.call.device.RunShellCommand(
+            ['ls', '-l', '/read/this/big/test/file'],
+            as_root=False, check_return=True),
+         ['-rw-rw---- root foo 123456 1970-01-01 00:00 file']),
+        (self.call.device._ReadFileWithPull('/read/this/big/test/file'),
+         contents)):
+      self.assertEqual(
+          contents, self.device.ReadFile('/read/this/big/test/file'))
+
+  def testReadFile_withPullAndSU(self):
+    contents = 'b' * 123456
+    with self.assertCalls(
+        (self.call.device.RunShellCommand(
+            ['ls', '-l', '/this/big/file/can.be.read.with.su'],
+            as_root=True, check_return=True),
+         ['-rw------- root root 123456 1970-01-01 00:00 can.be.read.with.su']),
+        (self.call.device.NeedsSU(), True),
+        (mock.call.pylib.utils.device_temp_file.DeviceTempFile(self.adb),
+         MockTempFile('/sdcard/tmp/on.device')),
+        self.call.device.RunShellCommand(
+            ['cp', '/this/big/file/can.be.read.with.su',
+             '/sdcard/tmp/on.device'],
+            as_root=True, check_return=True),
+        (self.call.device._ReadFileWithPull('/sdcard/tmp/on.device'),
+         contents)):
+      self.assertEqual(
+          contents,
+          self.device.ReadFile('/this/big/file/can.be.read.with.su',
+                               as_root=True))
+
+  def testReadFile_forcePull(self):
+    contents = 'a' * 123456
+    with self.assertCall(
+        self.call.device._ReadFileWithPull('/read/this/big/test/file'),
+        contents):
+      self.assertEqual(
+          contents,
+          self.device.ReadFile('/read/this/big/test/file', force_pull=True))
+
+
+class DeviceUtilsWriteFileTest(DeviceUtilsTest):
+
+  def testWriteFileWithPush_success(self):
+    tmp_host = MockTempFile('/tmp/file/on.host')
+    contents = 'some interesting contents'
+    with self.assertCalls(
+        (mock.call.tempfile.NamedTemporaryFile(), tmp_host),
+        self.call.adb.Push('/tmp/file/on.host', '/path/to/device/file')):
+      self.device._WriteFileWithPush('/path/to/device/file', contents)
+    tmp_host.file.write.assert_called_once_with(contents)
+
+  def testWriteFileWithPush_rejected(self):
+    tmp_host = MockTempFile('/tmp/file/on.host')
+    contents = 'some interesting contents'
+    with self.assertCalls(
+        (mock.call.tempfile.NamedTemporaryFile(), tmp_host),
+        (self.call.adb.Push('/tmp/file/on.host', '/path/to/device/file'),
+         self.CommandError())):
+      with self.assertRaises(device_errors.CommandFailedError):
+        self.device._WriteFileWithPush('/path/to/device/file', contents)
+
+  def testWriteFile_withPush(self):
+    contents = 'some large contents ' * 26 # 20 * 26 = 520 chars
+    with self.assertCalls(
+        self.call.device._WriteFileWithPush('/path/to/device/file', contents)):
+      self.device.WriteFile('/path/to/device/file', contents)
+
+  def testWriteFile_withPushForced(self):
+    contents = 'tiny contents'
+    with self.assertCalls(
+        self.call.device._WriteFileWithPush('/path/to/device/file', contents)):
+      self.device.WriteFile('/path/to/device/file', contents, force_push=True)
+
+  def testWriteFile_withPushAndSU(self):
+    contents = 'some large contents ' * 26 # 20 * 26 = 520 chars
+    with self.assertCalls(
+        (self.call.device.NeedsSU(), True),
+        (mock.call.pylib.utils.device_temp_file.DeviceTempFile(self.adb),
+         MockTempFile('/sdcard/tmp/on.device')),
+        self.call.device._WriteFileWithPush('/sdcard/tmp/on.device', contents),
+        self.call.device.RunShellCommand(
+            ['cp', '/sdcard/tmp/on.device', '/path/to/device/file'],
+            as_root=True, check_return=True)):
+      self.device.WriteFile('/path/to/device/file', contents, as_root=True)
+
+  def testWriteFile_withEcho(self):
+    with self.assertCall(self.call.adb.Shell(
+        "echo -n the.contents > /test/file/to.write"), ''):
+      self.device.WriteFile('/test/file/to.write', 'the.contents')
+
+  def testWriteFile_withEchoAndQuotes(self):
+    with self.assertCall(self.call.adb.Shell(
+        "echo -n 'the contents' > '/test/file/to write'"), ''):
+      self.device.WriteFile('/test/file/to write', 'the contents')
+
+  def testWriteFile_withEchoAndSU(self):
+    with self.assertCalls(
+        (self.call.device.NeedsSU(), True),
+        (self.call.adb.Shell("su -c sh -c 'echo -n contents > /test/file'"),
+         '')):
+      self.device.WriteFile('/test/file', 'contents', as_root=True)
+
+
+class DeviceUtilsLsTest(DeviceUtilsTest):
+
+  def testLs_directory(self):
+    result = [('.', adb_wrapper.DeviceStat(16889, 4096, 1417436123)),
+              ('..', adb_wrapper.DeviceStat(16873, 4096, 12382237)),
+              ('testfile.txt', adb_wrapper.DeviceStat(33206, 3, 1417436122))]
+    with self.assertCalls(
+        (self.call.adb.Ls('/data/local/tmp'), result)):
+      self.assertEquals(result,
+                        self.device.Ls('/data/local/tmp'))
+
+  def testLs_nothing(self):
+    with self.assertCalls(
+        (self.call.adb.Ls('/data/local/tmp/testfile.txt'), [])):
+      self.assertEquals([],
+                        self.device.Ls('/data/local/tmp/testfile.txt'))
+
+
+class DeviceUtilsStatTest(DeviceUtilsTest):
+
+  def testStat_file(self):
+    result = [('.', adb_wrapper.DeviceStat(16889, 4096, 1417436123)),
+              ('..', adb_wrapper.DeviceStat(16873, 4096, 12382237)),
+              ('testfile.txt', adb_wrapper.DeviceStat(33206, 3, 1417436122))]
+    with self.assertCalls(
+        (self.call.adb.Ls('/data/local/tmp'), result)):
+      self.assertEquals(adb_wrapper.DeviceStat(33206, 3, 1417436122),
+                        self.device.Stat('/data/local/tmp/testfile.txt'))
+
+  def testStat_directory(self):
+    result = [('.', adb_wrapper.DeviceStat(16873, 4096, 12382237)),
+              ('..', adb_wrapper.DeviceStat(16873, 4096, 12382237)),
+              ('tmp', adb_wrapper.DeviceStat(16889, 4096, 1417436123))]
+    with self.assertCalls(
+        (self.call.adb.Ls('/data/local'), result)):
+      self.assertEquals(adb_wrapper.DeviceStat(16889, 4096, 1417436123),
+                        self.device.Stat('/data/local/tmp'))
+
+  def testStat_doesNotExist(self):
+    result = [('.', adb_wrapper.DeviceStat(16889, 4096, 1417436123)),
+              ('..', adb_wrapper.DeviceStat(16873, 4096, 12382237)),
+              ('testfile.txt', adb_wrapper.DeviceStat(33206, 3, 1417436122))]
+    with self.assertCalls(
+        (self.call.adb.Ls('/data/local/tmp'), result)):
+      with self.assertRaises(device_errors.CommandFailedError):
+        self.device.Stat('/data/local/tmp/does.not.exist.txt')
+
+
+class DeviceUtilsSetJavaAssertsTest(DeviceUtilsTest):
+
+  def testSetJavaAsserts_enable(self):
+    with self.assertCalls(
+        (self.call.device.ReadFile(constants.DEVICE_LOCAL_PROPERTIES_PATH),
+         'some.example.prop=with an example value\n'
+         'some.other.prop=value_ok\n'),
+        self.call.device.WriteFile(
+            constants.DEVICE_LOCAL_PROPERTIES_PATH,
+            'some.example.prop=with an example value\n'
+            'some.other.prop=value_ok\n'
+            'dalvik.vm.enableassertions=all\n'),
+        (self.call.device.GetProp('dalvik.vm.enableassertions'), ''),
+        self.call.device.SetProp('dalvik.vm.enableassertions', 'all')):
+      self.assertTrue(self.device.SetJavaAsserts(True))
+
+  def testSetJavaAsserts_disable(self):
+    with self.assertCalls(
+        (self.call.device.ReadFile(constants.DEVICE_LOCAL_PROPERTIES_PATH),
+         'some.example.prop=with an example value\n'
+         'dalvik.vm.enableassertions=all\n'
+         'some.other.prop=value_ok\n'),
+        self.call.device.WriteFile(
+            constants.DEVICE_LOCAL_PROPERTIES_PATH,
+            'some.example.prop=with an example value\n'
+            'some.other.prop=value_ok\n'),
+        (self.call.device.GetProp('dalvik.vm.enableassertions'), 'all'),
+        self.call.device.SetProp('dalvik.vm.enableassertions', '')):
+      self.assertTrue(self.device.SetJavaAsserts(False))
+
+  def testSetJavaAsserts_alreadyEnabled(self):
+    with self.assertCalls(
+        (self.call.device.ReadFile(constants.DEVICE_LOCAL_PROPERTIES_PATH),
+         'some.example.prop=with an example value\n'
+         'dalvik.vm.enableassertions=all\n'
+         'some.other.prop=value_ok\n'),
+        (self.call.device.GetProp('dalvik.vm.enableassertions'), 'all')):
+      self.assertFalse(self.device.SetJavaAsserts(True))
+
+
+class DeviceUtilsGetPropTest(DeviceUtilsTest):
+
+  def testGetProp_exists(self):
+    with self.assertCall(
+        self.call.adb.Shell('getprop test.property'), 'property_value\n'):
+      self.assertEqual('property_value',
+                       self.device.GetProp('test.property'))
+
+  def testGetProp_doesNotExist(self):
+    with self.assertCall(
+        self.call.adb.Shell('getprop property.does.not.exist'), '\n'):
+      self.assertEqual('', self.device.GetProp('property.does.not.exist'))
+
+  def testGetProp_cachedRoProp(self):
+    with self.assertCall(
+        self.call.adb.Shell('getprop ro.build.type'), 'userdebug\n'):
+      self.assertEqual('userdebug',
+                       self.device.GetProp('ro.build.type', cache=True))
+      self.assertEqual('userdebug',
+                       self.device.GetProp('ro.build.type', cache=True))
+
+  def testGetProp_retryAndCache(self):
+    with self.assertCalls(
+        (self.call.adb.Shell('getprop ro.build.type'), self.ShellError()),
+        (self.call.adb.Shell('getprop ro.build.type'), self.ShellError()),
+        (self.call.adb.Shell('getprop ro.build.type'), 'userdebug\n')):
+      self.assertEqual('userdebug',
+                       self.device.GetProp('ro.build.type',
+                                           cache=True, retries=3))
+      self.assertEqual('userdebug',
+                       self.device.GetProp('ro.build.type',
+                                           cache=True, retries=3))
+
+
+class DeviceUtilsSetPropTest(DeviceUtilsTest):
+
+  def testSetProp(self):
+    with self.assertCall(
+        self.call.adb.Shell("setprop test.property 'test value'"), ''):
+      self.device.SetProp('test.property', 'test value')
+
+  def testSetProp_check_succeeds(self):
+    with self.assertCalls(
+        (self.call.adb.Shell('setprop test.property new_value'), ''),
+        (self.call.adb.Shell('getprop test.property'), 'new_value')):
+      self.device.SetProp('test.property', 'new_value', check=True)
+
+  def testSetProp_check_fails(self):
+    with self.assertCalls(
+        (self.call.adb.Shell('setprop test.property new_value'), ''),
+        (self.call.adb.Shell('getprop test.property'), 'old_value')):
+      with self.assertRaises(device_errors.CommandFailedError):
+        self.device.SetProp('test.property', 'new_value', check=True)
+
+
+class DeviceUtilsGetPidsTest(DeviceUtilsTest):
+
+  def testGetPids_noMatches(self):
+    with self.assertCall(
+        self.call.device._RunPipedShellCommand('ps | grep -F does.not.match'),
+        []):
+      self.assertEqual({}, self.device.GetPids('does.not.match'))
+
+  def testGetPids_oneMatch(self):
+    with self.assertCall(
+        self.call.device._RunPipedShellCommand('ps | grep -F one.match'),
+        ['user  1001    100   1024 1024   ffffffff 00000000 one.match']):
+      self.assertEqual({'one.match': '1001'}, self.device.GetPids('one.match'))
+
+  def testGetPids_mutlipleMatches(self):
+    with self.assertCall(
+        self.call.device._RunPipedShellCommand('ps | grep -F match'),
+        ['user  1001    100   1024 1024   ffffffff 00000000 one.match',
+         'user  1002    100   1024 1024   ffffffff 00000000 two.match',
+         'user  1003    100   1024 1024   ffffffff 00000000 three.match']):
+      self.assertEqual(
+          {'one.match': '1001', 'two.match': '1002', 'three.match': '1003'},
+          self.device.GetPids('match'))
+
+  def testGetPids_exactMatch(self):
+    with self.assertCall(
+        self.call.device._RunPipedShellCommand('ps | grep -F exact.match'),
+        ['user  1000    100   1024 1024   ffffffff 00000000 not.exact.match',
+         'user  1234    100   1024 1024   ffffffff 00000000 exact.match']):
+      self.assertEqual(
+          {'not.exact.match': '1000', 'exact.match': '1234'},
+          self.device.GetPids('exact.match'))
+
+  def testGetPids_quotable(self):
+    with self.assertCall(
+        self.call.device._RunPipedShellCommand("ps | grep -F 'my$process'"),
+        ['user  1234    100   1024 1024   ffffffff 00000000 my$process']):
+      self.assertEqual(
+          {'my$process': '1234'}, self.device.GetPids('my$process'))
+
+
+class DeviceUtilsTakeScreenshotTest(DeviceUtilsTest):
+
+  def testTakeScreenshot_fileNameProvided(self):
+    with self.assertCalls(
+        (mock.call.pylib.utils.device_temp_file.DeviceTempFile(
+            self.adb, suffix='.png'),
+         MockTempFile('/tmp/path/temp-123.png')),
+        (self.call.adb.Shell('/system/bin/screencap -p /tmp/path/temp-123.png'),
+         ''),
+        self.call.device.PullFile('/tmp/path/temp-123.png',
+                                  '/test/host/screenshot.png')):
+      self.device.TakeScreenshot('/test/host/screenshot.png')
+
+
+class DeviceUtilsGetMemoryUsageForPidTest(DeviceUtilsTest):
+
+  def setUp(self):
+    super(DeviceUtilsGetMemoryUsageForPidTest, self).setUp()
+
+  def testGetMemoryUsageForPid_validPid(self):
+    with self.assertCalls(
+        (self.call.device._RunPipedShellCommand(
+            'showmap 1234 | grep TOTAL', as_root=True),
+         ['100 101 102 103 104 105 106 107 TOTAL']),
+        (self.call.device.ReadFile('/proc/1234/status', as_root=True),
+         'VmHWM: 1024 kB\n')):
+      self.assertEqual(
+          {
+            'Size': 100,
+            'Rss': 101,
+            'Pss': 102,
+            'Shared_Clean': 103,
+            'Shared_Dirty': 104,
+            'Private_Clean': 105,
+            'Private_Dirty': 106,
+            'VmHWM': 1024
+          },
+          self.device.GetMemoryUsageForPid(1234))
+
+  def testGetMemoryUsageForPid_noSmaps(self):
+    with self.assertCalls(
+        (self.call.device._RunPipedShellCommand(
+            'showmap 4321 | grep TOTAL', as_root=True),
+         ['cannot open /proc/4321/smaps: No such file or directory']),
+        (self.call.device.ReadFile('/proc/4321/status', as_root=True),
+         'VmHWM: 1024 kb\n')):
+      self.assertEquals({'VmHWM': 1024}, self.device.GetMemoryUsageForPid(4321))
+
+  def testGetMemoryUsageForPid_noStatus(self):
+    with self.assertCalls(
+        (self.call.device._RunPipedShellCommand(
+            'showmap 4321 | grep TOTAL', as_root=True),
+         ['100 101 102 103 104 105 106 107 TOTAL']),
+        (self.call.device.ReadFile('/proc/4321/status', as_root=True),
+         self.CommandError())):
+      self.assertEquals(
+          {
+            'Size': 100,
+            'Rss': 101,
+            'Pss': 102,
+            'Shared_Clean': 103,
+            'Shared_Dirty': 104,
+            'Private_Clean': 105,
+            'Private_Dirty': 106,
+          },
+          self.device.GetMemoryUsageForPid(4321))
+
+
+class DeviceUtilsClientCache(DeviceUtilsTest):
+
+  def testClientCache_twoCaches(self):
+    self.device._cache['test'] = 0
+    client_cache_one = self.device.GetClientCache('ClientOne')
+    client_cache_one['test'] = 1
+    client_cache_two = self.device.GetClientCache('ClientTwo')
+    client_cache_two['test'] = 2
+    self.assertEqual(self.device._cache, {'test': 0})
+    self.assertEqual(client_cache_one, {'test': 1})
+    self.assertEqual(client_cache_two, {'test': 2})
+    self.device._ClearCache()
+    self.assertEqual(self.device._cache, {})
+    self.assertEqual(client_cache_one, {})
+    self.assertEqual(client_cache_two, {})
+
+  def testClientCache_multipleInstances(self):
+    client_cache_one = self.device.GetClientCache('ClientOne')
+    client_cache_one['test'] = 1
+    client_cache_two = self.device.GetClientCache('ClientOne')
+    self.assertEqual(client_cache_one, {'test': 1})
+    self.assertEqual(client_cache_two, {'test': 1})
+    self.device._ClearCache()
+    self.assertEqual(client_cache_one, {})
+    self.assertEqual(client_cache_two, {})
+
+
+class DeviceUtilsParallelTest(mock_calls.TestCase):
+
+  def testParallel_default(self):
+    test_serials = ['0123456789abcdef', 'fedcba9876543210']
+    with self.assertCall(
+        mock.call.pylib.device.device_utils.DeviceUtils.HealthyDevices(),
+        [device_utils.DeviceUtils(s) for s in test_serials]):
+      parallel_devices = device_utils.DeviceUtils.parallel()
+    for serial, device in zip(test_serials, parallel_devices.pGet(None)):
+      self.assertTrue(isinstance(device, device_utils.DeviceUtils))
+      self.assertEquals(serial, device.adb.GetDeviceSerial())
+
+  def testParallel_noDevices(self):
+    with self.assertCall(
+        mock.call.pylib.device.device_utils.DeviceUtils.HealthyDevices(), []):
+      with self.assertRaises(device_errors.NoDevicesError):
+        device_utils.DeviceUtils.parallel()
+
+
+class DeviceUtilsHealthyDevicesTest(mock_calls.TestCase):
+
+  def _createAdbWrapperMock(self, serial, is_ready=True):
+    adb = _AdbWrapperMock(serial)
+    adb.is_ready = is_ready
+    return adb
+
+  def testHealthyDevices_default(self):
+    test_serials = ['0123456789abcdef', 'fedcba9876543210']
+    with self.assertCalls(
+        (mock.call.pylib.device.device_blacklist.ReadBlacklist(), []),
+        (mock.call.pylib.device.adb_wrapper.AdbWrapper.Devices(),
+         [self._createAdbWrapperMock(s) for s in test_serials])):
+      devices = device_utils.DeviceUtils.HealthyDevices()
+    for serial, device in zip(test_serials, devices):
+      self.assertTrue(isinstance(device, device_utils.DeviceUtils))
+      self.assertEquals(serial, device.adb.GetDeviceSerial())
+
+  def testHealthyDevices_blacklisted(self):
+    test_serials = ['0123456789abcdef', 'fedcba9876543210']
+    with self.assertCalls(
+        (mock.call.pylib.device.device_blacklist.ReadBlacklist(),
+         ['fedcba9876543210']),
+        (mock.call.pylib.device.adb_wrapper.AdbWrapper.Devices(),
+         [self._createAdbWrapperMock(s) for s in test_serials])):
+      devices = device_utils.DeviceUtils.HealthyDevices()
+    self.assertEquals(1, len(devices))
+    self.assertTrue(isinstance(devices[0], device_utils.DeviceUtils))
+    self.assertEquals('0123456789abcdef', devices[0].adb.GetDeviceSerial())
+
+
+if __name__ == '__main__':
+  logging.getLogger().setLevel(logging.DEBUG)
+  unittest.main(verbosity=2)
+
diff --git a/build/android/pylib/device/intent.py b/build/android/pylib/device/intent.py
new file mode 100644
index 0000000..333b9f1
--- /dev/null
+++ b/build/android/pylib/device/intent.py
@@ -0,0 +1,113 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Manages intents and associated information.
+
+This is generally intended to be used with functions that calls Android's
+Am command.
+"""
+
+class Intent(object):
+
+  def __init__(self, action='android.intent.action.VIEW', activity=None,
+               category=None, component=None, data=None, extras=None,
+               flags=None, package=None):
+    """Creates an Intent.
+
+    Args:
+      action: A string containing the action.
+      activity: A string that, with |package|, can be used to specify the
+                component.
+      category: A string or list containing any categories.
+      component: A string that specifies the component to send the intent to.
+      data: A string containing a data URI.
+      extras: A dict containing extra parameters to be passed along with the
+              intent.
+      flags: A string containing flags to pass.
+      package: A string that, with activity, can be used to specify the
+               component.
+    """
+    self._action = action
+    self._activity = activity
+    if isinstance(category, list) or category is None:
+      self._category = category
+    else:
+      self._category = [category]
+    self._component = component
+    self._data = data
+    self._extras = extras
+    self._flags = flags
+    self._package = package
+
+    if self._component and '/' in component:
+      self._package, self._activity = component.split('/', 1)
+    elif self._package and self._activity:
+      self._component = '%s/%s' % (package, activity)
+
+  @property
+  def action(self):
+    return self._action
+
+  @property
+  def activity(self):
+    return self._activity
+
+  @property
+  def category(self):
+    return self._category
+
+  @property
+  def component(self):
+    return self._component
+
+  @property
+  def data(self):
+    return self._data
+
+  @property
+  def extras(self):
+    return self._extras
+
+  @property
+  def flags(self):
+    return self._flags
+
+  @property
+  def package(self):
+    return self._package
+
+  @property
+  def am_args(self):
+    """Returns the intent as a list of arguments for the activity manager.
+
+    For details refer to the specification at:
+    - http://developer.android.com/tools/help/adb.html#IntentSpec
+    """
+    args = []
+    if self.action:
+      args.extend(['-a', self.action])
+    if self.data:
+      args.extend(['-d', self.data])
+    if self.category:
+      args.extend(arg for cat in self.category for arg in ('-c', cat))
+    if self.component:
+      args.extend(['-n', self.component])
+    if self.flags:
+      args.extend(['-f', self.flags])
+    if self.extras:
+      for key, value in self.extras.iteritems():
+        if value is None:
+          args.extend(['--esn', key])
+        elif isinstance(value, str):
+          args.extend(['--es', key, value])
+        elif isinstance(value, bool):
+          args.extend(['--ez', key, str(value)])
+        elif isinstance(value, int):
+          args.extend(['--ei', key, str(value)])
+        elif isinstance(value, float):
+          args.extend(['--ef', key, str(value)])
+        else:
+          raise NotImplementedError(
+              'Intent does not know how to pass %s extras' % type(value))
+    return args
diff --git a/build/android/pylib/device/logcat_monitor.py b/build/android/pylib/device/logcat_monitor.py
new file mode 100644
index 0000000..2eebc2d
--- /dev/null
+++ b/build/android/pylib/device/logcat_monitor.py
@@ -0,0 +1,139 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# pylint: disable=unused-argument
+
+import collections
+import itertools
+import logging
+import subprocess
+import tempfile
+import time
+import re
+
+from pylib.device import adb_wrapper
+from pylib.device import decorators
+from pylib.device import device_errors
+
+
+class LogcatMonitor(object):
+
+  _THREADTIME_RE_FORMAT = (
+      r'(?P<date>\S*) +(?P<time>\S*) +(?P<proc_id>%s) +(?P<thread_id>%s) +'
+      r'(?P<log_level>%s) +(?P<component>%s) *: +(?P<message>%s)$')
+
+  def __init__(self, adb, clear=True, filter_specs=None):
+    """Create a LogcatMonitor instance.
+
+    Args:
+      adb: An instance of adb_wrapper.AdbWrapper.
+      clear: If True, clear the logcat when monitoring starts.
+      filter_specs: An optional list of '<tag>[:priority]' strings.
+    """
+    if isinstance(adb, adb_wrapper.AdbWrapper):
+      self._adb = adb
+    else:
+      raise ValueError('Unsupported type passed for argument "device"')
+    self._clear = clear
+    self._filter_specs = filter_specs
+    self._logcat_out = None
+    self._logcat_out_file = None
+    self._logcat_proc = None
+
+  @decorators.WithTimeoutAndRetriesDefaults(10, 0)
+  def WaitFor(self, success_regex, failure_regex=None, timeout=None,
+              retries=None):
+    """Wait for a matching logcat line or until a timeout occurs.
+
+    This will attempt to match lines in the logcat against both |success_regex|
+    and |failure_regex| (if provided). Note that this calls re.search on each
+    logcat line, not re.match, so the provided regular expressions don't have
+    to match an entire line.
+
+    Args:
+      success_regex: The regular expression to search for.
+      failure_regex: An optional regular expression that, if hit, causes this
+        to stop looking for a match. Can be None.
+      timeout: timeout in seconds
+      retries: number of retries
+
+    Returns:
+      A match object if |success_regex| matches a part of a logcat line, or
+      None if |failure_regex| matches a part of a logcat line.
+    Raises:
+      CommandFailedError on logcat failure (NOT on a |failure_regex| match).
+      CommandTimeoutError if no logcat line matching either |success_regex| or
+        |failure_regex| is found in |timeout| seconds.
+      DeviceUnreachableError if the device becomes unreachable.
+    """
+    if isinstance(success_regex, basestring):
+      success_regex = re.compile(success_regex)
+    if isinstance(failure_regex, basestring):
+      failure_regex = re.compile(failure_regex)
+
+    logging.debug('Waiting %d seconds for "%s"', timeout, success_regex.pattern)
+
+    # NOTE This will continue looping until:
+    #  - success_regex matches a line, in which case the match object is
+    #    returned.
+    #  - failure_regex matches a line, in which case None is returned
+    #  - the timeout is hit, in which case a CommandTimeoutError is raised.
+    for l in self._adb.Logcat(filter_specs=self._filter_specs):
+      m = success_regex.search(l)
+      if m:
+        return m
+      if failure_regex and failure_regex.search(l):
+        return None
+
+  def FindAll(self, message_regex, proc_id=None, thread_id=None, log_level=None,
+              component=None):
+    """Finds all lines in the logcat that match the provided constraints.
+
+    Args:
+      message_regex: The regular expression that the <message> section must
+        match.
+      proc_id: The process ID to match. If None, matches any process ID.
+      thread_id: The thread ID to match. If None, matches any thread ID.
+      log_level: The log level to match. If None, matches any log level.
+      component: The component to match. If None, matches any component.
+
+    Yields:
+      A match object for each matching line in the logcat. The match object
+      will always contain, in addition to groups defined in |message_regex|,
+      the following named groups: 'date', 'time', 'proc_id', 'thread_id',
+      'log_level', 'component', and 'message'.
+    """
+    if proc_id is None:
+      proc_id = r'\d+'
+    if thread_id is None:
+      thread_id = r'\d+'
+    if log_level is None:
+      log_level = r'[VDIWEF]'
+    if component is None:
+      component = r'[^\s:]+'
+    threadtime_re = re.compile(
+        type(self)._THREADTIME_RE_FORMAT % (
+            proc_id, thread_id, log_level, component, message_regex))
+
+    for line in self._adb.Logcat(dump=True, logcat_format='threadtime'):
+      m = re.match(threadtime_re, line)
+      if m:
+        yield m
+
+  def Start(self):
+    """Starts the logcat monitor.
+
+    Clears the logcat if |clear| was set in |__init__|.
+    """
+    if self._clear:
+      self._adb.Logcat(clear=True)
+
+  def __enter__(self):
+    """Starts the logcat monitor."""
+    self.Start()
+    return self
+
+  def __exit__(self, exc_type, exc_val, exc_tb):
+    """Stops the logcat monitor."""
+    pass
diff --git a/build/android/pylib/device/logcat_monitor_test.py b/build/android/pylib/device/logcat_monitor_test.py
new file mode 100755
index 0000000..db397e57
--- /dev/null
+++ b/build/android/pylib/device/logcat_monitor_test.py
@@ -0,0 +1,164 @@
+#!/usr/bin/env python
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import itertools
+import os
+import sys
+import unittest
+
+from pylib import constants
+from pylib.device import adb_wrapper
+from pylib.device import decorators
+from pylib.device import logcat_monitor
+
+sys.path.append(os.path.join(
+    constants.DIR_SOURCE_ROOT, 'third_party', 'pymock'))
+import mock # pylint: disable=F0401
+
+
+class LogcatMonitorTest(unittest.TestCase):
+
+  _TEST_THREADTIME_LOGCAT_DATA = [
+        '01-01 01:02:03.456  7890  0987 V LogcatMonitorTest: '
+            'verbose logcat monitor test message 1',
+        '01-01 01:02:03.457  8901  1098 D LogcatMonitorTest: '
+            'debug logcat monitor test message 2',
+        '01-01 01:02:03.458  9012  2109 I LogcatMonitorTest: '
+            'info logcat monitor test message 3',
+        '01-01 01:02:03.459  0123  3210 W LogcatMonitorTest: '
+            'warning logcat monitor test message 4',
+        '01-01 01:02:03.460  1234  4321 E LogcatMonitorTest: '
+            'error logcat monitor test message 5',
+        '01-01 01:02:03.461  2345  5432 F LogcatMonitorTest: '
+            'fatal logcat monitor test message 6',
+        '01-01 01:02:03.462  3456  6543 D LogcatMonitorTest: '
+            'ignore me',]
+
+  def _createTestLog(self, raw_logcat=None):
+    test_adb = adb_wrapper.AdbWrapper('0123456789abcdef')
+    test_adb.Logcat = mock.Mock(return_value=(l for l in raw_logcat))
+    test_log = logcat_monitor.LogcatMonitor(test_adb, clear=False)
+    return test_log
+
+  def assertIterEqual(self, expected_iter, actual_iter):
+    for expected, actual in itertools.izip_longest(expected_iter, actual_iter):
+      self.assertIsNotNone(
+          expected,
+          msg='actual has unexpected elements starting with %s' % str(actual))
+      self.assertIsNotNone(
+          actual,
+          msg='actual is missing elements starting with %s' % str(expected))
+      self.assertEqual(actual.group('proc_id'), expected[0])
+      self.assertEqual(actual.group('thread_id'), expected[1])
+      self.assertEqual(actual.group('log_level'), expected[2])
+      self.assertEqual(actual.group('component'), expected[3])
+      self.assertEqual(actual.group('message'), expected[4])
+
+    with self.assertRaises(StopIteration):
+      next(actual_iter)
+    with self.assertRaises(StopIteration):
+      next(expected_iter)
+
+  def testWaitFor_success(self):
+    test_log = self._createTestLog(
+        raw_logcat=type(self)._TEST_THREADTIME_LOGCAT_DATA)
+    actual_match = test_log.WaitFor(r'.*(fatal|error) logcat monitor.*', None)
+    self.assertTrue(actual_match)
+    self.assertEqual(
+        '01-01 01:02:03.460  1234  4321 E LogcatMonitorTest: '
+            'error logcat monitor test message 5',
+        actual_match.group(0))
+    self.assertEqual('error', actual_match.group(1))
+
+  def testWaitFor_failure(self):
+    test_log = self._createTestLog(
+        raw_logcat=type(self)._TEST_THREADTIME_LOGCAT_DATA)
+    actual_match = test_log.WaitFor(
+        r'.*My Success Regex.*', r'.*(fatal|error) logcat monitor.*')
+    self.assertIsNone(actual_match)
+
+  def testFindAll_defaults(self):
+    test_log = self._createTestLog(
+        raw_logcat=type(self)._TEST_THREADTIME_LOGCAT_DATA)
+    expected_results = [
+        ('7890', '0987', 'V', 'LogcatMonitorTest',
+         'verbose logcat monitor test message 1'),
+        ('8901', '1098', 'D', 'LogcatMonitorTest',
+         'debug logcat monitor test message 2'),
+        ('9012', '2109', 'I', 'LogcatMonitorTest',
+         'info logcat monitor test message 3'),
+        ('0123', '3210', 'W', 'LogcatMonitorTest',
+         'warning logcat monitor test message 4'),
+        ('1234', '4321', 'E', 'LogcatMonitorTest',
+         'error logcat monitor test message 5'),
+        ('2345', '5432', 'F', 'LogcatMonitorTest',
+         'fatal logcat monitor test message 6')]
+    actual_results = test_log.FindAll(r'\S* logcat monitor test message \d')
+    self.assertIterEqual(iter(expected_results), actual_results)
+
+  def testFindAll_defaults_miss(self):
+    test_log = self._createTestLog(
+        raw_logcat=type(self)._TEST_THREADTIME_LOGCAT_DATA)
+    expected_results = []
+    actual_results = test_log.FindAll(r'\S* nothing should match this \d')
+    self.assertIterEqual(iter(expected_results), actual_results)
+
+  def testFindAll_filterProcId(self):
+    test_log = self._createTestLog(
+        raw_logcat=type(self)._TEST_THREADTIME_LOGCAT_DATA)
+    actual_results = test_log.FindAll(
+        r'\S* logcat monitor test message \d', proc_id=1234)
+    expected_results = [
+        ('1234', '4321', 'E', 'LogcatMonitorTest',
+         'error logcat monitor test message 5')]
+    self.assertIterEqual(iter(expected_results), actual_results)
+
+  def testFindAll_filterThreadId(self):
+    test_log = self._createTestLog(
+        raw_logcat=type(self)._TEST_THREADTIME_LOGCAT_DATA)
+    actual_results = test_log.FindAll(
+        r'\S* logcat monitor test message \d', thread_id=2109)
+    expected_results = [
+        ('9012', '2109', 'I', 'LogcatMonitorTest',
+         'info logcat monitor test message 3')]
+    self.assertIterEqual(iter(expected_results), actual_results)
+
+  def testFindAll_filterLogLevel(self):
+    test_log = self._createTestLog(
+        raw_logcat=type(self)._TEST_THREADTIME_LOGCAT_DATA)
+    actual_results = test_log.FindAll(
+        r'\S* logcat monitor test message \d', log_level=r'[DW]')
+    expected_results = [
+        ('8901', '1098', 'D', 'LogcatMonitorTest',
+         'debug logcat monitor test message 2'),
+        ('0123', '3210', 'W', 'LogcatMonitorTest',
+         'warning logcat monitor test message 4'),]
+    self.assertIterEqual(iter(expected_results), actual_results)
+
+  def testFindAll_filterComponent(self):
+    test_log = self._createTestLog(
+        raw_logcat=type(self)._TEST_THREADTIME_LOGCAT_DATA)
+    actual_results = test_log.FindAll(r'.*', component='LogcatMonitorTest')
+    expected_results = [
+        ('7890', '0987', 'V', 'LogcatMonitorTest',
+         'verbose logcat monitor test message 1'),
+        ('8901', '1098', 'D', 'LogcatMonitorTest',
+         'debug logcat monitor test message 2'),
+        ('9012', '2109', 'I', 'LogcatMonitorTest',
+         'info logcat monitor test message 3'),
+        ('0123', '3210', 'W', 'LogcatMonitorTest',
+         'warning logcat monitor test message 4'),
+        ('1234', '4321', 'E', 'LogcatMonitorTest',
+         'error logcat monitor test message 5'),
+        ('2345', '5432', 'F', 'LogcatMonitorTest',
+         'fatal logcat monitor test message 6'),
+        ('3456', '6543', 'D', 'LogcatMonitorTest',
+         'ignore me'),]
+    self.assertIterEqual(iter(expected_results), actual_results)
+
+
+if __name__ == '__main__':
+  unittest.main(verbosity=2)
+
diff --git a/build/android/pylib/device/shared_prefs.py b/build/android/pylib/device/shared_prefs.py
new file mode 100644
index 0000000..32cef4b
--- /dev/null
+++ b/build/android/pylib/device/shared_prefs.py
@@ -0,0 +1,391 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Helper object to read and modify Shared Preferences from Android apps.
+
+See e.g.:
+  http://developer.android.com/reference/android/content/SharedPreferences.html
+"""
+
+import collections
+import logging
+import posixpath
+
+from xml.etree import ElementTree
+
+
+_XML_DECLARATION = "<?xml version='1.0' encoding='utf-8' standalone='yes' ?>\n"
+
+
+class BasePref(object):
+  """Base class for getting/setting the value of a specific preference type.
+
+  Should not be instantiated directly. The SharedPrefs collection will
+  instantiate the appropriate subclasses, which directly manipulate the
+  underlying xml document, to parse and serialize values according to their
+  type.
+
+  Args:
+    elem: An xml ElementTree object holding the preference data.
+
+  Properties:
+    tag_name: A string with the tag that must be used for this preference type.
+  """
+  tag_name = None
+
+  def __init__(self, elem):
+    if elem.tag != type(self).tag_name:
+      raise TypeError('Property %r has type %r, but trying to access as %r' %
+                      (elem.get('name'), elem.tag, type(self).tag_name))
+    self._elem = elem
+
+  def __str__(self):
+    """Get the underlying xml element as a string."""
+    return ElementTree.tostring(self._elem)
+
+  def get(self):
+    """Get the value of this preference."""
+    return self._elem.get('value')
+
+  def set(self, value):
+    """Set from a value casted as a string."""
+    self._elem.set('value', str(value))
+
+  @property
+  def has_value(self):
+    """Check whether the element has a value."""
+    return self._elem.get('value') is not None
+
+
+class BooleanPref(BasePref):
+  """Class for getting/setting a preference with a boolean value.
+
+  The underlying xml element has the form, e.g.:
+      <boolean name="featureEnabled" value="false" />
+  """
+  tag_name = 'boolean'
+  VALUES = {'true': True, 'false': False}
+
+  def get(self):
+    """Get the value as a Python bool."""
+    return type(self).VALUES[super(BooleanPref, self).get()]
+
+  def set(self, value):
+    """Set from a value casted as a bool."""
+    super(BooleanPref, self).set('true' if value else 'false')
+
+
+class FloatPref(BasePref):
+  """Class for getting/setting a preference with a float value.
+
+  The underlying xml element has the form, e.g.:
+      <float name="someMetric" value="4.7" />
+  """
+  tag_name = 'float'
+
+  def get(self):
+    """Get the value as a Python float."""
+    return float(super(FloatPref, self).get())
+
+
+class IntPref(BasePref):
+  """Class for getting/setting a preference with an int value.
+
+  The underlying xml element has the form, e.g.:
+      <int name="aCounter" value="1234" />
+  """
+  tag_name = 'int'
+
+  def get(self):
+    """Get the value as a Python int."""
+    return int(super(IntPref, self).get())
+
+
+class LongPref(IntPref):
+  """Class for getting/setting a preference with a long value.
+
+  The underlying xml element has the form, e.g.:
+      <long name="aLongCounter" value="1234" />
+
+  We use the same implementation from IntPref.
+  """
+  tag_name = 'long'
+
+
+class StringPref(BasePref):
+  """Class for getting/setting a preference with a string value.
+
+  The underlying xml element has the form, e.g.:
+      <string name="someHashValue">249b3e5af13d4db2</string>
+  """
+  tag_name = 'string'
+
+  def get(self):
+    """Get the value as a Python string."""
+    return self._elem.text
+
+  def set(self, value):
+    """Set from a value casted as a string."""
+    self._elem.text = str(value)
+
+
+class StringSetPref(StringPref):
+  """Class for getting/setting a preference with a set of string values.
+
+  The underlying xml element has the form, e.g.:
+      <set name="managed_apps">
+          <string>com.mine.app1</string>
+          <string>com.mine.app2</string>
+          <string>com.mine.app3</string>
+      </set>
+  """
+  tag_name = 'set'
+
+  def get(self):
+    """Get a list with the string values contained."""
+    value = []
+    for child in self._elem:
+      assert child.tag == 'string'
+      value.append(child.text)
+    return value
+
+  def set(self, value):
+    """Set from a sequence of values, each casted as a string."""
+    for child in list(self._elem):
+      self._elem.remove(child)
+    for item in value:
+      ElementTree.SubElement(self._elem, 'string').text = str(item)
+
+
+_PREF_TYPES = {c.tag_name: c for c in [BooleanPref, FloatPref, IntPref,
+                                       LongPref, StringPref, StringSetPref]}
+
+
+class SharedPrefs(object):
+  def __init__(self, device, package, filename):
+    """Helper object to read and update "Shared Prefs" of Android apps.
+
+    Such files typically look like, e.g.:
+
+        <?xml version='1.0' encoding='utf-8' standalone='yes' ?>
+        <map>
+          <int name="databaseVersion" value="107" />
+          <boolean name="featureEnabled" value="false" />
+          <string name="someHashValue">249b3e5af13d4db2</string>
+        </map>
+
+    Example usage:
+
+        prefs = shared_prefs.SharedPrefs(device, 'com.my.app', 'my_prefs.xml')
+        prefs.Load()
+        prefs.GetString('someHashValue') # => '249b3e5af13d4db2'
+        prefs.SetInt('databaseVersion', 42)
+        prefs.Remove('featureEnabled')
+        prefs.Commit()
+
+    The object may also be used as a context manager to automatically load and
+    commit, respectively, upon entering and leaving the context.
+
+    Args:
+      device: A DeviceUtils object.
+      package: A string with the package name of the app that owns the shared
+        preferences file.
+      filename: A string with the name of the preferences file to read/write.
+    """
+    self._device = device
+    self._xml = None
+    self._package = package
+    self._filename = filename
+    self._path = '/data/data/%s/shared_prefs/%s' % (package, filename)
+    self._changed = False
+
+  def __repr__(self):
+    """Get a useful printable representation of the object."""
+    return '<{cls} file {filename} for {package} on {device}>'.format(
+      cls=type(self).__name__, filename=self.filename, package=self.package,
+      device=str(self._device))
+
+  def __str__(self):
+    """Get the underlying xml document as a string."""
+    return _XML_DECLARATION + ElementTree.tostring(self.xml)
+
+  @property
+  def package(self):
+    """Get the package name of the app that owns the shared preferences."""
+    return self._package
+
+  @property
+  def filename(self):
+    """Get the filename of the shared preferences file."""
+    return self._filename
+
+  @property
+  def path(self):
+    """Get the full path to the shared preferences file on the device."""
+    return self._path
+
+  @property
+  def changed(self):
+    """True if properties have changed and a commit would be needed."""
+    return self._changed
+
+  @property
+  def xml(self):
+    """Get the underlying xml document as an ElementTree object."""
+    if self._xml is None:
+      self._xml = ElementTree.Element('map')
+    return self._xml
+
+  def Load(self):
+    """Load the shared preferences file from the device.
+
+    A empty xml document, which may be modified and saved on |commit|, is
+    created if the file does not already exist.
+    """
+    if self._device.FileExists(self.path):
+      self._xml = ElementTree.fromstring(
+          self._device.ReadFile(self.path, as_root=True))
+      assert self._xml.tag == 'map'
+    else:
+      self._xml = None
+    self._changed = False
+
+  def Clear(self):
+    """Clear all of the preferences contained in this object."""
+    if self._xml is not None and len(self): # only clear if not already empty
+      self._xml = None
+      self._changed = True
+
+  def Commit(self):
+    """Save the current set of preferences to the device.
+
+    Only actually saves if some preferences have been modified.
+    """
+    if not self.changed:
+      return
+    self._device.RunShellCommand(
+        ['mkdir', '-p', posixpath.dirname(self.path)],
+        as_root=True, check_return=True)
+    self._device.WriteFile(self.path, str(self), as_root=True)
+    self._device.KillAll(self.package, as_root=True, quiet=True)
+    self._changed = False
+
+  def __len__(self):
+    """Get the number of preferences in this collection."""
+    return len(self.xml)
+
+  def PropertyType(self, key):
+    """Get the type (i.e. tag name) of a property in the collection."""
+    return self._GetChild(key).tag
+
+  def HasProperty(self, key):
+    try:
+      self._GetChild(key)
+      return True
+    except KeyError:
+      return False
+
+  def GetBoolean(self, key):
+    """Get a boolean property."""
+    return BooleanPref(self._GetChild(key)).get()
+
+  def SetBoolean(self, key, value):
+    """Set a boolean property."""
+    self._SetPrefValue(key, value, BooleanPref)
+
+  def GetFloat(self, key):
+    """Get a float property."""
+    return FloatPref(self._GetChild(key)).get()
+
+  def SetFloat(self, key, value):
+    """Set a float property."""
+    self._SetPrefValue(key, value, FloatPref)
+
+  def GetInt(self, key):
+    """Get an int property."""
+    return IntPref(self._GetChild(key)).get()
+
+  def SetInt(self, key, value):
+    """Set an int property."""
+    self._SetPrefValue(key, value, IntPref)
+
+  def GetLong(self, key):
+    """Get a long property."""
+    return LongPref(self._GetChild(key)).get()
+
+  def SetLong(self, key, value):
+    """Set a long property."""
+    self._SetPrefValue(key, value, LongPref)
+
+  def GetString(self, key):
+    """Get a string property."""
+    return StringPref(self._GetChild(key)).get()
+
+  def SetString(self, key, value):
+    """Set a string property."""
+    self._SetPrefValue(key, value, StringPref)
+
+  def GetStringSet(self, key):
+    """Get a string set property."""
+    return StringSetPref(self._GetChild(key)).get()
+
+  def SetStringSet(self, key, value):
+    """Set a string set property."""
+    self._SetPrefValue(key, value, StringSetPref)
+
+  def Remove(self, key):
+    """Remove a preference from the collection."""
+    self.xml.remove(self._GetChild(key))
+
+  def AsDict(self):
+    """Return the properties and their values as a dictionary."""
+    d = {}
+    for child in self.xml:
+      pref = _PREF_TYPES[child.tag](child)
+      d[child.get('name')] = pref.get()
+    return d
+
+  def __enter__(self):
+    """Load preferences file from the device when entering a context."""
+    self.Load()
+    return self
+
+  def __exit__(self, exc_type, _exc_value, _traceback):
+    """Save preferences file to the device when leaving a context."""
+    if not exc_type:
+      self.Commit()
+
+  def _GetChild(self, key):
+    """Get the underlying xml node that holds the property of a given key.
+
+    Raises:
+      KeyError when the key is not found in the collection.
+    """
+    for child in self.xml:
+      if child.get('name') == key:
+        return child
+    raise KeyError(key)
+
+  def _SetPrefValue(self, key, value, pref_cls):
+    """Set the value of a property.
+
+    Args:
+      key: The key of the property to set.
+      value: The new value of the property.
+      pref_cls: A subclass of BasePref used to access the property.
+
+    Raises:
+      TypeError when the key already exists but with a different type.
+    """
+    try:
+      pref = pref_cls(self._GetChild(key))
+      old_value = pref.get()
+    except KeyError:
+      pref = pref_cls(ElementTree.SubElement(
+          self.xml, pref_cls.tag_name, {'name': key}))
+      old_value = None
+    if old_value != value:
+      pref.set(value)
+      self._changed = True
+      logging.info('Setting property: %s', pref)
diff --git a/build/android/pylib/device/shared_prefs_test.py b/build/android/pylib/device/shared_prefs_test.py
new file mode 100755
index 0000000..c5f0ec3
--- /dev/null
+++ b/build/android/pylib/device/shared_prefs_test.py
@@ -0,0 +1,169 @@
+#!/usr/bin/env python
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Unit tests for the contents of shared_prefs.py (mostly SharedPrefs).
+"""
+
+import logging
+import os
+import sys
+import unittest
+
+from pylib import constants
+from pylib.device import device_utils
+from pylib.device import shared_prefs
+
+sys.path.append(os.path.join(
+    constants.DIR_SOURCE_ROOT, 'third_party', 'pymock'))
+import mock
+
+
+def MockDeviceWithFiles(files=None):
+  if files is None:
+    files = {}
+
+  def file_exists(path):
+    return path in files
+
+  def write_file(path, contents, **_kwargs):
+    files[path] = contents
+
+  def read_file(path, **_kwargs):
+    return files[path]
+
+  device = mock.MagicMock(spec=device_utils.DeviceUtils)
+  device.FileExists = mock.Mock(side_effect=file_exists)
+  device.WriteFile = mock.Mock(side_effect=write_file)
+  device.ReadFile = mock.Mock(side_effect=read_file)
+  return device
+
+
+class SharedPrefsTest(unittest.TestCase):
+
+  def setUp(self):
+    self.device = MockDeviceWithFiles({
+      '/data/data/com.some.package/shared_prefs/prefs.xml':
+          "<?xml version='1.0' encoding='utf-8' standalone='yes' ?>\n"
+          '<map>\n'
+          '  <int name="databaseVersion" value="107" />\n'
+          '  <boolean name="featureEnabled" value="false" />\n'
+          '  <string name="someHashValue">249b3e5af13d4db2</string>\n'
+          '</map>'})
+    self.expected_data = {'databaseVersion': 107,
+                          'featureEnabled': False,
+                          'someHashValue': '249b3e5af13d4db2'}
+
+  def testPropertyLifetime(self):
+    prefs = shared_prefs.SharedPrefs(
+        self.device, 'com.some.package', 'prefs.xml')
+    self.assertEquals(len(prefs), 0) # collection is empty before loading
+    prefs.SetInt('myValue', 444)
+    self.assertEquals(len(prefs), 1)
+    self.assertEquals(prefs.GetInt('myValue'), 444)
+    self.assertTrue(prefs.HasProperty('myValue'))
+    prefs.Remove('myValue')
+    self.assertEquals(len(prefs), 0)
+    self.assertFalse(prefs.HasProperty('myValue'))
+    with self.assertRaises(KeyError):
+      prefs.GetInt('myValue')
+
+  def testPropertyType(self):
+    prefs = shared_prefs.SharedPrefs(
+        self.device, 'com.some.package', 'prefs.xml')
+    prefs.SetInt('myValue', 444)
+    self.assertEquals(prefs.PropertyType('myValue'), 'int')
+    with self.assertRaises(TypeError):
+      prefs.GetString('myValue')
+    with self.assertRaises(TypeError):
+      prefs.SetString('myValue', 'hello')
+
+  def testLoad(self):
+    prefs = shared_prefs.SharedPrefs(
+        self.device, 'com.some.package', 'prefs.xml')
+    self.assertEquals(len(prefs), 0) # collection is empty before loading
+    prefs.Load()
+    self.assertEquals(len(prefs), len(self.expected_data))
+    self.assertEquals(prefs.AsDict(), self.expected_data)
+    self.assertFalse(prefs.changed)
+
+  def testClear(self):
+    prefs = shared_prefs.SharedPrefs(
+        self.device, 'com.some.package', 'prefs.xml')
+    prefs.Load()
+    self.assertEquals(prefs.AsDict(), self.expected_data)
+    self.assertFalse(prefs.changed)
+    prefs.Clear()
+    self.assertEquals(len(prefs), 0) # collection is empty now
+    self.assertTrue(prefs.changed)
+
+  def testCommit(self):
+    prefs = shared_prefs.SharedPrefs(
+        self.device, 'com.some.package', 'other_prefs.xml')
+    self.assertFalse(self.device.FileExists(prefs.path)) # file does not exist
+    prefs.Load()
+    self.assertEquals(len(prefs), 0) # file did not exist, collection is empty
+    prefs.SetInt('magicNumber', 42)
+    prefs.SetFloat('myMetric', 3.14)
+    prefs.SetLong('bigNumner', 6000000000)
+    prefs.SetStringSet('apps', ['gmail', 'chrome', 'music'])
+    self.assertFalse(self.device.FileExists(prefs.path)) # still does not exist
+    self.assertTrue(prefs.changed)
+    prefs.Commit()
+    self.assertTrue(self.device.FileExists(prefs.path)) # should exist now
+    self.device.KillAll.assert_called_once_with(prefs.package, as_root=True,
+                                                quiet=True)
+    self.assertFalse(prefs.changed)
+
+    prefs = shared_prefs.SharedPrefs(
+        self.device, 'com.some.package', 'other_prefs.xml')
+    self.assertEquals(len(prefs), 0) # collection is empty before loading
+    prefs.Load()
+    self.assertEquals(prefs.AsDict(), {
+        'magicNumber': 42,
+        'myMetric': 3.14,
+        'bigNumner': 6000000000,
+        'apps': ['gmail', 'chrome', 'music']}) # data survived roundtrip
+
+  def testAsContextManager_onlyReads(self):
+    with shared_prefs.SharedPrefs(
+        self.device, 'com.some.package', 'prefs.xml') as prefs:
+      self.assertEquals(prefs.AsDict(), self.expected_data) # loaded and ready
+    self.assertEquals(self.device.WriteFile.call_args_list, []) # did not write
+
+  def testAsContextManager_readAndWrite(self):
+    with shared_prefs.SharedPrefs(
+        self.device, 'com.some.package', 'prefs.xml') as prefs:
+      prefs.SetBoolean('featureEnabled', True)
+      prefs.Remove('someHashValue')
+      prefs.SetString('newString', 'hello')
+
+    self.assertTrue(self.device.WriteFile.called) # did write
+    with shared_prefs.SharedPrefs(
+        self.device, 'com.some.package', 'prefs.xml') as prefs:
+      # changes persisted
+      self.assertTrue(prefs.GetBoolean('featureEnabled'))
+      self.assertFalse(prefs.HasProperty('someHashValue'))
+      self.assertEquals(prefs.GetString('newString'), 'hello')
+      self.assertTrue(prefs.HasProperty('databaseVersion')) # still there
+
+  def testAsContextManager_commitAborted(self):
+    with self.assertRaises(TypeError):
+      with shared_prefs.SharedPrefs(
+          self.device, 'com.some.package', 'prefs.xml') as prefs:
+        prefs.SetBoolean('featureEnabled', True)
+        prefs.Remove('someHashValue')
+        prefs.SetString('newString', 'hello')
+        prefs.SetInt('newString', 123) # oops!
+
+    self.assertEquals(self.device.WriteFile.call_args_list, []) # did not write
+    with shared_prefs.SharedPrefs(
+        self.device, 'com.some.package', 'prefs.xml') as prefs:
+      # contents were not modified
+      self.assertEquals(prefs.AsDict(), self.expected_data)
+
+if __name__ == '__main__':
+  logging.getLogger().setLevel(logging.DEBUG)
+  unittest.main(verbosity=2)
diff --git a/build/android/pylib/device_settings.py b/build/android/pylib/device_settings.py
new file mode 100644
index 0000000..beabcff
--- /dev/null
+++ b/build/android/pylib/device_settings.py
@@ -0,0 +1,198 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+
+from pylib import constants
+from pylib import content_settings
+from pylib.device import device_errors
+
+_LOCK_SCREEN_SETTINGS_PATH = '/data/system/locksettings.db'
+_ALTERNATE_LOCK_SCREEN_SETTINGS_PATH = (
+    '/data/data/com.android.providers.settings/databases/settings.db')
+PASSWORD_QUALITY_UNSPECIFIED = '0'
+
+
+def ConfigureContentSettings(device, desired_settings):
+  """Configures device content setings from a list.
+
+  Many settings are documented at:
+    http://developer.android.com/reference/android/provider/Settings.Global.html
+    http://developer.android.com/reference/android/provider/Settings.Secure.html
+    http://developer.android.com/reference/android/provider/Settings.System.html
+
+  Many others are undocumented.
+
+  Args:
+    device: A DeviceUtils instance for the device to configure.
+    desired_settings: A list of (table, [(key: value), ...]) for all
+        settings to configure.
+  """
+  if device.build_type == 'userdebug':
+    for table, key_value in desired_settings:
+      settings = content_settings.ContentSettings(table, device)
+      for key, value in key_value:
+        settings[key] = value
+      logging.info('\n%s %s', table, (80 - len(table)) * '-')
+      for key, value in sorted(settings.iteritems()):
+        logging.info('\t%s: %s', key, value)
+
+
+def SetLockScreenSettings(device):
+  """Sets lock screen settings on the device.
+
+  On certain device/Android configurations we need to disable the lock screen in
+  a different database. Additionally, the password type must be set to
+  DevicePolicyManager.PASSWORD_QUALITY_UNSPECIFIED.
+  Lock screen settings are stored in sqlite on the device in:
+      /data/system/locksettings.db
+
+  IMPORTANT: The first column is used as a primary key so that all rows with the
+  same value for that column are removed from the table prior to inserting the
+  new values.
+
+  Args:
+    device: A DeviceUtils instance for the device to configure.
+
+  Raises:
+    Exception if the setting was not properly set.
+  """
+  if device.build_type != 'userdebug':
+    logging.warning('Unable to disable lockscreen on user builds.')
+    return
+
+  def get_lock_settings(table):
+    return [(table, 'lockscreen.disabled', '1'),
+            (table, 'lockscreen.password_type', PASSWORD_QUALITY_UNSPECIFIED),
+            (table, 'lockscreen.password_type_alternate',
+             PASSWORD_QUALITY_UNSPECIFIED)]
+
+  if device.FileExists(_LOCK_SCREEN_SETTINGS_PATH):
+    db = _LOCK_SCREEN_SETTINGS_PATH
+    locksettings = get_lock_settings('locksettings')
+    columns = ['name', 'user', 'value']
+    generate_values = lambda k, v: [k, '0', v]
+  elif device.FileExists(_ALTERNATE_LOCK_SCREEN_SETTINGS_PATH):
+    db = _ALTERNATE_LOCK_SCREEN_SETTINGS_PATH
+    locksettings = get_lock_settings('secure') + get_lock_settings('system')
+    columns = ['name', 'value']
+    generate_values = lambda k, v: [k, v]
+  else:
+    logging.warning('Unable to find database file to set lock screen settings.')
+    return
+
+  for table, key, value in locksettings:
+    # Set the lockscreen setting for default user '0'
+    values = generate_values(key, value)
+
+    cmd = """begin transaction;
+delete from '%(table)s' where %(primary_key)s='%(primary_value)s';
+insert into '%(table)s' (%(columns)s) values (%(values)s);
+commit transaction;""" % {
+      'table': table,
+      'primary_key': columns[0],
+      'primary_value': values[0],
+      'columns': ', '.join(columns),
+      'values': ', '.join(["'%s'" % value for value in values])
+    }
+    output_msg = device.RunShellCommand('sqlite3 %s "%s"' % (db, cmd),
+                                        as_root=True)
+    if output_msg:
+      logging.info(' '.join(output_msg))
+
+
+ENABLE_LOCATION_SETTINGS = [
+  # Note that setting these in this order is required in order for all of
+  # them to take and stick through a reboot.
+  ('com.google.settings/partner', [
+    ('use_location_for_services', 1),
+  ]),
+  ('settings/secure', [
+    # Ensure Geolocation is enabled and allowed for tests.
+    ('location_providers_allowed', 'gps,network'),
+  ]),
+  ('com.google.settings/partner', [
+    ('network_location_opt_in', 1),
+  ])
+]
+
+DISABLE_LOCATION_SETTINGS = [
+  ('com.google.settings/partner', [
+    ('use_location_for_services', 0),
+  ]),
+  ('settings/secure', [
+    # Ensure Geolocation is disabled.
+    ('location_providers_allowed', ''),
+  ]),
+]
+
+ENABLE_MOCK_LOCATION_SETTINGS = [
+  ('settings/secure', [
+    ('mock_location', 1),
+  ]),
+]
+
+DISABLE_MOCK_LOCATION_SETTINGS = [
+  ('settings/secure', [
+    ('mock_location', 0),
+  ]),
+]
+
+DETERMINISTIC_DEVICE_SETTINGS = [
+  ('settings/global', [
+    ('assisted_gps_enabled', 0),
+
+    # Disable "auto time" and "auto time zone" to avoid network-provided time
+    # to overwrite the device's datetime and timezone synchronized from host
+    # when running tests later. See b/6569849.
+    ('auto_time', 0),
+    ('auto_time_zone', 0),
+
+    ('development_settings_enabled', 1),
+
+    # Flag for allowing ActivityManagerService to send ACTION_APP_ERROR intents
+    # on application crashes and ANRs. If this is disabled, the crash/ANR dialog
+    # will never display the "Report" button.
+    # Type: int ( 0 = disallow, 1 = allow )
+    ('send_action_app_error', 0),
+
+    ('stay_on_while_plugged_in', 3),
+
+    ('verifier_verify_adb_installs', 0),
+  ]),
+  ('settings/secure', [
+    ('allowed_geolocation_origins',
+        'http://www.google.co.uk http://www.google.com'),
+
+    # Ensure that we never get random dialogs like "Unfortunately the process
+    # android.process.acore has stopped", which steal the focus, and make our
+    # automation fail (because the dialog steals the focus then mistakenly
+    # receives the injected user input events).
+    ('anr_show_background', 0),
+
+    ('lockscreen.disabled', 1),
+
+    ('screensaver_enabled', 0),
+  ]),
+  ('settings/system', [
+    # Don't want devices to accidentally rotate the screen as that could
+    # affect performance measurements.
+    ('accelerometer_rotation', 0),
+
+    ('lockscreen.disabled', 1),
+
+    # Turn down brightness and disable auto-adjust so that devices run cooler.
+    ('screen_brightness', 5),
+    ('screen_brightness_mode', 0),
+
+    ('user_rotation', 0),
+  ]),
+]
+
+NETWORK_DISABLED_SETTINGS = [
+  ('settings/global', [
+    ('airplane_mode_on', 1),
+    ('wifi_on', 0),
+  ]),
+]
diff --git a/build/android/pylib/device_signal.py b/build/android/pylib/device_signal.py
new file mode 100644
index 0000000..6a5b709
--- /dev/null
+++ b/build/android/pylib/device_signal.py
@@ -0,0 +1,41 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Defines constants for signals that should be supported on devices.
+
+Note: Obtained by running `kill -l` on a user device.
+"""
+
+
+SIGHUP = 1 # Hangup
+SIGINT = 2 # Interrupt
+SIGQUIT = 3 # Quit
+SIGILL = 4 # Illegal instruction
+SIGTRAP = 5 # Trap
+SIGABRT = 6 # Aborted
+SIGBUS = 7 # Bus error
+SIGFPE = 8 # Floating point exception
+SIGKILL = 9 # Killed
+SIGUSR1 = 10 # User signal 1
+SIGSEGV = 11 # Segmentation fault
+SIGUSR2 = 12 # User signal 2
+SIGPIPE = 13 # Broken pipe
+SIGALRM = 14 # Alarm clock
+SIGTERM = 15 # Terminated
+SIGSTKFLT = 16 # Stack fault
+SIGCHLD = 17 # Child exited
+SIGCONT = 18 # Continue
+SIGSTOP = 19 # Stopped (signal)
+SIGTSTP = 20 # Stopped
+SIGTTIN = 21 # Stopped (tty input)
+SIGTTOU = 22 # Stopped (tty output)
+SIGURG = 23 # Urgent I/O condition
+SIGXCPU = 24 # CPU time limit exceeded
+SIGXFSZ = 25 # File size limit exceeded
+SIGVTALRM = 26 # Virtual timer expired
+SIGPROF = 27 # Profiling timer expired
+SIGWINCH = 28 # Window size changed
+SIGIO = 29 # I/O possible
+SIGPWR = 30 # Power failure
+SIGSYS = 31 # Bad system call
diff --git a/build/android/pylib/efficient_android_directory_copy.sh b/build/android/pylib/efficient_android_directory_copy.sh
new file mode 100755
index 0000000..7021109
--- /dev/null
+++ b/build/android/pylib/efficient_android_directory_copy.sh
@@ -0,0 +1,78 @@
+#!/system/bin/sh
+
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Android shell script to make the destination directory identical with the
+# source directory, without doing unnecessary copies. This assumes that the
+# the destination directory was originally a copy of the source directory, and
+# has since been modified.
+
+source=$1
+dest=$2
+echo copying $source to $dest
+
+delete_extra() {
+  # Don't delete symbolic links, since doing so deletes the vital lib link.
+  if [ ! -L "$1" ]
+  then
+    if [ ! -e "$source/$1" ]
+    then
+      echo rm -rf "$dest/$1"
+      rm -rf "$dest/$1"
+    elif [ -d "$1" ]
+    then
+      for f in "$1"/*
+      do
+       delete_extra "$f"
+      done
+    fi
+  fi
+}
+
+copy_if_older() {
+  if [ -d "$1" ] && [ -e "$dest/$1" ]
+  then
+    if [ ! -e "$dest/$1" ]
+    then
+      echo cp -a "$1" "$dest/$1"
+      cp -a "$1" "$dest/$1"
+    else
+      for f in "$1"/*
+      do
+        copy_if_older "$f"
+      done
+    fi
+  elif [ ! -e "$dest/$1" ] || [ "$1" -ot "$dest/$1" ] || [ "$1" -nt "$dest/$1" ]
+  then
+    # dates are different, so either the destination of the source has changed.
+    echo cp -a "$1" "$dest/$1"
+    cp -a "$1" "$dest/$1"
+  fi
+}
+
+if [ -e "$dest" ]
+then
+  echo cd "$dest"
+  cd "$dest"
+  for f in ./*
+  do
+    if [ -e "$f" ]
+    then
+      delete_extra "$f"
+    fi
+  done
+else
+  echo mkdir "$dest"
+  mkdir "$dest"
+fi
+echo cd "$source"
+cd "$source"
+for f in ./*
+do
+  if [ -e "$f" ]
+  then
+    copy_if_older "$f"
+  fi
+done
diff --git a/build/android/pylib/flag_changer.py b/build/android/pylib/flag_changer.py
new file mode 100644
index 0000000..718bc39
--- /dev/null
+++ b/build/android/pylib/flag_changer.py
@@ -0,0 +1,166 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+
+import pylib.android_commands
+import pylib.device.device_utils
+
+from pylib.device import device_errors
+
+
+class FlagChanger(object):
+  """Changes the flags Chrome runs with.
+
+  There are two different use cases for this file:
+  * Flags are permanently set by calling Set().
+  * Flags can be temporarily set for a particular set of unit tests.  These
+    tests should call Restore() to revert the flags to their original state
+    once the tests have completed.
+  """
+
+  def __init__(self, device, cmdline_file):
+    """Initializes the FlagChanger and records the original arguments.
+
+    Args:
+      device: A DeviceUtils instance.
+      cmdline_file: Path to the command line file on the device.
+    """
+    # TODO(jbudorick) Remove once telemetry switches over.
+    if isinstance(device, pylib.android_commands.AndroidCommands):
+      device = pylib.device.device_utils.DeviceUtils(device)
+    self._device = device
+    self._cmdline_file = cmdline_file
+
+    # Save the original flags.
+    try:
+      self._orig_line = self._device.ReadFile(self._cmdline_file).strip()
+    except device_errors.CommandFailedError:
+      self._orig_line = ''
+
+    # Parse out the flags into a list to facilitate adding and removing flags.
+    self._current_flags = self._TokenizeFlags(self._orig_line)
+
+  def Get(self):
+    """Returns list of current flags."""
+    return self._current_flags
+
+  def Set(self, flags):
+    """Replaces all flags on the current command line with the flags given.
+
+    Args:
+      flags: A list of flags to set, eg. ['--single-process'].
+    """
+    if flags:
+      assert flags[0] != 'chrome'
+
+    self._current_flags = flags
+    self._UpdateCommandLineFile()
+
+  def AddFlags(self, flags):
+    """Appends flags to the command line if they aren't already there.
+
+    Args:
+      flags: A list of flags to add on, eg. ['--single-process'].
+    """
+    if flags:
+      assert flags[0] != 'chrome'
+
+    # Avoid appending flags that are already present.
+    for flag in flags:
+      if flag not in self._current_flags:
+        self._current_flags.append(flag)
+    self._UpdateCommandLineFile()
+
+  def RemoveFlags(self, flags):
+    """Removes flags from the command line, if they exist.
+
+    Args:
+      flags: A list of flags to remove, eg. ['--single-process'].  Note that we
+             expect a complete match when removing flags; if you want to remove
+             a switch with a value, you must use the exact string used to add
+             it in the first place.
+    """
+    if flags:
+      assert flags[0] != 'chrome'
+
+    for flag in flags:
+      if flag in self._current_flags:
+        self._current_flags.remove(flag)
+    self._UpdateCommandLineFile()
+
+  def Restore(self):
+    """Restores the flags to their original state."""
+    self._current_flags = self._TokenizeFlags(self._orig_line)
+    self._UpdateCommandLineFile()
+
+  def _UpdateCommandLineFile(self):
+    """Writes out the command line to the file, or removes it if empty."""
+    logging.info('Current flags: %s', self._current_flags)
+    # Root is not required to write to /data/local/tmp/.
+    use_root = '/data/local/tmp/' not in self._cmdline_file
+    if self._current_flags:
+      # The first command line argument doesn't matter as we are not actually
+      # launching the chrome executable using this command line.
+      cmd_line = ' '.join(['_'] + self._current_flags)
+      self._device.WriteFile(
+          self._cmdline_file, cmd_line, as_root=use_root)
+      file_contents = self._device.ReadFile(
+          self._cmdline_file, as_root=use_root).rstrip()
+      assert file_contents == cmd_line, (
+          'Failed to set the command line file at %s' % self._cmdline_file)
+    else:
+      self._device.RunShellCommand('rm ' + self._cmdline_file,
+                                   as_root=use_root)
+      assert not self._device.FileExists(self._cmdline_file), (
+          'Failed to remove the command line file at %s' % self._cmdline_file)
+
+  @staticmethod
+  def _TokenizeFlags(line):
+    """Changes the string containing the command line into a list of flags.
+
+    Follows similar logic to CommandLine.java::tokenizeQuotedArguments:
+    * Flags are split using whitespace, unless the whitespace is within a
+      pair of quotation marks.
+    * Unlike the Java version, we keep the quotation marks around switch
+      values since we need them to re-create the file when new flags are
+      appended.
+
+    Args:
+      line: A string containing the entire command line.  The first token is
+            assumed to be the program name.
+    """
+    if not line:
+      return []
+
+    tokenized_flags = []
+    current_flag = ""
+    within_quotations = False
+
+    # Move through the string character by character and build up each flag
+    # along the way.
+    for c in line.strip():
+      if c is '"':
+        if len(current_flag) > 0 and current_flag[-1] == '\\':
+          # Last char was a backslash; pop it, and treat this " as a literal.
+          current_flag = current_flag[0:-1] + '"'
+        else:
+          within_quotations = not within_quotations
+          current_flag += c
+      elif not within_quotations and (c is ' ' or c is '\t'):
+        if current_flag is not "":
+          tokenized_flags.append(current_flag)
+          current_flag = ""
+      else:
+        current_flag += c
+
+    # Tack on the last flag.
+    if not current_flag:
+      if within_quotations:
+        logging.warn('Unterminated quoted argument: ' + line)
+    else:
+      tokenized_flags.append(current_flag)
+
+    # Return everything but the program name.
+    return tokenized_flags[1:]
diff --git a/build/android/pylib/forwarder.py b/build/android/pylib/forwarder.py
new file mode 100644
index 0000000..c8c47d6
--- /dev/null
+++ b/build/android/pylib/forwarder.py
@@ -0,0 +1,331 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# pylint: disable=W0212
+
+import fcntl
+import logging
+import os
+import psutil
+
+from pylib import cmd_helper
+from pylib import constants
+from pylib import valgrind_tools
+
+# TODO(jbudorick) Remove once telemetry gets switched over.
+import pylib.android_commands
+import pylib.device.device_utils
+
+
+def _GetProcessStartTime(pid):
+  return psutil.Process(pid).create_time
+
+
+class _FileLock(object):
+  """With statement-aware implementation of a file lock.
+
+  File locks are needed for cross-process synchronization when the
+  multiprocessing Python module is used.
+  """
+  def __init__(self, path):
+    self._fd = -1
+    self._path = path
+
+  def __enter__(self):
+    self._fd = os.open(self._path, os.O_RDONLY | os.O_CREAT)
+    if self._fd < 0:
+      raise Exception('Could not open file %s for reading' % self._path)
+    fcntl.flock(self._fd, fcntl.LOCK_EX)
+
+  def __exit__(self, _exception_type, _exception_value, traceback):
+    fcntl.flock(self._fd, fcntl.LOCK_UN)
+    os.close(self._fd)
+
+
+class Forwarder(object):
+  """Thread-safe class to manage port forwards from the device to the host."""
+
+  _DEVICE_FORWARDER_FOLDER = (constants.TEST_EXECUTABLE_DIR +
+                              '/forwarder/')
+  _DEVICE_FORWARDER_PATH = (constants.TEST_EXECUTABLE_DIR +
+                            '/forwarder/device_forwarder')
+  _LOCK_PATH = '/tmp/chrome.forwarder.lock'
+  # Defined in host_forwarder_main.cc
+  _HOST_FORWARDER_LOG = '/tmp/host_forwarder_log'
+
+  _instance = None
+
+  @staticmethod
+  def Map(port_pairs, device, tool=None):
+    """Runs the forwarder.
+
+    Args:
+      port_pairs: A list of tuples (device_port, host_port) to forward. Note
+                 that you can specify 0 as a device_port, in which case a
+                 port will by dynamically assigned on the device. You can
+                 get the number of the assigned port using the
+                 DevicePortForHostPort method.
+      device: A DeviceUtils instance.
+      tool: Tool class to use to get wrapper, if necessary, for executing the
+            forwarder (see valgrind_tools.py).
+
+    Raises:
+      Exception on failure to forward the port.
+    """
+    # TODO(jbudorick) Remove once telemetry gets switched over.
+    if isinstance(device, pylib.android_commands.AndroidCommands):
+      device = pylib.device.device_utils.DeviceUtils(device)
+    if not tool:
+      tool = valgrind_tools.CreateTool(None, device)
+    with _FileLock(Forwarder._LOCK_PATH):
+      instance = Forwarder._GetInstanceLocked(tool)
+      instance._InitDeviceLocked(device, tool)
+
+      device_serial = str(device)
+      redirection_commands = [
+          ['--adb=' + constants.GetAdbPath(),
+           '--serial-id=' + device_serial,
+           '--map', str(device_port), str(host_port)]
+          for device_port, host_port in port_pairs]
+      logging.info('Forwarding using commands: %s', redirection_commands)
+
+      for redirection_command in redirection_commands:
+        try:
+          (exit_code, output) = cmd_helper.GetCmdStatusAndOutput(
+              [instance._host_forwarder_path] + redirection_command)
+        except OSError as e:
+          if e.errno == 2:
+            raise Exception('Unable to start host forwarder. Make sure you have'
+                            ' built host_forwarder.')
+          else: raise
+        if exit_code != 0:
+          Forwarder._KillDeviceLocked(device, tool)
+          raise Exception('%s exited with %d:\n%s' % (
+              instance._host_forwarder_path, exit_code, '\n'.join(output)))
+        tokens = output.split(':')
+        if len(tokens) != 2:
+          raise Exception('Unexpected host forwarder output "%s", '
+                          'expected "device_port:host_port"' % output)
+        device_port = int(tokens[0])
+        host_port = int(tokens[1])
+        serial_with_port = (device_serial, device_port)
+        instance._device_to_host_port_map[serial_with_port] = host_port
+        instance._host_to_device_port_map[host_port] = serial_with_port
+        logging.info('Forwarding device port: %d to host port: %d.',
+                     device_port, host_port)
+
+  @staticmethod
+  def UnmapDevicePort(device_port, device):
+    """Unmaps a previously forwarded device port.
+
+    Args:
+      device: A DeviceUtils instance.
+      device_port: A previously forwarded port (through Map()).
+    """
+    # TODO(jbudorick) Remove once telemetry gets switched over.
+    if isinstance(device, pylib.android_commands.AndroidCommands):
+      device = pylib.device.device_utils.DeviceUtils(device)
+    with _FileLock(Forwarder._LOCK_PATH):
+      Forwarder._UnmapDevicePortLocked(device_port, device)
+
+  @staticmethod
+  def UnmapAllDevicePorts(device):
+    """Unmaps all the previously forwarded ports for the provided device.
+
+    Args:
+      device: A DeviceUtils instance.
+      port_pairs: A list of tuples (device_port, host_port) to unmap.
+    """
+    # TODO(jbudorick) Remove once telemetry gets switched over.
+    if isinstance(device, pylib.android_commands.AndroidCommands):
+      device = pylib.device.device_utils.DeviceUtils(device)
+    with _FileLock(Forwarder._LOCK_PATH):
+      if not Forwarder._instance:
+        return
+      adb_serial = str(device)
+      if adb_serial not in Forwarder._instance._initialized_devices:
+        return
+      port_map = Forwarder._GetInstanceLocked(
+          None)._device_to_host_port_map
+      for (device_serial, device_port) in port_map.keys():
+        if adb_serial == device_serial:
+          Forwarder._UnmapDevicePortLocked(device_port, device)
+      # There are no more ports mapped, kill the device_forwarder.
+      tool = valgrind_tools.CreateTool(None, device)
+      Forwarder._KillDeviceLocked(device, tool)
+
+  @staticmethod
+  def DevicePortForHostPort(host_port):
+    """Returns the device port that corresponds to a given host port."""
+    with _FileLock(Forwarder._LOCK_PATH):
+      (_device_serial, device_port) = Forwarder._GetInstanceLocked(
+          None)._host_to_device_port_map.get(host_port)
+      return device_port
+
+  @staticmethod
+  def RemoveHostLog():
+    if os.path.exists(Forwarder._HOST_FORWARDER_LOG):
+      os.unlink(Forwarder._HOST_FORWARDER_LOG)
+
+  @staticmethod
+  def GetHostLog():
+    if not os.path.exists(Forwarder._HOST_FORWARDER_LOG):
+      return ''
+    with file(Forwarder._HOST_FORWARDER_LOG, 'r') as f:
+      return f.read()
+
+  @staticmethod
+  def _GetInstanceLocked(tool):
+    """Returns the singleton instance.
+
+    Note that the global lock must be acquired before calling this method.
+
+    Args:
+      tool: Tool class to use to get wrapper, if necessary, for executing the
+            forwarder (see valgrind_tools.py).
+    """
+    if not Forwarder._instance:
+      Forwarder._instance = Forwarder(tool)
+    return Forwarder._instance
+
+  def __init__(self, tool):
+    """Constructs a new instance of Forwarder.
+
+    Note that Forwarder is a singleton therefore this constructor should be
+    called only once.
+
+    Args:
+      tool: Tool class to use to get wrapper, if necessary, for executing the
+            forwarder (see valgrind_tools.py).
+    """
+    assert not Forwarder._instance
+    self._tool = tool
+    self._initialized_devices = set()
+    self._device_to_host_port_map = dict()
+    self._host_to_device_port_map = dict()
+    self._host_forwarder_path = os.path.join(
+        constants.GetOutDirectory(), 'host_forwarder')
+    assert os.path.exists(self._host_forwarder_path), 'Please build forwarder2'
+    self._device_forwarder_path_on_host = os.path.join(
+        constants.GetOutDirectory(), 'forwarder_dist')
+    self._InitHostLocked()
+
+  @staticmethod
+  def _UnmapDevicePortLocked(device_port, device):
+    """Internal method used by UnmapDevicePort().
+
+    Note that the global lock must be acquired before calling this method.
+    """
+    instance = Forwarder._GetInstanceLocked(None)
+    serial = str(device)
+    serial_with_port = (serial, device_port)
+    if not serial_with_port in instance._device_to_host_port_map:
+      logging.error('Trying to unmap non-forwarded port %d' % device_port)
+      return
+    redirection_command = ['--adb=' + constants.GetAdbPath(),
+                           '--serial-id=' + serial,
+                           '--unmap', str(device_port)]
+    (exit_code, output) = cmd_helper.GetCmdStatusAndOutput(
+        [instance._host_forwarder_path] + redirection_command)
+    if exit_code != 0:
+      logging.error('%s exited with %d:\n%s' % (
+          instance._host_forwarder_path, exit_code, '\n'.join(output)))
+    host_port = instance._device_to_host_port_map[serial_with_port]
+    del instance._device_to_host_port_map[serial_with_port]
+    del instance._host_to_device_port_map[host_port]
+
+  @staticmethod
+  def _GetPidForLock():
+    """Returns the PID used for host_forwarder initialization.
+
+    The PID of the "sharder" is used to handle multiprocessing. The "sharder"
+    is the initial process that forks that is the parent process.
+    """
+    return os.getpgrp()
+
+  def _InitHostLocked(self):
+    """Initializes the host forwarder daemon.
+
+    Note that the global lock must be acquired before calling this method. This
+    method kills any existing host_forwarder process that could be stale.
+    """
+    # See if the host_forwarder daemon was already initialized by a concurrent
+    # process or thread (in case multi-process sharding is not used).
+    pid_for_lock = Forwarder._GetPidForLock()
+    fd = os.open(Forwarder._LOCK_PATH, os.O_RDWR | os.O_CREAT)
+    with os.fdopen(fd, 'r+') as pid_file:
+      pid_with_start_time = pid_file.readline()
+      if pid_with_start_time:
+        (pid, process_start_time) = pid_with_start_time.split(':')
+        if pid == str(pid_for_lock):
+          if process_start_time == str(_GetProcessStartTime(pid_for_lock)):
+            return
+      self._KillHostLocked()
+      pid_file.seek(0)
+      pid_file.write(
+          '%s:%s' % (pid_for_lock, str(_GetProcessStartTime(pid_for_lock))))
+      pid_file.truncate()
+
+  def _InitDeviceLocked(self, device, tool):
+    """Initializes the device_forwarder daemon for a specific device (once).
+
+    Note that the global lock must be acquired before calling this method. This
+    method kills any existing device_forwarder daemon on the device that could
+    be stale, pushes the latest version of the daemon (to the device) and starts
+    it.
+
+    Args:
+      device: A DeviceUtils instance.
+      tool: Tool class to use to get wrapper, if necessary, for executing the
+            forwarder (see valgrind_tools.py).
+    """
+    device_serial = str(device)
+    if device_serial in self._initialized_devices:
+      return
+    Forwarder._KillDeviceLocked(device, tool)
+    device.PushChangedFiles([(
+        self._device_forwarder_path_on_host,
+        Forwarder._DEVICE_FORWARDER_FOLDER)])
+    cmd = '%s %s' % (tool.GetUtilWrapper(), Forwarder._DEVICE_FORWARDER_PATH)
+    device.RunShellCommand(
+        cmd, env={'LD_LIBRARY_PATH': Forwarder._DEVICE_FORWARDER_FOLDER},
+        check_return=True)
+    self._initialized_devices.add(device_serial)
+
+  def _KillHostLocked(self):
+    """Kills the forwarder process running on the host.
+
+    Note that the global lock must be acquired before calling this method.
+    """
+    logging.info('Killing host_forwarder.')
+    (exit_code, output) = cmd_helper.GetCmdStatusAndOutput(
+        [self._host_forwarder_path, '--kill-server'])
+    if exit_code != 0:
+      (exit_code, output) = cmd_helper.GetCmdStatusAndOutput(
+          ['pkill', '-9', 'host_forwarder'])
+      if exit_code != 0:
+        raise Exception('%s exited with %d:\n%s' % (
+              self._host_forwarder_path, exit_code, '\n'.join(output)))
+
+  @staticmethod
+  def _KillDeviceLocked(device, tool):
+    """Kills the forwarder process running on the device.
+
+    Note that the global lock must be acquired before calling this method.
+
+    Args:
+      device: Instance of DeviceUtils for talking to the device.
+      tool: Wrapper tool (e.g. valgrind) that can be used to execute the device
+            forwarder (see valgrind_tools.py).
+    """
+    logging.info('Killing device_forwarder.')
+    Forwarder._instance._initialized_devices.discard(str(device))
+    if not device.FileExists(Forwarder._DEVICE_FORWARDER_PATH):
+      return
+
+    cmd = '%s %s --kill-server' % (tool.GetUtilWrapper(),
+                                   Forwarder._DEVICE_FORWARDER_PATH)
+    device.RunShellCommand(
+        cmd, env={'LD_LIBRARY_PATH': Forwarder._DEVICE_FORWARDER_FOLDER},
+        check_return=True)
diff --git a/build/android/pylib/gtest/__init__.py b/build/android/pylib/gtest/__init__.py
new file mode 100644
index 0000000..727e987
--- /dev/null
+++ b/build/android/pylib/gtest/__init__.py
@@ -0,0 +1,4 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
diff --git a/build/android/pylib/gtest/filter/OWNERS b/build/android/pylib/gtest/filter/OWNERS
new file mode 100644
index 0000000..72e8ffc
--- /dev/null
+++ b/build/android/pylib/gtest/filter/OWNERS
@@ -0,0 +1 @@
+*
diff --git a/build/android/pylib/gtest/filter/base_unittests_disabled b/build/android/pylib/gtest/filter/base_unittests_disabled
new file mode 100644
index 0000000..bf2311d
--- /dev/null
+++ b/build/android/pylib/gtest/filter/base_unittests_disabled
@@ -0,0 +1,28 @@
+# List of suppressions
+
+# Android will not support StackTrace.
+StackTrace.*
+#
+# Sometimes this is automatically generated by run_tests.py
+VerifyPathControlledByUserTest.Symlinks
+
+# http://crbug.com/138845
+MessagePumpLibeventTest.TestWatchingFromBadThread
+
+StringPrintfTest.StringPrintfMisc
+StringPrintfTest.StringAppendfString
+StringPrintfTest.StringAppendfInt
+StringPrintfTest.StringPrintfBounds
+ProcessUtilTest.GetAppOutputRestrictedSIGPIPE
+# TODO(jrg): Fails on bots.  Works locally.  Figure out why.  2/6/12
+FieldTrialTest.*
+# Flaky?
+ScopedJavaRefTest.RefCounts
+# Death tests are not supported with apks.
+*DeathTest*
+FileTest.MemoryCorruption
+MessagePumpLibeventTest.QuitOutsideOfRun
+ScopedFD.ScopedFDCrashesOnCloseFailure
+
+# http://crbug.com/245043
+StackContainer.BufferAlignment
diff --git a/build/android/pylib/gtest/filter/base_unittests_emulator_additional_disabled b/build/android/pylib/gtest/filter/base_unittests_emulator_additional_disabled
new file mode 100644
index 0000000..85e8fd6
--- /dev/null
+++ b/build/android/pylib/gtest/filter/base_unittests_emulator_additional_disabled
@@ -0,0 +1,10 @@
+# Addtional list of suppressions from emulator
+#
+# Automatically generated by run_tests.py
+PathServiceTest.Get
+SharedMemoryTest.OpenClose
+StringPrintfTest.StringAppendfInt
+StringPrintfTest.StringAppendfString
+StringPrintfTest.StringPrintfBounds
+StringPrintfTest.StringPrintfMisc
+VerifyPathControlledByUserTest.Symlinks
diff --git a/build/android/pylib/gtest/filter/blink_heap_unittests_disabled b/build/android/pylib/gtest/filter/blink_heap_unittests_disabled
new file mode 100644
index 0000000..7a43fb1
--- /dev/null
+++ b/build/android/pylib/gtest/filter/blink_heap_unittests_disabled
@@ -0,0 +1,2 @@
+# List of suppressions
+
diff --git a/build/android/pylib/gtest/filter/breakpad_unittests_disabled b/build/android/pylib/gtest/filter/breakpad_unittests_disabled
new file mode 100644
index 0000000..cefc64f
--- /dev/null
+++ b/build/android/pylib/gtest/filter/breakpad_unittests_disabled
@@ -0,0 +1,9 @@
+FileIDStripTest.StripSelf
+# crbug.com/303960
+ExceptionHandlerTest.InstructionPointerMemoryNullPointer
+# crbug.com/171419
+MinidumpWriterTest.MappingInfoContained
+# crbug.com/310088
+MinidumpWriterTest.MinidumpSizeLimit
+# crbug.com/375838
+ElfCoreDumpTest.ValidCoreFile
diff --git a/build/android/pylib/gtest/filter/cc_unittests_disabled b/build/android/pylib/gtest/filter/cc_unittests_disabled
new file mode 100644
index 0000000..b49d2c6
--- /dev/null
+++ b/build/android/pylib/gtest/filter/cc_unittests_disabled
@@ -0,0 +1,5 @@
+# Death tests are not supported with apks.
+BeginFrameObserverBaseTest.OnBeginFrameImplementation
+BeginFrameSourceBaseTest.ObserverManipulation
+BeginFrameSourceMultiplexerTest.SourcesManipulation
+BeginFrameSourceMultiplexerTest.MinimumIntervalNegativeFails
diff --git a/build/android/pylib/gtest/filter/content_browsertests_disabled b/build/android/pylib/gtest/filter/content_browsertests_disabled
new file mode 100644
index 0000000..dcad240
--- /dev/null
+++ b/build/android/pylib/gtest/filter/content_browsertests_disabled
@@ -0,0 +1,62 @@
+# List of suppressions
+# Timeouts
+Http/MediaTest.*
+File/MediaTest.*
+MediaTest.*
+DatabaseTest.*
+
+# Crashes
+RenderFrameHostManagerTest.IgnoreRendererDebugURLsWhenCrashed
+
+# Plugins are not supported.
+BrowserPluginThreadedCompositorPixelTest.*
+BrowserPluginHostTest.*
+BrowserPluginTest.*
+PluginTest.*
+
+# http://crbug.com/463740
+CrossPlatformAccessibilityBrowserTest.SelectedEditableTextAccessibility
+
+# http://crbug.com/297230
+DumpAccessibilityTreeTest.AccessibilityAriaLevel
+DumpAccessibilityTreeTest.AccessibilityAriaProgressbar
+DumpAccessibilityTreeTest.AccessibilityListMarkers
+DumpAccessibilityTreeTest.AccessibilityUl
+DumpAccessibilityTreeTest.AccessibilityCanvas
+RendererAccessibilityTest.DetachAccessibilityObject
+DumpAccessibilityTreeTest.AccessibilityDialog
+DumpAccessibilityTreeTest.AccessibilityModalDialogClosed
+DumpAccessibilityTreeTest.AccessibilityModalDialogInIframeOpened
+RendererAccessibilityTest.EventOnObjectNotInTree
+
+# http://crbug.com/187500
+RenderViewImplTest.*
+RendererAccessibilityTest.SendFullAccessibilityTreeOnReload
+RendererAccessibilityTest.HideAccessibilityObject
+RendererAccessibilityTest.ShowAccessibilityObject
+
+# http://crbug.com/215894
+DownloadContentTest.CancelInterruptedDownload
+DownloadContentTest.CancelResumingDownload
+DownloadContentTest.RemoveDownload
+DownloadContentTest.RemoveResumingDownload
+DownloadContentTest.ResumeInterruptedDownload
+DownloadContentTest.ResumeInterruptedDownloadNoRange
+DownloadContentTest.ResumeInterruptedDownloadNoVerifiers
+DownloadContentTest.ResumeInterruptedDownloadBadPrecondition
+DownloadContentTest.ResumeWithDeletedFile
+
+# http://crbug.com/386227
+IndexedDBBrowserTest.VersionChangeCrashResilience
+
+# http://crbug.com/233118
+IndexedDBBrowserTest.NullKeyPathPersistence
+
+# http://crbug.com/342525
+IndexedDBBrowserTestSingleProcess.RenderThreadShutdownTest
+
+# http://crbug.com/338421
+GinBrowserTest.GinAndGarbageCollection
+
+# http://crbug.com/343604
+MSE_ClearKey/EncryptedMediaTest.ConfigChangeVideo/0
diff --git a/build/android/pylib/gtest/filter/content_unittests_disabled b/build/android/pylib/gtest/filter/content_unittests_disabled
new file mode 100644
index 0000000..925a7d1
--- /dev/null
+++ b/build/android/pylib/gtest/filter/content_unittests_disabled
@@ -0,0 +1,13 @@
+# List of suppressions
+
+# crbug.com/139095
+RenderWidgetTest.OnMsgPaintAtSize
+# crbug.com/147549
+GamepadProviderTest.PollingAccess
+PepperGamepadHostTest.WaitForReply
+# crbug.com/159234
+WebContentsVideoCaptureDeviceTest.*
+# crbug.com/167045
+ContentViewPopupZoomerTest.testPopupZoomerShowsUp
+# crbug.com/254034
+PageStateSerializationTest.BackwardsCompat_v11
diff --git a/build/android/pylib/gtest/filter/gfx_unittests_disabled b/build/android/pylib/gtest/filter/gfx_unittests_disabled
new file mode 100644
index 0000000..b9aec9e
--- /dev/null
+++ b/build/android/pylib/gtest/filter/gfx_unittests_disabled
@@ -0,0 +1,10 @@
+CanvasTest.StringSizeEmptyString
+CanvasTest.StringWidth
+FontTest.Ascent
+FontTest.AvgWidths
+FontTest.CapHeight
+FontTest.GetActualFontNameForTesting
+FontTest.Height
+FontTest.LoadArial
+FontTest.LoadArialBold
+TextUtilsTest.GetStringWidth
diff --git a/build/android/pylib/gtest/filter/ipc_tests_disabled b/build/android/pylib/gtest/filter/ipc_tests_disabled
new file mode 100644
index 0000000..e8d0691
--- /dev/null
+++ b/build/android/pylib/gtest/filter/ipc_tests_disabled
@@ -0,0 +1,18 @@
+# Times out
+IPCSyncChannelTest.ChattyServer
+
+# MultiProcessTest related failures. These tests fail if DCHECK is enabled.
+IPCChannelPosixTest.AdvancedConnected
+IPCChannelPosixTest.ResetState
+IPCChannelPosixTest.MultiConnection
+IPCFuzzingTest.SanityTest
+IPCFuzzingTest.MsgBadPayloadArgs
+IPCFuzzingTest.MsgBadPayloadShort
+IPCSendFdsTest.DescriptorTest
+IPCChannelProxyTest.MessageClassFilters
+IPCChannelProxyTest.GlobalAndMessageClassFilters
+IPCChannelProxyTest.FilterRemoval
+IPCChannelTest.ChannelTest
+IPCChannelTest.ChannelProxyTest
+IPCChannelTest.SendMessageInChannelConnected
+SyncSocketTest.SanityTest
diff --git a/build/android/pylib/gtest/filter/media_unittests_disabled b/build/android/pylib/gtest/filter/media_unittests_disabled
new file mode 100644
index 0000000..ed3b9aa
--- /dev/null
+++ b/build/android/pylib/gtest/filter/media_unittests_disabled
@@ -0,0 +1,8 @@
+# List of suppressions
+
+# Death tests are not supported on APK
+# http://crbug.com/138855
+CompositeFilterDeathTest.*
+
+# http://crbug.com/138833
+AesDecryptorTest.*
diff --git a/build/android/pylib/gtest/filter/net_unittests_disabled b/build/android/pylib/gtest/filter/net_unittests_disabled
new file mode 100644
index 0000000..75a1c86
--- /dev/null
+++ b/build/android/pylib/gtest/filter/net_unittests_disabled
@@ -0,0 +1,41 @@
+# List of suppressions.
+
+PythonUtils.PythonRunTime
+VerifyEndEntity/CertVerifyProcWeakDigestTest.Verify/0
+VerifyEndEntity/CertVerifyProcWeakDigestTest.Verify/1
+VerifyEndEntity/CertVerifyProcWeakDigestTest.Verify/2
+VerifyIncompleteEndEntity/CertVerifyProcWeakDigestTest.Verify/0
+VerifyIncompleteEndEntity/CertVerifyProcWeakDigestTest.Verify/1
+VerifyIncompleteEndEntity/CertVerifyProcWeakDigestTest.Verify/2
+VerifyIncompleteIntermediate/CertVerifyProcWeakDigestTest.Verify/0
+VerifyIncompleteIntermediate/CertVerifyProcWeakDigestTest.Verify/1
+VerifyIncompleteIntermediate/CertVerifyProcWeakDigestTest.Verify/2
+VerifyIntermediate/CertVerifyProcWeakDigestTest.Verify/0
+VerifyIntermediate/CertVerifyProcWeakDigestTest.Verify/1
+VerifyIntermediate/CertVerifyProcWeakDigestTest.Verify/2
+VerifyMixed/CertVerifyProcWeakDigestTest.Verify/0
+VerifyMixed/CertVerifyProcWeakDigestTest.Verify/1
+VerifyMixed/CertVerifyProcWeakDigestTest.Verify/2
+VerifyRoot/CertVerifyProcWeakDigestTest.Verify/0
+VerifyRoot/CertVerifyProcWeakDigestTest.Verify/1
+VerifyRoot/CertVerifyProcWeakDigestTest.Verify/2
+
+# Can't spin up more than one SpawnedTestServer on Android.
+URLRequestTestReferrerPolicy.HTTPToCrossOriginHTTP
+URLRequestTestReferrerPolicy.HTTPSToCrossOriginHTTPS
+URLRequestTestReferrerPolicy.HTTPToHTTPS
+URLRequestTestReferrerPolicy.HTTPSToHTTP
+
+# Fail only on bots.
+HttpCache.RangeGET_Cancel
+HttpCache.RangeGET_Cancel2
+HttpCache.RangeGET_OK
+HttpCache.RangeGET_Previous200
+HttpCache.RangeGET_Revalidate2
+HttpCache.RangeGET_SyncOK
+HttpCache.TypicalGET_ConditionalRequest
+# Death tests are not supported with apks.
+*DeathTest*
+# These are death tests and thus also disabled.
+PrioritizedDispatcherTest.CancelNull
+PrioritizedDispatcherTest.CancelMissing
diff --git a/build/android/pylib/gtest/filter/sync_unit_tests_disabled b/build/android/pylib/gtest/filter/sync_unit_tests_disabled
new file mode 100644
index 0000000..cc4b72d
--- /dev/null
+++ b/build/android/pylib/gtest/filter/sync_unit_tests_disabled
@@ -0,0 +1,4 @@
+SyncHttpBridgeTest.*
+
+# crbug.com/144422
+OnDiskSyncableDirectory.FailInitialWrite
diff --git a/build/android/pylib/gtest/filter/unit_tests_disabled b/build/android/pylib/gtest/filter/unit_tests_disabled
new file mode 100644
index 0000000..c7851fd
--- /dev/null
+++ b/build/android/pylib/gtest/filter/unit_tests_disabled
@@ -0,0 +1,119 @@
+# List of suppressions
+
+# The UDP related tests currently do not work on Android because
+# we lack a UDP forwarder tool.
+NetworkStatsTestUDP.*
+
+# Missing test resource of 16MB.
+HistoryProfileTest.TypicalProfileVersion
+
+# crbug.com/139408
+SQLitePersistentCookieStoreTest.TestDontLoadOldSessionCookies
+SQLitePersistentCookieStoreTest.PersistIsPersistent
+
+# crbug.com/139433
+AutofillTableTest.AutofillProfile*
+AutofillTableTest.UpdateAutofillProfile
+
+# crbug.com/139400
+AutofillProfileTest.*
+CreditCardTest.SetInfoExpirationMonth
+
+# crbug.com/139398
+DownloadItemModelTest.InterruptTooltip
+
+# Tests crashing in the APK
+# l10n_util.cc(655)] Check failed: std::string::npos != pos
+DownloadItemModelTest.InterruptStatus
+# l10n_util.cc(655)] Check failed: std::string::npos != pos
+WebsiteSettingsTest.OnSiteDataAccessed
+
+# crbug.com/139423
+ValueStoreFrontendTest.GetExistingData
+
+# crbug.com/139421
+ChromeSelectFilePolicyTest.ExpectAsynchronousListenerCall
+
+# http://crbug.com/139033
+ChromeDownloadManagerDelegateTest.StartDownload_PromptAlways
+
+# Extension support is limited on Android.
+# Some of these can be enabled if we register extension related prefs in
+# browser_prefs.cc
+ExtensionTest.*
+ExtensionAPI.*
+ExtensionFileUtilTest.*
+ExtensionPermissionsTest.*
+ExtensionUnpackerTest.*
+ActiveTabTest.*
+ExtensionAppsPromo.*
+ComponentLoaderTest.*
+ExtensionFromUserScript.*
+ExtensionFromWebApp.*
+ExtensionIconManagerTest.*
+ExtensionServiceTest.*
+ExtensionServiceTestSimple.*
+ExtensionSourcePriorityTest.*
+ExtensionSpecialStoragePolicyTest.*
+ExternalPolicyProviderTest.*
+ExternalProviderImplTest.*
+MenuManagerTest.*
+PageActionControllerTest.*
+PermissionsUpdaterTest.*
+ImageLoaderTest.*
+ImageLoadingTrackerTest.*
+ExtensionSettingsFrontendTest.*
+ExtensionSettingsSyncTest.*
+ExtensionUpdaterTest.*
+UserScriptListenerTest.*
+WebApplicationTest.GetShortcutInfoForTab
+ExtensionActionIconFactoryTest.*
+
+# crbug.com/139411
+AutocompleteProviderTest.*
+HistoryContentsProviderBodyOnlyTest.*
+HistoryContentsProviderTest.*
+HQPOrderingTest.*
+SearchProviderTest.*
+
+ProtocolHandlerRegistryTest.TestOSRegistrationFailure
+
+# crbug.com/139418
+SQLiteServerBoundCertStoreTest.TestUpgradeV1
+SQLiteServerBoundCertStoreTest.TestUpgradeV2
+
+ProfileSyncComponentsFactoryImplTest.*
+PermissionsTest.GetWarningMessages_Plugins
+ImageOperations.ResizeShouldAverageColors
+
+# crbug.com/138275
+PrerenderTest.*
+RenderWidgetTest.OnMsgPaintAtSize
+
+# crbug.com/139643
+VariationsUtilTest.DisableAfterInitialization
+VariationsUtilTest.AssociateGoogleVariationID
+VariationsUtilTest.NoAssociation
+
+# crbug.com/141473
+AutofillManagerTest.UpdatePasswordSyncState
+AutofillManagerTest.UpdatePasswordGenerationState
+
+# crbug.com/144227
+ExtensionIconImageTest.*
+
+# crbug.com/145843
+EntropyProviderTest.UseOneTimeRandomizationSHA1
+EntropyProviderTest.UseOneTimeRandomizationPermuted
+
+# crbug.com/147500
+ManifestTest.RestrictedKeys
+
+# crbug.com/152599
+SyncSearchEngineDataTypeControllerTest.*
+
+# crbug.com/256259
+DiagnosticsModelTest.RunAll
+
+# Death tests are not supported with apks.
+*DeathTest*
diff --git a/build/android/pylib/gtest/filter/webkit_unit_tests_disabled b/build/android/pylib/gtest/filter/webkit_unit_tests_disabled
new file mode 100644
index 0000000..1ffa325
--- /dev/null
+++ b/build/android/pylib/gtest/filter/webkit_unit_tests_disabled
@@ -0,0 +1,25 @@
+# List of suppressions
+
+# crbug.com/159935
+WebCompositorInputHandlerImplTest.gestureFlingAnimates
+WebCompositorInputHandlerImplTest.gestureFlingTransferResets
+WebPageSerializerTest.HTMLNodes
+
+# crbug.com/241730
+ScrollAnimatorNoneTest.CurveMathQuartic
+ScrollAnimatorNoneTest.ScrollDownToBumper
+ScrollAnimatorNoneTest.ScrollQuadraticSmoothed
+ScrollAnimatorNoneTest.ScrollTwiceCubic
+ScrollAnimatorNoneTest.VaryingInputsEquivalencyCoastSteep
+WebViewTest.VisitedLinkCrash
+
+# Disabled until blink roll r151682
+DeferredImageDecoderTest.drawScaledIntoSkPicture
+
+# Disabled until blink roll r173540
+DeferredImageDecoderTest.decodeOnOtherThread
+DeferredImageDecoderTest.drawIntoSkPicture
+DeferredImageDecoderTest.drawIntoSkPictureProgressive
+
+# crbug.com/320005
+CoreAnimationCompositorAnimationsTest.ConvertTimingForCompositorIterationCount
diff --git a/build/android/pylib/gtest/gtest_config.py b/build/android/pylib/gtest/gtest_config.py
new file mode 100644
index 0000000..76e0f50
--- /dev/null
+++ b/build/android/pylib/gtest/gtest_config.py
@@ -0,0 +1,54 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Configuration file for android gtest suites."""
+
+# Add new suites here before upgrading them to the stable list below.
+EXPERIMENTAL_TEST_SUITES = [
+    'components_browsertests',
+    'content_gl_tests',
+    'heap_profiler_unittests',
+    'devtools_bridge_tests',
+]
+
+TELEMETRY_EXPERIMENTAL_TEST_SUITES = [
+    'telemetry_unittests',
+]
+
+# Do not modify this list without approval of an android owner.
+# This list determines which suites are run by default, both for local
+# testing and on android trybots running on commit-queue.
+STABLE_TEST_SUITES = [
+    'android_webview_unittests',
+    'base_unittests',
+    'breakpad_unittests',
+    'cc_unittests',
+    'components_unittests',
+    'content_browsertests',
+    'content_unittests',
+    'events_unittests',
+    'gl_tests',
+    'gl_unittests',
+    'gpu_unittests',
+    'ipc_tests',
+    'media_unittests',
+    'midi_unittests',
+    'net_unittests',
+    'sandbox_linux_unittests',
+    'skia_unittests',
+    'sql_unittests',
+    'sync_unit_tests',
+    'ui_android_unittests',
+    'ui_base_unittests',
+    'ui_touch_selection_unittests',
+    'unit_tests',
+    'webkit_unit_tests',
+]
+
+# Tests fail in component=shared_library build, which is required for ASan.
+# http://crbug.com/344868
+ASAN_EXCLUDED_TEST_SUITES = [
+    'breakpad_unittests',
+    'sandbox_linux_unittests'
+]
diff --git a/build/android/pylib/gtest/gtest_test_instance.py b/build/android/pylib/gtest/gtest_test_instance.py
new file mode 100644
index 0000000..3285e0b
--- /dev/null
+++ b/build/android/pylib/gtest/gtest_test_instance.py
@@ -0,0 +1,329 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+import os
+import re
+import shutil
+import sys
+import tempfile
+
+from pylib import constants
+from pylib.base import base_test_result
+from pylib.base import test_instance
+from pylib.utils import apk_helper
+
+sys.path.append(os.path.join(
+    constants.DIR_SOURCE_ROOT, 'build', 'util', 'lib', 'common'))
+import unittest_util
+
+
+BROWSER_TEST_SUITES = [
+  'components_browsertests',
+  'content_browsertests',
+]
+
+
+_DEFAULT_ISOLATE_FILE_PATHS = {
+    'base_unittests': 'base/base_unittests.isolate',
+    'blink_heap_unittests':
+      'third_party/WebKit/Source/platform/heap/BlinkHeapUnitTests.isolate',
+    'breakpad_unittests': 'breakpad/breakpad_unittests.isolate',
+    'cc_perftests': 'cc/cc_perftests.isolate',
+    'components_browsertests': 'components/components_browsertests.isolate',
+    'components_unittests': 'components/components_unittests.isolate',
+    'content_browsertests': 'content/content_browsertests.isolate',
+    'content_unittests': 'content/content_unittests.isolate',
+    'media_perftests': 'media/media_perftests.isolate',
+    'media_unittests': 'media/media_unittests.isolate',
+    'midi_unittests': 'media/midi/midi_unittests.isolate',
+    'net_unittests': 'net/net_unittests.isolate',
+    'sql_unittests': 'sql/sql_unittests.isolate',
+    'sync_unit_tests': 'sync/sync_unit_tests.isolate',
+    'ui_base_unittests': 'ui/base/ui_base_tests.isolate',
+    'unit_tests': 'chrome/unit_tests.isolate',
+    'webkit_unit_tests':
+      'third_party/WebKit/Source/web/WebKitUnitTests.isolate',
+}
+
+
+# Used for filtering large data deps at a finer grain than what's allowed in
+# isolate files since pushing deps to devices is expensive.
+# Wildcards are allowed.
+_DEPS_EXCLUSION_LIST = [
+    'chrome/test/data/extensions/api_test',
+    'chrome/test/data/extensions/secure_shell',
+    'chrome/test/data/firefox*',
+    'chrome/test/data/gpu',
+    'chrome/test/data/image_decoding',
+    'chrome/test/data/import',
+    'chrome/test/data/page_cycler',
+    'chrome/test/data/perf',
+    'chrome/test/data/pyauto_private',
+    'chrome/test/data/safari_import',
+    'chrome/test/data/scroll',
+    'chrome/test/data/third_party',
+    'third_party/hunspell_dictionaries/*.dic',
+    # crbug.com/258690
+    'webkit/data/bmp_decoder',
+    'webkit/data/ico_decoder',
+]
+
+
+_EXTRA_NATIVE_TEST_ACTIVITY = (
+    'org.chromium.native_test.NativeTestInstrumentationTestRunner.'
+        'NativeTestActivity')
+_EXTRA_SHARD_SIZE_LIMIT =(
+    'org.chromium.native_test.NativeTestInstrumentationTestRunner.'
+        'ShardSizeLimit')
+
+# TODO(jbudorick): Remove these once we're no longer parsing stdout to generate
+# results.
+_RE_TEST_STATUS = re.compile(
+    r'\[ +((?:RUN)|(?:FAILED)|(?:OK)) +\] ?([^ ]+)(?: \((\d+) ms\))?$')
+_RE_TEST_RUN_STATUS = re.compile(
+    r'\[ +(PASSED|RUNNER_FAILED|CRASHED) \] ?[^ ]+')
+
+
+# TODO(jbudorick): Make this a class method of GtestTestInstance once
+# test_package_apk and test_package_exe are gone.
+def ParseGTestListTests(raw_list):
+  """Parses a raw test list as provided by --gtest_list_tests.
+
+  Args:
+    raw_list: The raw test listing with the following format:
+
+    IPCChannelTest.
+      SendMessageInChannelConnected
+    IPCSyncChannelTest.
+      Simple
+      DISABLED_SendWithTimeoutMixedOKAndTimeout
+
+  Returns:
+    A list of all tests. For the above raw listing:
+
+    [IPCChannelTest.SendMessageInChannelConnected, IPCSyncChannelTest.Simple,
+     IPCSyncChannelTest.DISABLED_SendWithTimeoutMixedOKAndTimeout]
+  """
+  ret = []
+  current = ''
+  for test in raw_list:
+    if not test:
+      continue
+    if test[0] != ' ':
+      test_case = test.split()[0]
+      if test_case.endswith('.'):
+        current = test_case
+    elif not 'YOU HAVE' in test:
+      test_name = test.split()[0]
+      ret += [current + test_name]
+  return ret
+
+
+class GtestTestInstance(test_instance.TestInstance):
+
+  def __init__(self, args, isolate_delegate, error_func):
+    super(GtestTestInstance, self).__init__()
+    # TODO(jbudorick): Support multiple test suites.
+    if len(args.suite_name) > 1:
+      raise ValueError('Platform mode currently supports only 1 gtest suite')
+    self._suite = args.suite_name[0]
+
+    self._apk_path = os.path.join(
+        constants.GetOutDirectory(), '%s_apk' % self._suite,
+        '%s-debug.apk' % self._suite)
+    self._exe_path = os.path.join(constants.GetOutDirectory(),
+                                  self._suite)
+    if not os.path.exists(self._apk_path):
+      self._apk_path = None
+      self._activity = None
+      self._package = None
+      self._runner = None
+    else:
+      helper = apk_helper.ApkHelper(self._apk_path)
+      self._activity = helper.GetActivityName()
+      self._package = helper.GetPackageName()
+      self._runner = helper.GetInstrumentationName()
+      self._extras = {
+        _EXTRA_NATIVE_TEST_ACTIVITY: self._activity,
+      }
+      if self._suite in BROWSER_TEST_SUITES:
+        self._extras[_EXTRA_SHARD_SIZE_LIMIT] = 1
+
+    if not os.path.exists(self._exe_path):
+      self._exe_path = None
+    if not self._apk_path and not self._exe_path:
+      error_func('Could not find apk or executable for %s' % self._suite)
+
+    self._data_deps = []
+    if args.test_filter:
+      self._gtest_filter = args.test_filter
+    elif args.test_filter_file:
+      with open(args.test_filter_file, 'r') as f:
+        self._gtest_filter = ':'.join(l.strip() for l in f)
+    else:
+      self._gtest_filter = None
+
+    if not args.isolate_file_path:
+      default_isolate_file_path = _DEFAULT_ISOLATE_FILE_PATHS.get(self._suite)
+      if default_isolate_file_path:
+        args.isolate_file_path = os.path.join(
+            constants.DIR_SOURCE_ROOT, default_isolate_file_path)
+
+    if args.isolate_file_path:
+      self._isolate_abs_path = os.path.abspath(args.isolate_file_path)
+      self._isolate_delegate = isolate_delegate
+      self._isolated_abs_path = os.path.join(
+          constants.GetOutDirectory(), '%s.isolated' % self._suite)
+    else:
+      logging.warning('No isolate file provided. No data deps will be pushed.');
+      self._isolate_delegate = None
+
+    if args.app_data_files:
+      self._app_data_files = args.app_data_files
+      if args.app_data_file_dir:
+        self._app_data_file_dir = args.app_data_file_dir
+      else:
+        self._app_data_file_dir = tempfile.mkdtemp()
+        logging.critical('Saving app files to %s', self._app_data_file_dir)
+    else:
+      self._app_data_files = None
+      self._app_data_file_dir = None
+
+  #override
+  def TestType(self):
+    return 'gtest'
+
+  #override
+  def SetUp(self):
+    """Map data dependencies via isolate."""
+    if self._isolate_delegate:
+      self._isolate_delegate.Remap(
+          self._isolate_abs_path, self._isolated_abs_path)
+      self._isolate_delegate.PurgeExcluded(_DEPS_EXCLUSION_LIST)
+      self._isolate_delegate.MoveOutputDeps()
+      dest_dir = None
+      if self._suite == 'breakpad_unittests':
+        dest_dir = '/data/local/tmp/'
+      self._data_deps.extend([(constants.ISOLATE_DEPS_DIR, dest_dir)])
+
+
+  def GetDataDependencies(self):
+    """Returns the test suite's data dependencies.
+
+    Returns:
+      A list of (host_path, device_path) tuples to push. If device_path is
+      None, the client is responsible for determining where to push the file.
+    """
+    return self._data_deps
+
+  def FilterTests(self, test_list, disabled_prefixes=None):
+    """Filters |test_list| based on prefixes and, if present, a filter string.
+
+    Args:
+      test_list: The list of tests to filter.
+      disabled_prefixes: A list of test prefixes to filter. Defaults to
+        DISABLED_, FLAKY_, FAILS_, PRE_, and MANUAL_
+    Returns:
+      A filtered list of tests to run.
+    """
+    gtest_filter_strings = [
+        self._GenerateDisabledFilterString(disabled_prefixes)]
+    if self._gtest_filter:
+      gtest_filter_strings.append(self._gtest_filter)
+
+    filtered_test_list = test_list
+    for gtest_filter_string in gtest_filter_strings:
+      logging.debug('Filtering tests using: %s', gtest_filter_string)
+      filtered_test_list = unittest_util.FilterTestNames(
+          filtered_test_list, gtest_filter_string)
+    return filtered_test_list
+
+  def _GenerateDisabledFilterString(self, disabled_prefixes):
+    disabled_filter_items = []
+
+    if disabled_prefixes is None:
+      disabled_prefixes = ['DISABLED_', 'FLAKY_', 'FAILS_', 'PRE_', 'MANUAL_']
+    disabled_filter_items += ['%s*' % dp for dp in disabled_prefixes]
+    disabled_filter_items += ['*.%s*' % dp for dp in disabled_prefixes]
+
+    disabled_tests_file_path = os.path.join(
+        constants.DIR_SOURCE_ROOT, 'build', 'android', 'pylib', 'gtest',
+        'filter', '%s_disabled' % self._suite)
+    if disabled_tests_file_path and os.path.exists(disabled_tests_file_path):
+      with open(disabled_tests_file_path) as disabled_tests_file:
+        disabled_filter_items += [
+            '%s' % l for l in (line.strip() for line in disabled_tests_file)
+            if l and not l.startswith('#')]
+
+    return '*-%s' % ':'.join(disabled_filter_items)
+
+  def ParseGTestOutput(self, output):
+    """Parses raw gtest output and returns a list of results.
+
+    Args:
+      output: A list of output lines.
+    Returns:
+      A list of base_test_result.BaseTestResults.
+    """
+    results = []
+    for l in output:
+      matcher = _RE_TEST_STATUS.match(l)
+      if matcher:
+        result_type = None
+        if matcher.group(1) == 'OK':
+          result_type = base_test_result.ResultType.PASS
+        elif matcher.group(1) == 'FAILED':
+          result_type = base_test_result.ResultType.FAIL
+
+        if result_type:
+          test_name = matcher.group(2)
+          duration = matcher.group(3) if matcher.group(3) else 0
+          results.append(base_test_result.BaseTestResult(
+              test_name, result_type, duration))
+      logging.info(l)
+    return results
+
+  #override
+  def TearDown(self):
+    """Clear the mappings created by SetUp."""
+    if self._isolate_delegate:
+      self._isolate_delegate.Clear()
+
+  @property
+  def activity(self):
+    return self._activity
+
+  @property
+  def apk(self):
+    return self._apk_path
+
+  @property
+  def app_file_dir(self):
+    return self._app_data_file_dir
+
+  @property
+  def app_files(self):
+    return self._app_data_files
+
+  @property
+  def exe(self):
+    return self._exe_path
+
+  @property
+  def extras(self):
+    return self._extras
+
+  @property
+  def package(self):
+    return self._package
+
+  @property
+  def runner(self):
+    return self._runner
+
+  @property
+  def suite(self):
+    return self._suite
+
diff --git a/build/android/pylib/gtest/gtest_test_instance_test.py b/build/android/pylib/gtest/gtest_test_instance_test.py
new file mode 100755
index 0000000..c52b235
--- /dev/null
+++ b/build/android/pylib/gtest/gtest_test_instance_test.py
@@ -0,0 +1,86 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import unittest
+
+from pylib.gtest import gtest_test_instance
+
+
+class GtestTestInstanceTests(unittest.TestCase):
+
+  def testParseGTestListTests_simple(self):
+    raw_output = [
+      'TestCaseOne.',
+      '  testOne',
+      '  testTwo',
+      'TestCaseTwo.',
+      '  testThree',
+      '  testFour',
+    ]
+    actual = gtest_test_instance.ParseGTestListTests(raw_output)
+    expected = [
+      'TestCaseOne.testOne',
+      'TestCaseOne.testTwo',
+      'TestCaseTwo.testThree',
+      'TestCaseTwo.testFour',
+    ]
+    self.assertEqual(expected, actual)
+
+  def testParseGTestListTests_typeParameterized_old(self):
+    raw_output = [
+      'TPTestCase/WithTypeParam/0.',
+      '  testOne',
+      '  testTwo',
+    ]
+    actual = gtest_test_instance.ParseGTestListTests(raw_output)
+    expected = [
+      'TPTestCase/WithTypeParam/0.testOne',
+      'TPTestCase/WithTypeParam/0.testTwo',
+    ]
+    self.assertEqual(expected, actual)
+
+  def testParseGTestListTests_typeParameterized_new(self):
+    raw_output = [
+      'TPTestCase/WithTypeParam/0.  # TypeParam = TypeParam0',
+      '  testOne',
+      '  testTwo',
+    ]
+    actual = gtest_test_instance.ParseGTestListTests(raw_output)
+    expected = [
+      'TPTestCase/WithTypeParam/0.testOne',
+      'TPTestCase/WithTypeParam/0.testTwo',
+    ]
+    self.assertEqual(expected, actual)
+
+  def testParseGTestListTests_valueParameterized_old(self):
+    raw_output = [
+      'VPTestCase.',
+      '  testWithValueParam/0',
+      '  testWithValueParam/1',
+    ]
+    actual = gtest_test_instance.ParseGTestListTests(raw_output)
+    expected = [
+      'VPTestCase.testWithValueParam/0',
+      'VPTestCase.testWithValueParam/1',
+    ]
+    self.assertEqual(expected, actual)
+
+  def testParseGTestListTests_valueParameterized_new(self):
+    raw_output = [
+      'VPTestCase.',
+      '  testWithValueParam/0  # GetParam() = 0',
+      '  testWithValueParam/1  # GetParam() = 1',
+    ]
+    actual = gtest_test_instance.ParseGTestListTests(raw_output)
+    expected = [
+      'VPTestCase.testWithValueParam/0',
+      'VPTestCase.testWithValueParam/1',
+    ]
+    self.assertEqual(expected, actual)
+
+
+if __name__ == '__main__':
+  unittest.main(verbosity=2)
+
diff --git a/build/android/pylib/gtest/local_device_gtest_run.py b/build/android/pylib/gtest/local_device_gtest_run.py
new file mode 100644
index 0000000..f1cea4e
--- /dev/null
+++ b/build/android/pylib/gtest/local_device_gtest_run.py
@@ -0,0 +1,241 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import itertools
+import logging
+import os
+import posixpath
+
+from pylib import constants
+from pylib import ports
+from pylib.base import test_run
+from pylib.device import device_errors
+from pylib.gtest import gtest_test_instance
+
+from pylib.local import local_test_server_spawner
+from pylib.local.device import local_device_environment
+from pylib.local.device import local_device_test_run
+from pylib.utils import device_temp_file
+
+_COMMAND_LINE_FLAGS_SUPPORTED = True
+
+_EXTRA_COMMAND_LINE_FILE = (
+    'org.chromium.native_test.NativeTestActivity.CommandLineFile')
+_EXTRA_COMMAND_LINE_FLAGS = (
+    'org.chromium.native_test.NativeTestActivity.CommandLineFlags')
+_EXTRA_TEST_LIST = (
+    'org.chromium.native_test.NativeTestInstrumentationTestRunner'
+        '.TestList')
+
+_MAX_SHARD_SIZE = 256
+
+# TODO(jbudorick): Move this up to the test instance if the net test server is
+# handled outside of the APK for the remote_device environment.
+_SUITE_REQUIRES_TEST_SERVER_SPAWNER = [
+  'components_browsertests', 'content_unittests', 'content_browsertests',
+  'net_unittests', 'unit_tests'
+]
+
+# TODO(jbudorick): Move this inside _ApkDelegate once TestPackageApk is gone.
+def PullAppFilesImpl(device, package, files, directory):
+  device_dir = device.GetApplicationDataDirectory(package)
+  host_dir = os.path.join(directory, str(device))
+  for f in files:
+    device_file = posixpath.join(device_dir, f)
+    host_file = os.path.join(host_dir, *f.split(posixpath.sep))
+    host_file_base, ext = os.path.splitext(host_file)
+    for i in itertools.count():
+      host_file = '%s_%d%s' % (host_file_base, i, ext)
+      if not os.path.exists(host_file):
+        break
+    device.PullFile(device_file, host_file)
+
+class _ApkDelegate(object):
+  def __init__(self, test_instance):
+    self._activity = test_instance.activity
+    self._apk = test_instance.apk
+    self._package = test_instance.package
+    self._runner = test_instance.runner
+
+    self._component = '%s/%s' % (self._package, self._runner)
+    self._extras = test_instance.extras
+
+  def Install(self, device):
+    device.Install(self._apk)
+
+  def Run(self, test, device, flags=None, **kwargs):
+    extras = dict(self._extras)
+
+    with device_temp_file.DeviceTempFile(device.adb) as command_line_file:
+      device.WriteFile(command_line_file.name, '_ %s' % flags if flags else '_')
+      extras[_EXTRA_COMMAND_LINE_FILE] = command_line_file.name
+
+      with device_temp_file.DeviceTempFile(device.adb) as test_list_file:
+        if test:
+          device.WriteFile(test_list_file.name, '\n'.join(test))
+          extras[_EXTRA_TEST_LIST] = test_list_file.name
+
+        return device.StartInstrumentation(
+            self._component, extras=extras, raw=False, **kwargs)
+
+  def PullAppFiles(self, device, files, directory):
+    PullAppFilesImpl(device, self._package, files, directory)
+
+  def Clear(self, device):
+    device.ClearApplicationState(self._package)
+
+
+class _ExeDelegate(object):
+  def __init__(self, tr, exe):
+    self._exe_host_path = exe
+    self._exe_file_name = os.path.split(exe)[-1]
+    self._exe_device_path = '%s/%s' % (
+        constants.TEST_EXECUTABLE_DIR, self._exe_file_name)
+    deps_host_path = self._exe_host_path + '_deps'
+    if os.path.exists(deps_host_path):
+      self._deps_host_path = deps_host_path
+      self._deps_device_path = self._exe_device_path + '_deps'
+    else:
+      self._deps_host_path = None
+    self._test_run = tr
+
+  def Install(self, device):
+    # TODO(jbudorick): Look into merging this with normal data deps pushing if
+    # executables become supported on nonlocal environments.
+    host_device_tuples = [(self._exe_host_path, self._exe_device_path)]
+    if self._deps_host_path:
+      host_device_tuples.append((self._deps_host_path, self._deps_device_path))
+    device.PushChangedFiles(host_device_tuples)
+
+  def Run(self, test, device, flags=None, **kwargs):
+    cmd = [
+        self._test_run.GetTool(device).GetTestWrapper(),
+        self._exe_device_path,
+    ]
+    if test:
+      cmd.append('--gtest_filter=%s' % ':'.join(test))
+    if flags:
+      cmd.append(flags)
+    cwd = constants.TEST_EXECUTABLE_DIR
+
+    env = {
+      'LD_LIBRARY_PATH':
+          '%s/%s_deps' % (constants.TEST_EXECUTABLE_DIR, self._exe_file_name),
+    }
+    try:
+      gcov_strip_depth = os.environ['NATIVE_COVERAGE_DEPTH_STRIP']
+      external = device.GetExternalStoragePath()
+      env['GCOV_PREFIX'] = '%s/gcov' % external
+      env['GCOV_PREFIX_STRIP'] = gcov_strip_depth
+    except (device_errors.CommandFailedError, KeyError):
+      pass
+
+    # TODO(jbudorick): Switch to just RunShellCommand once perezju@'s CL
+    # for long shell commands lands.
+    with device_temp_file.DeviceTempFile(device.adb) as script_file:
+      script_contents = ' '.join(cmd)
+      logging.info('script contents: %r' % script_contents)
+      device.WriteFile(script_file.name, script_contents)
+      output = device.RunShellCommand(['sh', script_file.name], cwd=cwd,
+                                      env=env, **kwargs)
+    return output
+
+  def PullAppFiles(self, device, files, directory):
+    pass
+
+  def Clear(self, device):
+    device.KillAll(self._exe_file_name, blocking=True, timeout=30, quiet=True)
+
+
+class LocalDeviceGtestRun(local_device_test_run.LocalDeviceTestRun):
+
+  def __init__(self, env, test_instance):
+    assert isinstance(env, local_device_environment.LocalDeviceEnvironment)
+    assert isinstance(test_instance, gtest_test_instance.GtestTestInstance)
+    super(LocalDeviceGtestRun, self).__init__(env, test_instance)
+
+    if self._test_instance.apk:
+      self._delegate = _ApkDelegate(self._test_instance)
+    elif self._test_instance.exe:
+      self._delegate = _ExeDelegate(self, self._test_instance.exe)
+
+    self._servers = {}
+
+  #override
+  def TestPackage(self):
+    return self._test_instance.suite
+
+  #override
+  def SetUp(self):
+
+    def individual_device_set_up(dev, host_device_tuples):
+      # Install test APK.
+      self._delegate.Install(dev)
+
+      # Push data dependencies.
+      external_storage = dev.GetExternalStoragePath()
+      host_device_tuples = [
+          (h, d if d is not None else external_storage)
+          for h, d in host_device_tuples]
+      dev.PushChangedFiles(host_device_tuples)
+
+      self._servers[str(dev)] = []
+      if self.TestPackage() in _SUITE_REQUIRES_TEST_SERVER_SPAWNER:
+        self._servers[str(dev)].append(
+            local_test_server_spawner.LocalTestServerSpawner(
+                ports.AllocateTestServerPort(), dev, self.GetTool(dev)))
+
+      for s in self._servers[str(dev)]:
+        s.SetUp()
+
+    self._env.parallel_devices.pMap(individual_device_set_up,
+                                    self._test_instance.GetDataDependencies())
+
+  #override
+  def _ShouldShard(self):
+    return True
+
+  #override
+  def _CreateShards(self, tests):
+    device_count = len(self._env.devices)
+    shards = []
+    for i in xrange(0, device_count):
+      unbounded_shard = tests[i::device_count]
+      shards += [unbounded_shard[j:j+_MAX_SHARD_SIZE]
+                 for j in xrange(0, len(unbounded_shard), _MAX_SHARD_SIZE)]
+    return shards
+
+  #override
+  def _GetTests(self):
+    tests = self._delegate.Run(
+        None, self._env.devices[0], flags='--gtest_list_tests')
+    tests = gtest_test_instance.ParseGTestListTests(tests)
+    tests = self._test_instance.FilterTests(tests)
+    return tests
+
+  #override
+  def _RunTest(self, device, test):
+    # Run the test.
+    output = self._delegate.Run(
+        test, device, timeout=900, retries=0)
+    for s in self._servers[str(device)]:
+      s.Reset()
+    if self._test_instance.app_files:
+      self._delegate.PullAppFiles(device, self._test_instance.app_files,
+                                  self._test_instance.app_file_dir)
+    self._delegate.Clear(device)
+
+    # Parse the output.
+    # TODO(jbudorick): Transition test scripts away from parsing stdout.
+    results = self._test_instance.ParseGTestOutput(output)
+    return results
+
+  #override
+  def TearDown(self):
+    def individual_device_tear_down(dev):
+      for s in self._servers[str(dev)]:
+        s.TearDown()
+
+    self._env.parallel_devices.pMap(individual_device_tear_down)
+
diff --git a/build/android/pylib/gtest/setup.py b/build/android/pylib/gtest/setup.py
new file mode 100644
index 0000000..f563ccf
--- /dev/null
+++ b/build/android/pylib/gtest/setup.py
@@ -0,0 +1,230 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Generates test runner factory and tests for GTests."""
+# pylint: disable=W0212
+
+import logging
+import os
+import sys
+
+from pylib import constants
+
+from pylib.base import base_setup
+from pylib.base import base_test_result
+from pylib.base import test_dispatcher
+from pylib.device import device_utils
+from pylib.gtest import gtest_test_instance
+from pylib.gtest import test_package_apk
+from pylib.gtest import test_package_exe
+from pylib.gtest import test_runner
+
+sys.path.insert(0,
+                os.path.join(constants.DIR_SOURCE_ROOT, 'build', 'util', 'lib',
+                             'common'))
+import unittest_util # pylint: disable=F0401
+
+
+ISOLATE_FILE_PATHS = gtest_test_instance._DEFAULT_ISOLATE_FILE_PATHS
+
+
+# Used for filtering large data deps at a finer grain than what's allowed in
+# isolate files since pushing deps to devices is expensive.
+# Wildcards are allowed.
+DEPS_EXCLUSION_LIST = [
+    'chrome/test/data/extensions/api_test',
+    'chrome/test/data/extensions/secure_shell',
+    'chrome/test/data/firefox*',
+    'chrome/test/data/gpu',
+    'chrome/test/data/image_decoding',
+    'chrome/test/data/import',
+    'chrome/test/data/page_cycler',
+    'chrome/test/data/perf',
+    'chrome/test/data/pyauto_private',
+    'chrome/test/data/safari_import',
+    'chrome/test/data/scroll',
+    'chrome/test/data/third_party',
+    'third_party/hunspell_dictionaries/*.dic',
+    # crbug.com/258690
+    'webkit/data/bmp_decoder',
+    'webkit/data/ico_decoder',
+]
+
+
+def _GetDisabledTestsFilterFromFile(suite_name):
+  """Returns a gtest filter based on the *_disabled file.
+
+  Args:
+    suite_name: Name of the test suite (e.g. base_unittests).
+
+  Returns:
+    A gtest filter which excludes disabled tests.
+    Example: '*-StackTrace.*:StringPrintfTest.StringPrintfMisc'
+  """
+  filter_file_path = os.path.join(
+      os.path.abspath(os.path.dirname(__file__)),
+      'filter', '%s_disabled' % suite_name)
+
+  if not filter_file_path or not os.path.exists(filter_file_path):
+    logging.info('No filter file found at %s', filter_file_path)
+    return '*'
+
+  filters = [x for x in [x.strip() for x in file(filter_file_path).readlines()]
+             if x and x[0] != '#']
+  disabled_filter = '*-%s' % ':'.join(filters)
+  logging.info('Applying filter "%s" obtained from %s',
+               disabled_filter, filter_file_path)
+  return disabled_filter
+
+
+def _GetTests(test_options, test_package, devices):
+  """Get a list of tests.
+
+  Args:
+    test_options: A GTestOptions object.
+    test_package: A TestPackageApk object.
+    devices: A list of attached devices.
+
+  Returns:
+    A list of all the tests in the test suite.
+  """
+  class TestListResult(base_test_result.BaseTestResult):
+    def __init__(self):
+      super(TestListResult, self).__init__(
+          'gtest_list_tests', base_test_result.ResultType.PASS)
+      self.test_list = []
+
+  def TestListerRunnerFactory(device, _shard_index):
+    class TestListerRunner(test_runner.TestRunner):
+      def RunTest(self, _test):
+        result = TestListResult()
+        self.test_package.Install(self.device)
+        result.test_list = self.test_package.GetAllTests(self.device)
+        results = base_test_result.TestRunResults()
+        results.AddResult(result)
+        return results, None
+    return TestListerRunner(test_options, device, test_package)
+
+  results, _no_retry = test_dispatcher.RunTests(
+      ['gtest_list_tests'], TestListerRunnerFactory, devices)
+  tests = []
+  for r in results.GetAll():
+    tests.extend(r.test_list)
+  return tests
+
+
+def _FilterTestsUsingPrefixes(all_tests, pre=False, manual=False):
+  """Removes tests with disabled prefixes.
+
+  Args:
+    all_tests: List of tests to filter.
+    pre: If True, include tests with PRE_ prefix.
+    manual: If True, include tests with MANUAL_ prefix.
+
+  Returns:
+    List of tests remaining.
+  """
+  filtered_tests = []
+  filter_prefixes = ['DISABLED_', 'FLAKY_', 'FAILS_']
+
+  if not pre:
+    filter_prefixes.append('PRE_')
+
+  if not manual:
+    filter_prefixes.append('MANUAL_')
+
+  for t in all_tests:
+    test_case, test = t.split('.', 1)
+    if not any([test_case.startswith(prefix) or test.startswith(prefix) for
+                prefix in filter_prefixes]):
+      filtered_tests.append(t)
+  return filtered_tests
+
+
+def _FilterDisabledTests(tests, suite_name, has_gtest_filter):
+  """Removes disabled tests from |tests|.
+
+  Applies the following filters in order:
+    1. Remove tests with disabled prefixes.
+    2. Remove tests specified in the *_disabled files in the 'filter' dir
+
+  Args:
+    tests: List of tests.
+    suite_name: Name of the test suite (e.g. base_unittests).
+    has_gtest_filter: Whether a gtest_filter is provided.
+
+  Returns:
+    List of tests remaining.
+  """
+  tests = _FilterTestsUsingPrefixes(
+      tests, has_gtest_filter, has_gtest_filter)
+  tests = unittest_util.FilterTestNames(
+      tests, _GetDisabledTestsFilterFromFile(suite_name))
+
+  return tests
+
+
+def Setup(test_options, devices):
+  """Create the test runner factory and tests.
+
+  Args:
+    test_options: A GTestOptions object.
+    devices: A list of attached devices.
+
+  Returns:
+    A tuple of (TestRunnerFactory, tests).
+  """
+  test_package = test_package_apk.TestPackageApk(test_options.suite_name)
+  if not os.path.exists(test_package.suite_path):
+    exe_test_package = test_package_exe.TestPackageExecutable(
+        test_options.suite_name)
+    if not os.path.exists(exe_test_package.suite_path):
+      raise Exception(
+          'Did not find %s target. Ensure it has been built.\n'
+          '(not found at %s or %s)'
+          % (test_options.suite_name,
+             test_package.suite_path,
+             exe_test_package.suite_path))
+    test_package = exe_test_package
+  logging.warning('Found target %s', test_package.suite_path)
+
+  i = base_setup.GenerateDepsDirUsingIsolate(test_options.suite_name,
+                                         test_options.isolate_file_path,
+                                         ISOLATE_FILE_PATHS,
+                                         DEPS_EXCLUSION_LIST)
+  def push_data_deps_to_device_dir(device):
+    device_dir = (constants.TEST_EXECUTABLE_DIR
+        if test_package.suite_name == 'breakpad_unittests'
+        else device.GetExternalStoragePath())
+    base_setup.PushDataDeps(device, device_dir, test_options)
+  device_utils.DeviceUtils.parallel(devices).pMap(push_data_deps_to_device_dir)
+  if i:
+    i.Clear()
+
+  tests = _GetTests(test_options, test_package, devices)
+
+  # Constructs a new TestRunner with the current options.
+  def TestRunnerFactory(device, _shard_index):
+    return test_runner.TestRunner(
+        test_options,
+        device,
+        test_package)
+
+  if test_options.run_disabled:
+    test_options = test_options._replace(
+        test_arguments=('%s --gtest_also_run_disabled_tests' %
+                        test_options.test_arguments))
+  else:
+    tests = _FilterDisabledTests(tests, test_options.suite_name,
+                                 bool(test_options.gtest_filter))
+  if test_options.gtest_filter:
+    tests = unittest_util.FilterTestNames(tests, test_options.gtest_filter)
+
+  # Coalesce unit tests into a single test per device
+  if test_options.suite_name not in gtest_test_instance.BROWSER_TEST_SUITES:
+    num_devices = len(devices)
+    tests = [':'.join(tests[i::num_devices]) for i in xrange(num_devices)]
+    tests = [t for t in tests if t]
+
+  return (TestRunnerFactory, tests)
diff --git a/build/android/pylib/gtest/test_options.py b/build/android/pylib/gtest/test_options.py
new file mode 100644
index 0000000..8bc6996
--- /dev/null
+++ b/build/android/pylib/gtest/test_options.py
@@ -0,0 +1,19 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Defines the GTestOptions named tuple."""
+
+import collections
+
+GTestOptions = collections.namedtuple('GTestOptions', [
+    'tool',
+    'gtest_filter',
+    'run_disabled',
+    'test_arguments',
+    'timeout',
+    'isolate_file_path',
+    'suite_name',
+    'app_data_files',
+    'app_data_file_dir',
+    'delete_stale_data'])
diff --git a/build/android/pylib/gtest/test_package.py b/build/android/pylib/gtest/test_package.py
new file mode 100644
index 0000000..4042a98
--- /dev/null
+++ b/build/android/pylib/gtest/test_package.py
@@ -0,0 +1,76 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Base class representing GTest test packages."""
+# pylint: disable=R0201
+
+
+class TestPackage(object):
+
+  """A helper base class for both APK and stand-alone executables.
+
+  Args:
+    suite_name: Name of the test suite (e.g. base_unittests).
+  """
+  def __init__(self, suite_name):
+    self.suite_name = suite_name
+
+  def ClearApplicationState(self, device):
+    """Clears the application state.
+
+    Args:
+      device: Instance of DeviceUtils.
+    """
+    raise NotImplementedError('Method must be overridden.')
+
+  def CreateCommandLineFileOnDevice(self, device, test_filter, test_arguments):
+    """Creates a test runner script and pushes to the device.
+
+    Args:
+      device: Instance of DeviceUtils.
+      test_filter: A test_filter flag.
+      test_arguments: Additional arguments to pass to the test binary.
+    """
+    raise NotImplementedError('Method must be overridden.')
+
+  def GetAllTests(self, device):
+    """Returns a list of all tests available in the test suite.
+
+    Args:
+      device: Instance of DeviceUtils.
+    """
+    raise NotImplementedError('Method must be overridden.')
+
+  def GetGTestReturnCode(self, _device):
+    return None
+
+  def SpawnTestProcess(self, device):
+    """Spawn the test process.
+
+    Args:
+      device: Instance of DeviceUtils.
+
+    Returns:
+      An instance of pexpect spawn class.
+    """
+    raise NotImplementedError('Method must be overridden.')
+
+  def Install(self, device):
+    """Install the test package to the device.
+
+    Args:
+      device: Instance of DeviceUtils.
+    """
+    raise NotImplementedError('Method must be overridden.')
+
+  def PullAppFiles(self, device, files, directory):
+    """Pull application data from the device.
+
+    Args:
+      device: Instance of DeviceUtils.
+      files: A list of paths relative to the application data directory to
+        retrieve from the device.
+      directory: The host directory to which files should be pulled.
+    """
+    raise NotImplementedError('Method must be overridden.')
diff --git a/build/android/pylib/gtest/test_package_apk.py b/build/android/pylib/gtest/test_package_apk.py
new file mode 100644
index 0000000..a679b03
--- /dev/null
+++ b/build/android/pylib/gtest/test_package_apk.py
@@ -0,0 +1,157 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Defines TestPackageApk to help run APK-based native tests."""
+# pylint: disable=W0212
+
+import itertools
+import logging
+import os
+import posixpath
+import shlex
+import sys
+import tempfile
+import time
+
+from pylib import android_commands
+from pylib import constants
+from pylib import pexpect
+from pylib.device import device_errors
+from pylib.device import intent
+from pylib.gtest import gtest_test_instance
+from pylib.gtest import local_device_gtest_run
+from pylib.gtest.test_package import TestPackage
+
+
+class TestPackageApk(TestPackage):
+  """A helper class for running APK-based native tests."""
+
+  def __init__(self, suite_name):
+    """
+    Args:
+      suite_name: Name of the test suite (e.g. base_unittests).
+    """
+    TestPackage.__init__(self, suite_name)
+    self.suite_path = os.path.join(
+        constants.GetOutDirectory(), '%s_apk' % suite_name,
+        '%s-debug.apk' % suite_name)
+    if suite_name == 'content_browsertests':
+      self._package_info = constants.PACKAGE_INFO['content_browsertests']
+    elif suite_name == 'components_browsertests':
+      self._package_info = constants.PACKAGE_INFO['components_browsertests']
+    else:
+      self._package_info = constants.PACKAGE_INFO['gtest']
+
+    if suite_name == 'net_unittests':
+      self._extras = {'RunInSubThread': ''}
+    else:
+      self._extras = []
+
+  def _CreateCommandLineFileOnDevice(self, device, options):
+    device.WriteFile(self._package_info.cmdline_file,
+                     self.suite_name + ' ' + options)
+
+  def _GetFifo(self):
+    # The test.fifo path is determined by:
+    # testing/android/native_test/java/src/org/chromium/native_test/
+    #     NativeTestActivity.java and
+    # testing/android/native_test_launcher.cc
+    return '/data/data/' + self._package_info.package + '/files/test.fifo'
+
+  def _ClearFifo(self, device):
+    device.RunShellCommand('rm -f ' + self._GetFifo())
+
+  def _WatchFifo(self, device, timeout, logfile=None):
+    for i in range(100):
+      if device.FileExists(self._GetFifo()):
+        logging.info('Fifo created. Slept for %f secs' % (i * 0.5))
+        break
+      time.sleep(0.5)
+    else:
+      raise device_errors.DeviceUnreachableError(
+          'Unable to find fifo on device %s ' % self._GetFifo())
+    args = shlex.split(device.old_interface.Adb()._target_arg)
+    args += ['shell', 'cat', self._GetFifo()]
+    return pexpect.spawn('adb', args, timeout=timeout, logfile=logfile)
+
+  def _StartActivity(self, device, force_stop=True):
+    device.StartActivity(
+        intent.Intent(package=self._package_info.package,
+                      activity=self._package_info.activity,
+                      action='android.intent.action.MAIN',
+                      extras=self._extras),
+        # No wait since the runner waits for FIFO creation anyway.
+        blocking=False,
+        force_stop=force_stop)
+
+  #override
+  def ClearApplicationState(self, device):
+    device.ClearApplicationState(self._package_info.package)
+    # Content shell creates a profile on the sdscard which accumulates cache
+    # files over time.
+    if self.suite_name == 'content_browsertests':
+      try:
+        device.RunShellCommand(
+            'rm -r %s/content_shell' % device.GetExternalStoragePath(),
+            timeout=60 * 2)
+      except device_errors.CommandFailedError:
+        # TODO(jbudorick) Handle this exception appropriately once the
+        #                 conversions are done.
+        pass
+    elif self.suite_name == 'components_browsertests':
+      try:
+        device.RunShellCommand(
+            'rm -r %s/components_shell' % device.GetExternalStoragePath(),
+            timeout=60 * 2)
+      except device_errors.CommandFailedError:
+        # TODO(jbudorick) Handle this exception appropriately once the
+        #                 conversions are done.
+        pass
+
+  #override
+  def CreateCommandLineFileOnDevice(self, device, test_filter, test_arguments):
+    self._CreateCommandLineFileOnDevice(
+        device, '--gtest_filter=%s %s' % (test_filter, test_arguments))
+
+  #override
+  def GetAllTests(self, device):
+    self._CreateCommandLineFileOnDevice(device, '--gtest_list_tests')
+    try:
+      self.tool.SetupEnvironment()
+      # Clear and start monitoring logcat.
+      self._ClearFifo(device)
+      self._StartActivity(device)
+      # Wait for native test to complete.
+      p = self._WatchFifo(device, timeout=30 * self.tool.GetTimeoutScale())
+      p.expect('<<ScopedMainEntryLogger')
+      p.close()
+    finally:
+      self.tool.CleanUpEnvironment()
+    # We need to strip the trailing newline.
+    content = [line.rstrip() for line in p.before.splitlines()]
+    return gtest_test_instance.ParseGTestListTests(content)
+
+  #override
+  def SpawnTestProcess(self, device):
+    try:
+      self.tool.SetupEnvironment()
+      self._ClearFifo(device)
+      # Doesn't need to stop an Activity because ClearApplicationState() is
+      # always called before this call and so it is already stopped at this
+      # point.
+      self._StartActivity(device, force_stop=False)
+    finally:
+      self.tool.CleanUpEnvironment()
+    logfile = android_commands.NewLineNormalizer(sys.stdout)
+    return self._WatchFifo(device, timeout=10, logfile=logfile)
+
+  #override
+  def Install(self, device):
+    self.tool.CopyFiles(device)
+    device.Install(self.suite_path)
+
+  #override
+  def PullAppFiles(self, device, files, directory):
+    local_device_gtest_run.PullAppFilesImpl(
+        device, self._package_info.package, files, directory)
diff --git a/build/android/pylib/gtest/test_package_exe.py b/build/android/pylib/gtest/test_package_exe.py
new file mode 100644
index 0000000..87071b5
--- /dev/null
+++ b/build/android/pylib/gtest/test_package_exe.py
@@ -0,0 +1,163 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Defines TestPackageExecutable to help run stand-alone executables."""
+
+import logging
+import os
+import posixpath
+import sys
+import tempfile
+
+from pylib import cmd_helper
+from pylib import constants
+from pylib import pexpect
+from pylib.device import device_errors
+from pylib.gtest import gtest_test_instance
+from pylib.gtest.test_package import TestPackage
+
+
+class TestPackageExecutable(TestPackage):
+  """A helper class for running stand-alone executables."""
+
+  _TEST_RUNNER_RET_VAL_FILE = 'gtest_retval'
+
+  def __init__(self, suite_name):
+    """
+    Args:
+      suite_name: Name of the test suite (e.g. base_unittests).
+    """
+    TestPackage.__init__(self, suite_name)
+    self.suite_path = os.path.join(constants.GetOutDirectory(), suite_name)
+    self._symbols_dir = os.path.join(constants.GetOutDirectory(),
+                                     'lib.target')
+
+  #override
+  def GetGTestReturnCode(self, device):
+    ret = None
+    ret_code = 1  # Assume failure if we can't find it
+    ret_code_file = tempfile.NamedTemporaryFile()
+    try:
+      if not device.PullFile(
+          constants.TEST_EXECUTABLE_DIR + '/' +
+          TestPackageExecutable._TEST_RUNNER_RET_VAL_FILE,
+          ret_code_file.name):
+        logging.critical('Unable to pull gtest ret val file %s',
+                         ret_code_file.name)
+        raise ValueError
+      ret_code = file(ret_code_file.name).read()
+      ret = int(ret_code)
+    except ValueError:
+      logging.critical('Error reading gtest ret val file %s [%s]',
+                       ret_code_file.name, ret_code)
+      ret = 1
+    return ret
+
+  @staticmethod
+  def _AddNativeCoverageExports(device):
+    # export GCOV_PREFIX set the path for native coverage results
+    # export GCOV_PREFIX_STRIP indicates how many initial directory
+    #                          names to strip off the hardwired absolute paths.
+    #                          This value is calculated in buildbot.sh and
+    #                          depends on where the tree is built.
+    # Ex: /usr/local/google/code/chrome will become
+    #     /code/chrome if GCOV_PREFIX_STRIP=3
+    try:
+      depth = os.environ['NATIVE_COVERAGE_DEPTH_STRIP']
+      export_string = ('export GCOV_PREFIX="%s/gcov"\n' %
+                       device.GetExternalStoragePath())
+      export_string += 'export GCOV_PREFIX_STRIP=%s\n' % depth
+      return export_string
+    except KeyError:
+      logging.info('NATIVE_COVERAGE_DEPTH_STRIP is not defined: '
+                   'No native coverage.')
+      return ''
+    except device_errors.CommandFailedError:
+      logging.info('No external storage found: No native coverage.')
+      return ''
+
+  #override
+  def ClearApplicationState(self, device):
+    device.KillAll(self.suite_name, blocking=True, timeout=30, quiet=True)
+
+  #override
+  def CreateCommandLineFileOnDevice(self, device, test_filter, test_arguments):
+    tool_wrapper = self.tool.GetTestWrapper()
+    sh_script_file = tempfile.NamedTemporaryFile()
+    # We need to capture the exit status from the script since adb shell won't
+    # propagate to us.
+    sh_script_file.write(
+        'cd %s\n'
+        '%s'
+        '%s LD_LIBRARY_PATH=%s/%s_deps %s/%s --gtest_filter=%s %s\n'
+        'echo $? > %s' %
+        (constants.TEST_EXECUTABLE_DIR,
+         self._AddNativeCoverageExports(device),
+         tool_wrapper,
+         constants.TEST_EXECUTABLE_DIR,
+         self.suite_name,
+         constants.TEST_EXECUTABLE_DIR,
+         self.suite_name,
+         test_filter, test_arguments,
+         TestPackageExecutable._TEST_RUNNER_RET_VAL_FILE))
+    sh_script_file.flush()
+    cmd_helper.RunCmd(['chmod', '+x', sh_script_file.name])
+    device.PushChangedFiles([(
+        sh_script_file.name,
+        constants.TEST_EXECUTABLE_DIR + '/chrome_test_runner.sh')])
+    logging.info('Conents of the test runner script: ')
+    for line in open(sh_script_file.name).readlines():
+      logging.info('  ' + line.rstrip())
+
+  #override
+  def GetAllTests(self, device):
+    lib_path = posixpath.join(
+        constants.TEST_EXECUTABLE_DIR, '%s_deps' % self.suite_name)
+
+    cmd = []
+    if self.tool.GetTestWrapper():
+      cmd.append(self.tool.GetTestWrapper())
+    cmd.extend([
+        posixpath.join(constants.TEST_EXECUTABLE_DIR, self.suite_name),
+        '--gtest_list_tests'])
+
+    output = device.RunShellCommand(
+        cmd, check_return=True, env={'LD_LIBRARY_PATH': lib_path})
+    return gtest_test_instance.ParseGTestListTests(output)
+
+  #override
+  def SpawnTestProcess(self, device):
+    args = ['adb', '-s', str(device), 'shell', 'sh',
+            constants.TEST_EXECUTABLE_DIR + '/chrome_test_runner.sh']
+    logging.info(args)
+    return pexpect.spawn(args[0], args[1:], logfile=sys.stdout)
+
+  #override
+  def Install(self, device):
+    if self.tool.NeedsDebugInfo():
+      target_name = self.suite_path
+    else:
+      target_name = self.suite_path + '_stripped'
+      if not os.path.isfile(target_name):
+        raise Exception('Did not find %s, build target %s' %
+                        (target_name, self.suite_name + '_stripped'))
+
+      target_mtime = os.stat(target_name).st_mtime
+      source_mtime = os.stat(self.suite_path).st_mtime
+      if target_mtime < source_mtime:
+        raise Exception(
+            'stripped binary (%s, timestamp %d) older than '
+            'source binary (%s, timestamp %d), build target %s' %
+            (target_name, target_mtime, self.suite_path, source_mtime,
+             self.suite_name + '_stripped'))
+
+    test_binary_path = constants.TEST_EXECUTABLE_DIR + '/' + self.suite_name
+    device.PushChangedFiles([(target_name, test_binary_path)])
+    deps_path = self.suite_path + '_deps'
+    if os.path.isdir(deps_path):
+      device.PushChangedFiles([(deps_path, test_binary_path + '_deps')])
+
+  #override
+  def PullAppFiles(self, device, files, directory):
+    pass
diff --git a/build/android/pylib/gtest/test_runner.py b/build/android/pylib/gtest/test_runner.py
new file mode 100644
index 0000000..a48f18a
--- /dev/null
+++ b/build/android/pylib/gtest/test_runner.py
@@ -0,0 +1,217 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+import os
+import re
+import tempfile
+
+from pylib import pexpect
+from pylib import ports
+from pylib.base import base_test_result
+from pylib.base import base_test_runner
+from pylib.device import device_errors
+from pylib.gtest import gtest_test_instance
+from pylib.local import local_test_server_spawner
+from pylib.perf import perf_control
+
+# Test case statuses.
+RE_RUN = re.compile('\\[ RUN      \\] ?(.*)\r\n')
+RE_FAIL = re.compile('\\[  FAILED  \\] ?(.*?)( \\((\\d+) ms\\))?\r\r\n')
+RE_OK = re.compile('\\[       OK \\] ?(.*?)( \\((\\d+) ms\\))?\r\r\n')
+
+# Test run statuses.
+RE_PASSED = re.compile('\\[  PASSED  \\] ?(.*)\r\n')
+RE_RUNNER_FAIL = re.compile('\\[ RUNNER_FAILED \\] ?(.*)\r\n')
+# Signal handlers are installed before starting tests
+# to output the CRASHED marker when a crash happens.
+RE_CRASH = re.compile('\\[ CRASHED      \\](.*)\r\n')
+
+# Bots that don't output anything for 20 minutes get timed out, so that's our
+# hard cap.
+_INFRA_STDOUT_TIMEOUT = 20 * 60
+
+
+def _TestSuiteRequiresMockTestServer(suite_name):
+  """Returns True if the test suite requires mock test server."""
+  tests_require_net_test_server = ['unit_tests', 'net_unittests',
+                                   'components_browsertests',
+                                   'content_unittests',
+                                   'content_browsertests']
+  return (suite_name in
+          tests_require_net_test_server)
+
+def _TestSuiteRequiresHighPerfMode(suite_name):
+  """Returns True if the test suite requires high performance mode."""
+  return 'perftests' in suite_name
+
+class TestRunner(base_test_runner.BaseTestRunner):
+  def __init__(self, test_options, device, test_package):
+    """Single test suite attached to a single device.
+
+    Args:
+      test_options: A GTestOptions object.
+      device: Device to run the tests.
+      test_package: An instance of TestPackage class.
+    """
+
+    super(TestRunner, self).__init__(device, test_options.tool)
+
+    self.test_package = test_package
+    self.test_package.tool = self.tool
+    self._test_arguments = test_options.test_arguments
+
+    timeout = test_options.timeout
+    if timeout == 0:
+      timeout = 60
+    # On a VM (e.g. chromium buildbots), this timeout is way too small.
+    if os.environ.get('BUILDBOT_SLAVENAME'):
+      timeout = timeout * 2
+
+    self._timeout = min(timeout * self.tool.GetTimeoutScale(),
+                        _INFRA_STDOUT_TIMEOUT)
+    if _TestSuiteRequiresHighPerfMode(self.test_package.suite_name):
+      self._perf_controller = perf_control.PerfControl(self.device)
+
+    if _TestSuiteRequiresMockTestServer(self.test_package.suite_name):
+      self._servers = [
+          local_test_server_spawner.LocalTestServerSpawner(
+              ports.AllocateTestServerPort(), self.device, self.tool)]
+    else:
+      self._servers = []
+
+    if test_options.app_data_files:
+      self._app_data_files = test_options.app_data_files
+      if test_options.app_data_file_dir:
+        self._app_data_file_dir = test_options.app_data_file_dir
+      else:
+        self._app_data_file_dir = tempfile.mkdtemp()
+        logging.critical('Saving app files to %s', self._app_data_file_dir)
+    else:
+      self._app_data_files = None
+      self._app_data_file_dir = None
+
+  #override
+  def InstallTestPackage(self):
+    self.test_package.Install(self.device)
+
+  def _ParseTestOutput(self, p):
+    """Process the test output.
+
+    Args:
+      p: An instance of pexpect spawn class.
+
+    Returns:
+      A TestRunResults object.
+    """
+    results = base_test_result.TestRunResults()
+
+    log = ''
+    try:
+      while True:
+        full_test_name = None
+
+        found = p.expect([RE_RUN, RE_PASSED, RE_RUNNER_FAIL],
+                         timeout=self._timeout)
+        if found == 1:  # RE_PASSED
+          break
+        elif found == 2:  # RE_RUNNER_FAIL
+          break
+        else:  # RE_RUN
+          full_test_name = p.match.group(1).replace('\r', '')
+          found = p.expect([RE_OK, RE_FAIL, RE_CRASH], timeout=self._timeout)
+          log = p.before.replace('\r', '')
+          if found == 0:  # RE_OK
+            if full_test_name == p.match.group(1).replace('\r', ''):
+              duration_ms = int(p.match.group(3)) if p.match.group(3) else 0
+              results.AddResult(base_test_result.BaseTestResult(
+                  full_test_name, base_test_result.ResultType.PASS,
+                  duration=duration_ms, log=log))
+          elif found == 2:  # RE_CRASH
+            results.AddResult(base_test_result.BaseTestResult(
+                full_test_name, base_test_result.ResultType.CRASH,
+                log=log))
+            break
+          else:  # RE_FAIL
+            duration_ms = int(p.match.group(3)) if p.match.group(3) else 0
+            results.AddResult(base_test_result.BaseTestResult(
+                full_test_name, base_test_result.ResultType.FAIL,
+                duration=duration_ms, log=log))
+    except pexpect.EOF:
+      logging.error('Test terminated - EOF')
+      # We're here because either the device went offline, or the test harness
+      # crashed without outputting the CRASHED marker (crbug.com/175538).
+      if not self.device.IsOnline():
+        raise device_errors.DeviceUnreachableError(
+            'Device %s went offline.' % str(self.device))
+      if full_test_name:
+        results.AddResult(base_test_result.BaseTestResult(
+            full_test_name, base_test_result.ResultType.CRASH,
+            log=p.before.replace('\r', '')))
+    except pexpect.TIMEOUT:
+      logging.error('Test terminated after %d second timeout.',
+                    self._timeout)
+      if full_test_name:
+        results.AddResult(base_test_result.BaseTestResult(
+            full_test_name, base_test_result.ResultType.TIMEOUT,
+            log=p.before.replace('\r', '')))
+    finally:
+      p.close()
+
+    ret_code = self.test_package.GetGTestReturnCode(self.device)
+    if ret_code:
+      logging.critical(
+          'gtest exit code: %d\npexpect.before: %s\npexpect.after: %s',
+          ret_code, p.before, p.after)
+
+    return results
+
+  #override
+  def RunTest(self, test):
+    test_results = base_test_result.TestRunResults()
+    if not test:
+      return test_results, None
+
+    try:
+      self.test_package.ClearApplicationState(self.device)
+      self.test_package.CreateCommandLineFileOnDevice(
+          self.device, test, self._test_arguments)
+      test_results = self._ParseTestOutput(
+          self.test_package.SpawnTestProcess(self.device))
+      if self._app_data_files:
+        self.test_package.PullAppFiles(self.device, self._app_data_files,
+                                       self._app_data_file_dir)
+    finally:
+      for s in self._servers:
+        s.Reset()
+    # Calculate unknown test results.
+    all_tests = set(test.split(':'))
+    all_tests_ran = set([t.GetName() for t in test_results.GetAll()])
+    unknown_tests = all_tests - all_tests_ran
+    test_results.AddResults(
+        [base_test_result.BaseTestResult(t, base_test_result.ResultType.UNKNOWN)
+         for t in unknown_tests])
+    retry = ':'.join([t.GetName() for t in test_results.GetNotPass()])
+    return test_results, retry
+
+  #override
+  def SetUp(self):
+    """Sets up necessary test enviroment for the test suite."""
+    super(TestRunner, self).SetUp()
+    for s in self._servers:
+      s.SetUp()
+    if _TestSuiteRequiresHighPerfMode(self.test_package.suite_name):
+      self._perf_controller.SetHighPerfMode()
+    self.tool.SetupEnvironment()
+
+  #override
+  def TearDown(self):
+    """Cleans up the test enviroment for the test suite."""
+    for s in self._servers:
+      s.TearDown()
+    if _TestSuiteRequiresHighPerfMode(self.test_package.suite_name):
+      self._perf_controller.SetDefaultPerfMode()
+    self.test_package.ClearApplicationState(self.device)
+    self.tool.CleanUpEnvironment()
+    super(TestRunner, self).TearDown()
diff --git a/build/android/pylib/host_driven/__init__.py b/build/android/pylib/host_driven/__init__.py
new file mode 100644
index 0000000..727e987
--- /dev/null
+++ b/build/android/pylib/host_driven/__init__.py
@@ -0,0 +1,4 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
diff --git a/build/android/pylib/host_driven/setup.py b/build/android/pylib/host_driven/setup.py
new file mode 100644
index 0000000..b2ed348
--- /dev/null
+++ b/build/android/pylib/host_driven/setup.py
@@ -0,0 +1,200 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Setup for instrumentation host-driven tests."""
+
+import logging
+import os
+import sys
+import types
+
+from pylib.host_driven import test_case
+from pylib.host_driven import test_info_collection
+from pylib.host_driven import test_runner
+
+
+def _GetPythonFiles(root, files):
+  """Returns all files from |files| that end in 'Test.py'.
+
+  Args:
+    root: A directory name with python files.
+    files: A list of file names.
+
+  Returns:
+    A list with all python files that match the testing naming scheme.
+  """
+  return [os.path.join(root, f) for f in files if f.endswith('Test.py')]
+
+
+def _InferImportNameFromFile(python_file):
+  """Given a file, infer the import name for that file.
+
+  Example: /usr/foo/bar/baz.py -> baz.
+
+  Args:
+    python_file: Path to the Python file, ostensibly to import later.
+
+  Returns:
+    The module name for the given file.
+  """
+  return os.path.splitext(os.path.basename(python_file))[0]
+
+
+def _GetTestModules(host_driven_test_root, is_official_build):
+  """Retrieve a list of python modules that match the testing naming scheme.
+
+  Walks the location of host-driven tests, imports them, and provides the list
+  of imported modules to the caller.
+
+  Args:
+    host_driven_test_root: The path to walk, looking for the
+        pythonDrivenTests or host_driven_tests directory
+    is_official_build: Whether to run only those tests marked 'official'
+
+  Returns:
+    A list of python modules under |host_driven_test_root| which match the
+    testing naming scheme. Each module should define one or more classes that
+    derive from HostDrivenTestCase.
+  """
+  # By default run all host-driven tests under pythonDrivenTests or
+  # host_driven_tests.
+  host_driven_test_file_list = []
+  for root, _, files in os.walk(host_driven_test_root):
+    if (root.endswith('host_driven_tests') or
+        root.endswith('pythonDrivenTests') or
+        (is_official_build and (root.endswith('pythonDrivenTests/official') or
+                                root.endswith('host_driven_tests/official')))):
+      host_driven_test_file_list += _GetPythonFiles(root, files)
+  host_driven_test_file_list.sort()
+
+  test_module_list = [_GetModuleFromFile(test_file)
+                      for test_file in host_driven_test_file_list]
+  return test_module_list
+
+
+def _GetModuleFromFile(python_file):
+  """Gets the python module associated with a file by importing it.
+
+  Args:
+    python_file: File to import.
+
+  Returns:
+    The module object.
+  """
+  sys.path.append(os.path.dirname(python_file))
+  import_name = _InferImportNameFromFile(python_file)
+  return __import__(import_name)
+
+
+def _GetTestsFromClass(test_case_class, **kwargs):
+  """Returns one test object for each test method in |test_case_class|.
+
+  Test methods are methods on the class which begin with 'test'.
+
+  Args:
+    test_case_class: Class derived from HostDrivenTestCase which contains zero
+        or more test methods.
+    kwargs: Keyword args to pass into the constructor of test cases.
+
+  Returns:
+    A list of test case objects, each initialized for a particular test method.
+  """
+  test_names = [m for m in dir(test_case_class)
+                if _IsTestMethod(m, test_case_class)]
+  return [test_case_class(name, **kwargs) for name in test_names]
+
+
+def _GetTestsFromModule(test_module, **kwargs):
+  """Gets a list of test objects from |test_module|.
+
+  Args:
+    test_module: Module from which to get the set of test methods.
+    kwargs: Keyword args to pass into the constructor of test cases.
+
+  Returns:
+    A list of test case objects each initialized for a particular test method
+    defined in |test_module|.
+  """
+
+  tests = []
+  for name in dir(test_module):
+    attr = getattr(test_module, name)
+    if _IsTestCaseClass(attr):
+      tests.extend(_GetTestsFromClass(attr, **kwargs))
+  return tests
+
+
+def _IsTestCaseClass(test_class):
+  return (type(test_class) is types.TypeType and
+          issubclass(test_class, test_case.HostDrivenTestCase) and
+          test_class is not test_case.HostDrivenTestCase)
+
+
+def _IsTestMethod(attrname, test_case_class):
+  """Checks whether this is a valid test method.
+
+  Args:
+    attrname: The method name.
+    test_case_class: The test case class.
+
+  Returns:
+    True if test_case_class.'attrname' is callable and it starts with 'test';
+    False otherwise.
+  """
+  attr = getattr(test_case_class, attrname)
+  return callable(attr) and attrname.startswith('test')
+
+
+def _GetAllTests(test_root, is_official_build, **kwargs):
+  """Retrieve a list of host-driven tests defined under |test_root|.
+
+  Args:
+    test_root: Path which contains host-driven test files.
+    is_official_build: Whether this is an official build.
+    kwargs: Keyword args to pass into the constructor of test cases.
+
+  Returns:
+    List of test case objects, one for each available test method.
+  """
+  if not test_root:
+    return []
+  all_tests = []
+  test_module_list = _GetTestModules(test_root, is_official_build)
+  for module in test_module_list:
+    all_tests.extend(_GetTestsFromModule(module, **kwargs))
+  return all_tests
+
+
+def InstrumentationSetup(host_driven_test_root, official_build,
+                         instrumentation_options):
+  """Creates a list of host-driven instrumentation tests and a runner factory.
+
+  Args:
+    host_driven_test_root: Directory where the host-driven tests are.
+    official_build: True if this is an official build.
+    instrumentation_options: An InstrumentationOptions object.
+
+  Returns:
+    A tuple of (TestRunnerFactory, tests).
+  """
+
+  test_collection = test_info_collection.TestInfoCollection()
+  all_tests = _GetAllTests(
+      host_driven_test_root, official_build,
+      instrumentation_options=instrumentation_options)
+  test_collection.AddTests(all_tests)
+
+  available_tests = test_collection.GetAvailableTests(
+      instrumentation_options.annotations,
+      instrumentation_options.exclude_annotations,
+      instrumentation_options.test_filter)
+  logging.debug('All available tests: ' + str(
+      [t.tagged_name for t in available_tests]))
+
+  def TestRunnerFactory(device, shard_index):
+    return test_runner.HostDrivenTestRunner(
+        device, shard_index,
+        instrumentation_options.tool)
+
+  return (TestRunnerFactory, available_tests)
diff --git a/build/android/pylib/host_driven/test_case.py b/build/android/pylib/host_driven/test_case.py
new file mode 100644
index 0000000..6ff4c5f
--- /dev/null
+++ b/build/android/pylib/host_driven/test_case.py
@@ -0,0 +1,189 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Base class for host-driven test cases.
+
+This test case is intended to serve as the base class for any host-driven
+test cases. It is similar to the Python unitttest module in that test cases
+inherit from this class and add methods which will be run as tests.
+
+When a HostDrivenTestCase object is instantiated, its purpose is to run only one
+test method in the derived class. The test runner gives it the name of the test
+method the instance will run. The test runner calls SetUp with the device ID
+which the test method will run against. The test runner runs the test method
+itself, collecting the result, and calls TearDown.
+
+Tests can perform arbitrary Python commands and asserts in test methods. Tests
+that run instrumentation tests can make use of the _RunJavaTestFilters helper
+function to trigger Java tests and convert results into a single host-driven
+test result.
+"""
+
+import logging
+import os
+import time
+
+from pylib import constants
+from pylib import forwarder
+from pylib import valgrind_tools
+from pylib.base import base_test_result
+from pylib.device import device_utils
+from pylib.instrumentation import test_package
+from pylib.instrumentation import test_result
+from pylib.instrumentation import test_runner
+
+# aka the parent of com.google.android
+BASE_ROOT = 'src' + os.sep
+
+
+class HostDrivenTestCase(object):
+  """Base class for host-driven test cases."""
+
+  _HOST_DRIVEN_TAG = 'HostDriven'
+
+  def __init__(self, test_name, instrumentation_options=None):
+    """Create a test case initialized to run |test_name|.
+
+    Args:
+      test_name: The name of the method to run as the test.
+      instrumentation_options: An InstrumentationOptions object.
+    """
+    class_name = self.__class__.__name__
+    self.device = None
+    self.device_id = ''
+    self.has_forwarded_ports = False
+    self.instrumentation_options = instrumentation_options
+    self.ports_to_forward = []
+    self.shard_index = 0
+
+    # Use tagged_name when creating results, so that we can identify host-driven
+    # tests in the overall results.
+    self.test_name = test_name
+    self.qualified_name = '%s.%s' % (class_name, self.test_name)
+    self.tagged_name = '%s_%s' % (self._HOST_DRIVEN_TAG, self.qualified_name)
+
+  # TODO(bulach): make ports_to_forward not optional and move the Forwarder
+  # mapping here.
+  def SetUp(self, device, shard_index, ports_to_forward=None):
+    if not ports_to_forward:
+      ports_to_forward = []
+    self.device = device
+    self.shard_index = shard_index
+    self.device_id = str(self.device)
+    if ports_to_forward:
+      self.ports_to_forward = ports_to_forward
+
+  def TearDown(self):
+    pass
+
+  # TODO(craigdh): Remove GetOutDir once references have been removed
+  # downstream.
+  @staticmethod
+  def GetOutDir():
+    return constants.GetOutDirectory()
+
+  def Run(self):
+    logging.info('Running host-driven test: %s', self.tagged_name)
+    # Get the test method on the derived class and execute it
+    return getattr(self, self.test_name)()
+
+  @staticmethod
+  def __GetHostForwarderLog():
+    return ('-- Begin Full HostForwarder log\n'
+            '%s\n'
+            '--End Full HostForwarder log\n' % forwarder.Forwarder.GetHostLog())
+
+  def __StartForwarder(self):
+    logging.warning('Forwarding %s %s', self.ports_to_forward,
+                    self.has_forwarded_ports)
+    if self.ports_to_forward and not self.has_forwarded_ports:
+      self.has_forwarded_ports = True
+      tool = valgrind_tools.CreateTool(None, self.device)
+      forwarder.Forwarder.Map([(port, port) for port in self.ports_to_forward],
+                              self.device, tool)
+
+  def __RunJavaTest(self, test, test_pkg, additional_flags=None):
+    """Runs a single Java test in a Java TestRunner.
+
+    Args:
+      test: Fully qualified test name (ex. foo.bar.TestClass#testMethod)
+      test_pkg: TestPackage object.
+      additional_flags: A list of additional flags to add to the command line.
+
+    Returns:
+      TestRunResults object with a single test result.
+    """
+    # TODO(bulach): move this to SetUp() stage.
+    self.__StartForwarder()
+
+    java_test_runner = test_runner.TestRunner(
+        self.instrumentation_options, self.device, self.shard_index,
+        test_pkg, additional_flags=additional_flags)
+    try:
+      java_test_runner.SetUp()
+      return java_test_runner.RunTest(test)[0]
+    finally:
+      java_test_runner.TearDown()
+
+  def _RunJavaTestFilters(self, test_filters, additional_flags=None):
+    """Calls a list of tests and stops at the first test failure.
+
+    This method iterates until either it encounters a non-passing test or it
+    exhausts the list of tests. Then it returns the appropriate overall result.
+
+    Test cases may make use of this method internally to assist in running
+    instrumentation tests. This function relies on instrumentation_options
+    being defined.
+
+    Args:
+      test_filters: A list of Java test filters.
+      additional_flags: A list of addition flags to add to the command line.
+
+    Returns:
+      A TestRunResults object containing an overall result for this set of Java
+      tests. If any Java tests do not pass, this is a fail overall.
+    """
+    test_type = base_test_result.ResultType.PASS
+    log = ''
+
+    test_pkg = test_package.TestPackage(
+        self.instrumentation_options.test_apk_path,
+        self.instrumentation_options.test_apk_jar_path,
+        self.instrumentation_options.test_support_apk_path)
+
+    start_ms = int(time.time()) * 1000
+    done = False
+    for test_filter in test_filters:
+      tests = test_pkg.GetAllMatchingTests(None, None, test_filter)
+      # Filters should always result in >= 1 test.
+      if len(tests) == 0:
+        raise Exception('Java test filter "%s" returned no tests.'
+                        % test_filter)
+      for test in tests:
+        # We're only running one test at a time, so this TestRunResults object
+        # will hold only one result.
+        java_result = self.__RunJavaTest(test, test_pkg, additional_flags)
+        assert len(java_result.GetAll()) == 1
+        if not java_result.DidRunPass():
+          result = java_result.GetNotPass().pop()
+          log = result.GetLog()
+          log += self.__GetHostForwarderLog()
+          test_type = result.GetType()
+          done = True
+          break
+      if done:
+        break
+    duration_ms = int(time.time()) * 1000 - start_ms
+
+    overall_result = base_test_result.TestRunResults()
+    overall_result.AddResult(
+        test_result.InstrumentationTestResult(
+            self.tagged_name, test_type, start_ms, duration_ms, log=log))
+    return overall_result
+
+  def __str__(self):
+    return self.tagged_name
+
+  def __repr__(self):
+    return self.tagged_name
diff --git a/build/android/pylib/host_driven/test_info_collection.py b/build/android/pylib/host_driven/test_info_collection.py
new file mode 100644
index 0000000..c65d417
--- /dev/null
+++ b/build/android/pylib/host_driven/test_info_collection.py
@@ -0,0 +1,144 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Module containing information about the host-driven tests."""
+
+import logging
+import os
+import sys
+
+from pylib.host_driven import tests_annotations
+
+from pylib import constants
+
+sys.path.insert(0,
+                os.path.join(constants.DIR_SOURCE_ROOT,
+                             'build', 'util', 'lib', 'common'))
+
+import unittest_util # pylint: disable=F0401
+
+class TestInfo(object):
+  """An object containing and representing a test function, plus metadata."""
+
+  def __init__(self, runnable, set_up=None, tear_down=None):
+    # The actual test function/method.
+    self.runnable = runnable
+    # Qualified name of test function/method (e.g. FooModule.testBar).
+    self.qualified_name = self._GetQualifiedName(runnable)
+    # setUp and teardown functions, if any.
+    self.set_up = set_up
+    self.tear_down = tear_down
+
+  @staticmethod
+  def _GetQualifiedName(runnable):
+    """Helper method to infer a runnable's name and module name.
+
+    Many filters and lists presuppose a format of module_name.testMethodName.
+    To make this easy on everyone, we use some reflection magic to infer this
+    name automatically.
+
+    Args:
+      runnable: the test method to get the qualified name for
+
+    Returns:
+      qualified name for this runnable, incl. module name and method name.
+    """
+    runnable_name = runnable.__name__
+    # See also tests_annotations.
+    module_name = os.path.splitext(
+        os.path.basename(runnable.__globals__['__file__']))[0]
+    return '.'.join([module_name, runnable_name])
+
+  def __str__(self):
+    return self.qualified_name
+
+
+class TestInfoCollection(object):
+  """A collection of TestInfo objects which facilitates filtering."""
+
+  def __init__(self):
+    """Initialize a new TestInfoCollection."""
+    # Master list of all valid tests.
+    self.all_tests = []
+
+  def AddTests(self, test_infos):
+    """Adds a set of tests to this collection.
+
+    The user may then retrieve them, optionally according to criteria, via
+    GetAvailableTests().
+
+    Args:
+      test_infos: a list of TestInfos representing test functions/methods.
+    """
+    self.all_tests = test_infos
+
+  def GetAvailableTests(self, annotations, exclude_annotations, name_filter):
+    """Get a collection of TestInfos which match the supplied criteria.
+
+    Args:
+      annotations: List of annotations. Each test in the returned list is
+        annotated with atleast one of these annotations.
+      exclude_annotations: List of annotations. The tests in the returned
+        list are not annotated with any of these annotations.
+      name_filter: name filter which tests must match, if any
+
+    Returns:
+      List of available tests.
+    """
+    available_tests = self.all_tests
+
+    # Filter out tests which match neither the requested annotation, nor the
+    # requested name filter, if any.
+    available_tests = [t for t in available_tests if
+                       self._AnnotationIncludesTest(t, annotations)]
+    if annotations and len(annotations) == 1 and annotations[0] == 'SmallTest':
+      tests_without_annotation = [
+          t for t in self.all_tests if
+          not tests_annotations.AnnotatedFunctions.GetTestAnnotations(
+              t.qualified_name)]
+      test_names = [t.qualified_name for t in tests_without_annotation]
+      logging.warning('The following tests do not contain any annotation. '
+                      'Assuming "SmallTest":\n%s',
+                      '\n'.join(test_names))
+      available_tests += tests_without_annotation
+    if exclude_annotations:
+      excluded_tests = [t for t in available_tests if
+                        self._AnnotationIncludesTest(t, exclude_annotations)]
+      available_tests = list(set(available_tests) - set(excluded_tests))
+
+    if name_filter:
+      available_test_names = unittest_util.FilterTestNames(
+          [t.qualified_name for t in available_tests], name_filter)
+      available_tests = [
+          t for t in available_tests if
+          t.qualified_name in available_test_names]
+    return available_tests
+
+  @staticmethod
+  def _AnnotationIncludesTest(test_info, annotation_filter_list):
+    """Checks whether a given test represented by test_info matches annotation.
+
+    Args:
+      test_info: TestInfo object representing the test
+      annotation_filter_list: list of annotation filters to match (e.g. Smoke)
+
+    Returns:
+      True if no annotation was supplied or the test matches; false otherwise.
+    """
+    if not annotation_filter_list:
+      return True
+    for annotation_filter in annotation_filter_list:
+      filters = annotation_filter.split('=')
+      if len(filters) == 2:
+        key = filters[0]
+        value_list = filters[1].split(',')
+        for value in value_list:
+          if tests_annotations.AnnotatedFunctions.IsAnnotated(
+              key + ':' + value, test_info.qualified_name):
+            return True
+      elif tests_annotations.AnnotatedFunctions.IsAnnotated(
+          annotation_filter, test_info.qualified_name):
+        return True
+    return False
+
diff --git a/build/android/pylib/host_driven/test_runner.py b/build/android/pylib/host_driven/test_runner.py
new file mode 100644
index 0000000..8620aa1
--- /dev/null
+++ b/build/android/pylib/host_driven/test_runner.py
@@ -0,0 +1,133 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Runs host-driven tests on a particular device."""
+
+import logging
+import sys
+import time
+import traceback
+
+from pylib.base import base_test_result
+from pylib.base import base_test_runner
+from pylib.host_driven import test_case
+from pylib.instrumentation import test_result
+
+
+class HostDrivenExceptionTestResult(test_result.InstrumentationTestResult):
+  """Test result corresponding to a python exception in a host-driven test."""
+
+  def __init__(self, test_name, start_date_ms, exc_info):
+    """Constructs a HostDrivenExceptionTestResult object.
+
+    Args:
+      test_name: name of the test which raised an exception.
+      start_date_ms: the starting time for the test.
+      exc_info: exception info, ostensibly from sys.exc_info().
+    """
+    exc_type, exc_value, exc_traceback = exc_info
+    trace_info = ''.join(traceback.format_exception(exc_type, exc_value,
+                                                    exc_traceback))
+    log_msg = 'Exception:\n' + trace_info
+    duration_ms = (int(time.time()) * 1000) - start_date_ms
+
+    super(HostDrivenExceptionTestResult, self).__init__(
+        test_name,
+        base_test_result.ResultType.FAIL,
+        start_date_ms,
+        duration_ms,
+        log=str(exc_type) + ' ' + log_msg)
+
+
+class HostDrivenTestRunner(base_test_runner.BaseTestRunner):
+  """Orchestrates running a set of host-driven tests.
+
+  Any Python exceptions in the tests are caught and translated into a failed
+  result, rather than being re-raised on the main thread.
+  """
+
+  # TODO(jbudorick): Remove cleanup_test_files once it's no longer used.
+  # pylint: disable=unused-argument
+  #override
+  def __init__(self, device, shard_index, tool, cleanup_test_files=None):
+    """Creates a new HostDrivenTestRunner.
+
+    Args:
+      device: Attached android device.
+      shard_index: Shard index.
+      tool: Name of the Valgrind tool.
+      cleanup_test_files: Deprecated.
+    """
+
+    super(HostDrivenTestRunner, self).__init__(device, tool)
+
+    # The shard index affords the ability to create unique port numbers (e.g.
+    # DEFAULT_PORT + shard_index) if the test so wishes.
+    self.shard_index = shard_index
+
+  # pylint: enable=unused-argument
+
+  #override
+  def RunTest(self, test):
+    """Sets up and runs a test case.
+
+    Args:
+      test: An object which is ostensibly a subclass of HostDrivenTestCase.
+
+    Returns:
+      A TestRunResults object which contains the result produced by the test
+      and, in the case of a failure, the test that should be retried.
+    """
+
+    assert isinstance(test, test_case.HostDrivenTestCase)
+
+    start_date_ms = int(time.time()) * 1000
+    exception_raised = False
+
+    try:
+      test.SetUp(self.device, self.shard_index)
+    except Exception:
+      logging.exception(
+          'Caught exception while trying to run SetUp() for test: ' +
+          test.tagged_name)
+      # Tests whose SetUp() method has failed are likely to fail, or at least
+      # yield invalid results.
+      exc_info = sys.exc_info()
+      results = base_test_result.TestRunResults()
+      results.AddResult(HostDrivenExceptionTestResult(
+          test.tagged_name, start_date_ms, exc_info))
+      return results, test
+
+    try:
+      results = test.Run()
+    except Exception:
+      # Setting this lets TearDown() avoid stomping on our stack trace from
+      # Run() should TearDown() also raise an exception.
+      exception_raised = True
+      logging.exception('Caught exception while trying to run test: ' +
+                        test.tagged_name)
+      exc_info = sys.exc_info()
+      results = base_test_result.TestRunResults()
+      results.AddResult(HostDrivenExceptionTestResult(
+          test.tagged_name, start_date_ms, exc_info))
+
+    try:
+      test.TearDown()
+    except Exception:
+      logging.exception(
+          'Caught exception while trying run TearDown() for test: ' +
+          test.tagged_name)
+      if not exception_raised:
+        # Don't stomp the error during the test if TearDown blows up. This is a
+        # trade-off: if the test fails, this will mask any problem with TearDown
+        # until the test is fixed.
+        exc_info = sys.exc_info()
+        results = base_test_result.TestRunResults()
+        results.AddResult(HostDrivenExceptionTestResult(
+            test.tagged_name, start_date_ms, exc_info))
+
+    if not results.DidRunPass():
+      return results, test
+    else:
+      return results, None
diff --git a/build/android/pylib/host_driven/test_server.py b/build/android/pylib/host_driven/test_server.py
new file mode 100644
index 0000000..0783500
--- /dev/null
+++ b/build/android/pylib/host_driven/test_server.py
@@ -0,0 +1,130 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Host driven test server controller.
+
+This class controls the startup and shutdown of a python driven test server that
+runs in a separate process.
+
+The server starts up automatically when the object is created.
+
+After it starts up, it is possible to retreive the hostname it started on
+through accessing the member field |host| and the port name through |port|.
+
+For shutting down the server, call TearDown().
+"""
+
+import logging
+import subprocess
+import os
+import os.path
+import time
+import urllib2
+
+from pylib import constants
+
+# NOTE: when adding or modifying these lines, omit any leading slashes!
+# Otherwise os.path.join() will (correctly) treat them as absolute paths
+# instead of relative paths, and will do nothing.
+_PYTHONPATH_DIRS = [
+    'net/tools/testserver/',
+    'third_party/',
+    'third_party/pyftpdlib/src/',
+    'third_party/pywebsocket/src',
+    'third_party/tlslite/',
+]
+
+# Python files in these directories are generated as part of the build.
+# These dirs are located in out/(Debug|Release) directory.
+# The correct path is determined based on the build type. E.g. out/Debug for
+# debug builds and out/Release for release builds.
+_GENERATED_PYTHONPATH_DIRS = [
+    'pyproto/policy/proto/',
+    'pyproto/sync/protocol/',
+    'pyproto/'
+]
+
+_TEST_SERVER_HOST = '127.0.0.1'
+# Paths for supported test server executables.
+TEST_NET_SERVER_PATH = 'net/tools/testserver/testserver.py'
+TEST_SYNC_SERVER_PATH = 'sync/tools/testserver/sync_testserver.py'
+TEST_POLICY_SERVER_PATH = 'chrome/browser/policy/test/policy_testserver.py'
+# Parameters to check that the server is up and running.
+TEST_SERVER_CHECK_PARAMS = {
+  TEST_NET_SERVER_PATH: {
+      'url_path': '/',
+      'response': 'Default response given for path'
+  },
+  TEST_SYNC_SERVER_PATH: {
+      'url_path': 'chromiumsync/time',
+      'response': '0123456789'
+  },
+  TEST_POLICY_SERVER_PATH: {
+      'url_path': 'test/ping',
+      'response': 'Policy server is up.'
+  },
+}
+
+class TestServer(object):
+  """Sets up a host driven test server on the host machine.
+
+  For shutting down the server, call TearDown().
+  """
+
+  def __init__(self, shard_index, test_server_port, test_server_path,
+               test_server_flags=None):
+    """Sets up a Python driven test server on the host machine.
+
+    Args:
+      shard_index: Index of the current shard.
+      test_server_port: Port to run the test server on. This is multiplexed with
+                        the shard index. To retrieve the real port access the
+                        member variable |port|.
+      test_server_path: The path (relative to the root src dir) of the server
+      test_server_flags: Optional list of additional flags to the test server
+    """
+    self.host = _TEST_SERVER_HOST
+    self.port = test_server_port + shard_index
+
+    src_dir = constants.DIR_SOURCE_ROOT
+    # Make dirs into a list of absolute paths.
+    abs_dirs = [os.path.join(src_dir, d) for d in _PYTHONPATH_DIRS]
+    # Add the generated python files to the path
+    abs_dirs.extend([os.path.join(src_dir, constants.GetOutDirectory(), d)
+                     for d in _GENERATED_PYTHONPATH_DIRS])
+    current_python_path = os.environ.get('PYTHONPATH')
+    extra_python_path = ':'.join(abs_dirs)
+    if current_python_path:
+      python_path = current_python_path + ':' + extra_python_path
+    else:
+      python_path = extra_python_path
+
+    # NOTE: A separate python process is used to simplify getting the right
+    # system path for finding includes.
+    test_server_flags = test_server_flags or []
+    cmd = ['python', os.path.join(src_dir, test_server_path),
+           '--log-to-console',
+           ('--host=%s' % self.host),
+           ('--port=%d' % self.port),
+           '--on-remote-server'] + test_server_flags
+    self._test_server_process = subprocess.Popen(
+          cmd, env={'PYTHONPATH': python_path})
+    test_url = 'http://%s:%d/%s' % (self.host, self.port,
+        TEST_SERVER_CHECK_PARAMS[test_server_path]['url_path'])
+    expected_response = TEST_SERVER_CHECK_PARAMS[test_server_path]['response']
+    retries = 0
+    while retries < 5:
+      try:
+        d = urllib2.urlopen(test_url).read()
+        logging.info('URL %s GOT: %s' % (test_url, d))
+        if d.startswith(expected_response):
+          break
+      except Exception as e:
+        logging.info('URL %s GOT: %s' % (test_url, e))
+      time.sleep(retries * 0.1)
+      retries += 1
+
+  def TearDown(self):
+    self._test_server_process.kill()
+    self._test_server_process.wait()
diff --git a/build/android/pylib/host_driven/tests_annotations.py b/build/android/pylib/host_driven/tests_annotations.py
new file mode 100644
index 0000000..5331140
--- /dev/null
+++ b/build/android/pylib/host_driven/tests_annotations.py
@@ -0,0 +1,94 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Annotations for host-driven tests."""
+# pylint: disable=W0212
+
+import os
+
+
+class AnnotatedFunctions(object):
+  """A container for annotated methods."""
+  _ANNOTATED = {}
+
+  @staticmethod
+  def _AddFunction(annotation, function):
+    """Adds an annotated function to our container.
+
+    Args:
+      annotation: the annotation string.
+      function: the function.
+    Returns:
+      The function passed in.
+    """
+    module_name = os.path.splitext(os.path.basename(
+        function.__globals__['__file__']))[0]
+    qualified_function_name = '.'.join([module_name, function.func_name])
+    function_list = AnnotatedFunctions._ANNOTATED.get(annotation, [])
+    function_list.append(qualified_function_name)
+    AnnotatedFunctions._ANNOTATED[annotation] = function_list
+    return function
+
+  @staticmethod
+  def IsAnnotated(annotation, qualified_function_name):
+    """True if function name (module.function) contains the annotation.
+
+    Args:
+      annotation: the annotation string.
+      qualified_function_name: the qualified function name.
+    Returns:
+      True if module.function contains the annotation.
+    """
+    return qualified_function_name in AnnotatedFunctions._ANNOTATED.get(
+        annotation, [])
+
+  @staticmethod
+  def GetTestAnnotations(qualified_function_name):
+    """Returns a list containing all annotations for the given function.
+
+    Args:
+      qualified_function_name: the qualified function name.
+    Returns:
+      List of all annotations for this function.
+    """
+    return [annotation
+            for annotation, tests in AnnotatedFunctions._ANNOTATED.iteritems()
+            if qualified_function_name in tests]
+
+
+# The following functions are annotations used for the host-driven tests.
+def Smoke(function):
+  return AnnotatedFunctions._AddFunction('Smoke', function)
+
+
+def SmallTest(function):
+  return AnnotatedFunctions._AddFunction('SmallTest', function)
+
+
+def MediumTest(function):
+  return AnnotatedFunctions._AddFunction('MediumTest', function)
+
+
+def LargeTest(function):
+  return AnnotatedFunctions._AddFunction('LargeTest', function)
+
+
+def EnormousTest(function):
+  return AnnotatedFunctions._AddFunction('EnormousTest', function)
+
+
+def FlakyTest(function):
+  return AnnotatedFunctions._AddFunction('FlakyTest', function)
+
+
+def DisabledTest(function):
+  return AnnotatedFunctions._AddFunction('DisabledTest', function)
+
+
+def Feature(feature_list):
+  def _AddFeatures(function):
+    for feature in feature_list:
+      AnnotatedFunctions._AddFunction('Feature:%s' % feature, function)
+    return AnnotatedFunctions._AddFunction('Feature', function)
+  return _AddFeatures
diff --git a/build/android/pylib/instrumentation/__init__.py b/build/android/pylib/instrumentation/__init__.py
new file mode 100644
index 0000000..727e987
--- /dev/null
+++ b/build/android/pylib/instrumentation/__init__.py
@@ -0,0 +1,4 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
diff --git a/build/android/pylib/instrumentation/instrumentation_parser.py b/build/android/pylib/instrumentation/instrumentation_parser.py
new file mode 100644
index 0000000..1859f14
--- /dev/null
+++ b/build/android/pylib/instrumentation/instrumentation_parser.py
@@ -0,0 +1,96 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+import re
+
+# http://developer.android.com/reference/android/test/InstrumentationTestRunner.html
+STATUS_CODE_START = 1
+STATUS_CODE_OK = 0
+STATUS_CODE_ERROR = -1
+STATUS_CODE_FAILURE = -2
+
+# http://developer.android.com/reference/android/app/Activity.html
+RESULT_CODE_OK = -1
+RESULT_CODE_CANCELED = 0
+
+_INSTR_LINE_RE = re.compile('^\s*INSTRUMENTATION_([A-Z_]+): (.*)$')
+
+
+class InstrumentationParser(object):
+
+  def __init__(self, stream):
+    """An incremental parser for the output of Android instrumentation tests.
+
+    Example:
+
+      stream = adb.IterShell('am instrument -r ...')
+      parser = InstrumentationParser(stream)
+
+      for code, bundle in parser.IterStatus():
+        # do something with each instrumentation status
+        print 'status:', code, bundle
+
+      # do something with the final instrumentation result
+      code, bundle = parser.GetResult()
+      print 'result:', code, bundle
+
+    Args:
+      stream: a sequence of lines as produced by the raw output of an
+        instrumentation test (e.g. by |am instrument -r| or |uiautomator|).
+    """
+    self._stream = stream
+    self._code = None
+    self._bundle = None
+
+  def IterStatus(self):
+    """Iterate over statuses as they are produced by the instrumentation test.
+
+    Yields:
+      A tuple (code, bundle) for each instrumentation status found in the
+      output.
+    """
+    def join_bundle_values(bundle):
+      for key in bundle:
+        bundle[key] = '\n'.join(bundle[key])
+      return bundle
+
+    bundle = {'STATUS': {}, 'RESULT': {}}
+    header = None
+    key = None
+    for line in self._stream:
+      m = _INSTR_LINE_RE.match(line)
+      if m:
+        header, value = m.groups()
+        key = None
+        if header in ['STATUS', 'RESULT'] and '=' in value:
+          key, value = value.split('=', 1)
+          bundle[header][key] = [value]
+        elif header == 'STATUS_CODE':
+          yield int(value), join_bundle_values(bundle['STATUS'])
+          bundle['STATUS'] = {}
+        elif header == 'CODE':
+          self._code = int(value)
+        else:
+          logging.warning('Unknown INSTRUMENTATION_%s line: %s', header, value)
+      elif key is not None:
+        bundle[header][key].append(line)
+
+    self._bundle = join_bundle_values(bundle['RESULT'])
+
+  def GetResult(self):
+    """Return the final instrumentation result.
+
+    Returns:
+      A pair (code, bundle) with the final instrumentation result. The |code|
+      may be None if no instrumentation result was found in the output.
+
+    Raises:
+      AssertionError if attempting to get the instrumentation result before
+      exhausting |IterStatus| first.
+    """
+    assert self._bundle is not None, (
+        'The IterStatus generator must be exhausted before reading the final'
+        ' instrumentation result.')
+    return self._code, self._bundle
diff --git a/build/android/pylib/instrumentation/instrumentation_parser_test.py b/build/android/pylib/instrumentation/instrumentation_parser_test.py
new file mode 100755
index 0000000..092d10f
--- /dev/null
+++ b/build/android/pylib/instrumentation/instrumentation_parser_test.py
@@ -0,0 +1,134 @@
+#!/usr/bin/env python
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+
+"""Unit tests for instrumentation.InstrumentationParser."""
+
+import unittest
+
+from pylib.instrumentation import instrumentation_parser
+
+
+class InstrumentationParserTest(unittest.TestCase):
+
+  def testInstrumentationParser_nothing(self):
+    parser = instrumentation_parser.InstrumentationParser([''])
+    statuses = list(parser.IterStatus())
+    code, bundle = parser.GetResult()
+    self.assertEqual(None, code)
+    self.assertEqual({}, bundle)
+    self.assertEqual([], statuses)
+
+  def testInstrumentationParser_noMatchingStarts(self):
+    raw_output = [
+      '',
+      'this.is.a.test.package.TestClass:.',
+      'Test result for =.',
+      'Time: 1.234',
+      '',
+      'OK (1 test)',
+    ]
+
+    parser = instrumentation_parser.InstrumentationParser(raw_output)
+    statuses = list(parser.IterStatus())
+    code, bundle = parser.GetResult()
+    self.assertEqual(None, code)
+    self.assertEqual({}, bundle)
+    self.assertEqual([], statuses)
+
+  def testInstrumentationParser_resultAndCode(self):
+    raw_output = [
+      'INSTRUMENTATION_RESULT: shortMsg=foo bar',
+      'INSTRUMENTATION_RESULT: longMsg=a foo',
+      'walked into',
+      'a bar',
+      'INSTRUMENTATION_CODE: -1',
+    ]
+
+    parser = instrumentation_parser.InstrumentationParser(raw_output)
+    statuses = list(parser.IterStatus())
+    code, bundle = parser.GetResult()
+    self.assertEqual(-1, code)
+    self.assertEqual(
+        {'shortMsg': 'foo bar', 'longMsg': 'a foo\nwalked into\na bar'}, bundle)
+    self.assertEqual([], statuses)
+
+  def testInstrumentationParser_oneStatus(self):
+    raw_output = [
+      'INSTRUMENTATION_STATUS: foo=1',
+      'INSTRUMENTATION_STATUS: bar=hello',
+      'INSTRUMENTATION_STATUS: world=false',
+      'INSTRUMENTATION_STATUS: class=this.is.a.test.package.TestClass',
+      'INSTRUMENTATION_STATUS: test=testMethod',
+      'INSTRUMENTATION_STATUS_CODE: 0',
+    ]
+
+    parser = instrumentation_parser.InstrumentationParser(raw_output)
+    statuses = list(parser.IterStatus())
+
+    expected = [
+      (0, {
+        'foo': '1',
+        'bar': 'hello',
+        'world': 'false',
+        'class': 'this.is.a.test.package.TestClass',
+        'test': 'testMethod',
+      })
+    ]
+    self.assertEqual(expected, statuses)
+
+  def testInstrumentationParser_multiStatus(self):
+    raw_output = [
+      'INSTRUMENTATION_STATUS: class=foo',
+      'INSTRUMENTATION_STATUS: test=bar',
+      'INSTRUMENTATION_STATUS_CODE: 1',
+      'INSTRUMENTATION_STATUS: test_skipped=true',
+      'INSTRUMENTATION_STATUS_CODE: 0',
+      'INSTRUMENTATION_STATUS: class=hello',
+      'INSTRUMENTATION_STATUS: test=world',
+      'INSTRUMENTATION_STATUS: stack=',
+      'foo/bar.py (27)',
+      'hello/world.py (42)',
+      'test/file.py (1)',
+      'INSTRUMENTATION_STATUS_CODE: -1',
+    ]
+
+    parser = instrumentation_parser.InstrumentationParser(raw_output)
+    statuses = list(parser.IterStatus())
+
+    expected = [
+      (1, {'class': 'foo', 'test': 'bar',}),
+      (0, {'test_skipped': 'true'}),
+      (-1, {
+        'class': 'hello',
+        'test': 'world',
+        'stack': '\nfoo/bar.py (27)\nhello/world.py (42)\ntest/file.py (1)',
+      }),
+    ]
+    self.assertEqual(expected, statuses)
+
+  def testInstrumentationParser_statusResultAndCode(self):
+    raw_output = [
+      'INSTRUMENTATION_STATUS: class=foo',
+      'INSTRUMENTATION_STATUS: test=bar',
+      'INSTRUMENTATION_STATUS_CODE: 1',
+      'INSTRUMENTATION_RESULT: result=hello',
+      'world',
+      '',
+      '',
+      'INSTRUMENTATION_CODE: 0',
+    ]
+
+    parser = instrumentation_parser.InstrumentationParser(raw_output)
+    statuses = list(parser.IterStatus())
+    code, bundle = parser.GetResult()
+
+    self.assertEqual(0, code)
+    self.assertEqual({'result': 'hello\nworld\n\n'}, bundle)
+    self.assertEqual([(1, {'class': 'foo', 'test': 'bar'})], statuses)
+
+
+if __name__ == '__main__':
+  unittest.main(verbosity=2)
diff --git a/build/android/pylib/instrumentation/instrumentation_test_instance.py b/build/android/pylib/instrumentation/instrumentation_test_instance.py
new file mode 100644
index 0000000..f9957b0
--- /dev/null
+++ b/build/android/pylib/instrumentation/instrumentation_test_instance.py
@@ -0,0 +1,525 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+import os
+import pickle
+import re
+import sys
+
+from pylib import cmd_helper
+from pylib import constants
+from pylib import flag_changer
+from pylib.base import base_test_result
+from pylib.base import test_instance
+from pylib.instrumentation import test_result
+from pylib.instrumentation import instrumentation_parser
+from pylib.utils import apk_helper
+from pylib.utils import md5sum
+from pylib.utils import proguard
+
+sys.path.append(
+    os.path.join(constants.DIR_SOURCE_ROOT, 'build', 'util', 'lib', 'common'))
+import unittest_util
+
+# Ref: http://developer.android.com/reference/android/app/Activity.html
+_ACTIVITY_RESULT_CANCELED = 0
+_ACTIVITY_RESULT_OK = -1
+
+_DEFAULT_ANNOTATIONS = [
+    'Smoke', 'SmallTest', 'MediumTest', 'LargeTest',
+    'EnormousTest', 'IntegrationTest']
+_EXTRA_ENABLE_HTTP_SERVER = (
+    'org.chromium.chrome.test.ChromeInstrumentationTestRunner.'
+        + 'EnableTestHttpServer')
+_EXTRA_DRIVER_TEST_LIST = (
+    'org.chromium.test.driver.OnDeviceInstrumentationDriver.TestList')
+_EXTRA_DRIVER_TEST_LIST_FILE = (
+    'org.chromium.test.driver.OnDeviceInstrumentationDriver.TestListFile')
+_EXTRA_DRIVER_TARGET_PACKAGE = (
+    'org.chromium.test.driver.OnDeviceInstrumentationDriver.TargetPackage')
+_EXTRA_DRIVER_TARGET_CLASS = (
+    'org.chromium.test.driver.OnDeviceInstrumentationDriver.TargetClass')
+_NATIVE_CRASH_RE = re.compile('native crash', re.IGNORECASE)
+_PICKLE_FORMAT_VERSION = 10
+
+
+# TODO(jbudorick): Make these private class methods of
+# InstrumentationTestInstance once the instrumentation test_runner is
+# deprecated.
+def ParseAmInstrumentRawOutput(raw_output):
+  """Parses the output of an |am instrument -r| call.
+
+  Args:
+    raw_output: the output of an |am instrument -r| call as a list of lines
+  Returns:
+    A 3-tuple containing:
+      - the instrumentation code as an integer
+      - the instrumentation result as a list of lines
+      - the instrumentation statuses received as a list of 2-tuples
+        containing:
+        - the status code as an integer
+        - the bundle dump as a dict mapping string keys to a list of
+          strings, one for each line.
+  """
+  parser = instrumentation_parser.InstrumentationParser(raw_output)
+  statuses = list(parser.IterStatus())
+  code, bundle = parser.GetResult()
+  return (code, bundle, statuses)
+
+
+def GenerateTestResults(
+    result_code, result_bundle, statuses, start_ms, duration_ms):
+  """Generate test results from |statuses|.
+
+  Args:
+    result_code: The overall status code as an integer.
+    result_bundle: The summary bundle dump as a dict.
+    statuses: A list of 2-tuples containing:
+      - the status code as an integer
+      - the bundle dump as a dict mapping string keys to string values
+      Note that this is the same as the third item in the 3-tuple returned by
+      |_ParseAmInstrumentRawOutput|.
+    start_ms: The start time of the test in milliseconds.
+    duration_ms: The duration of the test in milliseconds.
+
+  Returns:
+    A list containing an instance of InstrumentationTestResult for each test
+    parsed.
+  """
+
+  results = []
+
+  current_result = None
+
+  for status_code, bundle in statuses:
+    test_class = bundle.get('class', '')
+    test_method = bundle.get('test', '')
+    if test_class and test_method:
+      test_name = '%s#%s' % (test_class, test_method)
+    else:
+      continue
+
+    if status_code == instrumentation_parser.STATUS_CODE_START:
+      if current_result:
+        results.append(current_result)
+      current_result = test_result.InstrumentationTestResult(
+          test_name, base_test_result.ResultType.UNKNOWN, start_ms, duration_ms)
+    else:
+      if status_code == instrumentation_parser.STATUS_CODE_OK:
+        if bundle.get('test_skipped', '').lower() in ('true', '1', 'yes'):
+          current_result.SetType(base_test_result.ResultType.SKIP)
+        elif current_result.GetType() == base_test_result.ResultType.UNKNOWN:
+          current_result.SetType(base_test_result.ResultType.PASS)
+      else:
+        if status_code not in (instrumentation_parser.STATUS_CODE_ERROR,
+                               instrumentation_parser.STATUS_CODE_FAILURE):
+          logging.error('Unrecognized status code %d. Handling as an error.',
+                        status_code)
+        current_result.SetType(base_test_result.ResultType.FAIL)
+        if 'stack' in bundle:
+          current_result.SetLog(bundle['stack'])
+
+  if current_result:
+    if current_result.GetType() == base_test_result.ResultType.UNKNOWN:
+      crashed = (result_code == _ACTIVITY_RESULT_CANCELED
+                 and any(_NATIVE_CRASH_RE.search(l)
+                         for l in result_bundle.itervalues()))
+      if crashed:
+        current_result.SetType(base_test_result.ResultType.CRASH)
+
+    results.append(current_result)
+
+  return results
+
+
+class InstrumentationTestInstance(test_instance.TestInstance):
+
+  def __init__(self, args, isolate_delegate, error_func):
+    super(InstrumentationTestInstance, self).__init__()
+
+    self._apk_under_test = None
+    self._package_info = None
+    self._suite = None
+    self._test_apk = None
+    self._test_jar = None
+    self._test_package = None
+    self._test_runner = None
+    self._test_support_apk = None
+    self._initializeApkAttributes(args, error_func)
+
+    self._data_deps = None
+    self._isolate_abs_path = None
+    self._isolate_delegate = None
+    self._isolated_abs_path = None
+    self._test_data = None
+    self._initializeDataDependencyAttributes(args, isolate_delegate)
+
+    self._annotations = None
+    self._excluded_annotations = None
+    self._test_filter = None
+    self._initializeTestFilterAttributes(args)
+
+    self._flags = None
+    self._initializeFlagAttributes(args)
+
+    self._driver_apk = None
+    self._driver_package = None
+    self._driver_name = None
+    self._initializeDriverAttributes()
+
+  def _initializeApkAttributes(self, args, error_func):
+    if args.apk_under_test.endswith('.apk'):
+      self._apk_under_test = args.apk_under_test
+    else:
+      self._apk_under_test = os.path.join(
+          constants.GetOutDirectory(), constants.SDK_BUILD_APKS_DIR,
+          '%s.apk' % args.apk_under_test)
+
+    if not os.path.exists(self._apk_under_test):
+      error_func('Unable to find APK under test: %s' % self._apk_under_test)
+
+    if args.test_apk.endswith('.apk'):
+      self._suite = os.path.splitext(os.path.basename(args.test_apk))[0]
+      self._test_apk = args.test_apk
+    else:
+      self._suite = args.test_apk
+      self._test_apk = os.path.join(
+          constants.GetOutDirectory(), constants.SDK_BUILD_APKS_DIR,
+          '%s.apk' % args.test_apk)
+
+    self._test_jar = os.path.join(
+        constants.GetOutDirectory(), constants.SDK_BUILD_TEST_JAVALIB_DIR,
+        '%s.jar' % self._suite)
+    self._test_support_apk = os.path.join(
+        constants.GetOutDirectory(), constants.SDK_BUILD_TEST_JAVALIB_DIR,
+        '%sSupport.apk' % self._suite)
+
+    if not os.path.exists(self._test_apk):
+      error_func('Unable to find test APK: %s' % self._test_apk)
+    if not os.path.exists(self._test_jar):
+      error_func('Unable to find test JAR: %s' % self._test_jar)
+
+    apk = apk_helper.ApkHelper(self.test_apk)
+    self._test_package = apk.GetPackageName()
+    self._test_runner = apk.GetInstrumentationName()
+
+    self._package_info = None
+    for package_info in constants.PACKAGE_INFO.itervalues():
+      if self._test_package == package_info.test_package:
+        self._package_info = package_info
+    if not self._package_info:
+      logging.warning('Unable to find package info for %s', self._test_package)
+
+  def _initializeDataDependencyAttributes(self, args, isolate_delegate):
+    self._data_deps = []
+    if args.isolate_file_path:
+      self._isolate_abs_path = os.path.abspath(args.isolate_file_path)
+      self._isolate_delegate = isolate_delegate
+      self._isolated_abs_path = os.path.join(
+          constants.GetOutDirectory(), '%s.isolated' % self._test_package)
+    else:
+      self._isolate_delegate = None
+
+    # TODO(jbudorick): Deprecate and remove --test-data once data dependencies
+    # are fully converted to isolate.
+    if args.test_data:
+      logging.info('Data dependencies specified via --test-data')
+      self._test_data = args.test_data
+    else:
+      self._test_data = None
+
+    if not self._isolate_delegate and not self._test_data:
+      logging.warning('No data dependencies will be pushed.')
+
+  def _initializeTestFilterAttributes(self, args):
+    self._test_filter = args.test_filter
+
+    def annotation_dict_element(a):
+      a = a.split('=')
+      return (a[0], a[1] if len(a) == 2 else None)
+
+    if args.annotation_str:
+      self._annotations = dict(
+          annotation_dict_element(a)
+          for a in args.annotation_str.split(','))
+    elif not self._test_filter:
+      self._annotations = dict(
+          annotation_dict_element(a)
+          for a in _DEFAULT_ANNOTATIONS)
+    else:
+      self._annotations = {}
+
+    if args.exclude_annotation_str:
+      self._excluded_annotations = dict(
+          annotation_dict_element(a)
+          for a in args.exclude_annotation_str.split(','))
+    else:
+      self._excluded_annotations = {}
+
+  def _initializeFlagAttributes(self, args):
+    self._flags = ['--disable-fre', '--enable-test-intents']
+    # TODO(jbudorick): Transition "--device-flags" to "--device-flags-file"
+    if hasattr(args, 'device_flags') and args.device_flags:
+      with open(args.device_flags) as device_flags_file:
+        stripped_lines = (l.strip() for l in device_flags_file)
+        self._flags.extend([flag for flag in stripped_lines if flag])
+    if hasattr(args, 'device_flags_file') and args.device_flags_file:
+      with open(args.device_flags_file) as device_flags_file:
+        stripped_lines = (l.strip() for l in device_flags_file)
+        self._flags.extend([flag for flag in stripped_lines if flag])
+
+  def _initializeDriverAttributes(self):
+    self._driver_apk = os.path.join(
+        constants.GetOutDirectory(), constants.SDK_BUILD_APKS_DIR,
+        'OnDeviceInstrumentationDriver.apk')
+    if os.path.exists(self._driver_apk):
+      driver_apk = apk_helper.ApkHelper(self._driver_apk)
+      self._driver_package = driver_apk.GetPackageName()
+      self._driver_name = driver_apk.GetInstrumentationName()
+    else:
+      self._driver_apk = None
+
+  @property
+  def apk_under_test(self):
+    return self._apk_under_test
+
+  @property
+  def flags(self):
+    return self._flags
+
+  @property
+  def driver_apk(self):
+    return self._driver_apk
+
+  @property
+  def driver_package(self):
+    return self._driver_package
+
+  @property
+  def driver_name(self):
+    return self._driver_name
+
+  @property
+  def package_info(self):
+    return self._package_info
+
+  @property
+  def suite(self):
+    return self._suite
+
+  @property
+  def test_apk(self):
+    return self._test_apk
+
+  @property
+  def test_jar(self):
+    return self._test_jar
+
+  @property
+  def test_support_apk(self):
+    return self._test_support_apk
+
+  @property
+  def test_package(self):
+    return self._test_package
+
+  @property
+  def test_runner(self):
+    return self._test_runner
+
+  #override
+  def TestType(self):
+    return 'instrumentation'
+
+  #override
+  def SetUp(self):
+    if self._isolate_delegate:
+      self._isolate_delegate.Remap(
+          self._isolate_abs_path, self._isolated_abs_path)
+      self._isolate_delegate.MoveOutputDeps()
+      self._data_deps.extend([(constants.ISOLATE_DEPS_DIR, None)])
+
+    # TODO(jbudorick): Convert existing tests that depend on the --test-data
+    # mechanism to isolate, then remove this.
+    if self._test_data:
+      for t in self._test_data:
+        device_rel_path, host_rel_path = t.split(':')
+        host_abs_path = os.path.join(constants.DIR_SOURCE_ROOT, host_rel_path)
+        self._data_deps.extend(
+            [(host_abs_path,
+              [None, 'chrome', 'test', 'data', device_rel_path])])
+
+  def GetDataDependencies(self):
+    return self._data_deps
+
+  def GetTests(self):
+    pickle_path = '%s-proguard.pickle' % self.test_jar
+    try:
+      tests = self._GetTestsFromPickle(pickle_path, self.test_jar)
+    except self.ProguardPickleException as e:
+      logging.info('Getting tests from JAR via proguard. (%s)' % str(e))
+      tests = self._GetTestsFromProguard(self.test_jar)
+      self._SaveTestsToPickle(pickle_path, self.test_jar, tests)
+    return self._InflateTests(self._FilterTests(tests))
+
+  class ProguardPickleException(Exception):
+    pass
+
+  def _GetTestsFromPickle(self, pickle_path, jar_path):
+    if not os.path.exists(pickle_path):
+      raise self.ProguardPickleException('%s does not exist.' % pickle_path)
+    if os.path.getmtime(pickle_path) <= os.path.getmtime(jar_path):
+      raise self.ProguardPickleException(
+          '%s newer than %s.' % (jar_path, pickle_path))
+
+    with open(pickle_path, 'r') as pickle_file:
+      pickle_data = pickle.loads(pickle_file.read())
+    jar_md5 = md5sum.CalculateHostMd5Sums(jar_path)[jar_path]
+
+    try:
+      if pickle_data['VERSION'] != _PICKLE_FORMAT_VERSION:
+        raise self.ProguardPickleException('PICKLE_FORMAT_VERSION has changed.')
+      if pickle_data['JAR_MD5SUM'] != jar_md5:
+        raise self.ProguardPickleException('JAR file MD5 sum differs.')
+      return pickle_data['TEST_METHODS']
+    except TypeError as e:
+      logging.error(pickle_data)
+      raise self.ProguardPickleException(str(e))
+
+  def _GetTestsFromProguard(self, jar_path):
+    p = proguard.Dump(jar_path)
+
+    def is_test_class(c):
+      return c['class'].endswith('Test')
+
+    def is_test_method(m):
+      return m['method'].startswith('test')
+
+    class_lookup = dict((c['class'], c) for c in p['classes'])
+    def recursive_get_class_annotations(c):
+      s = c['superclass']
+      if s in class_lookup:
+        a = recursive_get_class_annotations(class_lookup[s])
+      else:
+        a = {}
+      a.update(c['annotations'])
+      return a
+
+    def stripped_test_class(c):
+      return {
+        'class': c['class'],
+        'annotations': recursive_get_class_annotations(c),
+        'methods': [m for m in c['methods'] if is_test_method(m)],
+      }
+
+    return [stripped_test_class(c) for c in p['classes']
+            if is_test_class(c)]
+
+  def _SaveTestsToPickle(self, pickle_path, jar_path, tests):
+    jar_md5 = md5sum.CalculateHostMd5Sums(jar_path)[jar_path]
+    pickle_data = {
+      'VERSION': _PICKLE_FORMAT_VERSION,
+      'JAR_MD5SUM': jar_md5,
+      'TEST_METHODS': tests,
+    }
+    with open(pickle_path, 'w') as pickle_file:
+      pickle.dump(pickle_data, pickle_file)
+
+  def _FilterTests(self, tests):
+
+    def gtest_filter(c, m):
+      t = ['%s.%s' % (c['class'].split('.')[-1], m['method'])]
+      return (not self._test_filter
+              or unittest_util.FilterTestNames(t, self._test_filter))
+
+    def annotation_filter(all_annotations):
+      if not self._annotations:
+        return True
+      return any_annotation_matches(self._annotations, all_annotations)
+
+    def excluded_annotation_filter(all_annotations):
+      if not self._excluded_annotations:
+        return True
+      return not any_annotation_matches(self._excluded_annotations,
+                                        all_annotations)
+
+    def any_annotation_matches(annotations, all_annotations):
+      return any(
+          ak in all_annotations and (av is None or av == all_annotations[ak])
+          for ak, av in annotations.iteritems())
+
+    filtered_classes = []
+    for c in tests:
+      filtered_methods = []
+      for m in c['methods']:
+        # Gtest filtering
+        if not gtest_filter(c, m):
+          continue
+
+        all_annotations = dict(c['annotations'])
+        all_annotations.update(m['annotations'])
+        if (not annotation_filter(all_annotations)
+            or not excluded_annotation_filter(all_annotations)):
+          continue
+
+        filtered_methods.append(m)
+
+      if filtered_methods:
+        filtered_class = dict(c)
+        filtered_class['methods'] = filtered_methods
+        filtered_classes.append(filtered_class)
+
+    return filtered_classes
+
+  def _InflateTests(self, tests):
+    inflated_tests = []
+    for c in tests:
+      for m in c['methods']:
+        a = dict(c['annotations'])
+        a.update(m['annotations'])
+        inflated_tests.append({
+            'class': c['class'],
+            'method': m['method'],
+            'annotations': a,
+        })
+    return inflated_tests
+
+  @staticmethod
+  def GetHttpServerEnvironmentVars():
+    return {
+      _EXTRA_ENABLE_HTTP_SERVER: None,
+    }
+
+  def GetDriverEnvironmentVars(
+      self, test_list=None, test_list_file_path=None):
+    env = {
+      _EXTRA_DRIVER_TARGET_PACKAGE: self.test_package,
+      _EXTRA_DRIVER_TARGET_CLASS: self.test_runner,
+    }
+
+    if test_list:
+      env[_EXTRA_DRIVER_TEST_LIST] = ','.join(test_list)
+
+    if test_list_file_path:
+      env[_EXTRA_DRIVER_TEST_LIST_FILE] = (
+          os.path.basename(test_list_file_path))
+
+    return env
+
+  @staticmethod
+  def ParseAmInstrumentRawOutput(raw_output):
+    return ParseAmInstrumentRawOutput(raw_output)
+
+  @staticmethod
+  def GenerateTestResults(
+      result_code, result_bundle, statuses, start_ms, duration_ms):
+    return GenerateTestResults(result_code, result_bundle, statuses,
+                               start_ms, duration_ms)
+
+  #override
+  def TearDown(self):
+    if self._isolate_delegate:
+      self._isolate_delegate.Clear()
+
diff --git a/build/android/pylib/instrumentation/instrumentation_test_instance_test.py b/build/android/pylib/instrumentation/instrumentation_test_instance_test.py
new file mode 100755
index 0000000..752e4d3
--- /dev/null
+++ b/build/android/pylib/instrumentation/instrumentation_test_instance_test.py
@@ -0,0 +1,109 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+
+"""Unit tests for instrumentation.TestRunner."""
+
+# pylint: disable=W0212
+
+import os
+import sys
+import unittest
+
+from pylib import constants
+from pylib.base import base_test_result
+from pylib.instrumentation import instrumentation_test_instance
+
+sys.path.append(os.path.join(
+    constants.DIR_SOURCE_ROOT, 'third_party', 'pymock'))
+import mock  # pylint: disable=F0401
+
+
+class InstrumentationTestInstanceTest(unittest.TestCase):
+
+  def setUp(self):
+    options = mock.Mock()
+    options.tool = ''
+
+  def testGenerateTestResults_noStatus(self):
+    results = instrumentation_test_instance.GenerateTestResults(
+        None, None, [], 0, 1000)
+    self.assertEqual([], results)
+
+  def testGenerateTestResults_testPassed(self):
+    statuses = [
+      (1, {
+        'class': 'test.package.TestClass',
+        'test': 'testMethod',
+      }),
+      (0, {
+        'class': 'test.package.TestClass',
+        'test': 'testMethod',
+      }),
+    ]
+    results = instrumentation_test_instance.GenerateTestResults(
+        None, None, statuses, 0, 1000)
+    self.assertEqual(1, len(results))
+    self.assertEqual(base_test_result.ResultType.PASS, results[0].GetType())
+
+  def testGenerateTestResults_testSkipped_true(self):
+    statuses = [
+      (1, {
+        'class': 'test.package.TestClass',
+        'test': 'testMethod',
+      }),
+      (0, {
+        'test_skipped': 'true',
+        'class': 'test.package.TestClass',
+        'test': 'testMethod',
+      }),
+      (0, {
+        'class': 'test.package.TestClass',
+        'test': 'testMethod',
+      }),
+    ]
+    results = instrumentation_test_instance.GenerateTestResults(
+        None, None, statuses, 0, 1000)
+    self.assertEqual(1, len(results))
+    self.assertEqual(base_test_result.ResultType.SKIP, results[0].GetType())
+
+  def testGenerateTestResults_testSkipped_false(self):
+    statuses = [
+      (1, {
+        'class': 'test.package.TestClass',
+        'test': 'testMethod',
+      }),
+      (0, {
+        'test_skipped': 'false',
+      }),
+      (0, {
+        'class': 'test.package.TestClass',
+        'test': 'testMethod',
+      }),
+    ]
+    results = instrumentation_test_instance.GenerateTestResults(
+        None, None, statuses, 0, 1000)
+    self.assertEqual(1, len(results))
+    self.assertEqual(base_test_result.ResultType.PASS, results[0].GetType())
+
+  def testGenerateTestResults_testFailed(self):
+    statuses = [
+      (1, {
+        'class': 'test.package.TestClass',
+        'test': 'testMethod',
+      }),
+      (-2, {
+        'class': 'test.package.TestClass',
+        'test': 'testMethod',
+      }),
+    ]
+    results = instrumentation_test_instance.GenerateTestResults(
+        None, None, statuses, 0, 1000)
+    self.assertEqual(1, len(results))
+    self.assertEqual(base_test_result.ResultType.FAIL, results[0].GetType())
+
+
+if __name__ == '__main__':
+  unittest.main(verbosity=2)
diff --git a/build/android/pylib/instrumentation/json_perf_parser.py b/build/android/pylib/instrumentation/json_perf_parser.py
new file mode 100644
index 0000000..ffdfbe7
--- /dev/null
+++ b/build/android/pylib/instrumentation/json_perf_parser.py
@@ -0,0 +1,161 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+
+"""A helper module for parsing JSON objects from perf tests results."""
+
+import json
+
+
+def GetAverageRunInfo(json_data, name):
+  """Summarizes TraceEvent JSON data for performance metrics.
+
+  Example JSON Inputs (More tags can be added but these are required):
+  Measuring Duration:
+  [
+    { "cat": "Java",
+      "ts": 10000000000,
+      "ph": "S",
+      "name": "TestTrace"
+    },
+    { "cat": "Java",
+      "ts": 10000004000,
+      "ph": "F",
+      "name": "TestTrace"
+    },
+    ...
+  ]
+
+  Measuring Call Frequency (FPS):
+  [
+    { "cat": "Java",
+      "ts": 10000000000,
+      "ph": "I",
+      "name": "TestTraceFPS"
+    },
+    { "cat": "Java",
+      "ts": 10000004000,
+      "ph": "I",
+      "name": "TestTraceFPS"
+    },
+    ...
+  ]
+
+  Args:
+    json_data: A list of dictonaries each representing a JSON object.
+    name: The 'name' tag to filter on in the JSON file.
+
+  Returns:
+    A dictionary of result data with the following tags:
+      min: The minimum value tracked.
+      max: The maximum value tracked.
+      average: The average of all the values tracked.
+      count: The number of times the category/name pair was tracked.
+      type: The type of tracking ('Instant' for instant tags and 'Span' for
+            begin/end tags.
+      category: The passed in category filter.
+      name: The passed in name filter.
+      data_points: A list of all of the times used to generate this data.
+      units: The units for the values being reported.
+
+  Raises:
+    Exception: if entry contains invalid data.
+  """
+
+  def EntryFilter(entry):
+    return entry['cat'] == 'Java' and entry['name'] == name
+  filtered_entries = filter(EntryFilter, json_data)
+
+  result = {}
+
+  result['min'] = -1
+  result['max'] = -1
+  result['average'] = 0
+  result['count'] = 0
+  result['type'] = 'Unknown'
+  result['category'] = 'Java'
+  result['name'] = name
+  result['data_points'] = []
+  result['units'] = ''
+
+  total_sum = 0
+
+  last_val = 0
+  val_type = None
+  for entry in filtered_entries:
+    if not val_type:
+      if 'mem' in entry:
+        val_type = 'mem'
+
+        def GetVal(entry):
+          return entry['mem']
+
+        result['units'] = 'kb'
+      elif 'ts' in entry:
+        val_type = 'ts'
+
+        def GetVal(entry):
+          return float(entry['ts']) / 1000.0
+
+        result['units'] = 'ms'
+      else:
+        raise Exception('Entry did not contain valid value info: %s' % entry)
+
+    if not val_type in entry:
+      raise Exception('Entry did not contain expected value type "%s" '
+                      'information: %s' % (val_type, entry))
+    val = GetVal(entry)
+    if (entry['ph'] == 'S' and
+        (result['type'] == 'Unknown' or result['type'] == 'Span')):
+      result['type'] = 'Span'
+      last_val = val
+    elif ((entry['ph'] == 'F' and result['type'] == 'Span') or
+          (entry['ph'] == 'I' and (result['type'] == 'Unknown' or
+                                   result['type'] == 'Instant'))):
+      if last_val > 0:
+        delta = val - last_val
+        if result['min'] == -1 or result['min'] > delta:
+          result['min'] = delta
+        if result['max'] == -1 or result['max'] < delta:
+          result['max'] = delta
+        total_sum += delta
+        result['count'] += 1
+        result['data_points'].append(delta)
+      if entry['ph'] == 'I':
+        result['type'] = 'Instant'
+        last_val = val
+  if result['count'] > 0:
+    result['average'] = total_sum / result['count']
+
+  return result
+
+
+def GetAverageRunInfoFromJSONString(json_string, name):
+  """Returns the results from GetAverageRunInfo using a JSON string.
+
+  Args:
+    json_string: The string containing JSON.
+    name: The 'name' tag to filter on in the JSON file.
+
+  Returns:
+    See GetAverageRunInfo Returns section.
+  """
+  return GetAverageRunInfo(json.loads(json_string), name)
+
+
+def GetAverageRunInfoFromFile(json_file, name):
+  """Returns the results from GetAverageRunInfo using a JSON file.
+
+  Args:
+    json_file: The path to a JSON file.
+    name: The 'name' tag to filter on in the JSON file.
+
+  Returns:
+    See GetAverageRunInfo Returns section.
+  """
+  with open(json_file, 'r') as f:
+    data = f.read()
+    perf = json.loads(data)
+
+  return GetAverageRunInfo(perf, name)
diff --git a/build/android/pylib/instrumentation/setup.py b/build/android/pylib/instrumentation/setup.py
new file mode 100644
index 0000000..7a0501e
--- /dev/null
+++ b/build/android/pylib/instrumentation/setup.py
@@ -0,0 +1,113 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Generates test runner factory and tests for instrumentation tests."""
+
+import logging
+import os
+
+from pylib import constants
+from pylib import valgrind_tools
+
+from pylib.base import base_setup
+from pylib.device import device_utils
+from pylib.instrumentation import test_package
+from pylib.instrumentation import test_runner
+
+DEVICE_DATA_DIR = 'chrome/test/data'
+
+ISOLATE_FILE_PATHS = {
+    'AndroidWebViewTest': 'android_webview/android_webview_test_apk.isolate',
+    'ChromeShellTest': 'chrome/chrome_shell_test_apk.isolate',
+    'ContentShellTest': 'content/content_shell_test_apk.isolate',
+}
+
+DEPS_EXCLUSION_LIST = []
+
+# TODO(mikecase): Remove this function and the constant DEVICE_DATA_DIR
+# once all data deps are pushed to the same location on the device.
+def _PushExtraSuiteDataDeps(device, test_apk):
+  """Pushes some extra data files/dirs needed by some test suite.
+
+  Args:
+    test_apk: The test suite basename for which to return file paths.
+  """
+  if test_apk in ['ChromeTest', 'ContentShellTest']:
+    test_files = 'net/data/ssl/certificates'
+    host_device_file_tuple = [
+        (os.path.join(constants.DIR_SOURCE_ROOT, test_files),
+         os.path.join(device.GetExternalStoragePath(), test_files))]
+    device.PushChangedFiles(host_device_file_tuple)
+
+
+# TODO(mikecase): Remove this function once everything uses
+# base_setup.PushDataDeps to push data deps to the device.
+def _PushDataDeps(device, test_options):
+  valgrind_tools.PushFilesForTool(test_options.tool, device)
+
+  host_device_file_tuples = []
+  for dest_host_pair in test_options.test_data:
+    dst_src = dest_host_pair.split(':', 1)
+    dst_layer = dst_src[0]
+    host_src = dst_src[1]
+    host_test_files_path = os.path.join(constants.DIR_SOURCE_ROOT, host_src)
+    if os.path.exists(host_test_files_path):
+      host_device_file_tuples += [(
+          host_test_files_path,
+          '%s/%s/%s' % (
+              device.GetExternalStoragePath(),
+              DEVICE_DATA_DIR,
+              dst_layer))]
+  if host_device_file_tuples:
+    device.PushChangedFiles(host_device_file_tuples)
+
+
+def Setup(test_options, devices):
+  """Create and return the test runner factory and tests.
+
+  Args:
+    test_options: An InstrumentationOptions object.
+
+  Returns:
+    A tuple of (TestRunnerFactory, tests).
+  """
+  if (test_options.coverage_dir and not
+      os.path.exists(test_options.coverage_dir)):
+    os.makedirs(test_options.coverage_dir)
+
+  test_pkg = test_package.TestPackage(test_options.test_apk_path,
+                                      test_options.test_apk_jar_path,
+                                      test_options.test_support_apk_path)
+  tests = test_pkg.GetAllMatchingTests(
+      test_options.annotations,
+      test_options.exclude_annotations,
+      test_options.test_filter)
+  if not tests:
+    logging.error('No instrumentation tests to run with current args.')
+
+  if test_options.test_data:
+    device_utils.DeviceUtils.parallel(devices).pMap(
+        _PushDataDeps, test_options)
+
+  if test_options.isolate_file_path:
+    i = base_setup.GenerateDepsDirUsingIsolate(test_options.test_apk,
+                                           test_options.isolate_file_path,
+                                           ISOLATE_FILE_PATHS,
+                                           DEPS_EXCLUSION_LIST)
+    def push_data_deps_to_device_dir(device):
+      base_setup.PushDataDeps(device, device.GetExternalStoragePath(),
+                              test_options)
+    device_utils.DeviceUtils.parallel(devices).pMap(
+        push_data_deps_to_device_dir)
+    if i:
+      i.Clear()
+
+  device_utils.DeviceUtils.parallel(devices).pMap(
+      _PushExtraSuiteDataDeps, test_options.test_apk)
+
+  def TestRunnerFactory(device, shard_index):
+    return test_runner.TestRunner(test_options, device, shard_index,
+                                  test_pkg)
+
+  return (TestRunnerFactory, tests)
diff --git a/build/android/pylib/instrumentation/test_jar.py b/build/android/pylib/instrumentation/test_jar.py
new file mode 100644
index 0000000..7ad8997
--- /dev/null
+++ b/build/android/pylib/instrumentation/test_jar.py
@@ -0,0 +1,230 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Helper class for instrumenation test jar."""
+# pylint: disable=W0702
+
+import logging
+import os
+import pickle
+import re
+import sys
+
+from pylib import cmd_helper
+from pylib import constants
+from pylib.device import device_utils
+from pylib.utils import md5sum
+from pylib.utils import proguard
+
+sys.path.insert(0,
+                os.path.join(constants.DIR_SOURCE_ROOT,
+                             'build', 'util', 'lib', 'common'))
+
+import unittest_util # pylint: disable=F0401
+
+# If you change the cached output of proguard, increment this number
+PICKLE_FORMAT_VERSION = 4
+
+
+class TestJar(object):
+  _ANNOTATIONS = frozenset(
+      ['Smoke', 'SmallTest', 'MediumTest', 'LargeTest', 'EnormousTest',
+       'FlakyTest', 'DisabledTest', 'Manual', 'PerfTest', 'HostDrivenTest',
+       'IntegrationTest'])
+  _DEFAULT_ANNOTATION = 'SmallTest'
+  _PROGUARD_CLASS_RE = re.compile(r'\s*?- Program class:\s*([\S]+)$')
+  _PROGUARD_SUPERCLASS_RE = re.compile(r'\s*?  Superclass:\s*([\S]+)$')
+  _PROGUARD_METHOD_RE = re.compile(r'\s*?- Method:\s*(\S*)[(].*$')
+  _PROGUARD_ANNOTATION_RE = re.compile(r'\s*?- Annotation \[L(\S*);\]:$')
+  _PROGUARD_ANNOTATION_CONST_RE = (
+      re.compile(r'\s*?- Constant element value.*$'))
+  _PROGUARD_ANNOTATION_VALUE_RE = re.compile(r'\s*?- \S+? \[(.*)\]$')
+
+  def __init__(self, jar_path):
+    if not os.path.exists(jar_path):
+      raise Exception('%s not found, please build it' % jar_path)
+
+    self._PROGUARD_PATH = os.path.join(constants.ANDROID_SDK_ROOT,
+                                       'tools/proguard/lib/proguard.jar')
+    if not os.path.exists(self._PROGUARD_PATH):
+      self._PROGUARD_PATH = os.path.join(os.environ['ANDROID_BUILD_TOP'],
+                                         'external/proguard/lib/proguard.jar')
+    self._jar_path = jar_path
+    self._pickled_proguard_name = self._jar_path + '-proguard.pickle'
+    self._test_methods = {}
+    if not self._GetCachedProguardData():
+      self._GetProguardData()
+
+  def _GetCachedProguardData(self):
+    if (os.path.exists(self._pickled_proguard_name) and
+        (os.path.getmtime(self._pickled_proguard_name) >
+         os.path.getmtime(self._jar_path))):
+      logging.info('Loading cached proguard output from %s',
+                   self._pickled_proguard_name)
+      try:
+        with open(self._pickled_proguard_name, 'r') as r:
+          d = pickle.loads(r.read())
+        jar_md5 = md5sum.CalculateHostMd5Sums(
+          self._jar_path)[os.path.realpath(self._jar_path)]
+        if (d['JAR_MD5SUM'] == jar_md5 and
+            d['VERSION'] == PICKLE_FORMAT_VERSION):
+          self._test_methods = d['TEST_METHODS']
+          return True
+      except:
+        logging.warning('PICKLE_FORMAT_VERSION has changed, ignoring cache')
+    return False
+
+  def _GetProguardData(self):
+    logging.info('Retrieving test methods via proguard.')
+
+    p = proguard.Dump(self._jar_path)
+
+    class_lookup = dict((c['class'], c) for c in p['classes'])
+    def recursive_get_annotations(c):
+      s = c['superclass']
+      if s in class_lookup:
+        a = recursive_get_annotations(class_lookup[s])
+      else:
+        a = {}
+      a.update(c['annotations'])
+      return a
+
+    test_classes = (c for c in p['classes']
+                    if c['class'].endswith('Test'))
+    for c in test_classes:
+      class_annotations = recursive_get_annotations(c)
+      test_methods = (m for m in c['methods']
+                      if m['method'].startswith('test'))
+      for m in test_methods:
+        qualified_method = '%s#%s' % (c['class'], m['method'])
+        annotations = dict(class_annotations)
+        annotations.update(m['annotations'])
+        self._test_methods[qualified_method] = m
+        self._test_methods[qualified_method]['annotations'] = annotations
+
+    logging.info('Storing proguard output to %s', self._pickled_proguard_name)
+    d = {'VERSION': PICKLE_FORMAT_VERSION,
+         'TEST_METHODS': self._test_methods,
+         'JAR_MD5SUM':
+              md5sum.CalculateHostMd5Sums(
+                self._jar_path)[os.path.realpath(self._jar_path)]}
+    with open(self._pickled_proguard_name, 'w') as f:
+      f.write(pickle.dumps(d))
+
+  @staticmethod
+  def _IsTestMethod(test):
+    class_name, method = test.split('#')
+    return class_name.endswith('Test') and method.startswith('test')
+
+  def GetTestAnnotations(self, test):
+    """Returns a list of all annotations for the given |test|. May be empty."""
+    if not self._IsTestMethod(test) or not test in self._test_methods:
+      return []
+    return self._test_methods[test]['annotations']
+
+  @staticmethod
+  def _AnnotationsMatchFilters(annotation_filter_list, annotations):
+    """Checks if annotations match any of the filters."""
+    if not annotation_filter_list:
+      return True
+    for annotation_filter in annotation_filter_list:
+      filters = annotation_filter.split('=')
+      if len(filters) == 2:
+        key = filters[0]
+        value_list = filters[1].split(',')
+        for value in value_list:
+          if key in annotations and value == annotations[key]:
+            return True
+      elif annotation_filter in annotations:
+        return True
+    return False
+
+  def GetAnnotatedTests(self, annotation_filter_list):
+    """Returns a list of all tests that match the given annotation filters."""
+    return [test for test in self.GetTestMethods()
+            if self._IsTestMethod(test) and self._AnnotationsMatchFilters(
+                annotation_filter_list, self.GetTestAnnotations(test))]
+
+  def GetTestMethods(self):
+    """Returns a dict of all test methods and relevant attributes.
+
+    Test methods are retrieved as Class#testMethod.
+    """
+    return self._test_methods
+
+  def _GetTestsMissingAnnotation(self):
+    """Get a list of test methods with no known annotations."""
+    tests_missing_annotations = []
+    for test_method in self.GetTestMethods().iterkeys():
+      annotations_ = frozenset(self.GetTestAnnotations(test_method).iterkeys())
+      if (annotations_.isdisjoint(self._ANNOTATIONS) and
+          not self.IsHostDrivenTest(test_method)):
+        tests_missing_annotations.append(test_method)
+    return sorted(tests_missing_annotations)
+
+  def _IsTestValidForSdkRange(self, test_name, attached_min_sdk_level):
+    required_min_sdk_level = int(
+        self.GetTestAnnotations(test_name).get('MinAndroidSdkLevel', 0))
+    return (required_min_sdk_level is None or
+            attached_min_sdk_level >= required_min_sdk_level)
+
+  def GetAllMatchingTests(self, annotation_filter_list,
+                          exclude_annotation_list, test_filter):
+    """Get a list of tests matching any of the annotations and the filter.
+
+    Args:
+      annotation_filter_list: List of test annotations. A test must have at
+        least one of these annotations. A test without any annotations is
+        considered to be SmallTest.
+      exclude_annotation_list: List of test annotations. A test must not have
+        any of these annotations.
+      test_filter: Filter used for partial matching on the test method names.
+
+    Returns:
+      List of all matching tests.
+    """
+    if annotation_filter_list:
+      available_tests = self.GetAnnotatedTests(annotation_filter_list)
+      # Include un-annotated tests in SmallTest.
+      if annotation_filter_list.count(self._DEFAULT_ANNOTATION) > 0:
+        for test in self._GetTestsMissingAnnotation():
+          logging.warning(
+              '%s has no annotations. Assuming "%s".', test,
+              self._DEFAULT_ANNOTATION)
+          available_tests.append(test)
+    else:
+      available_tests = [m for m in self.GetTestMethods()
+                         if not self.IsHostDrivenTest(m)]
+
+    if exclude_annotation_list:
+      excluded_tests = self.GetAnnotatedTests(exclude_annotation_list)
+      available_tests = list(set(available_tests) - set(excluded_tests))
+
+    tests = []
+    if test_filter:
+      # |available_tests| are in adb instrument format: package.path.class#test.
+
+      # Maps a 'class.test' name to each 'package.path.class#test' name.
+      sanitized_test_names = dict([
+          (t.split('.')[-1].replace('#', '.'), t) for t in available_tests])
+      # Filters 'class.test' names and populates |tests| with the corresponding
+      # 'package.path.class#test' names.
+      tests = [
+          sanitized_test_names[t] for t in unittest_util.FilterTestNames(
+              sanitized_test_names.keys(), test_filter.replace('#', '.'))]
+    else:
+      tests = available_tests
+
+    # Filter out any tests with SDK level requirements that don't match the set
+    # of attached devices.
+    devices = device_utils.DeviceUtils.parallel()
+    min_sdk_version = min(devices.build_version_sdk.pGet(None))
+    tests = [t for t in tests
+             if self._IsTestValidForSdkRange(t, min_sdk_version)]
+
+    return tests
+
+  @staticmethod
+  def IsHostDrivenTest(test):
+    return 'pythonDrivenTests' in test
diff --git a/build/android/pylib/instrumentation/test_options.py b/build/android/pylib/instrumentation/test_options.py
new file mode 100644
index 0000000..e7b7a9f
--- /dev/null
+++ b/build/android/pylib/instrumentation/test_options.py
@@ -0,0 +1,27 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Defines the InstrumentationOptions named tuple."""
+
+import collections
+
+InstrumentationOptions = collections.namedtuple('InstrumentationOptions', [
+    'tool',
+    'annotations',
+    'exclude_annotations',
+    'test_filter',
+    'test_data',
+    'save_perf_json',
+    'screenshot_failures',
+    'wait_for_debugger',
+    'coverage_dir',
+    'test_apk',
+    'test_apk_path',
+    'test_apk_jar_path',
+    'test_runner',
+    'test_support_apk_path',
+    'device_flags',
+    'isolate_file_path',
+    'set_asserts',
+    'delete_stale_data'])
diff --git a/build/android/pylib/instrumentation/test_package.py b/build/android/pylib/instrumentation/test_package.py
new file mode 100644
index 0000000..5be061d
--- /dev/null
+++ b/build/android/pylib/instrumentation/test_package.py
@@ -0,0 +1,44 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Class representing instrumentation test apk and jar."""
+
+import os
+
+from pylib.instrumentation import test_jar
+from pylib.utils import apk_helper
+
+
+class TestPackage(test_jar.TestJar):
+  def __init__(self, apk_path, jar_path, test_support_apk_path):
+    test_jar.TestJar.__init__(self, jar_path)
+
+    if not os.path.exists(apk_path):
+      raise Exception('%s not found, please build it' % apk_path)
+    if test_support_apk_path and not os.path.exists(test_support_apk_path):
+      raise Exception('%s not found, please build it' % test_support_apk_path)
+    self._apk_path = apk_path
+    self._apk_name = os.path.splitext(os.path.basename(apk_path))[0]
+    self._package_name = apk_helper.GetPackageName(self._apk_path)
+    self._test_support_apk_path = test_support_apk_path
+
+  def GetApkPath(self):
+    """Returns the absolute path to the APK."""
+    return self._apk_path
+
+  def GetApkName(self):
+    """Returns the name of the apk without the suffix."""
+    return self._apk_name
+
+  def GetPackageName(self):
+    """Returns the package name of this APK."""
+    return self._package_name
+
+  # Override.
+  def Install(self, device):
+    device.Install(self.GetApkPath())
+    if (self._test_support_apk_path and
+        os.path.exists(self._test_support_apk_path)):
+      device.Install(self._test_support_apk_path)
+
diff --git a/build/android/pylib/instrumentation/test_result.py b/build/android/pylib/instrumentation/test_result.py
new file mode 100644
index 0000000..24e80a8
--- /dev/null
+++ b/build/android/pylib/instrumentation/test_result.py
@@ -0,0 +1,30 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from pylib.base import base_test_result
+
+
+class InstrumentationTestResult(base_test_result.BaseTestResult):
+  """Result information for a single instrumentation test."""
+
+  def __init__(self, full_name, test_type, start_date, dur, log=''):
+    """Construct an InstrumentationTestResult object.
+
+    Args:
+      full_name: Full name of the test.
+      test_type: Type of the test result as defined in ResultType.
+      start_date: Date in milliseconds when the test began running.
+      dur: Duration of the test run in milliseconds.
+      log: A string listing any errors.
+    """
+    super(InstrumentationTestResult, self).__init__(
+        full_name, test_type, dur, log)
+    name_pieces = full_name.rsplit('#')
+    if len(name_pieces) > 1:
+      self._test_name = name_pieces[1]
+      self._class_name = name_pieces[0]
+    else:
+      self._class_name = full_name
+      self._test_name = full_name
+    self._start_date = start_date
diff --git a/build/android/pylib/instrumentation/test_runner.py b/build/android/pylib/instrumentation/test_runner.py
new file mode 100644
index 0000000..0f2e53f
--- /dev/null
+++ b/build/android/pylib/instrumentation/test_runner.py
@@ -0,0 +1,374 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Class for running instrumentation tests on a single device."""
+
+import logging
+import os
+import re
+import sys
+import time
+
+from pylib import constants
+from pylib import flag_changer
+from pylib import valgrind_tools
+from pylib.base import base_test_result
+from pylib.base import base_test_runner
+from pylib.device import device_errors
+from pylib.instrumentation import instrumentation_test_instance
+from pylib.instrumentation import json_perf_parser
+from pylib.instrumentation import test_result
+from pylib.local.device import local_device_instrumentation_test_run
+
+sys.path.append(os.path.join(constants.DIR_SOURCE_ROOT, 'build', 'util', 'lib',
+                             'common'))
+import perf_tests_results_helper # pylint: disable=F0401
+
+
+_PERF_TEST_ANNOTATION = 'PerfTest'
+
+
+class TestRunner(base_test_runner.BaseTestRunner):
+  """Responsible for running a series of tests connected to a single device."""
+
+  _DEVICE_COVERAGE_DIR = 'chrome/test/coverage'
+  _HOSTMACHINE_PERF_OUTPUT_FILE = '/tmp/chrome-profile'
+  _DEVICE_PERF_OUTPUT_SEARCH_PREFIX = (constants.DEVICE_PERF_OUTPUT_DIR +
+                                       '/chrome-profile*')
+
+  def __init__(self, test_options, device, shard_index, test_pkg,
+               additional_flags=None):
+    """Create a new TestRunner.
+
+    Args:
+      test_options: An InstrumentationOptions object.
+      device: Attached android device.
+      shard_index: Shard index.
+      test_pkg: A TestPackage object.
+      additional_flags: A list of additional flags to add to the command line.
+    """
+    super(TestRunner, self).__init__(device, test_options.tool)
+    self._lighttp_port = constants.LIGHTTPD_RANDOM_PORT_FIRST + shard_index
+    self._logcat_monitor = None
+
+    self.coverage_device_file = None
+    self.coverage_dir = test_options.coverage_dir
+    self.coverage_host_file = None
+    self.options = test_options
+    self.test_pkg = test_pkg
+    # Use the correct command line file for the package under test.
+    cmdline_file = [a.cmdline_file for a in constants.PACKAGE_INFO.itervalues()
+                    if a.test_package == self.test_pkg.GetPackageName()]
+    assert len(cmdline_file) < 2, 'Multiple packages have the same test package'
+    if len(cmdline_file) and cmdline_file[0]:
+      self.flags = flag_changer.FlagChanger(self.device, cmdline_file[0])
+      if additional_flags:
+        self.flags.AddFlags(additional_flags)
+    else:
+      self.flags = None
+
+  #override
+  def InstallTestPackage(self):
+    self.test_pkg.Install(self.device)
+
+  def _GetInstrumentationArgs(self):
+    ret = {}
+    if self.options.wait_for_debugger:
+      ret['debug'] = 'true'
+    if self.coverage_dir:
+      ret['coverage'] = 'true'
+      ret['coverageFile'] = self.coverage_device_file
+
+    return ret
+
+  def _TakeScreenshot(self, test):
+    """Takes a screenshot from the device."""
+    screenshot_name = os.path.join(constants.SCREENSHOTS_DIR, '%s.png' % test)
+    logging.info('Taking screenshot named %s', screenshot_name)
+    self.device.TakeScreenshot(screenshot_name)
+
+  def SetUp(self):
+    """Sets up the test harness and device before all tests are run."""
+    super(TestRunner, self).SetUp()
+    if not self.device.HasRoot():
+      logging.warning('Unable to enable java asserts for %s, non rooted device',
+                      str(self.device))
+    else:
+      if self.device.SetJavaAsserts(self.options.set_asserts):
+        # TODO(jbudorick) How to best do shell restart after the
+        #                 android_commands refactor?
+        self.device.RunShellCommand('stop')
+        self.device.RunShellCommand('start')
+        self.device.WaitUntilFullyBooted()
+
+    # We give different default value to launch HTTP server based on shard index
+    # because it may have race condition when multiple processes are trying to
+    # launch lighttpd with same port at same time.
+    self.LaunchTestHttpServer(
+        os.path.join(constants.DIR_SOURCE_ROOT), self._lighttp_port)
+    if self.flags:
+      self.flags.AddFlags(['--disable-fre', '--enable-test-intents'])
+      if self.options.device_flags:
+        with open(self.options.device_flags) as device_flags_file:
+          stripped_flags = (l.strip() for l in device_flags_file)
+          self.flags.AddFlags([flag for flag in stripped_flags if flag])
+
+  def TearDown(self):
+    """Cleans up the test harness and saves outstanding data from test run."""
+    if self.flags:
+      self.flags.Restore()
+    super(TestRunner, self).TearDown()
+
+  def TestSetup(self, test):
+    """Sets up the test harness for running a particular test.
+
+    Args:
+      test: The name of the test that will be run.
+    """
+    self.SetupPerfMonitoringIfNeeded(test)
+    self._SetupIndividualTestTimeoutScale(test)
+    self.tool.SetupEnvironment()
+
+    if self.flags and self._IsFreTest(test):
+      self.flags.RemoveFlags(['--disable-fre'])
+
+    # Make sure the forwarder is still running.
+    self._RestartHttpServerForwarderIfNecessary()
+
+    if self.coverage_dir:
+      coverage_basename = '%s.ec' % test
+      self.coverage_device_file = '%s/%s/%s' % (
+          self.device.GetExternalStoragePath(),
+          TestRunner._DEVICE_COVERAGE_DIR, coverage_basename)
+      self.coverage_host_file = os.path.join(
+          self.coverage_dir, coverage_basename)
+
+  def _IsFreTest(self, test):
+    """Determines whether a test is a first run experience test.
+
+    Args:
+      test: The name of the test to be checked.
+
+    Returns:
+      Whether the feature being tested is FirstRunExperience.
+    """
+    annotations = self.test_pkg.GetTestAnnotations(test)
+    return 'FirstRunExperience' == annotations.get('Feature', None)
+
+  def _IsPerfTest(self, test):
+    """Determines whether a test is a performance test.
+
+    Args:
+      test: The name of the test to be checked.
+
+    Returns:
+      Whether the test is annotated as a performance test.
+    """
+    return _PERF_TEST_ANNOTATION in self.test_pkg.GetTestAnnotations(test)
+
+  def SetupPerfMonitoringIfNeeded(self, test):
+    """Sets up performance monitoring if the specified test requires it.
+
+    Args:
+      test: The name of the test to be run.
+    """
+    if not self._IsPerfTest(test):
+      return
+    self.device.RunShellCommand(
+        ['rm', TestRunner._DEVICE_PERF_OUTPUT_SEARCH_PREFIX])
+    self._logcat_monitor = self.device.GetLogcatMonitor()
+    self._logcat_monitor.Start()
+
+  def TestTeardown(self, test, result):
+    """Cleans up the test harness after running a particular test.
+
+    Depending on the options of this TestRunner this might handle performance
+    tracking.  This method will only be called if the test passed.
+
+    Args:
+      test: The name of the test that was just run.
+      result: result for this test.
+    """
+
+    self.tool.CleanUpEnvironment()
+
+    # The logic below relies on the test passing.
+    if not result or not result.DidRunPass():
+      return
+
+    self.TearDownPerfMonitoring(test)
+
+    if self.flags and self._IsFreTest(test):
+      self.flags.AddFlags(['--disable-fre'])
+
+    if self.coverage_dir:
+      self.device.PullFile(
+          self.coverage_device_file, self.coverage_host_file)
+      self.device.RunShellCommand(
+          'rm -f %s' % self.coverage_device_file)
+
+  def TearDownPerfMonitoring(self, test):
+    """Cleans up performance monitoring if the specified test required it.
+
+    Args:
+      test: The name of the test that was just run.
+    Raises:
+      Exception: if there's anything wrong with the perf data.
+    """
+    if not self._IsPerfTest(test):
+      return
+    raw_test_name = test.split('#')[1]
+
+    # Wait and grab annotation data so we can figure out which traces to parse
+    regex = self._logcat_monitor.WaitFor(
+        re.compile(r'\*\*PERFANNOTATION\(' + raw_test_name + r'\)\:(.*)'))
+
+    # If the test is set to run on a specific device type only (IE: only
+    # tablet or phone) and it is being run on the wrong device, the test
+    # just quits and does not do anything.  The java test harness will still
+    # print the appropriate annotation for us, but will add --NORUN-- for
+    # us so we know to ignore the results.
+    # The --NORUN-- tag is managed by ChromeTabbedActivityTestBase.java
+    if regex.group(1) != '--NORUN--':
+
+      # Obtain the relevant perf data.  The data is dumped to a
+      # JSON formatted file.
+      json_string = self.device.ReadFile(
+          '/data/data/com.google.android.apps.chrome/files/PerfTestData.txt',
+          as_root=True)
+
+      if not json_string:
+        raise Exception('Perf file is empty')
+
+      if self.options.save_perf_json:
+        json_local_file = '/tmp/chromium-android-perf-json-' + raw_test_name
+        with open(json_local_file, 'w') as f:
+          f.write(json_string)
+        logging.info('Saving Perf UI JSON from test ' +
+                     test + ' to ' + json_local_file)
+
+      raw_perf_data = regex.group(1).split(';')
+
+      for raw_perf_set in raw_perf_data:
+        if raw_perf_set:
+          perf_set = raw_perf_set.split(',')
+          if len(perf_set) != 3:
+            raise Exception('Unexpected number of tokens in perf annotation '
+                            'string: ' + raw_perf_set)
+
+          # Process the performance data
+          result = json_perf_parser.GetAverageRunInfoFromJSONString(json_string,
+                                                                    perf_set[0])
+          perf_tests_results_helper.PrintPerfResult(perf_set[1], perf_set[2],
+                                                    [result['average']],
+                                                    result['units'])
+
+  def _SetupIndividualTestTimeoutScale(self, test):
+    timeout_scale = self._GetIndividualTestTimeoutScale(test)
+    valgrind_tools.SetChromeTimeoutScale(self.device, timeout_scale)
+
+  def _GetIndividualTestTimeoutScale(self, test):
+    """Returns the timeout scale for the given |test|."""
+    annotations = self.test_pkg.GetTestAnnotations(test)
+    timeout_scale = 1
+    if 'TimeoutScale' in annotations:
+      try:
+        timeout_scale = int(annotations['TimeoutScale'])
+      except ValueError:
+        logging.warning('Non-integer value of TimeoutScale ignored. (%s)'
+                        % annotations['TimeoutScale'])
+    if self.options.wait_for_debugger:
+      timeout_scale *= 100
+    return timeout_scale
+
+  def _GetIndividualTestTimeoutSecs(self, test):
+    """Returns the timeout in seconds for the given |test|."""
+    annotations = self.test_pkg.GetTestAnnotations(test)
+    if 'Manual' in annotations:
+      return 10 * 60 * 60
+    if 'IntegrationTest' in annotations:
+      return 30 * 60
+    if 'External' in annotations:
+      return 10 * 60
+    if 'EnormousTest' in annotations:
+      return 10 * 60
+    if 'LargeTest' in annotations or _PERF_TEST_ANNOTATION in annotations:
+      return 5 * 60
+    if 'MediumTest' in annotations:
+      return 3 * 60
+    if 'SmallTest' in annotations:
+      return 1 * 60
+
+    logging.warn(("Test size not found in annotations for test '%s', using " +
+                  "1 minute for timeout.") % test)
+    return 1 * 60
+
+  def _RunTest(self, test, timeout):
+    """Runs a single instrumentation test.
+
+    Args:
+      test: Test class/method.
+      timeout: Timeout time in seconds.
+
+    Returns:
+      The raw output of am instrument as a list of lines.
+    """
+    extras = self._GetInstrumentationArgs()
+    extras['class'] = test
+    return self.device.StartInstrumentation(
+        '%s/%s' % (self.test_pkg.GetPackageName(), self.options.test_runner),
+        raw=True, extras=extras, timeout=timeout, retries=3)
+
+  def _GenerateTestResult(self, test, instr_result_code, instr_result_bundle,
+                          statuses, start_ms, duration_ms):
+    results = instrumentation_test_instance.GenerateTestResults(
+        instr_result_code, instr_result_bundle, statuses, start_ms, duration_ms)
+    for r in results:
+      if r.GetName() == test:
+        return r
+    logging.error('Could not find result for test: %s', test)
+    return test_result.InstrumentationTestResult(
+        test, base_test_result.ResultType.UNKNOWN, start_ms, duration_ms)
+
+  #override
+  def RunTest(self, test):
+    results = base_test_result.TestRunResults()
+    timeout = (self._GetIndividualTestTimeoutSecs(test) *
+               self._GetIndividualTestTimeoutScale(test) *
+               self.tool.GetTimeoutScale())
+
+    start_ms = 0
+    duration_ms = 0
+    try:
+      self.TestSetup(test)
+
+      try:
+        self.device.GoHome()
+      except device_errors.CommandTimeoutError:
+        logging.exception('Failed to focus the launcher.')
+
+      time_ms = lambda: int(time.time() * 1000)
+      start_ms = time_ms()
+      raw_output = self._RunTest(test, timeout)
+      duration_ms = time_ms() - start_ms
+
+      # Parse the test output
+      result_code, result_bundle, statuses = (
+          instrumentation_test_instance.ParseAmInstrumentRawOutput(raw_output))
+      result = self._GenerateTestResult(
+          test, result_code, result_bundle, statuses, start_ms, duration_ms)
+      if local_device_instrumentation_test_run.DidPackageCrashOnDevice(
+          self.test_pkg.GetPackageName(), self.device):
+        result.SetType(base_test_result.ResultType.CRASH)
+      results.AddResult(result)
+    except device_errors.CommandTimeoutError as e:
+      results.AddResult(test_result.InstrumentationTestResult(
+          test, base_test_result.ResultType.TIMEOUT, start_ms, duration_ms,
+          log=str(e) or 'No information'))
+    except device_errors.DeviceUnreachableError as e:
+      results.AddResult(test_result.InstrumentationTestResult(
+          test, base_test_result.ResultType.CRASH, start_ms, duration_ms,
+          log=str(e) or 'No information'))
+    self.TestTeardown(test, results)
+    return (results, None if results.DidRunPass() else test)
diff --git a/build/android/pylib/junit/__init__.py b/build/android/pylib/junit/__init__.py
new file mode 100644
index 0000000..5cac026
--- /dev/null
+++ b/build/android/pylib/junit/__init__.py
@@ -0,0 +1,4 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
diff --git a/build/android/pylib/junit/setup.py b/build/android/pylib/junit/setup.py
new file mode 100644
index 0000000..94d4277
--- /dev/null
+++ b/build/android/pylib/junit/setup.py
@@ -0,0 +1,20 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from pylib.junit import test_runner
+
+def Setup(args):
+  """Creates a test runner factory for junit tests.
+
+  Args:
+    args: an argparse.Namespace object.
+  Return:
+    A (runner_factory, tests) tuple.
+  """
+
+  def TestRunnerFactory(_unused_device, _unused_shard_index):
+    return test_runner.JavaTestRunner(args)
+
+  return (TestRunnerFactory, ['JUnit tests'])
+
diff --git a/build/android/pylib/junit/test_dispatcher.py b/build/android/pylib/junit/test_dispatcher.py
new file mode 100644
index 0000000..6e0d865
--- /dev/null
+++ b/build/android/pylib/junit/test_dispatcher.py
@@ -0,0 +1,28 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from pylib import constants
+from pylib.base import base_test_result
+
+def RunTests(tests, runner_factory):
+  """Runs a set of java tests on the host.
+
+  Return:
+    A tuple containing the results & the exit code.
+  """
+  def run(t):
+    runner = runner_factory(None, None)
+    runner.SetUp()
+    results_list, return_code = runner.RunTest(t)
+    runner.TearDown()
+    return (results_list, return_code == 0)
+
+  test_run_results = base_test_result.TestRunResults()
+  exit_code = 0
+  for t in tests:
+    results_list, passed = run(t)
+    test_run_results.AddResults(results_list)
+    if not passed:
+      exit_code = constants.ERROR_EXIT_CODE
+  return (test_run_results, exit_code)
\ No newline at end of file
diff --git a/build/android/pylib/junit/test_runner.py b/build/android/pylib/junit/test_runner.py
new file mode 100644
index 0000000..a6d3bf9
--- /dev/null
+++ b/build/android/pylib/junit/test_runner.py
@@ -0,0 +1,50 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import json
+import os
+import tempfile
+
+from pylib import cmd_helper
+from pylib import constants
+from pylib.base import base_test_result
+from pylib.results import json_results
+
+class JavaTestRunner(object):
+  """Runs java tests on the host."""
+
+  def __init__(self, args):
+    self._package_filter = args.package_filter
+    self._runner_filter = args.runner_filter
+    self._sdk_version = args.sdk_version
+    self._test_filter = args.test_filter
+    self._test_suite = args.test_suite
+
+  def SetUp(self):
+    pass
+
+  def RunTest(self, _test):
+    """Runs junit tests from |self._test_suite|."""
+    with tempfile.NamedTemporaryFile() as json_file:
+      java_script = os.path.join(
+          constants.GetOutDirectory(), 'bin', self._test_suite)
+      command = [java_script,
+                 '-test-jars', self._test_suite + '.jar',
+                 '-json-results-file', json_file.name]
+      if self._test_filter:
+        command.extend(['-gtest-filter', self._test_filter])
+      if self._package_filter:
+        command.extend(['-package-filter', self._package_filter])
+      if self._runner_filter:
+        command.extend(['-runner-filter', self._runner_filter])
+      if self._sdk_version:
+        command.extend(['-sdk-version', self._sdk_version])
+      return_code = cmd_helper.RunCmd(command)
+      results_list = json_results.ParseResultsFromJson(
+          json.loads(json_file.read()))
+      return (results_list, return_code)
+
+  def TearDown(self):
+    pass
+
diff --git a/build/android/pylib/linker/__init__.py b/build/android/pylib/linker/__init__.py
new file mode 100644
index 0000000..af99437
--- /dev/null
+++ b/build/android/pylib/linker/__init__.py
@@ -0,0 +1,4 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
diff --git a/build/android/pylib/linker/setup.py b/build/android/pylib/linker/setup.py
new file mode 100644
index 0000000..5776f5a
--- /dev/null
+++ b/build/android/pylib/linker/setup.py
@@ -0,0 +1,45 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Setup for linker tests."""
+
+import os
+import sys
+
+from pylib import constants
+from pylib.linker import test_case
+from pylib.linker import test_runner
+
+sys.path.insert(0,
+                os.path.join(constants.DIR_SOURCE_ROOT, 'build', 'util', 'lib',
+                             'common'))
+import unittest_util # pylint: disable=F0401
+
+def Setup(args, _devices):
+  """Creates a list of test cases and a runner factory.
+
+  Args:
+    args: an argparse.Namespace object.
+  Returns:
+    A tuple of (TestRunnerFactory, tests).
+  """
+  test_cases = [
+      test_case.LinkerLibraryAddressTest,
+      test_case.LinkerSharedRelroTest,
+      test_case.LinkerRandomizationTest]
+
+  low_memory_modes = [False, True]
+  all_tests = [t(is_low_memory=m) for t in test_cases for m in low_memory_modes]
+
+  if args.test_filter:
+    all_test_names = [test.qualified_name for test in all_tests]
+    filtered_test_names = unittest_util.FilterTestNames(all_test_names,
+                                                        args.test_filter)
+    all_tests = [t for t in all_tests \
+                 if t.qualified_name in filtered_test_names]
+
+  def TestRunnerFactory(device, _shard_index):
+    return test_runner.LinkerTestRunner(device, args.tool)
+
+  return (TestRunnerFactory, all_tests)
diff --git a/build/android/pylib/linker/test_case.py b/build/android/pylib/linker/test_case.py
new file mode 100644
index 0000000..c7b0f50
--- /dev/null
+++ b/build/android/pylib/linker/test_case.py
@@ -0,0 +1,496 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Base class for linker-specific test cases.
+
+   The custom dynamic linker can only be tested through a custom test case
+   for various technical reasons:
+
+     - It's an 'invisible feature', i.e. it doesn't expose a new API or
+       behaviour, all it does is save RAM when loading native libraries.
+
+     - Checking that it works correctly requires several things that do not
+       fit the existing GTest-based and instrumentation-based tests:
+
+         - Native test code needs to be run in both the browser and renderer
+           process at the same time just after loading native libraries, in
+           a completely asynchronous way.
+
+         - Each test case requires restarting a whole new application process
+           with a different command-line.
+
+         - Enabling test support in the Linker code requires building a special
+           APK with a flag to activate special test-only support code in the
+           Linker code itself.
+
+       Host-driven tests have also been tried, but since they're really
+       sub-classes of instrumentation tests, they didn't work well either.
+
+   To build and run the linker tests, do the following:
+
+     ninja -C out/Debug chromium_linker_test_apk
+     build/android/test_runner.py linker
+
+"""
+# pylint: disable=R0201
+
+import logging
+import os
+import re
+import time
+
+from pylib import constants
+from pylib.base import base_test_result
+from pylib.device import device_errors
+from pylib.device import intent
+
+
+ResultType = base_test_result.ResultType
+
+_PACKAGE_NAME = 'org.chromium.chromium_linker_test_apk'
+_ACTIVITY_NAME = '.ChromiumLinkerTestActivity'
+_COMMAND_LINE_FILE = '/data/local/tmp/chromium-linker-test-command-line'
+
+# Path to the Linker.java source file.
+_LINKER_JAVA_SOURCE_PATH = (
+    'base/android/java/src/org/chromium/base/library_loader/Linker.java')
+
+# A regular expression used to extract the browser shared RELRO configuration
+# from the Java source file above.
+_RE_LINKER_BROWSER_CONFIG = re.compile(
+    r'.*BROWSER_SHARED_RELRO_CONFIG\s+=\s+' +
+        r'BROWSER_SHARED_RELRO_CONFIG_(\S+)\s*;.*',
+    re.MULTILINE | re.DOTALL)
+
+# Logcat filters used during each test. Only the 'chromium' one is really
+# needed, but the logs are added to the TestResult in case of error, and
+# it is handy to have the 'chromium_android_linker' ones as well when
+# troubleshooting.
+_LOGCAT_FILTERS = ['*:s', 'chromium:v', 'chromium_android_linker:v']
+#_LOGCAT_FILTERS = ['*:v']  ## DEBUG
+
+# Regular expression used to match status lines in logcat.
+_RE_BROWSER_STATUS_LINE = re.compile(r' BROWSER_LINKER_TEST: (FAIL|SUCCESS)$')
+_RE_RENDERER_STATUS_LINE = re.compile(r' RENDERER_LINKER_TEST: (FAIL|SUCCESS)$')
+
+# Regular expression used to mach library load addresses in logcat.
+_RE_LIBRARY_ADDRESS = re.compile(
+    r'(BROWSER|RENDERER)_LIBRARY_ADDRESS: (\S+) ([0-9A-Fa-f]+)')
+
+
+def _GetBrowserSharedRelroConfig():
+  """Returns a string corresponding to the Linker's configuration of shared
+     RELRO sections in the browser process. This parses the Java linker source
+     file to get the appropriate information.
+  Return:
+      None in case of error (e.g. could not locate the source file).
+     'NEVER' if the browser process shall never use shared RELROs.
+     'LOW_RAM_ONLY' if if uses it only on low-end devices.
+     'ALWAYS' if it always uses a shared RELRO.
+  """
+  source_path = \
+      os.path.join(constants.DIR_SOURCE_ROOT, _LINKER_JAVA_SOURCE_PATH)
+  if not os.path.exists(source_path):
+    logging.error('Could not find linker source file: ' + source_path)
+    return None
+
+  with open(source_path) as f:
+    configs = _RE_LINKER_BROWSER_CONFIG.findall(f.read())
+    if not configs:
+      logging.error(
+          'Can\'t find browser shared RELRO configuration value in ' + \
+          source_path)
+      return None
+
+    if configs[0] not in ['NEVER', 'LOW_RAM_ONLY', 'ALWAYS']:
+      logging.error('Unexpected browser config value: ' + configs[0])
+      return None
+
+    logging.info('Found linker browser shared RELRO config: ' + configs[0])
+    return configs[0]
+
+
+def _StartActivityAndWaitForLinkerTestStatus(device, timeout):
+  """Force-start an activity and wait up to |timeout| seconds until the full
+     linker test status lines appear in the logcat, recorded through |device|.
+  Args:
+    device: A DeviceUtils instance.
+    timeout: Timeout in seconds
+  Returns:
+    A (status, logs) tuple, where status is a ResultType constant, and logs
+    if the final logcat output as a string.
+  """
+
+  # 1. Start recording logcat with appropriate filters.
+  with device.GetLogcatMonitor(filter_specs=_LOGCAT_FILTERS) as logmon:
+
+    # 2. Force-start activity.
+    device.StartActivity(
+        intent.Intent(package=_PACKAGE_NAME, activity=_ACTIVITY_NAME),
+        force_stop=True)
+
+    # 3. Wait up to |timeout| seconds until the test status is in the logcat.
+    result = ResultType.PASS
+    try:
+      browser_match = logmon.WaitFor(_RE_BROWSER_STATUS_LINE, timeout=timeout)
+      logging.debug('Found browser match: %s', browser_match.group(0))
+      renderer_match = logmon.WaitFor(_RE_RENDERER_STATUS_LINE,
+                                      timeout=timeout)
+      logging.debug('Found renderer match: %s', renderer_match.group(0))
+      if (browser_match.group(1) != 'SUCCESS'
+          or renderer_match.group(1) != 'SUCCESS'):
+        result = ResultType.FAIL
+    except device_errors.CommandTimeoutError:
+      result = ResultType.TIMEOUT
+
+    return result, '\n'.join(device.adb.Logcat(dump=True))
+
+
+class LibraryLoadMap(dict):
+  """A helper class to pretty-print a map of library names to load addresses."""
+  def __str__(self):
+    items = ['\'%s\': 0x%x' % (name, address) for \
+        (name, address) in self.iteritems()]
+    return '{%s}' % (', '.join(items))
+
+  def __repr__(self):
+    return 'LibraryLoadMap(%s)' % self.__str__()
+
+
+class AddressList(list):
+  """A helper class to pretty-print a list of load addresses."""
+  def __str__(self):
+    items = ['0x%x' % address for address in self]
+    return '[%s]' % (', '.join(items))
+
+  def __repr__(self):
+    return 'AddressList(%s)' % self.__str__()
+
+
+def _ExtractLibraryLoadAddressesFromLogcat(logs):
+  """Extract the names and addresses of shared libraries loaded in the
+     browser and renderer processes.
+  Args:
+    logs: A string containing logcat output.
+  Returns:
+    A tuple (browser_libs, renderer_libs), where each item is a map of
+    library names (strings) to library load addresses (ints), for the
+    browser and renderer processes, respectively.
+  """
+  browser_libs = LibraryLoadMap()
+  renderer_libs = LibraryLoadMap()
+  for m in _RE_LIBRARY_ADDRESS.finditer(logs):
+    process_type, lib_name, lib_address = m.groups()
+    lib_address = int(lib_address, 16)
+    if process_type == 'BROWSER':
+      browser_libs[lib_name] = lib_address
+    elif process_type == 'RENDERER':
+      renderer_libs[lib_name] = lib_address
+    else:
+      assert False, 'Invalid process type'
+
+  return browser_libs, renderer_libs
+
+
+def _CheckLoadAddressRandomization(lib_map_list, process_type):
+  """Check that a map of library load addresses is random enough.
+  Args:
+    lib_map_list: a list of dictionaries that map library names (string)
+      to load addresses (int). Each item in the list corresponds to a
+      different run / process start.
+    process_type: a string describing the process type.
+  Returns:
+    (status, logs) tuple, where <status> is True iff the load addresses are
+    randomized, False otherwise, and <logs> is a string containing an error
+    message detailing the libraries that are not randomized properly.
+  """
+  # Collect, for each library, its list of load addresses.
+  lib_addr_map = {}
+  for lib_map in lib_map_list:
+    for lib_name, lib_address in lib_map.iteritems():
+      if lib_name not in lib_addr_map:
+        lib_addr_map[lib_name] = AddressList()
+      lib_addr_map[lib_name].append(lib_address)
+
+  logging.info('%s library load map: %s', process_type, lib_addr_map)
+
+  # For each library, check the randomness of its load addresses.
+  bad_libs = {}
+  for lib_name, lib_address_list in lib_addr_map.iteritems():
+    # If all addresses are different, skip to next item.
+    lib_address_set = set(lib_address_list)
+    # Consider that if there is more than one pair of identical addresses in
+    # the list, then randomization is broken.
+    if len(lib_address_set) < len(lib_address_list) - 1:
+      bad_libs[lib_name] = lib_address_list
+
+
+  if bad_libs:
+    return False, '%s libraries failed randomization: %s' % \
+        (process_type, bad_libs)
+
+  return True, '%s libraries properly randomized: %s' % \
+      (process_type, lib_addr_map)
+
+
+class LinkerTestCaseBase(object):
+  """Base class for linker test cases."""
+
+  def __init__(self, is_low_memory=False):
+    """Create a test case.
+    Args:
+      is_low_memory: True to simulate a low-memory device, False otherwise.
+    """
+    self.is_low_memory = is_low_memory
+    if is_low_memory:
+      test_suffix = 'ForLowMemoryDevice'
+    else:
+      test_suffix = 'ForRegularDevice'
+    class_name = self.__class__.__name__
+    self.qualified_name = '%s.%s' % (class_name, test_suffix)
+    self.tagged_name = self.qualified_name
+
+  def _RunTest(self, _device):
+    """Run the test, must be overriden.
+    Args:
+      _device: A DeviceUtils interface.
+    Returns:
+      A (status, log) tuple, where <status> is a ResultType constant, and <log>
+      is the logcat output captured during the test in case of error, or None
+      in case of success.
+    """
+    return ResultType.FAIL, 'Unimplemented _RunTest() method!'
+
+  def Run(self, device):
+    """Run the test on a given device.
+    Args:
+      device: Name of target device where to run the test.
+    Returns:
+      A base_test_result.TestRunResult() instance.
+    """
+    margin = 8
+    print '[ %-*s ] %s' % (margin, 'RUN', self.tagged_name)
+    logging.info('Running linker test: %s', self.tagged_name)
+
+    # Create command-line file on device.
+    command_line_flags = ''
+    if self.is_low_memory:
+      command_line_flags = '--low-memory-device'
+    device.WriteFile(_COMMAND_LINE_FILE, command_line_flags)
+
+    # Run the test.
+    status, logs = self._RunTest(device)
+
+    result_text = 'OK'
+    if status == ResultType.FAIL:
+      result_text = 'FAILED'
+    elif status == ResultType.TIMEOUT:
+      result_text = 'TIMEOUT'
+    print '[ %*s ] %s' % (margin, result_text, self.tagged_name)
+
+    results = base_test_result.TestRunResults()
+    results.AddResult(
+        base_test_result.BaseTestResult(
+            self.tagged_name,
+            status,
+            log=logs))
+
+    return results
+
+  def __str__(self):
+    return self.tagged_name
+
+  def __repr__(self):
+    return self.tagged_name
+
+
+class LinkerSharedRelroTest(LinkerTestCaseBase):
+  """A linker test case to check the status of shared RELRO sections.
+
+    The core of the checks performed here are pretty simple:
+
+      - Clear the logcat and start recording with an appropriate set of filters.
+      - Create the command-line appropriate for the test-case.
+      - Start the activity (always forcing a cold start).
+      - Every second, look at the current content of the filtered logcat lines
+        and look for instances of the following:
+
+            BROWSER_LINKER_TEST: <status>
+            RENDERER_LINKER_TEST: <status>
+
+        where <status> can be either FAIL or SUCCESS. These lines can appear
+        in any order in the logcat. Once both browser and renderer status are
+        found, stop the loop. Otherwise timeout after 30 seconds.
+
+        Note that there can be other lines beginning with BROWSER_LINKER_TEST:
+        and RENDERER_LINKER_TEST:, but are not followed by a <status> code.
+
+      - The test case passes if the <status> for both the browser and renderer
+        process are SUCCESS. Otherwise its a fail.
+  """
+  def _RunTest(self, device):
+    # Wait up to 30 seconds until the linker test status is in the logcat.
+    return _StartActivityAndWaitForLinkerTestStatus(device, timeout=30)
+
+
+class LinkerLibraryAddressTest(LinkerTestCaseBase):
+  """A test case that verifies library load addresses.
+
+     The point of this check is to ensure that the libraries are loaded
+     according to the following rules:
+
+     - For low-memory devices, they should always be loaded at the same address
+       in both browser and renderer processes, both below 0x4000_0000.
+
+     - For regular devices, the browser process should load libraries above
+       0x4000_0000, and renderer ones below it.
+  """
+  def _RunTest(self, device):
+    result, logs = _StartActivityAndWaitForLinkerTestStatus(device, timeout=30)
+
+    # Return immediately in case of timeout.
+    if result == ResultType.TIMEOUT:
+      return result, logs
+
+    # Collect the library load addresses in the browser and renderer processes.
+    browser_libs, renderer_libs = _ExtractLibraryLoadAddressesFromLogcat(logs)
+
+    logging.info('Browser libraries: %s', browser_libs)
+    logging.info('Renderer libraries: %s', renderer_libs)
+
+    # Check that the same libraries are loaded into both processes:
+    browser_set = set(browser_libs.keys())
+    renderer_set = set(renderer_libs.keys())
+    if browser_set != renderer_set:
+      logging.error('Library set mistmach browser=%s renderer=%s',
+          browser_libs.keys(), renderer_libs.keys())
+      return ResultType.FAIL, logs
+
+    # And that there are not empty.
+    if not browser_set:
+      logging.error('No libraries loaded in any process!')
+      return ResultType.FAIL, logs
+
+    # Check that the renderer libraries are loaded at 'low-addresses'. i.e.
+    # below 0x4000_0000, for every kind of device.
+    memory_boundary = 0x40000000
+    bad_libs = []
+    for lib_name, lib_address in renderer_libs.iteritems():
+      if lib_address >= memory_boundary:
+        bad_libs.append((lib_name, lib_address))
+
+    if bad_libs:
+      logging.error('Renderer libraries loaded at high addresses: %s', bad_libs)
+      return ResultType.FAIL, logs
+
+    browser_config = _GetBrowserSharedRelroConfig()
+    if not browser_config:
+      return ResultType.FAIL, 'Bad linker source configuration'
+
+    if browser_config == 'ALWAYS' or \
+        (browser_config == 'LOW_RAM_ONLY' and self.is_low_memory):
+      # The libraries must all be loaded at the same addresses. This also
+      # implicitly checks that the browser libraries are at low addresses.
+      addr_mismatches = []
+      for lib_name, lib_address in browser_libs.iteritems():
+        lib_address2 = renderer_libs[lib_name]
+        if lib_address != lib_address2:
+          addr_mismatches.append((lib_name, lib_address, lib_address2))
+
+      if addr_mismatches:
+        logging.error('Library load address mismatches: %s',
+            addr_mismatches)
+        return ResultType.FAIL, logs
+
+    # Otherwise, check that libraries are loaded at 'high-addresses'.
+    # Note that for low-memory devices, the previous checks ensure that they
+    # were loaded at low-addresses.
+    else:
+      bad_libs = []
+      for lib_name, lib_address in browser_libs.iteritems():
+        if lib_address < memory_boundary:
+          bad_libs.append((lib_name, lib_address))
+
+      if bad_libs:
+        logging.error('Browser libraries loaded at low addresses: %s', bad_libs)
+        return ResultType.FAIL, logs
+
+    # Everything's ok.
+    return ResultType.PASS, logs
+
+
+class LinkerRandomizationTest(LinkerTestCaseBase):
+  """A linker test case to check that library load address randomization works
+     properly between successive starts of the test program/activity.
+
+     This starts the activity several time (each time forcing a new process
+     creation) and compares the load addresses of the libraries in them to
+     detect that they have changed.
+
+     In theory, two successive runs could (very rarely) use the same load
+     address, so loop 5 times and compare the values there. It is assumed
+     that if there are more than one pair of identical addresses, then the
+     load addresses are not random enough for this test.
+  """
+  def _RunTest(self, device):
+    max_loops = 5
+    browser_lib_map_list = []
+    renderer_lib_map_list = []
+    logs_list = []
+    for _ in range(max_loops):
+      # Start the activity.
+      result, logs = _StartActivityAndWaitForLinkerTestStatus(
+          device, timeout=30)
+      if result == ResultType.TIMEOUT:
+        # Something bad happened. Return immediately.
+        return result, logs
+
+      # Collect library addresses.
+      browser_libs, renderer_libs = _ExtractLibraryLoadAddressesFromLogcat(logs)
+      browser_lib_map_list.append(browser_libs)
+      renderer_lib_map_list.append(renderer_libs)
+      logs_list.append(logs)
+
+    # Check randomization in the browser libraries.
+    logs = '\n'.join(logs_list)
+
+    browser_status, browser_logs = _CheckLoadAddressRandomization(
+        browser_lib_map_list, 'Browser')
+
+    renderer_status, renderer_logs = _CheckLoadAddressRandomization(
+        renderer_lib_map_list, 'Renderer')
+
+    browser_config = _GetBrowserSharedRelroConfig()
+    if not browser_config:
+      return ResultType.FAIL, 'Bad linker source configuration'
+
+    if not browser_status:
+      if browser_config == 'ALWAYS' or \
+          (browser_config == 'LOW_RAM_ONLY' and self.is_low_memory):
+        return ResultType.FAIL, browser_logs
+
+      # IMPORTANT NOTE: The system's ASLR implementation seems to be very poor
+      # when starting an activity process in a loop with "adb shell am start".
+      #
+      # When simulating a regular device, loading libraries in the browser
+      # process uses a simple mmap(NULL, ...) to let the kernel device where to
+      # load the file (this is similar to what System.loadLibrary() does).
+      #
+      # Unfortunately, at least in the context of this test, doing so while
+      # restarting the activity with the activity manager very, very, often
+      # results in the system using the same load address for all 5 runs, or
+      # sometimes only 4 out of 5.
+      #
+      # This has been tested experimentally on both Android 4.1.2 and 4.3.
+      #
+      # Note that this behaviour doesn't seem to happen when starting an
+      # application 'normally', i.e. when using the application launcher to
+      # start the activity.
+      logging.info('Ignoring system\'s low randomization of browser libraries' +
+                   ' for regular devices')
+
+    if not renderer_status:
+      return ResultType.FAIL, renderer_logs
+
+    return ResultType.PASS, logs
diff --git a/build/android/pylib/linker/test_runner.py b/build/android/pylib/linker/test_runner.py
new file mode 100644
index 0000000..b6803e4
--- /dev/null
+++ b/build/android/pylib/linker/test_runner.py
@@ -0,0 +1,98 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Runs linker tests on a particular device."""
+
+import logging
+import os.path
+import sys
+import traceback
+
+from pylib import constants
+from pylib.base import base_test_result
+from pylib.base import base_test_runner
+from pylib.linker import test_case
+from pylib.utils import apk_helper
+
+
+# Name of the Android package to install for this to work.
+_PACKAGE_NAME = 'ChromiumLinkerTest'
+
+
+class LinkerExceptionTestResult(base_test_result.BaseTestResult):
+  """Test result corresponding to a python exception in a host-custom test."""
+
+  def __init__(self, test_name, exc_info):
+    """Constructs a LinkerExceptionTestResult object.
+
+    Args:
+      test_name: name of the test which raised an exception.
+      exc_info: exception info, ostensibly from sys.exc_info().
+    """
+    exc_type, exc_value, exc_traceback = exc_info
+    trace_info = ''.join(traceback.format_exception(exc_type, exc_value,
+                                                    exc_traceback))
+    log_msg = 'Exception:\n' + trace_info
+
+    super(LinkerExceptionTestResult, self).__init__(
+        test_name,
+        base_test_result.ResultType.FAIL,
+        log="%s %s" % (exc_type, log_msg))
+
+
+class LinkerTestRunner(base_test_runner.BaseTestRunner):
+  """Orchestrates running a set of linker tests.
+
+  Any Python exceptions in the tests are caught and translated into a failed
+  result, rather than being re-raised on the main thread.
+  """
+
+  #override
+  def __init__(self, device, tool):
+    """Creates a new LinkerTestRunner.
+
+    Args:
+      device: Attached android device.
+      tool: Name of the Valgrind tool.
+    """
+    super(LinkerTestRunner, self).__init__(device, tool)
+
+  #override
+  def InstallTestPackage(self):
+    apk_path = os.path.join(
+        constants.GetOutDirectory(), 'apks', '%s.apk' % _PACKAGE_NAME)
+
+    if not os.path.exists(apk_path):
+      raise Exception('%s not found, please build it' % apk_path)
+
+    self.device.Install(apk_path)
+
+  #override
+  def RunTest(self, test):
+    """Sets up and runs a test case.
+
+    Args:
+      test: An object which is ostensibly a subclass of LinkerTestCaseBase.
+
+    Returns:
+      A TestRunResults object which contains the result produced by the test
+      and, in the case of a failure, the test that should be retried.
+    """
+
+    assert isinstance(test, test_case.LinkerTestCaseBase)
+
+    try:
+      results = test.Run(self.device)
+    except Exception:
+      logging.exception('Caught exception while trying to run test: ' +
+                        test.tagged_name)
+      exc_info = sys.exc_info()
+      results = base_test_result.TestRunResults()
+      results.AddResult(LinkerExceptionTestResult(
+          test.tagged_name, exc_info))
+
+    if not results.DidRunPass():
+      return results, test
+    else:
+      return results, None
diff --git a/build/android/pylib/local/__init__.py b/build/android/pylib/local/__init__.py
new file mode 100644
index 0000000..4d6aabb
--- /dev/null
+++ b/build/android/pylib/local/__init__.py
@@ -0,0 +1,3 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
diff --git a/build/android/pylib/local/device/__init__.py b/build/android/pylib/local/device/__init__.py
new file mode 100644
index 0000000..4d6aabb
--- /dev/null
+++ b/build/android/pylib/local/device/__init__.py
@@ -0,0 +1,3 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
diff --git a/build/android/pylib/local/device/local_device_environment.py b/build/android/pylib/local/device/local_device_environment.py
new file mode 100644
index 0000000..04f9ab7
--- /dev/null
+++ b/build/android/pylib/local/device/local_device_environment.py
@@ -0,0 +1,54 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from pylib.base import environment
+from pylib.device import adb_wrapper
+from pylib.device import device_errors
+from pylib.device import device_utils
+from pylib.utils import parallelizer
+
+
+class LocalDeviceEnvironment(environment.Environment):
+
+  def __init__(self, args, _error_func):
+    super(LocalDeviceEnvironment, self).__init__()
+    self._device_serial = args.test_device
+    self._devices = []
+    self._max_tries = 1 + args.num_retries
+    self._tool_name = args.tool
+
+  #override
+  def SetUp(self):
+    available_devices = device_utils.DeviceUtils.HealthyDevices()
+    if not available_devices:
+      raise device_errors.NoDevicesError
+    if self._device_serial:
+      self._devices = [d for d in available_devices
+                       if d.adb.GetDeviceSerial() == self._device_serial]
+      if not self._devices:
+        raise device_errors.DeviceUnreachableError(
+            'Could not find device %r' % self._device_serial)
+    else:
+      self._devices = available_devices
+
+  @property
+  def devices(self):
+    return self._devices
+
+  @property
+  def parallel_devices(self):
+    return parallelizer.SyncParallelizer(self._devices)
+
+  @property
+  def max_tries(self):
+    return self._max_tries
+
+  @property
+  def tool(self):
+    return self._tool_name
+
+  #override
+  def TearDown(self):
+    pass
+
diff --git a/build/android/pylib/local/device/local_device_instrumentation_test_run.py b/build/android/pylib/local/device/local_device_instrumentation_test_run.py
new file mode 100644
index 0000000..e388fce
--- /dev/null
+++ b/build/android/pylib/local/device/local_device_instrumentation_test_run.py
@@ -0,0 +1,207 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+import re
+import time
+
+from pylib import flag_changer
+from pylib.base import base_test_result
+from pylib.base import test_run
+from pylib.constants import keyevent
+from pylib.device import device_errors
+from pylib.local.device import local_device_test_run
+
+
+TIMEOUT_ANNOTATIONS = [
+  ('Manual', 10 * 60 * 60),
+  ('IntegrationTest', 30 * 60),
+  ('External', 10 * 60),
+  ('EnormousTest', 10 * 60),
+  ('LargeTest', 5 * 60),
+  ('MediumTest', 3 * 60),
+  ('SmallTest', 1 * 60),
+]
+
+
+# TODO(jbudorick): Make this private once the instrumentation test_runner is
+# deprecated.
+def DidPackageCrashOnDevice(package_name, device):
+  # Dismiss any error dialogs. Limit the number in case we have an error
+  # loop or we are failing to dismiss.
+  try:
+    for _ in xrange(10):
+      package = _DismissCrashDialog(device)
+      if not package:
+        return False
+      # Assume test package convention of ".test" suffix
+      if package in package_name:
+        return True
+  except device_errors.CommandFailedError:
+    logging.exception('Error while attempting to dismiss crash dialog.')
+  return False
+
+
+_CURRENT_FOCUS_CRASH_RE = re.compile(
+    r'\s*mCurrentFocus.*Application (Error|Not Responding): (\S+)}')
+
+
+def _DismissCrashDialog(device):
+  # TODO(jbudorick): Try to grep the output on the device instead of using
+  # large_output if/when DeviceUtils exposes a public interface for piped
+  # shell command handling.
+  for l in device.RunShellCommand(
+      ['dumpsys', 'window', 'windows'], check_return=True, large_output=True):
+    m = re.match(_CURRENT_FOCUS_CRASH_RE, l)
+    if m:
+      device.SendKeyEvent(keyevent.KEYCODE_DPAD_RIGHT)
+      device.SendKeyEvent(keyevent.KEYCODE_DPAD_RIGHT)
+      device.SendKeyEvent(keyevent.KEYCODE_ENTER)
+      return m.group(2)
+
+  return None
+
+
+class LocalDeviceInstrumentationTestRun(
+    local_device_test_run.LocalDeviceTestRun):
+  def __init__(self, env, test_instance):
+    super(LocalDeviceInstrumentationTestRun, self).__init__(env, test_instance)
+    self._flag_changers = {}
+
+  def TestPackage(self):
+    return None
+
+  def SetUp(self):
+    def substitute_external_storage(d, external_storage):
+      if not d:
+        return external_storage
+      elif isinstance(d, list):
+        return '/'.join(p if p else external_storage for p in d)
+      else:
+        return d
+
+    def individual_device_set_up(dev, host_device_tuples):
+      dev.Install(self._test_instance.apk_under_test)
+      dev.Install(self._test_instance.test_apk)
+
+      external_storage = dev.GetExternalStoragePath()
+      host_device_tuples = [
+          (h, substitute_external_storage(d, external_storage))
+          for h, d in host_device_tuples]
+      logging.info('instrumentation data deps:')
+      for h, d in host_device_tuples:
+        logging.info('%r -> %r', h, d)
+      dev.PushChangedFiles(host_device_tuples)
+      if self._test_instance.flags:
+        if not self._test_instance.package_info:
+          logging.error("Couldn't set flags: no package info")
+        elif not self._test_instance.package_info.cmdline_file:
+          logging.error("Couldn't set flags: no cmdline_file")
+        else:
+          self._flag_changers[str(dev)] = flag_changer.FlagChanger(
+              dev, self._test_instance.package_info.cmdline_file)
+          logging.debug('Attempting to set flags: %r',
+                        self._test_instance.flags)
+          self._flag_changers[str(dev)].AddFlags(self._test_instance.flags)
+
+    self._env.parallel_devices.pMap(
+        individual_device_set_up,
+        self._test_instance.GetDataDependencies())
+
+  def TearDown(self):
+    def individual_device_tear_down(dev):
+      if str(dev) in self._flag_changers:
+        self._flag_changers[str(dev)].Restore()
+
+    self._env.parallel_devices.pMap(individual_device_tear_down)
+
+  #override
+  def _CreateShards(self, tests):
+    return tests
+
+  #override
+  def _GetTests(self):
+    return self._test_instance.GetTests()
+
+  #override
+  def _GetTestName(self, test):
+    return '%s#%s' % (test['class'], test['method'])
+
+  #override
+  def _RunTest(self, device, test):
+    extras = self._test_instance.GetHttpServerEnvironmentVars()
+
+    if isinstance(test, list):
+      if not self._test_instance.driver_apk:
+        raise Exception('driver_apk does not exist. '
+                        'Please build it and try again.')
+
+      def name_and_timeout(t):
+        n = self._GetTestName(t)
+        i = self._GetTimeoutFromAnnotations(t['annotations'], n)
+        return (n, i)
+
+      test_names, timeouts = zip(*(name_and_timeout(t) for t in test))
+
+      test_name = ','.join(test_names)
+      target = '%s/%s' % (
+          self._test_instance.driver_package,
+          self._test_instance.driver_name)
+      extras.update(
+          self._test_instance.GetDriverEnvironmentVars(
+              test_list=test_names))
+      timeout = sum(timeouts)
+    else:
+      test_name = self._GetTestName(test)
+      target = '%s/%s' % (
+          self._test_instance.test_package, self._test_instance.test_runner)
+      extras['class'] = test_name
+      timeout = self._GetTimeoutFromAnnotations(test['annotations'], test_name)
+
+    logging.info('preparing to run %s: %s' % (test_name, test))
+
+    time_ms = lambda: int(time.time() * 1e3)
+    start_ms = time_ms()
+    output = device.StartInstrumentation(
+        target, raw=True, extras=extras, timeout=timeout, retries=0)
+    duration_ms = time_ms() - start_ms
+
+    # TODO(jbudorick): Make instrumentation tests output a JSON so this
+    # doesn't have to parse the output.
+    logging.debug('output from %s:', test_name)
+    for l in output:
+      logging.debug('  %s', l)
+
+    result_code, result_bundle, statuses = (
+        self._test_instance.ParseAmInstrumentRawOutput(output))
+    results = self._test_instance.GenerateTestResults(
+        result_code, result_bundle, statuses, start_ms, duration_ms)
+    if DidPackageCrashOnDevice(self._test_instance.test_package, device):
+      for r in results:
+        if r.GetType() == base_test_result.ResultType.UNKNOWN:
+          r.SetType(base_test_result.ResultType.CRASH)
+    return results
+
+  #override
+  def _ShouldShard(self):
+    return True
+
+  @staticmethod
+  def _GetTimeoutFromAnnotations(annotations, test_name):
+    for k, v in TIMEOUT_ANNOTATIONS:
+      if k in annotations:
+        timeout = v
+    else:
+      logging.warning('Using default 1 minute timeout for %s' % test_name)
+      timeout = 60
+
+    try:
+      scale = int(annotations.get('TimeoutScale', 1))
+    except ValueError as e:
+      logging.warning("Non-integer value of TimeoutScale ignored. (%s)", str(e))
+      scale = 1
+    timeout *= scale
+
+    return timeout
+
diff --git a/build/android/pylib/local/device/local_device_test_run.py b/build/android/pylib/local/device/local_device_test_run.py
new file mode 100644
index 0000000..fa24eb1
--- /dev/null
+++ b/build/android/pylib/local/device/local_device_test_run.py
@@ -0,0 +1,99 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+
+from pylib import valgrind_tools
+from pylib.base import base_test_result
+from pylib.base import test_run
+from pylib.base import test_collection
+
+
+class LocalDeviceTestRun(test_run.TestRun):
+
+  def __init__(self, env, test_instance):
+    super(LocalDeviceTestRun, self).__init__(env, test_instance)
+    self._tools = {}
+
+  #override
+  def RunTests(self):
+    tests = self._GetTests()
+
+    def run_tests_on_device(dev, tests):
+      r = base_test_result.TestRunResults()
+      for test in tests:
+        result = self._RunTest(dev, test)
+        if isinstance(result, base_test_result.BaseTestResult):
+          r.AddResult(result)
+        elif isinstance(result, list):
+          r.AddResults(result)
+        else:
+          raise Exception('Unexpected result type: %s' % type(result).__name__)
+        if isinstance(tests, test_collection.TestCollection):
+          tests.test_completed()
+      return r
+
+    tries = 0
+    results = base_test_result.TestRunResults()
+    all_fail_results = {}
+    while tries < self._env.max_tries and tests:
+      logging.debug('try %d, will run %d tests:', tries, len(tests))
+      for t in tests:
+        logging.debug('  %s', t)
+
+      if self._ShouldShard():
+        tc = test_collection.TestCollection(self._CreateShards(tests))
+        try_results = self._env.parallel_devices.pMap(
+            run_tests_on_device, tc).pGet(None)
+      else:
+        try_results = self._env.parallel_devices.pMap(
+            run_tests_on_device, tests).pGet(None)
+      for try_result in try_results:
+        for result in try_result.GetAll():
+          if result.GetType() in (base_test_result.ResultType.PASS,
+                                  base_test_result.ResultType.SKIP):
+            results.AddResult(result)
+          else:
+            all_fail_results[result.GetName()] = result
+
+      results_names = set(r.GetName() for r in results.GetAll())
+      tests = [t for t in tests if self._GetTestName(t) not in results_names]
+      tries += 1
+
+    all_unknown_test_names = set(self._GetTestName(t) for t in tests)
+    all_failed_test_names = set(all_fail_results.iterkeys())
+
+    unknown_tests = all_unknown_test_names.difference(all_failed_test_names)
+    failed_tests = all_failed_test_names.intersection(all_unknown_test_names)
+
+    if unknown_tests:
+      results.AddResults(
+          base_test_result.BaseTestResult(
+              u, base_test_result.ResultType.UNKNOWN)
+          for u in unknown_tests)
+    if failed_tests:
+      results.AddResults(all_fail_results[f] for f in failed_tests)
+
+    return results
+
+  def GetTool(self, device):
+    if not str(device) in self._tools:
+      self._tools[str(device)] = valgrind_tools.CreateTool(
+          self._env.tool, device)
+    return self._tools[str(device)]
+
+  def _CreateShards(self, tests):
+    raise NotImplementedError
+
+  def _GetTestName(self, test):
+    return test
+
+  def _GetTests(self):
+    raise NotImplementedError
+
+  def _RunTest(self, device, test):
+    raise NotImplementedError
+
+  def _ShouldShard(self):
+    raise NotImplementedError
diff --git a/build/android/pylib/local/local_test_server_spawner.py b/build/android/pylib/local/local_test_server_spawner.py
new file mode 100644
index 0000000..77f552e
--- /dev/null
+++ b/build/android/pylib/local/local_test_server_spawner.py
@@ -0,0 +1,45 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from pylib import chrome_test_server_spawner
+from pylib import forwarder
+from pylib.base import test_server
+
+
+class LocalTestServerSpawner(test_server.TestServer):
+
+  def __init__(self, port, device, tool):
+    super(LocalTestServerSpawner, self).__init__()
+    self._device = device
+    self._spawning_server = chrome_test_server_spawner.SpawningServer(
+        port, device, tool)
+    self._tool = tool
+
+  @property
+  def server_address(self):
+    return self._spawning_server.server.server_address
+
+  @property
+  def port(self):
+    return self.server_address[1]
+
+  #override
+  def SetUp(self):
+    self._device.WriteFile(
+        '%s/net-test-server-ports' % self._device.GetExternalStoragePath(),
+        '%s:0' % str(self.port))
+    forwarder.Forwarder.Map(
+        [(self.port, self.port)], self._device, self._tool)
+    self._spawning_server.Start()
+
+  #override
+  def Reset(self):
+    self._spawning_server.CleanupState()
+
+  #override
+  def TearDown(self):
+    self.Reset()
+    self._spawning_server.Stop()
+    forwarder.Forwarder.UnmapDevicePort(self.port, self._device)
+
diff --git a/build/android/pylib/monkey/__init__.py b/build/android/pylib/monkey/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/build/android/pylib/monkey/__init__.py
diff --git a/build/android/pylib/monkey/setup.py b/build/android/pylib/monkey/setup.py
new file mode 100644
index 0000000..fe690a5
--- /dev/null
+++ b/build/android/pylib/monkey/setup.py
@@ -0,0 +1,27 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Generates test runner factory and tests for monkey tests."""
+
+from pylib.monkey import test_runner
+
+
+def Setup(test_options):
+  """Create and return the test runner factory and tests.
+
+  Args:
+    test_options: A MonkeyOptions object.
+
+  Returns:
+    A tuple of (TestRunnerFactory, tests).
+  """
+  # Token to replicate across devices as the "test". The TestRunner does all of
+  # the work to run the test.
+  tests = ['MonkeyTest']
+
+  def TestRunnerFactory(device, shard_index):
+    return test_runner.TestRunner(
+        test_options, device, shard_index)
+
+  return (TestRunnerFactory, tests)
diff --git a/build/android/pylib/monkey/test_options.py b/build/android/pylib/monkey/test_options.py
new file mode 100644
index 0000000..54d3d08
--- /dev/null
+++ b/build/android/pylib/monkey/test_options.py
@@ -0,0 +1,16 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Defines the MonkeyOptions named tuple."""
+
+import collections
+
+MonkeyOptions = collections.namedtuple('MonkeyOptions', [
+    'verbose_count',
+    'package',
+    'event_count',
+    'category',
+    'throttle',
+    'seed',
+    'extra_args'])
diff --git a/build/android/pylib/monkey/test_runner.py b/build/android/pylib/monkey/test_runner.py
new file mode 100644
index 0000000..3fd1797
--- /dev/null
+++ b/build/android/pylib/monkey/test_runner.py
@@ -0,0 +1,106 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Runs a monkey test on a single device."""
+
+import logging
+import random
+
+from pylib import constants
+from pylib.base import base_test_result
+from pylib.base import base_test_runner
+from pylib.device import device_errors
+from pylib.device import intent
+
+_CHROME_PACKAGE = constants.PACKAGE_INFO['chrome'].package
+
+class TestRunner(base_test_runner.BaseTestRunner):
+  """A TestRunner instance runs a monkey test on a single device."""
+
+  def __init__(self, test_options, device, _):
+    super(TestRunner, self).__init__(device, None)
+    self._options = test_options
+    self._package = constants.PACKAGE_INFO[self._options.package].package
+    self._activity = constants.PACKAGE_INFO[self._options.package].activity
+
+  def _LaunchMonkeyTest(self):
+    """Runs monkey test for a given package.
+
+    Returns:
+      Output from the monkey command on the device.
+    """
+
+    timeout_ms = self._options.event_count * self._options.throttle * 1.5
+
+    cmd = ['monkey',
+           '-p %s' % self._package,
+           ' '.join(['-c %s' % c for c in self._options.category]),
+           '--throttle %d' % self._options.throttle,
+           '-s %d' % (self._options.seed or random.randint(1, 100)),
+           '-v ' * self._options.verbose_count,
+           '--monitor-native-crashes',
+           '--kill-process-after-error',
+           self._options.extra_args,
+           '%d' % self._options.event_count]
+    return self.device.RunShellCommand(' '.join(cmd), timeout=timeout_ms)
+
+  def RunTest(self, test_name):
+    """Run a Monkey test on the device.
+
+    Args:
+      test_name: String to use for logging the test result.
+
+    Returns:
+      A tuple of (TestRunResults, retry).
+    """
+    self.device.StartActivity(
+        intent.Intent(package=self._package, activity=self._activity,
+                      action='android.intent.action.MAIN'),
+        blocking=True, force_stop=True)
+
+    # Chrome crashes are not always caught by Monkey test runner.
+    # Verify Chrome has the same PID before and after the test.
+    before_pids = self.device.GetPids(self._package)
+
+    # Run the test.
+    output = ''
+    if before_pids:
+      output = '\n'.join(self._LaunchMonkeyTest())
+      after_pids = self.device.GetPids(self._package)
+
+    crashed = True
+    if not self._package in before_pids:
+      logging.error('Failed to start the process.')
+    elif not self._package in after_pids:
+      logging.error('Process %s has died.', before_pids[self._package])
+    elif before_pids[self._package] != after_pids[self._package]:
+      logging.error('Detected process restart %s -> %s',
+                    before_pids[self._package], after_pids[self._package])
+    else:
+      crashed = False
+
+    results = base_test_result.TestRunResults()
+    success_pattern = 'Events injected: %d' % self._options.event_count
+    if success_pattern in output and not crashed:
+      result = base_test_result.BaseTestResult(
+          test_name, base_test_result.ResultType.PASS, log=output)
+    else:
+      result = base_test_result.BaseTestResult(
+          test_name, base_test_result.ResultType.FAIL, log=output)
+      if 'chrome' in self._options.package:
+        logging.warning('Starting MinidumpUploadService...')
+        # TODO(jbudorick): Update this after upstreaming.
+        minidump_intent = intent.Intent(
+            action='%s.crash.ACTION_FIND_ALL' % _CHROME_PACKAGE,
+            package=self._package,
+            activity='%s.crash.MinidumpUploadService' % _CHROME_PACKAGE)
+        try:
+          self.device.RunShellCommand(
+              ['am', 'startservice'] + minidump_intent.am_args,
+              as_root=True, check_return=True)
+        except device_errors.CommandFailedError:
+          logging.exception('Failed to start MinidumpUploadService')
+
+    results.AddResult(result)
+    return results, False
diff --git a/build/android/pylib/perf/__init__.py b/build/android/pylib/perf/__init__.py
new file mode 100644
index 0000000..9228df8
--- /dev/null
+++ b/build/android/pylib/perf/__init__.py
@@ -0,0 +1,3 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
diff --git a/build/android/pylib/perf/cache_control.py b/build/android/pylib/perf/cache_control.py
new file mode 100644
index 0000000..8065cf9
--- /dev/null
+++ b/build/android/pylib/perf/cache_control.py
@@ -0,0 +1,21 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from pylib import android_commands
+from pylib.device import device_utils
+
+class CacheControl(object):
+  _DROP_CACHES = '/proc/sys/vm/drop_caches'
+
+  def __init__(self, device):
+    # TODO(jbudorick) Remove once telemetry gets switched over.
+    if isinstance(device, android_commands.AndroidCommands):
+      device = device_utils.DeviceUtils(device)
+    self._device = device
+
+  def DropRamCaches(self):
+    """Drops the filesystem ram caches for performance testing."""
+    self._device.RunShellCommand('sync', as_root=True)
+    self._device.WriteFile(CacheControl._DROP_CACHES, '3', as_root=True)
+
diff --git a/build/android/pylib/perf/perf_control.py b/build/android/pylib/perf/perf_control.py
new file mode 100644
index 0000000..f89f397
--- /dev/null
+++ b/build/android/pylib/perf/perf_control.py
@@ -0,0 +1,161 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import atexit
+import logging
+
+from pylib import android_commands
+from pylib.device import device_errors
+from pylib.device import device_utils
+
+
+class PerfControl(object):
+  """Provides methods for setting the performance mode of a device."""
+  _CPU_PATH = '/sys/devices/system/cpu'
+  _KERNEL_MAX = '/sys/devices/system/cpu/kernel_max'
+
+  def __init__(self, device):
+    # TODO(jbudorick) Remove once telemetry gets switched over.
+    if isinstance(device, android_commands.AndroidCommands):
+      device = device_utils.DeviceUtils(device)
+    self._device = device
+    # this will raise an AdbCommandFailedError if no CPU files are found
+    self._cpu_files = self._device.RunShellCommand(
+        'ls -d cpu[0-9]*', cwd=self._CPU_PATH, check_return=True, as_root=True)
+    assert self._cpu_files, 'Failed to detect CPUs.'
+    self._cpu_file_list = ' '.join(self._cpu_files)
+    logging.info('CPUs found: %s', self._cpu_file_list)
+    self._have_mpdecision = self._device.FileExists('/system/bin/mpdecision')
+
+  def SetHighPerfMode(self):
+    """Sets the highest stable performance mode for the device."""
+    try:
+      self._device.EnableRoot()
+    except device_errors.CommandFailedError:
+      message = 'Need root for performance mode. Results may be NOISY!!'
+      logging.warning(message)
+      # Add an additional warning at exit, such that it's clear that any results
+      # may be different/noisy (due to the lack of intended performance mode).
+      atexit.register(logging.warning, message)
+      return
+
+    product_model = self._device.product_model
+    # TODO(epenner): Enable on all devices (http://crbug.com/383566)
+    if 'Nexus 4' == product_model:
+      self._ForceAllCpusOnline(True)
+      if not self._AllCpusAreOnline():
+        logging.warning('Failed to force CPUs online. Results may be NOISY!')
+      self._SetScalingGovernorInternal('performance')
+    elif 'Nexus 5' == product_model:
+      self._ForceAllCpusOnline(True)
+      if not self._AllCpusAreOnline():
+        logging.warning('Failed to force CPUs online. Results may be NOISY!')
+      self._SetScalingGovernorInternal('performance')
+      self._SetScalingMaxFreq(1190400)
+      self._SetMaxGpuClock(200000000)
+    else:
+      self._SetScalingGovernorInternal('performance')
+
+  def SetPerfProfilingMode(self):
+    """Enables all cores for reliable perf profiling."""
+    self._ForceAllCpusOnline(True)
+    self._SetScalingGovernorInternal('performance')
+    if not self._AllCpusAreOnline():
+      if not self._device.HasRoot():
+        raise RuntimeError('Need root to force CPUs online.')
+      raise RuntimeError('Failed to force CPUs online.')
+
+  def SetDefaultPerfMode(self):
+    """Sets the performance mode for the device to its default mode."""
+    if not self._device.HasRoot():
+      return
+    product_model = self._device.product_model
+    if 'Nexus 5' == product_model:
+      if self._AllCpusAreOnline():
+        self._SetScalingMaxFreq(2265600)
+        self._SetMaxGpuClock(450000000)
+
+    governor_mode = {
+        'GT-I9300': 'pegasusq',
+        'Galaxy Nexus': 'interactive',
+        'Nexus 4': 'ondemand',
+        'Nexus 5': 'ondemand',
+        'Nexus 7': 'interactive',
+        'Nexus 10': 'interactive'
+    }.get(product_model, 'ondemand')
+    self._SetScalingGovernorInternal(governor_mode)
+    self._ForceAllCpusOnline(False)
+
+  def GetCpuInfo(self):
+    online = (output.rstrip() == '1' and status == 0
+              for (_, output, status) in self._ForEachCpu('cat "$CPU/online"'))
+    governor = (output.rstrip() if status == 0 else None
+                for (_, output, status)
+                in self._ForEachCpu('cat "$CPU/cpufreq/scaling_governor"'))
+    return zip(self._cpu_files, online, governor)
+
+  def _ForEachCpu(self, cmd):
+    script = '; '.join([
+        'for CPU in %s' % self._cpu_file_list,
+        'do %s' % cmd,
+        'echo -n "%~%$?%~%"',
+        'done'
+    ])
+    output = self._device.RunShellCommand(
+        script, cwd=self._CPU_PATH, check_return=True, as_root=True)
+    output = '\n'.join(output).split('%~%')
+    return zip(self._cpu_files, output[0::2], (int(c) for c in output[1::2]))
+
+  def _WriteEachCpuFile(self, path, value):
+    results = self._ForEachCpu(
+        'test -e "$CPU/{path}" && echo {value} > "$CPU/{path}"'.format(
+            path=path, value=value))
+    cpus = ' '.join(cpu for (cpu, _, status) in results if status == 0)
+    if cpus:
+      logging.info('Successfully set %s to %r on: %s', path, value, cpus)
+    else:
+      logging.warning('Failed to set %s to %r on any cpus')
+
+  def _SetScalingGovernorInternal(self, value):
+    self._WriteEachCpuFile('cpufreq/scaling_governor', value)
+
+  def _SetScalingMaxFreq(self, value):
+    self._WriteEachCpuFile('cpufreq/scaling_max_freq', '%d' % value)
+
+  def _SetMaxGpuClock(self, value):
+    self._device.WriteFile('/sys/class/kgsl/kgsl-3d0/max_gpuclk',
+                           str(value),
+                           as_root=True)
+
+  def _AllCpusAreOnline(self):
+    results = self._ForEachCpu('cat "$CPU/online"')
+    # TODO(epenner): Investigate why file may be missing
+    # (http://crbug.com/397118)
+    return all(output.rstrip() == '1' and status == 0
+               for (cpu, output, status) in results
+               if cpu != 'cpu0')
+
+  def _ForceAllCpusOnline(self, force_online):
+    """Enable all CPUs on a device.
+
+    Some vendors (or only Qualcomm?) hot-plug their CPUs, which can add noise
+    to measurements:
+    - In perf, samples are only taken for the CPUs that are online when the
+      measurement is started.
+    - The scaling governor can't be set for an offline CPU and frequency scaling
+      on newly enabled CPUs adds noise to both perf and tracing measurements.
+
+    It appears Qualcomm is the only vendor that hot-plugs CPUs, and on Qualcomm
+    this is done by "mpdecision".
+
+    """
+    if self._have_mpdecision:
+      script = 'stop mpdecision' if force_online else 'start mpdecision'
+      self._device.RunShellCommand(script, check_return=True, as_root=True)
+
+    if not self._have_mpdecision and not self._AllCpusAreOnline():
+      logging.warning('Unexpected cpu hot plugging detected.')
+
+    if force_online:
+      self._ForEachCpu('echo 1 > "$CPU/online"')
diff --git a/build/android/pylib/perf/perf_control_unittest.py b/build/android/pylib/perf/perf_control_unittest.py
new file mode 100644
index 0000000..69b8b46
--- /dev/null
+++ b/build/android/pylib/perf/perf_control_unittest.py
@@ -0,0 +1,37 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+# pylint: disable=W0212
+
+import os
+import sys
+import unittest
+
+sys.path.append(os.path.join(os.path.dirname(__file__), os.pardir, os.pardir))
+
+from pylib.device import device_utils
+from pylib.perf import perf_control
+
+class TestPerfControl(unittest.TestCase):
+  def setUp(self):
+    if not os.getenv('BUILDTYPE'):
+      os.environ['BUILDTYPE'] = 'Debug'
+
+    devices = device_utils.DeviceUtils.HealthyDevices()
+    self.assertGreater(len(devices), 0, 'No device attached!')
+    self._device = devices[0]
+
+  def testHighPerfMode(self):
+    perf = perf_control.PerfControl(self._device)
+    try:
+      perf.SetPerfProfilingMode()
+      cpu_info = perf.GetCpuInfo()
+      self.assertEquals(len(perf._cpu_files), len(cpu_info))
+      for _, online, governor in cpu_info:
+        self.assertTrue(online)
+        self.assertEquals('performance', governor)
+    finally:
+      perf.SetDefaultPerfMode()
+
+if __name__ == '__main__':
+  unittest.main()
diff --git a/build/android/pylib/perf/setup.py b/build/android/pylib/perf/setup.py
new file mode 100644
index 0000000..8e1fc28
--- /dev/null
+++ b/build/android/pylib/perf/setup.py
@@ -0,0 +1,97 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Generates test runner factory and tests for performance tests."""
+
+import json
+import fnmatch
+import logging
+import os
+import shutil
+
+from pylib import constants
+from pylib import forwarder
+from pylib.device import device_list
+from pylib.device import device_utils
+from pylib.perf import test_runner
+from pylib.utils import test_environment
+
+
+def _GetAllDevices():
+  devices_path = os.path.join(os.environ.get('CHROMIUM_OUT_DIR', 'out'),
+                              device_list.LAST_DEVICES_FILENAME)
+  try:
+    devices = [device_utils.DeviceUtils(s)
+               for s in device_list.GetPersistentDeviceList(devices_path)]
+  except IOError as e:
+    logging.error('Unable to find %s [%s]', devices_path, e)
+    devices = device_utils.DeviceUtils.HealthyDevices()
+  return sorted(devices)
+
+
+def _GetStepsDictFromSingleStep(test_options):
+  # Running a single command, build the tests structure.
+  steps_dict = {
+    'version': 1,
+    'steps': {
+        'single_step': {
+          'device_affinity': 0,
+          'cmd': test_options.single_step
+        },
+    }
+  }
+  return steps_dict
+
+
+def _GetStepsDict(test_options):
+  if test_options.single_step:
+    return _GetStepsDictFromSingleStep(test_options)
+  if test_options.steps:
+    with file(test_options.steps, 'r') as f:
+      steps = json.load(f)
+
+      # Already using the new format.
+      assert steps['version'] == 1
+      return steps
+
+
+def Setup(test_options):
+  """Create and return the test runner factory and tests.
+
+  Args:
+    test_options: A PerformanceOptions object.
+
+  Returns:
+    A tuple of (TestRunnerFactory, tests, devices).
+  """
+  # TODO(bulach): remove this once the bot side lands. BUG=318369
+  constants.SetBuildType('Release')
+  if os.path.exists(constants.PERF_OUTPUT_DIR):
+    shutil.rmtree(constants.PERF_OUTPUT_DIR)
+  os.makedirs(constants.PERF_OUTPUT_DIR)
+
+  # Before running the tests, kill any leftover server.
+  test_environment.CleanupLeftoverProcesses()
+
+  # We want to keep device affinity, so return all devices ever seen.
+  all_devices = _GetAllDevices()
+
+  steps_dict = _GetStepsDict(test_options)
+  sorted_step_names = sorted(steps_dict['steps'].keys())
+
+  if test_options.test_filter:
+    sorted_step_names = fnmatch.filter(sorted_step_names,
+                                       test_options.test_filter)
+
+  flaky_steps = []
+  if test_options.flaky_steps:
+    with file(test_options.flaky_steps, 'r') as f:
+      flaky_steps = json.load(f)
+
+  def TestRunnerFactory(device, shard_index):
+    return test_runner.TestRunner(
+        test_options, device, shard_index, len(all_devices),
+        steps_dict, flaky_steps)
+
+  return (TestRunnerFactory, sorted_step_names, all_devices)
diff --git a/build/android/pylib/perf/surface_stats_collector.py b/build/android/pylib/perf/surface_stats_collector.py
new file mode 100644
index 0000000..c7e7527
--- /dev/null
+++ b/build/android/pylib/perf/surface_stats_collector.py
@@ -0,0 +1,191 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import Queue
+import datetime
+import logging
+import re
+import threading
+from pylib import android_commands
+from pylib.device import device_utils
+
+
+# Log marker containing SurfaceTexture timestamps.
+_SURFACE_TEXTURE_TIMESTAMPS_MESSAGE = 'SurfaceTexture update timestamps'
+_SURFACE_TEXTURE_TIMESTAMP_RE = r'\d+'
+
+
+class SurfaceStatsCollector(object):
+  """Collects surface stats for a SurfaceView from the output of SurfaceFlinger.
+
+  Args:
+    device: A DeviceUtils instance.
+  """
+
+  def __init__(self, device):
+    # TODO(jbudorick) Remove once telemetry gets switched over.
+    if isinstance(device, android_commands.AndroidCommands):
+      device = device_utils.DeviceUtils(device)
+    self._device = device
+    self._collector_thread = None
+    self._surface_before = None
+    self._get_data_event = None
+    self._data_queue = None
+    self._stop_event = None
+    self._warn_about_empty_data = True
+
+  def DisableWarningAboutEmptyData(self):
+    self._warn_about_empty_data = False
+
+  def Start(self):
+    assert not self._collector_thread
+
+    if self._ClearSurfaceFlingerLatencyData():
+      self._get_data_event = threading.Event()
+      self._stop_event = threading.Event()
+      self._data_queue = Queue.Queue()
+      self._collector_thread = threading.Thread(target=self._CollectorThread)
+      self._collector_thread.start()
+    else:
+      raise Exception('SurfaceFlinger not supported on this device.')
+
+  def Stop(self):
+    assert self._collector_thread
+    (refresh_period, timestamps) = self._GetDataFromThread()
+    if self._collector_thread:
+      self._stop_event.set()
+      self._collector_thread.join()
+      self._collector_thread = None
+    return (refresh_period, timestamps)
+
+  def _CollectorThread(self):
+    last_timestamp = 0
+    timestamps = []
+    retries = 0
+
+    while not self._stop_event.is_set():
+      self._get_data_event.wait(1)
+      try:
+        refresh_period, new_timestamps = self._GetSurfaceFlingerFrameData()
+        if refresh_period is None or timestamps is None:
+          retries += 1
+          if retries < 3:
+            continue
+          if last_timestamp:
+            # Some data has already been collected, but either the app
+            # was closed or there's no new data. Signal the main thread and
+            # wait.
+            self._data_queue.put((None, None))
+            self._stop_event.wait()
+            break
+          raise Exception('Unable to get surface flinger latency data')
+
+        timestamps += [timestamp for timestamp in new_timestamps
+                       if timestamp > last_timestamp]
+        if len(timestamps):
+          last_timestamp = timestamps[-1]
+
+        if self._get_data_event.is_set():
+          self._get_data_event.clear()
+          self._data_queue.put((refresh_period, timestamps))
+          timestamps = []
+      except Exception as e:
+        # On any error, before aborting, put the exception into _data_queue to
+        # prevent the main thread from waiting at _data_queue.get() infinitely.
+        self._data_queue.put(e)
+        raise
+
+  def _GetDataFromThread(self):
+    self._get_data_event.set()
+    ret = self._data_queue.get()
+    if isinstance(ret, Exception):
+      raise ret
+    return ret
+
+  def _ClearSurfaceFlingerLatencyData(self):
+    """Clears the SurfaceFlinger latency data.
+
+    Returns:
+      True if SurfaceFlinger latency is supported by the device, otherwise
+      False.
+    """
+    # The command returns nothing if it is supported, otherwise returns many
+    # lines of result just like 'dumpsys SurfaceFlinger'.
+    results = self._device.RunShellCommand(
+        'dumpsys SurfaceFlinger --latency-clear SurfaceView')
+    return not len(results)
+
+  def GetSurfaceFlingerPid(self):
+    results = self._device.RunShellCommand('ps | grep surfaceflinger')
+    if not results:
+      raise Exception('Unable to get surface flinger process id')
+    pid = results[0].split()[1]
+    return pid
+
+  def _GetSurfaceFlingerFrameData(self):
+    """Returns collected SurfaceFlinger frame timing data.
+
+    Returns:
+      A tuple containing:
+      - The display's nominal refresh period in milliseconds.
+      - A list of timestamps signifying frame presentation times in
+        milliseconds.
+      The return value may be (None, None) if there was no data collected (for
+      example, if the app was closed before the collector thread has finished).
+    """
+    # adb shell dumpsys SurfaceFlinger --latency <window name>
+    # prints some information about the last 128 frames displayed in
+    # that window.
+    # The data returned looks like this:
+    # 16954612
+    # 7657467895508   7657482691352   7657493499756
+    # 7657484466553   7657499645964   7657511077881
+    # 7657500793457   7657516600576   7657527404785
+    # (...)
+    #
+    # The first line is the refresh period (here 16.95 ms), it is followed
+    # by 128 lines w/ 3 timestamps in nanosecond each:
+    # A) when the app started to draw
+    # B) the vsync immediately preceding SF submitting the frame to the h/w
+    # C) timestamp immediately after SF submitted that frame to the h/w
+    #
+    # The difference between the 1st and 3rd timestamp is the frame-latency.
+    # An interesting data is when the frame latency crosses a refresh period
+    # boundary, this can be calculated this way:
+    #
+    # ceil((C - A) / refresh-period)
+    #
+    # (each time the number above changes, we have a "jank").
+    # If this happens a lot during an animation, the animation appears
+    # janky, even if it runs at 60 fps in average.
+    #
+    # We use the special "SurfaceView" window name because the statistics for
+    # the activity's main window are not updated when the main web content is
+    # composited into a SurfaceView.
+    results = self._device.RunShellCommand(
+        'dumpsys SurfaceFlinger --latency SurfaceView')
+    if not len(results):
+      return (None, None)
+
+    timestamps = []
+    nanoseconds_per_millisecond = 1e6
+    refresh_period = long(results[0]) / nanoseconds_per_millisecond
+
+    # If a fence associated with a frame is still pending when we query the
+    # latency data, SurfaceFlinger gives the frame a timestamp of INT64_MAX.
+    # Since we only care about completed frames, we will ignore any timestamps
+    # with this value.
+    pending_fence_timestamp = (1 << 63) - 1
+
+    for line in results[1:]:
+      fields = line.split()
+      if len(fields) != 3:
+        continue
+      timestamp = long(fields[1])
+      if timestamp == pending_fence_timestamp:
+        continue
+      timestamp /= nanoseconds_per_millisecond
+      timestamps.append(timestamp)
+
+    return (refresh_period, timestamps)
diff --git a/build/android/pylib/perf/surface_stats_collector_unittest.py b/build/android/pylib/perf/surface_stats_collector_unittest.py
new file mode 100644
index 0000000..e905d73
--- /dev/null
+++ b/build/android/pylib/perf/surface_stats_collector_unittest.py
@@ -0,0 +1,64 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unittests for SurfaceStatsCollector."""
+# pylint: disable=W0212
+
+import unittest
+
+from pylib.perf.surface_stats_collector import SurfaceStatsCollector
+
+
+class TestSurfaceStatsCollector(unittest.TestCase):
+  @staticmethod
+  def _CreateUniformTimestamps(base, num, delta):
+    return [base + i * delta for i in range(1, num + 1)]
+
+  @staticmethod
+  def _CreateDictionaryFromResults(results):
+    dictionary = {}
+    for result in results:
+      dictionary[result.name] = result
+    return dictionary
+
+  def setUp(self):
+    self.refresh_period = 0.1
+
+  def testOneFrameDelta(self):
+    timestamps = self._CreateUniformTimestamps(0, 10, self.refresh_period)
+    results = self._CreateDictionaryFromResults(
+                  SurfaceStatsCollector._CalculateResults(
+                      self.refresh_period, timestamps, ''))
+
+    self.assertEquals(results['avg_surface_fps'].value,
+                      int(round(1 / self.refresh_period)))
+    self.assertEquals(results['jank_count'].value, 0)
+    self.assertEquals(results['max_frame_delay'].value, 1)
+    self.assertEquals(len(results['frame_lengths'].value), len(timestamps) - 1)
+
+  def testAllFramesTooShort(self):
+    timestamps = self._CreateUniformTimestamps(0, 10, self.refresh_period / 100)
+    self.assertRaises(Exception,
+                      SurfaceStatsCollector._CalculateResults,
+                      [self.refresh_period, timestamps, ''])
+
+  def testSomeFramesTooShort(self):
+    timestamps = self._CreateUniformTimestamps(0, 5, self.refresh_period)
+    # The following timestamps should be skipped.
+    timestamps += self._CreateUniformTimestamps(timestamps[4],
+                                                5,
+                                                self.refresh_period / 100)
+    timestamps += self._CreateUniformTimestamps(timestamps[4],
+                                                5,
+                                                self.refresh_period)
+
+    results = self._CreateDictionaryFromResults(
+                  SurfaceStatsCollector._CalculateResults(
+                      self.refresh_period, timestamps, ''))
+
+    self.assertEquals(len(results['frame_lengths'].value), 9)
+
+
+if __name__ == '__main__':
+  unittest.main()
diff --git a/build/android/pylib/perf/test_options.py b/build/android/pylib/perf/test_options.py
new file mode 100644
index 0000000..eff928e
--- /dev/null
+++ b/build/android/pylib/perf/test_options.py
@@ -0,0 +1,22 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Defines the PerfOptions named tuple."""
+
+import collections
+
+PerfOptions = collections.namedtuple('PerfOptions', [
+    'steps',
+    'flaky_steps',
+    'output_json_list',
+    'print_step',
+    'no_timeout',
+    'test_filter',
+    'dry_run',
+    'single_step',
+    'collect_chartjson_data',
+    'output_chartjson_data',
+    'max_battery_temp',
+    'min_battery_level',
+])
diff --git a/build/android/pylib/perf/test_runner.py b/build/android/pylib/perf/test_runner.py
new file mode 100644
index 0000000..d21a9b7
--- /dev/null
+++ b/build/android/pylib/perf/test_runner.py
@@ -0,0 +1,374 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Runs perf tests.
+
+Our buildbot infrastructure requires each slave to run steps serially.
+This is sub-optimal for android, where these steps can run independently on
+multiple connected devices.
+
+The buildbots will run this script multiple times per cycle:
+- First: all steps listed in --steps in will be executed in parallel using all
+connected devices. Step results will be pickled to disk. Each step has a unique
+name. The result code will be ignored if the step name is listed in
+--flaky-steps.
+The buildbot will treat this step as a regular step, and will not process any
+graph data.
+
+- Then, with -print-step STEP_NAME: at this stage, we'll simply print the file
+with the step results previously saved. The buildbot will then process the graph
+data accordingly.
+
+The JSON steps file contains a dictionary in the format:
+{ "version": int,
+  "steps": {
+    "foo": {
+      "device_affinity": int,
+      "cmd": "script_to_execute foo"
+    },
+    "bar": {
+      "device_affinity": int,
+      "cmd": "script_to_execute bar"
+    }
+  }
+}
+
+The JSON flaky steps file contains a list with step names which results should
+be ignored:
+[
+  "step_name_foo",
+  "step_name_bar"
+]
+
+Note that script_to_execute necessarily have to take at least the following
+option:
+  --device: the serial number to be passed to all adb commands.
+"""
+
+import collections
+import datetime
+import json
+import logging
+import os
+import pickle
+import shutil
+import sys
+import tempfile
+import threading
+import time
+
+from pylib import cmd_helper
+from pylib import constants
+from pylib import forwarder
+from pylib.base import base_test_result
+from pylib.base import base_test_runner
+from pylib.device import battery_utils
+from pylib.device import device_errors
+
+
+def GetPersistedResult(test_name):
+  file_name = os.path.join(constants.PERF_OUTPUT_DIR, test_name)
+  if not os.path.exists(file_name):
+    logging.error('File not found %s', file_name)
+    return None
+
+  with file(file_name, 'r') as f:
+    return pickle.loads(f.read())
+
+
+def OutputJsonList(json_input, json_output):
+  with file(json_input, 'r') as i:
+    all_steps = json.load(i)
+
+  step_values = []
+  for k, v in all_steps['steps'].iteritems():
+    data = {'test': k, 'device_affinity': v['device_affinity']}
+
+    persisted_result = GetPersistedResult(k)
+    if persisted_result:
+      data['total_time'] = persisted_result['total_time']
+    step_values.append(data)
+
+  with file(json_output, 'w') as o:
+    o.write(json.dumps(step_values))
+  return 0
+
+
+def PrintTestOutput(test_name, json_file_name=None):
+  """Helper method to print the output of previously executed test_name.
+
+  Args:
+    test_name: name of the test that has been previously executed.
+    json_file_name: name of the file to output chartjson data to.
+
+  Returns:
+    exit code generated by the test step.
+  """
+  persisted_result = GetPersistedResult(test_name)
+  if not persisted_result:
+    return 1
+  logging.info('*' * 80)
+  logging.info('Output from:')
+  logging.info(persisted_result['cmd'])
+  logging.info('*' * 80)
+  print persisted_result['output']
+
+  if json_file_name:
+    with file(json_file_name, 'w') as f:
+      f.write(persisted_result['chartjson'])
+
+  return persisted_result['exit_code']
+
+
+def PrintSummary(test_names):
+  logging.info('*' * 80)
+  logging.info('Sharding summary')
+  device_total_time = collections.defaultdict(int)
+  for test_name in test_names:
+    file_name = os.path.join(constants.PERF_OUTPUT_DIR, test_name)
+    if not os.path.exists(file_name):
+      logging.info('%s : No status file found', test_name)
+      continue
+    with file(file_name, 'r') as f:
+      result = pickle.loads(f.read())
+    logging.info('%s : exit_code=%d in %d secs at %s',
+                 result['name'], result['exit_code'], result['total_time'],
+                 result['device'])
+    device_total_time[result['device']] += result['total_time']
+  for device, device_time in device_total_time.iteritems():
+    logging.info('Total for device %s : %d secs', device, device_time)
+  logging.info('Total steps time: %d secs', sum(device_total_time.values()))
+
+
+class _HeartBeatLogger(object):
+  # How often to print the heartbeat on flush().
+  _PRINT_INTERVAL = 30.0
+
+  def __init__(self):
+    """A file-like class for keeping the buildbot alive."""
+    self._len = 0
+    self._tick = time.time()
+    self._stopped = threading.Event()
+    self._timer = threading.Thread(target=self._runner)
+    self._timer.start()
+
+  def _runner(self):
+    while not self._stopped.is_set():
+      self.flush()
+      self._stopped.wait(_HeartBeatLogger._PRINT_INTERVAL)
+
+  def write(self, data):
+    self._len += len(data)
+
+  def flush(self):
+    now = time.time()
+    if now - self._tick >= _HeartBeatLogger._PRINT_INTERVAL:
+      self._tick = now
+      print '--single-step output length %d' % self._len
+      sys.stdout.flush()
+
+  def stop(self):
+    self._stopped.set()
+
+
+class TestRunner(base_test_runner.BaseTestRunner):
+  def __init__(self, test_options, device, shard_index, max_shard, tests,
+      flaky_tests):
+    """A TestRunner instance runs a perf test on a single device.
+
+    Args:
+      test_options: A PerfOptions object.
+      device: Device to run the tests.
+      shard_index: the index of this device.
+      max_shards: the maximum shard index.
+      tests: a dict mapping test_name to command.
+      flaky_tests: a list of flaky test_name.
+    """
+    super(TestRunner, self).__init__(device, None)
+    self._options = test_options
+    self._shard_index = shard_index
+    self._max_shard = max_shard
+    self._tests = tests
+    self._flaky_tests = flaky_tests
+    self._output_dir = None
+    self._device_battery = battery_utils.BatteryUtils(self.device)
+
+  @staticmethod
+  def _IsBetter(result):
+    if result['actual_exit_code'] == 0:
+      return True
+    pickled = os.path.join(constants.PERF_OUTPUT_DIR,
+                           result['name'])
+    if not os.path.exists(pickled):
+      return True
+    with file(pickled, 'r') as f:
+      previous = pickle.loads(f.read())
+    return result['actual_exit_code'] < previous['actual_exit_code']
+
+  @staticmethod
+  def _SaveResult(result):
+    if TestRunner._IsBetter(result):
+      with file(os.path.join(constants.PERF_OUTPUT_DIR,
+                             result['name']), 'w') as f:
+        f.write(pickle.dumps(result))
+
+  def _CheckDeviceAffinity(self, test_name):
+    """Returns True if test_name has affinity for this shard."""
+    affinity = (self._tests['steps'][test_name]['device_affinity'] %
+                self._max_shard)
+    if self._shard_index == affinity:
+      return True
+    logging.info('Skipping %s on %s (affinity is %s, device is %s)',
+                 test_name, self.device_serial, affinity, self._shard_index)
+    return False
+
+  def _CleanupOutputDirectory(self):
+    if self._output_dir:
+      shutil.rmtree(self._output_dir, ignore_errors=True)
+      self._output_dir = None
+
+  def _ReadChartjsonOutput(self):
+    if not self._output_dir:
+      return ''
+
+    json_output_path = os.path.join(self._output_dir, 'results-chart.json')
+    try:
+      with open(json_output_path) as f:
+        return f.read()
+    except IOError:
+      logging.exception('Exception when reading chartjson.')
+      logging.error('This usually means that telemetry did not run, so it could'
+                    ' not generate the file. Please check the device running'
+                    ' the test.')
+      return ''
+
+  def _LaunchPerfTest(self, test_name):
+    """Runs a perf test.
+
+    Args:
+      test_name: the name of the test to be executed.
+
+    Returns:
+      A tuple containing (Output, base_test_result.ResultType)
+    """
+    if not self._CheckDeviceAffinity(test_name):
+      return '', base_test_result.ResultType.PASS
+
+    try:
+      logging.warning('Unmapping device ports')
+      forwarder.Forwarder.UnmapAllDevicePorts(self.device)
+      self.device.old_interface.RestartAdbdOnDevice()
+    except Exception as e:
+      logging.error('Exception when tearing down device %s', e)
+
+    cmd = ('%s --device %s' %
+           (self._tests['steps'][test_name]['cmd'],
+            self.device_serial))
+
+    if self._options.collect_chartjson_data:
+      self._output_dir = tempfile.mkdtemp()
+      cmd = cmd + ' --output-dir=%s' % self._output_dir
+
+    logging.info(
+        'temperature: %s (0.1 C)',
+        str(self._device_battery.GetBatteryInfo().get('temperature')))
+    if self._options.max_battery_temp:
+      self._device_battery.LetBatteryCoolToTemperature(
+          self._options.max_battery_temp)
+
+    logging.info('Charge level: %s%%',
+        str(self._device_battery.GetBatteryInfo().get('level')))
+    if self._options.min_battery_level:
+      self._device_battery.ChargeDeviceToLevel(
+          self._options.min_battery_level)
+
+    logging.info('%s : %s', test_name, cmd)
+    start_time = datetime.datetime.now()
+
+    timeout = self._tests['steps'][test_name].get('timeout', 5400)
+    if self._options.no_timeout:
+      timeout = None
+    logging.info('Timeout for %s test: %s', test_name, timeout)
+    full_cmd = cmd
+    if self._options.dry_run:
+      full_cmd = 'echo %s' % cmd
+
+    logfile = sys.stdout
+    if self._options.single_step:
+      # Just print a heart-beat so that the outer buildbot scripts won't timeout
+      # without response.
+      logfile = _HeartBeatLogger()
+    cwd = os.path.abspath(constants.DIR_SOURCE_ROOT)
+    if full_cmd.startswith('src/'):
+      cwd = os.path.abspath(os.path.join(constants.DIR_SOURCE_ROOT, os.pardir))
+    try:
+      exit_code, output = cmd_helper.GetCmdStatusAndOutputWithTimeout(
+          full_cmd, timeout, cwd=cwd, shell=True, logfile=logfile)
+      json_output = self._ReadChartjsonOutput()
+    except cmd_helper.TimeoutError as e:
+      exit_code = -1
+      output = str(e)
+      json_output = ''
+    finally:
+      self._CleanupOutputDirectory()
+      if self._options.single_step:
+        logfile.stop()
+    end_time = datetime.datetime.now()
+    if exit_code is None:
+      exit_code = -1
+    logging.info('%s : exit_code=%d in %d secs at %s',
+                 test_name, exit_code, (end_time - start_time).seconds,
+                 self.device_serial)
+
+    if exit_code == 0:
+      result_type = base_test_result.ResultType.PASS
+    else:
+      result_type = base_test_result.ResultType.FAIL
+      # Since perf tests use device affinity, give the device a chance to
+      # recover if it is offline after a failure. Otherwise, the master sharder
+      # will remove it from the pool and future tests on this device will fail.
+      try:
+        self.device.WaitUntilFullyBooted(timeout=120)
+      except device_errors.CommandTimeoutError as e:
+        logging.error('Device failed to return after %s: %s' % (test_name, e))
+
+    actual_exit_code = exit_code
+    if test_name in self._flaky_tests:
+      # The exit_code is used at the second stage when printing the
+      # test output. If the test is flaky, force to "0" to get that step green
+      # whilst still gathering data to the perf dashboards.
+      # The result_type is used by the test_dispatcher to retry the test.
+      exit_code = 0
+
+    persisted_result = {
+        'name': test_name,
+        'output': output,
+        'chartjson': json_output,
+        'exit_code': exit_code,
+        'actual_exit_code': actual_exit_code,
+        'result_type': result_type,
+        'total_time': (end_time - start_time).seconds,
+        'device': self.device_serial,
+        'cmd': cmd,
+    }
+    self._SaveResult(persisted_result)
+
+    return (output, result_type)
+
+  def RunTest(self, test_name):
+    """Run a perf test on the device.
+
+    Args:
+      test_name: String to use for logging the test result.
+
+    Returns:
+      A tuple of (TestRunResults, retry).
+    """
+    _, result_type = self._LaunchPerfTest(test_name)
+    results = base_test_result.TestRunResults()
+    results.AddResult(base_test_result.BaseTestResult(test_name, result_type))
+    retry = None
+    if not results.DidRunPass():
+      retry = test_name
+    return results, retry
diff --git a/build/android/pylib/perf/thermal_throttle.py b/build/android/pylib/perf/thermal_throttle.py
new file mode 100644
index 0000000..383b6d5
--- /dev/null
+++ b/build/android/pylib/perf/thermal_throttle.py
@@ -0,0 +1,137 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+from pylib import android_commands
+from pylib.device import device_utils
+
+
+class OmapThrottlingDetector(object):
+  """Class to detect and track thermal throttling on an OMAP 4."""
+  OMAP_TEMP_FILE = ('/sys/devices/platform/omap/omap_temp_sensor.0/'
+                    'temperature')
+
+  @staticmethod
+  def IsSupported(device):
+    return device.FileExists(OmapThrottlingDetector.OMAP_TEMP_FILE)
+
+  def __init__(self, device):
+    self._device = device
+
+  @staticmethod
+  def BecameThrottled(log_line):
+    return 'omap_thermal_throttle' in log_line
+
+  @staticmethod
+  def BecameUnthrottled(log_line):
+    return 'omap_thermal_unthrottle' in log_line
+
+  @staticmethod
+  def GetThrottlingTemperature(log_line):
+    if 'throttle_delayed_work_fn' in log_line:
+      return float([s for s in log_line.split() if s.isdigit()][0]) / 1000.0
+
+  def GetCurrentTemperature(self):
+    tempdata = self._device.ReadFile(OmapThrottlingDetector.OMAP_TEMP_FILE)
+    return float(tempdata) / 1000.0
+
+
+class ExynosThrottlingDetector(object):
+  """Class to detect and track thermal throttling on an Exynos 5."""
+  @staticmethod
+  def IsSupported(device):
+    return device.FileExists('/sys/bus/exynos5-core')
+
+  def __init__(self, device):
+    pass
+
+  @staticmethod
+  def BecameThrottled(log_line):
+    return 'exynos_tmu: Throttling interrupt' in log_line
+
+  @staticmethod
+  def BecameUnthrottled(log_line):
+    return 'exynos_thermal_unthrottle: not throttling' in log_line
+
+  @staticmethod
+  def GetThrottlingTemperature(_log_line):
+    return None
+
+  @staticmethod
+  def GetCurrentTemperature():
+    return None
+
+
+class ThermalThrottle(object):
+  """Class to detect and track thermal throttling.
+
+  Usage:
+    Wait for IsThrottled() to be False before running test
+    After running test call HasBeenThrottled() to find out if the
+    test run was affected by thermal throttling.
+  """
+
+  def __init__(self, device):
+    # TODO(jbudorick) Remove once telemetry gets switched over.
+    if isinstance(device, android_commands.AndroidCommands):
+      device = device_utils.DeviceUtils(device)
+    self._device = device
+    self._throttled = False
+    self._detector = None
+    if OmapThrottlingDetector.IsSupported(device):
+      self._detector = OmapThrottlingDetector(device)
+    elif ExynosThrottlingDetector.IsSupported(device):
+      self._detector = ExynosThrottlingDetector(device)
+
+  def HasBeenThrottled(self):
+    """True if there has been any throttling since the last call to
+       HasBeenThrottled or IsThrottled.
+    """
+    return self._ReadLog()
+
+  def IsThrottled(self):
+    """True if currently throttled."""
+    self._ReadLog()
+    return self._throttled
+
+  def _ReadLog(self):
+    if not self._detector:
+      return False
+    has_been_throttled = False
+    serial_number = str(self._device)
+    log = self._device.RunShellCommand('dmesg -c')
+    degree_symbol = unichr(0x00B0)
+    for line in log:
+      if self._detector.BecameThrottled(line):
+        if not self._throttled:
+          logging.warning('>>> Device %s thermally throttled', serial_number)
+        self._throttled = True
+        has_been_throttled = True
+      elif self._detector.BecameUnthrottled(line):
+        if self._throttled:
+          logging.warning('>>> Device %s thermally unthrottled', serial_number)
+        self._throttled = False
+        has_been_throttled = True
+      temperature = self._detector.GetThrottlingTemperature(line)
+      if temperature is not None:
+        logging.info(u'Device %s thermally throttled at %3.1f%sC',
+                     serial_number, temperature, degree_symbol)
+
+    if logging.getLogger().isEnabledFor(logging.DEBUG):
+      # Print current temperature of CPU SoC.
+      temperature = self._detector.GetCurrentTemperature()
+      if temperature is not None:
+        logging.debug(u'Current SoC temperature of %s = %3.1f%sC',
+                      serial_number, temperature, degree_symbol)
+
+      # Print temperature of battery, to give a system temperature
+      dumpsys_log = self._device.RunShellCommand('dumpsys battery')
+      for line in dumpsys_log:
+        if 'temperature' in line:
+          btemp = float([s for s in line.split() if s.isdigit()][0]) / 10.0
+          logging.debug(u'Current battery temperature of %s = %3.1f%sC',
+                        serial_number, btemp, degree_symbol)
+
+    return has_been_throttled
+
diff --git a/build/android/pylib/pexpect.py b/build/android/pylib/pexpect.py
new file mode 100644
index 0000000..cf59fb0
--- /dev/null
+++ b/build/android/pylib/pexpect.py
@@ -0,0 +1,21 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+from __future__ import absolute_import
+
+import os
+import sys
+
+_CHROME_SRC = os.path.join(
+    os.path.abspath(os.path.dirname(__file__)), '..', '..', '..')
+
+_PEXPECT_PATH = os.path.join(_CHROME_SRC, 'third_party', 'pexpect')
+if _PEXPECT_PATH not in sys.path:
+  sys.path.append(_PEXPECT_PATH)
+
+# pexpect is not available on all platforms. We allow this file to be imported
+# on platforms without pexpect and only fail when pexpect is actually used.
+try:
+  from pexpect import * # pylint: disable=W0401,W0614
+except ImportError:
+  pass
diff --git a/build/android/pylib/ports.py b/build/android/pylib/ports.py
new file mode 100644
index 0000000..578152c
--- /dev/null
+++ b/build/android/pylib/ports.py
@@ -0,0 +1,172 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Functions that deal with local and device ports."""
+
+import contextlib
+import fcntl
+import httplib
+import logging
+import os
+import socket
+import traceback
+
+from pylib import constants
+
+
+# The following two methods are used to allocate the port source for various
+# types of test servers. Because some net-related tests can be run on shards at
+# same time, it's important to have a mechanism to allocate the port
+# process-safe. In here, we implement the safe port allocation by leveraging
+# flock.
+def ResetTestServerPortAllocation():
+  """Resets the port allocation to start from TEST_SERVER_PORT_FIRST.
+
+  Returns:
+    Returns True if reset successes. Otherwise returns False.
+  """
+  try:
+    with open(constants.TEST_SERVER_PORT_FILE, 'w') as fp:
+      fp.write('%d' % constants.TEST_SERVER_PORT_FIRST)
+    if os.path.exists(constants.TEST_SERVER_PORT_LOCKFILE):
+      os.unlink(constants.TEST_SERVER_PORT_LOCKFILE)
+    return True
+  except Exception as e:
+    logging.error(e)
+  return False
+
+
+def AllocateTestServerPort():
+  """Allocates a port incrementally.
+
+  Returns:
+    Returns a valid port which should be in between TEST_SERVER_PORT_FIRST and
+    TEST_SERVER_PORT_LAST. Returning 0 means no more valid port can be used.
+  """
+  port = 0
+  ports_tried = []
+  try:
+    fp_lock = open(constants.TEST_SERVER_PORT_LOCKFILE, 'w')
+    fcntl.flock(fp_lock, fcntl.LOCK_EX)
+    # Get current valid port and calculate next valid port.
+    if not os.path.exists(constants.TEST_SERVER_PORT_FILE):
+      ResetTestServerPortAllocation()
+    with open(constants.TEST_SERVER_PORT_FILE, 'r+') as fp:
+      port = int(fp.read())
+      ports_tried.append(port)
+      while not IsHostPortAvailable(port):
+        port += 1
+        ports_tried.append(port)
+      if (port > constants.TEST_SERVER_PORT_LAST or
+          port < constants.TEST_SERVER_PORT_FIRST):
+        port = 0
+      else:
+        fp.seek(0, os.SEEK_SET)
+        fp.write('%d' % (port + 1))
+  except Exception as e:
+    logging.error(e)
+  finally:
+    if fp_lock:
+      fcntl.flock(fp_lock, fcntl.LOCK_UN)
+      fp_lock.close()
+  if port:
+    logging.info('Allocate port %d for test server.', port)
+  else:
+    logging.error('Could not allocate port for test server. '
+                  'List of ports tried: %s', str(ports_tried))
+  return port
+
+
+def IsHostPortAvailable(host_port):
+  """Checks whether the specified host port is available.
+
+  Args:
+    host_port: Port on host to check.
+
+  Returns:
+    True if the port on host is available, otherwise returns False.
+  """
+  s = socket.socket()
+  try:
+    s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
+    s.bind(('', host_port))
+    s.close()
+    return True
+  except socket.error:
+    return False
+
+
+def IsDevicePortUsed(device, device_port, state=''):
+  """Checks whether the specified device port is used or not.
+
+  Args:
+    device: A DeviceUtils instance.
+    device_port: Port on device we want to check.
+    state: String of the specified state. Default is empty string, which
+           means any state.
+
+  Returns:
+    True if the port on device is already used, otherwise returns False.
+  """
+  base_url = '127.0.0.1:%d' % device_port
+  netstat_results = device.RunShellCommand('netstat')
+  for single_connect in netstat_results:
+    # Column 3 is the local address which we want to check with.
+    connect_results = single_connect.split()
+    if connect_results[0] != 'tcp':
+      continue
+    if len(connect_results) < 6:
+      raise Exception('Unexpected format while parsing netstat line: ' +
+                      single_connect)
+    is_state_match = connect_results[5] == state if state else True
+    if connect_results[3] == base_url and is_state_match:
+      return True
+  return False
+
+
+def IsHttpServerConnectable(host, port, tries=3, command='GET', path='/',
+                            expected_read='', timeout=2):
+  """Checks whether the specified http server is ready to serve request or not.
+
+  Args:
+    host: Host name of the HTTP server.
+    port: Port number of the HTTP server.
+    tries: How many times we want to test the connection. The default value is
+           3.
+    command: The http command we use to connect to HTTP server. The default
+             command is 'GET'.
+    path: The path we use when connecting to HTTP server. The default path is
+          '/'.
+    expected_read: The content we expect to read from the response. The default
+                   value is ''.
+    timeout: Timeout (in seconds) for each http connection. The default is 2s.
+
+  Returns:
+    Tuple of (connect status, client error). connect status is a boolean value
+    to indicate whether the server is connectable. client_error is the error
+    message the server returns when connect status is false.
+  """
+  assert tries >= 1
+  for i in xrange(0, tries):
+    client_error = None
+    try:
+      with contextlib.closing(httplib.HTTPConnection(
+          host, port, timeout=timeout)) as http:
+        # Output some debug information when we have tried more than 2 times.
+        http.set_debuglevel(i >= 2)
+        http.request(command, path)
+        r = http.getresponse()
+        content = r.read()
+        if r.status == 200 and r.reason == 'OK' and content == expected_read:
+          return (True, '')
+        client_error = ('Bad response: %s %s version %s\n  ' %
+                        (r.status, r.reason, r.version) +
+                        '\n  '.join([': '.join(h) for h in r.getheaders()]))
+    except (httplib.HTTPException, socket.error) as e:
+      # Probably too quick connecting: try again.
+      exception_error_msgs = traceback.format_exception_only(type(e), e)
+      if exception_error_msgs:
+        client_error = ''.join(exception_error_msgs)
+  # Only returns last client_error.
+  return (False, client_error or 'Timeout')
diff --git a/build/android/pylib/remote/__init__.py b/build/android/pylib/remote/__init__.py
new file mode 100644
index 0000000..4d6aabb
--- /dev/null
+++ b/build/android/pylib/remote/__init__.py
@@ -0,0 +1,3 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
diff --git a/build/android/pylib/remote/device/__init__.py b/build/android/pylib/remote/device/__init__.py
new file mode 100644
index 0000000..4d6aabb
--- /dev/null
+++ b/build/android/pylib/remote/device/__init__.py
@@ -0,0 +1,3 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
diff --git a/build/android/pylib/remote/device/appurify_constants.py b/build/android/pylib/remote/device/appurify_constants.py
new file mode 100644
index 0000000..9343178
--- /dev/null
+++ b/build/android/pylib/remote/device/appurify_constants.py
@@ -0,0 +1,57 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Defines a set of constants specific to appurify."""
+
+# Appurify network config constants.
+class NETWORK(object):
+  WIFI_1_BAR = 1
+  SPRINT_4G_LTE_4_BARS = 2
+  SPRINT_3G_5_BARS = 3
+  SPRINT_3G_4_BARS = 4
+  SPRINT_3G_3_BARS = 5
+  SPRINT_3G_2_BARS = 6
+  SPRINT_3G_1_BAR = 7
+  SPRING_4G_1_BAR = 8
+  VERIZON_3G_5_BARS = 9
+  VERIZON_3G_4_BARS = 10
+  VERIZON_3G_3_BARS = 11
+  VERIZON_3G_2_BARS = 12
+  VERIZON_3G_1_BAR = 13
+  VERIZON_4G_1_BAR = 14
+  ATANDT_3G_5_BARS = 15
+  ATANDT_3G_4_BARS = 16
+  ATANDT_3G_3_BARS = 17
+  ATANDT_3G_2_BARS = 18
+  ATANDT_3G_1_BAR = 19
+  GENERIC_2G_4_BARS = 20
+  GENERIC_2G_3_BARS = 21
+  GENERIC_EVOLVED_EDGE = 22
+  GENERIC_GPRS = 23
+  GENERIC_ENHANCED_GPRS = 24
+  GENERIC_LTE = 25
+  GENERIC_HIGH_LATENCY_DNS = 26
+  GENERIC_100_PERCENT_PACKET_LOSS = 27
+  ATANDT_HSPA_PLUS = 28
+  ATANDT_4G_LTE_4_BARS = 29
+  VERIZON_4G_LTE_4_BARS = 30
+  GENERIC_DIGITAL_SUBSCRIBE_LINE = 31
+  WIFI_STARBUCKS_3_BARS = 32
+  WIFI_STARBUCKS_4_BARS = 33
+  WIFI_STARBUCKS_HIGH_TRAFFIC = 34
+  WIFI_TARGET_1_BAR = 35
+  WIFI_TARGET_3_BARS = 36
+  WIFI_TARGET_4_BARS = 37
+  PUBLIC_WIFI_MCDONALDS_5_BARS = 38
+  PUBLIC_WIFI_MCDONALDS_4_BARS = 39
+  PUBLIC_WIFI_MCDONALDS_2_BARS = 40
+  PUBLIC_WIFI_MCDONALDS_1_BAR = 41
+  PUBLIC_WIFI_KOHLS_5_BARS = 42
+  PUBLIC_WIFI_KOHLS_4_BARS = 43
+  PUBLIC_WIFI_KOHLS_2_BARS = 44
+  PUBLIC_WIFI_ATANDT_5_BARS = 45
+  PUBLIC_WIFI_ATANDT_4_BARS = 46
+  PUBLIC_WIFI_ATANDT_2_BARS = 47
+  PUBLIC_WIFI_ATANDT_1_BAR = 48
+  BOINGO = 49
\ No newline at end of file
diff --git a/build/android/pylib/remote/device/appurify_sanitized.py b/build/android/pylib/remote/device/appurify_sanitized.py
new file mode 100644
index 0000000..9f6ab40
--- /dev/null
+++ b/build/android/pylib/remote/device/appurify_sanitized.py
@@ -0,0 +1,40 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import contextlib
+import logging
+import os
+import sys
+
+from pylib import constants
+
+sys.path.append(os.path.join(
+    constants.DIR_SOURCE_ROOT, 'third_party', 'requests', 'src'))
+sys.path.append(os.path.join(
+    constants.DIR_SOURCE_ROOT, 'third_party', 'appurify-python', 'src'))
+handlers_before = list(logging.getLogger().handlers)
+
+import appurify.api
+import appurify.utils
+
+handlers_after = list(logging.getLogger().handlers)
+new_handler = list(set(handlers_after) - set(handlers_before))
+while new_handler:
+  logging.info("Removing logging handler.")
+  logging.getLogger().removeHandler(new_handler.pop())
+
+api = appurify.api
+utils = appurify.utils
+
+# This is not thread safe. If multiple threads are ever supported with appurify
+# this may cause logging messages to go missing.
+@contextlib.contextmanager
+def SanitizeLogging(verbose_count, level):
+  if verbose_count < 2:
+    logging.disable(level)
+    yield True
+    logging.disable(logging.NOTSET)
+  else:
+    yield False
+
diff --git a/build/android/pylib/remote/device/dummy/BUILD.gn b/build/android/pylib/remote/device/dummy/BUILD.gn
new file mode 100644
index 0000000..54ca275
--- /dev/null
+++ b/build/android/pylib/remote/device/dummy/BUILD.gn
@@ -0,0 +1,14 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/android/config.gni")
+import("//build/config/android/rules.gni")
+
+# GYP: //build/android/pylib/remote/device/dummy/dummy.gyp:remote_device_dummy_apk
+android_apk("remote_device_dummy_apk") {
+  android_manifest = "//build/android/AndroidManifest.xml"
+  java_files = [ "src/org/chromium/dummy/Dummy.java" ]
+  apk_name = "remote_device_dummy"
+  testonly = true
+}
diff --git a/build/android/pylib/remote/device/dummy/dummy.gyp b/build/android/pylib/remote/device/dummy/dummy.gyp
new file mode 100644
index 0000000..b003edc
--- /dev/null
+++ b/build/android/pylib/remote/device/dummy/dummy.gyp
@@ -0,0 +1,25 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Running gtests on a remote device via am instrument requires both an "app"
+# APK and a "test" APK with different package names. Our gtests only use one
+# APK, so we build a dummy APK to upload as the app.
+
+{
+  'targets': [
+    {
+      # GN: //build/android/pylib/remote/device/dummy:remote_device_dummy_apk
+      'target_name': 'remote_device_dummy_apk',
+      'type': 'none',
+      'variables': {
+        'apk_name': 'remote_device_dummy',
+        'java_in_dir': '.',
+        'android_manifest_path': '../../../../../../build/android/AndroidManifest.xml',
+      },
+      'includes': [
+        '../../../../../../build/java_apk.gypi',
+      ]
+    },
+  ]
+}
diff --git a/build/android/pylib/remote/device/dummy/src/org/chromium/dummy/Dummy.java b/build/android/pylib/remote/device/dummy/src/org/chromium/dummy/Dummy.java
new file mode 100644
index 0000000..1281b39
--- /dev/null
+++ b/build/android/pylib/remote/device/dummy/src/org/chromium/dummy/Dummy.java
@@ -0,0 +1,9 @@
+// Copyright 2015 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package org.chromium.dummy;
+
+/** Does nothing. */
+class Dummy {}
+
diff --git a/build/android/pylib/remote/device/remote_device_environment.py b/build/android/pylib/remote/device/remote_device_environment.py
new file mode 100644
index 0000000..dc11845
--- /dev/null
+++ b/build/android/pylib/remote/device/remote_device_environment.py
@@ -0,0 +1,368 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Environment setup and teardown for remote devices."""
+
+import distutils.version
+import json
+import logging
+import os
+import random
+import sys
+
+from pylib import constants
+from pylib.base import environment
+from pylib.remote.device import appurify_sanitized
+from pylib.remote.device import remote_device_helper
+from pylib.utils import timeout_retry
+from pylib.utils import reraiser_thread
+
+class RemoteDeviceEnvironment(environment.Environment):
+  """An environment for running on remote devices."""
+
+  _ENV_KEY = 'env'
+  _DEVICE_KEY = 'device'
+  _DEFAULT_RETRIES = 0
+
+  def __init__(self, args, error_func):
+    """Constructor.
+
+    Args:
+      args: Command line arguments.
+      error_func: error to show when using bad command line arguments.
+    """
+    super(RemoteDeviceEnvironment, self).__init__()
+    self._access_token = None
+    self._device = None
+    self._device_type = args.device_type
+    self._verbose_count = args.verbose_count
+    self._timeouts = {
+        'queueing': 60 * 10,
+        'installing': 60 * 10,
+        'in-progress': 60 * 30,
+        'unknown': 60 * 5
+    }
+    # Example config file:
+    # {
+    #   "remote_device": ["Galaxy S4", "Galaxy S3"],
+    #   "remote_device_os": ["4.4.2", "4.4.4"],
+    #   "remote_device_minimum_os": "4.4.2",
+    #   "api_address": "www.example.com",
+    #   "api_port": "80",
+    #   "api_protocol": "http",
+    #   "api_secret": "apisecret",
+    #   "api_key": "apikey",
+    #   "timeouts": {
+    #     "queueing": 600,
+    #     "installing": 600,
+    #     "in-progress": 1800,
+    #     "unknown": 300
+    #   }
+    # }
+    if args.remote_device_file:
+      with open(args.remote_device_file) as device_file:
+        device_json = json.load(device_file)
+    else:
+      device_json = {}
+
+    self._api_address = device_json.get('api_address', None)
+    self._api_key = device_json.get('api_key', None)
+    self._api_port = device_json.get('api_port', None)
+    self._api_protocol = device_json.get('api_protocol', None)
+    self._api_secret = device_json.get('api_secret', None)
+    self._device_oem = device_json.get('device_oem', None)
+    self._device_type = device_json.get('device_type', 'Android')
+    self._network_config = device_json.get('network_config', None)
+    self._remote_device = device_json.get('remote_device', None)
+    self._remote_device_minimum_os = device_json.get(
+        'remote_device_minimum_os', None)
+    self._remote_device_os = device_json.get('remote_device_os', None)
+    self._remote_device_timeout = device_json.get(
+        'remote_device_timeout', None)
+    self._results_path = device_json.get('results_path', None)
+    self._runner_package = device_json.get('runner_package', None)
+    self._runner_type = device_json.get('runner_type', None)
+    self._timeouts.update(device_json.get('timeouts', {}))
+
+    def command_line_override(
+        file_value, cmd_line_value, desc, print_value=True):
+      if cmd_line_value:
+        if file_value and file_value != cmd_line_value:
+          if print_value:
+            logging.info('Overriding %s from %s to %s',
+                         desc, file_value, cmd_line_value)
+          else:
+            logging.info('overriding %s', desc)
+        return cmd_line_value
+      return file_value
+
+    self._api_address = command_line_override(
+        self._api_address, args.api_address, 'api_address')
+    self._api_port = command_line_override(
+        self._api_port, args.api_port, 'api_port')
+    self._api_protocol = command_line_override(
+        self._api_protocol, args.api_protocol, 'api_protocol')
+    self._device_oem = command_line_override(
+        self._device_oem, args.device_oem, 'device_oem')
+    self._device_type = command_line_override(
+        self._device_type, args.device_type, 'device_type')
+    self._network_config = command_line_override(
+        self._network_config, args.network_config, 'network_config')
+    self._remote_device = command_line_override(
+        self._remote_device, args.remote_device, 'remote_device')
+    self._remote_device_minimum_os = command_line_override(
+        self._remote_device_minimum_os, args.remote_device_minimum_os,
+        'remote_device_minimum_os')
+    self._remote_device_os = command_line_override(
+        self._remote_device_os, args.remote_device_os, 'remote_device_os')
+    self._remote_device_timeout = command_line_override(
+        self._remote_device_timeout, args.remote_device_timeout,
+        'remote_device_timeout')
+    self._results_path = command_line_override(
+        self._results_path, args.results_path, 'results_path')
+    self._runner_package = command_line_override(
+        self._runner_package, args.runner_package, 'runner_package')
+    self._runner_type = command_line_override(
+        self._runner_type, args.runner_type, 'runner_type')
+
+    if args.api_key_file:
+      with open(args.api_key_file) as api_key_file:
+        temp_key = api_key_file.read().strip()
+        self._api_key = command_line_override(
+            self._api_key, temp_key, 'api_key', print_value=False)
+    self._api_key = command_line_override(
+        self._api_key, args.api_key, 'api_key', print_value=False)
+
+    if args.api_secret_file:
+      with open(args.api_secret_file) as api_secret_file:
+        temp_secret = api_secret_file.read().strip()
+        self._api_secret = command_line_override(
+            self._api_secret, temp_secret, 'api_secret', print_value=False)
+    self._api_secret = command_line_override(
+        self._api_secret, args.api_secret, 'api_secret', print_value=False)
+
+    if not self._api_address:
+      error_func('Must set api address with --api-address'
+                 ' or in --remote-device-file.')
+    if not self._api_key:
+      error_func('Must set api key with --api-key, --api-key-file'
+                 ' or in --remote-device-file')
+    if not self._api_port:
+      error_func('Must set api port with --api-port'
+                 ' or in --remote-device-file')
+    if not self._api_protocol:
+      error_func('Must set api protocol with --api-protocol'
+                 ' or in --remote-device-file. Example: http')
+    if not self._api_secret:
+      error_func('Must set api secret with --api-secret, --api-secret-file'
+                 ' or in --remote-device-file')
+
+    logging.info('Api address: %s', self._api_address)
+    logging.info('Api port: %s', self._api_port)
+    logging.info('Api protocol: %s', self._api_protocol)
+    logging.info('Remote device: %s', self._remote_device)
+    logging.info('Remote device minimum OS: %s',
+                 self._remote_device_minimum_os)
+    logging.info('Remote device OS: %s', self._remote_device_os)
+    logging.info('Remote device OEM: %s', self._device_oem)
+    logging.info('Remote device type: %s', self._device_type)
+    logging.info('Remote device timout: %s', self._remote_device_timeout)
+    logging.info('Results Path: %s', self._results_path)
+    logging.info('Runner package: %s', self._runner_package)
+    logging.info('Runner type: %s', self._runner_type)
+    logging.info('Timeouts: %s', self._timeouts)
+
+    if not args.trigger and not args.collect:
+      self._trigger = True
+      self._collect = True
+    else:
+      self._trigger = args.trigger
+      self._collect = args.collect
+
+  def SetUp(self):
+    """Set up the test environment."""
+    os.environ['APPURIFY_API_PROTO'] = self._api_protocol
+    os.environ['APPURIFY_API_HOST'] = self._api_address
+    os.environ['APPURIFY_API_PORT'] = self._api_port
+    os.environ['APPURIFY_STATUS_BASE_URL'] = 'none'
+    self._GetAccessToken()
+    if self._trigger:
+      self._SelectDevice()
+
+  def TearDown(self):
+    """Teardown the test environment."""
+    self._RevokeAccessToken()
+
+  def __enter__(self):
+    """Set up the test run when used as a context manager."""
+    try:
+      self.SetUp()
+      return self
+    except:
+      self.__exit__(*sys.exc_info())
+      raise
+
+  def __exit__(self, exc_type, exc_val, exc_tb):
+    """Tears down the test run when used as a context manager."""
+    self.TearDown()
+
+  def DumpTo(self, persisted_data):
+    env_data = {
+      self._DEVICE_KEY: self._device,
+    }
+    persisted_data[self._ENV_KEY] = env_data
+
+  def LoadFrom(self, persisted_data):
+    env_data = persisted_data[self._ENV_KEY]
+    self._device = env_data[self._DEVICE_KEY]
+
+  def _GetAccessToken(self):
+    """Generates access token for remote device service."""
+    logging.info('Generating remote service access token')
+    with appurify_sanitized.SanitizeLogging(self._verbose_count,
+                                            logging.WARNING):
+      access_token_results = appurify_sanitized.api.access_token_generate(
+          self._api_key, self._api_secret)
+    remote_device_helper.TestHttpResponse(access_token_results,
+                                          'Unable to generate access token.')
+    self._access_token = access_token_results.json()['response']['access_token']
+
+  def _RevokeAccessToken(self):
+    """Destroys access token for remote device service."""
+    logging.info('Revoking remote service access token')
+    with appurify_sanitized.SanitizeLogging(self._verbose_count,
+                                            logging.WARNING):
+      revoke_token_results = appurify_sanitized.api.access_token_revoke(
+          self._access_token)
+    remote_device_helper.TestHttpResponse(revoke_token_results,
+                                          'Unable to revoke access token.')
+
+  def _SelectDevice(self):
+    if self._remote_device_timeout:
+      try:
+        timeout_retry.Run(self._FindDeviceWithTimeout,
+                          self._remote_device_timeout, self._DEFAULT_RETRIES)
+      except reraiser_thread.TimeoutError:
+        self._NoDeviceFound()
+    else:
+      if not self._FindDevice():
+        self._NoDeviceFound()
+
+  def _FindDevice(self):
+    """Find which device to use."""
+    logging.info('Finding device to run tests on.')
+    device_list = self._GetDeviceList()
+    random.shuffle(device_list)
+    for device in device_list:
+      if device['os_name'] != self._device_type:
+        continue
+      if self._remote_device and device['name'] not in self._remote_device:
+        continue
+      if (self._remote_device_os
+          and device['os_version'] not in self._remote_device_os):
+        continue
+      if self._device_oem and device['brand'] not in self._device_oem:
+        continue
+      if (self._remote_device_minimum_os
+          and distutils.version.LooseVersion(device['os_version'])
+          < distutils.version.LooseVersion(self._remote_device_minimum_os)):
+        continue
+      if device['has_available_device']:
+        logging.info('Found device: %s %s',
+                     device['name'], device['os_version'])
+        self._device = device
+        return True
+    return False
+
+  def _FindDeviceWithTimeout(self):
+    """Find which device to use with timeout."""
+    timeout_retry.WaitFor(self._FindDevice, wait_period=1)
+
+  def _PrintAvailableDevices(self, device_list):
+    def compare_devices(a,b):
+      for key in ('os_version', 'name'):
+        c = cmp(a[key], b[key])
+        if c:
+          return c
+      return 0
+
+    logging.critical('Available %s Devices:', self._device_type)
+    logging.critical(
+        '  %s %s %s %s %s',
+        'OS'.ljust(10),
+        'Device Name'.ljust(30),
+        'Available'.ljust(10),
+        'Busy'.ljust(10),
+        'All'.ljust(10))
+    devices = (d for d in device_list if d['os_name'] == self._device_type)
+    for d in sorted(devices, compare_devices):
+      logging.critical(
+          '  %s %s %s %s %s',
+          d['os_version'].ljust(10),
+          d['name'].ljust(30),
+          str(d['available_devices_count']).ljust(10),
+          str(d['busy_devices_count']).ljust(10),
+          str(d['all_devices_count']).ljust(10))
+
+  def _GetDeviceList(self):
+    with appurify_sanitized.SanitizeLogging(self._verbose_count,
+                                            logging.WARNING):
+      dev_list_res = appurify_sanitized.api.devices_list(self._access_token)
+    remote_device_helper.TestHttpResponse(dev_list_res,
+                                         'Unable to generate access token.')
+    return dev_list_res.json()['response']
+
+  def _NoDeviceFound(self):
+    self._PrintAvailableDevices(self._GetDeviceList())
+    raise remote_device_helper.RemoteDeviceError(
+        'No device found.', is_infra_error=True)
+
+  @property
+  def collect(self):
+    return self._collect
+
+  @property
+  def device_type_id(self):
+    return self._device['device_type_id']
+
+  @property
+  def network_config(self):
+    return self._network_config
+
+  @property
+  def only_output_failures(self):
+    # TODO(jbudorick): Remove this once b/18981674 is fixed.
+    return True
+
+  @property
+  def results_path(self):
+    return self._results_path
+
+  @property
+  def runner_package(self):
+    return self._runner_package
+
+  @property
+  def runner_type(self):
+    return self._runner_type
+
+  @property
+  def timeouts(self):
+    return self._timeouts
+
+  @property
+  def token(self):
+    return self._access_token
+
+  @property
+  def trigger(self):
+    return self._trigger
+
+  @property
+  def verbose_count(self):
+    return self._verbose_count
+
+  @property
+  def device_type(self):
+    return self._device_type
diff --git a/build/android/pylib/remote/device/remote_device_gtest_run.py b/build/android/pylib/remote/device/remote_device_gtest_run.py
new file mode 100644
index 0000000..98d41e4
--- /dev/null
+++ b/build/android/pylib/remote/device/remote_device_gtest_run.py
@@ -0,0 +1,81 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Run specific test on specific environment."""
+
+import logging
+import os
+import sys
+import tempfile
+
+from pylib import constants
+from pylib.base import base_test_result
+from pylib.remote.device import appurify_sanitized
+from pylib.remote.device import remote_device_test_run
+from pylib.remote.device import remote_device_helper
+
+
+_EXTRA_COMMAND_LINE_FILE = (
+    'org.chromium.native_test.NativeTestActivity.CommandLineFile')
+
+
+class RemoteDeviceGtestTestRun(remote_device_test_run.RemoteDeviceTestRun):
+  """Run gtests and uirobot tests on a remote device."""
+
+  DEFAULT_RUNNER_PACKAGE = (
+      'org.chromium.native_test.NativeTestInstrumentationTestRunner')
+
+  #override
+  def TestPackage(self):
+    return self._test_instance.suite
+
+  #override
+  def _TriggerSetUp(self):
+    """Set up the triggering of a test run."""
+    logging.info('Triggering test run.')
+
+    if self._env.runner_type:
+      logging.warning('Ignoring configured runner_type "%s"',
+                      self._env.runner_type)
+
+    if not self._env.runner_package:
+      runner_package = self.DEFAULT_RUNNER_PACKAGE
+      logging.info('Using default runner package: %s',
+                   self.DEFAULT_RUNNER_PACKAGE)
+    else:
+      runner_package = self._env.runner_package
+
+    dummy_app_path = os.path.join(
+        constants.GetOutDirectory(), 'apks', 'remote_device_dummy.apk')
+    with tempfile.NamedTemporaryFile(suffix='.flags.txt') as flag_file:
+      env_vars = {}
+      filter_string = self._test_instance._GenerateDisabledFilterString(None)
+      if filter_string:
+        flag_file.write('_ --gtest_filter=%s' % filter_string)
+        flag_file.flush()
+        env_vars[_EXTRA_COMMAND_LINE_FILE] = os.path.basename(flag_file.name)
+        self._test_instance._data_deps.append(
+            (os.path.abspath(flag_file.name), None))
+      self._AmInstrumentTestSetup(
+          dummy_app_path, self._test_instance.apk, runner_package,
+          environment_variables=env_vars)
+
+  _INSTRUMENTATION_STREAM_LEADER = 'INSTRUMENTATION_STATUS: stream='
+
+  #override
+  def _ParseTestResults(self):
+    logging.info('Parsing results from stdout.')
+    results = base_test_result.TestRunResults()
+    output = self._results['results']['output'].splitlines()
+    output = (l[len(self._INSTRUMENTATION_STREAM_LEADER):] for l in output
+              if l.startswith(self._INSTRUMENTATION_STREAM_LEADER))
+    results_list = self._test_instance.ParseGTestOutput(output)
+    results.AddResults(results_list)
+    if self._env.only_output_failures:
+      logging.info('See logcat for more results information.')
+    if not self._results['results']['pass']:
+      results.AddResult(base_test_result.BaseTestResult(
+          'Remote Service detected error.',
+          base_test_result.ResultType.FAIL))
+    return results
diff --git a/build/android/pylib/remote/device/remote_device_helper.py b/build/android/pylib/remote/device/remote_device_helper.py
new file mode 100644
index 0000000..896ae99
--- /dev/null
+++ b/build/android/pylib/remote/device/remote_device_helper.py
@@ -0,0 +1,24 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Common functions and Exceptions for remote_device_*"""
+
+from pylib.utils import base_error
+
+
+class RemoteDeviceError(base_error.BaseError):
+  """Exception to throw when problems occur with remote device service."""
+  pass
+
+
+def TestHttpResponse(response, error_msg):
+  """Checks the Http response from remote device service.
+
+  Args:
+      response: response dict from the remote device service.
+      error_msg: Error message to display if bad response is seen.
+  """
+  if response.status_code != 200:
+    raise RemoteDeviceError(
+        '%s (%d: %s)' % (error_msg, response.status_code, response.reason))
diff --git a/build/android/pylib/remote/device/remote_device_instrumentation_test_run.py b/build/android/pylib/remote/device/remote_device_instrumentation_test_run.py
new file mode 100644
index 0000000..bcdb90c
--- /dev/null
+++ b/build/android/pylib/remote/device/remote_device_instrumentation_test_run.py
@@ -0,0 +1,74 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Run specific test on specific environment."""
+
+import logging
+import os
+import tempfile
+
+from pylib import constants
+from pylib.base import base_test_result
+from pylib.remote.device import remote_device_test_run
+from pylib.utils import apk_helper
+
+
+class RemoteDeviceInstrumentationTestRun(
+    remote_device_test_run.RemoteDeviceTestRun):
+  """Run instrumentation tests on a remote device."""
+
+  #override
+  def TestPackage(self):
+    return self._test_instance.test_package
+
+  #override
+  def _TriggerSetUp(self):
+    """Set up the triggering of a test run."""
+    logging.info('Triggering test run.')
+
+    with tempfile.NamedTemporaryFile(suffix='.txt') as test_list_file:
+      tests = self._test_instance.GetTests()
+      logging.debug('preparing to run %d instrumentation tests remotely:',
+                    len(tests))
+      for t in tests:
+        test_name = '%s#%s' % (t['class'], t['method'])
+        logging.debug('  %s', test_name)
+        test_list_file.write('%s\n' % test_name)
+      test_list_file.flush()
+      self._test_instance._data_deps.append(
+          (os.path.abspath(test_list_file.name), None))
+
+      env_vars = self._test_instance.GetDriverEnvironmentVars(
+          test_list_file_path=test_list_file.name)
+      env_vars.update(self._test_instance.GetHttpServerEnvironmentVars())
+
+      logging.debug('extras:')
+      for k, v in env_vars.iteritems():
+        logging.debug('  %s: %s', k, v)
+
+      self._AmInstrumentTestSetup(
+          self._test_instance.apk_under_test,
+          self._test_instance.driver_apk,
+          self._test_instance.driver_name,
+          environment_variables=env_vars,
+          extra_apks=[self._test_instance.test_apk])
+
+  #override
+  def _ParseTestResults(self):
+    logging.info('Parsing results from stdout.')
+    r = base_test_result.TestRunResults()
+    result_code, result_bundle, statuses = (
+        self._test_instance.ParseAmInstrumentRawOutput(
+            self._results['results']['output'].splitlines()))
+    result = self._test_instance.GenerateTestResults(
+        result_code, result_bundle, statuses, 0, 0)
+
+    if isinstance(result, base_test_result.BaseTestResult):
+      r.AddResult(result)
+    elif isinstance(result, list):
+      r.AddResults(result)
+    else:
+      raise Exception('Unexpected result type: %s' % type(result).__name__)
+
+    return r
diff --git a/build/android/pylib/remote/device/remote_device_test_run.py b/build/android/pylib/remote/device/remote_device_test_run.py
new file mode 100644
index 0000000..60cc735
--- /dev/null
+++ b/build/android/pylib/remote/device/remote_device_test_run.py
@@ -0,0 +1,308 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Run specific test on specific environment."""
+
+import json
+import logging
+import os
+import sys
+import tempfile
+import time
+import zipfile
+
+from pylib import constants
+from pylib.base import test_run
+from pylib.remote.device import appurify_constants
+from pylib.remote.device import appurify_sanitized
+from pylib.remote.device import remote_device_helper
+from pylib.utils import zip_utils
+
+class RemoteDeviceTestRun(test_run.TestRun):
+  """Run tests on a remote device."""
+
+  _TEST_RUN_KEY = 'test_run'
+  _TEST_RUN_ID_KEY = 'test_run_id'
+
+  WAIT_TIME = 5
+  COMPLETE = 'complete'
+  HEARTBEAT_INTERVAL = 300
+
+  def __init__(self, env, test_instance):
+    """Constructor.
+
+    Args:
+      env: Environment the tests will run in.
+      test_instance: The test that will be run.
+    """
+    super(RemoteDeviceTestRun, self).__init__(env, test_instance)
+    self._env = env
+    self._test_instance = test_instance
+    self._app_id = ''
+    self._test_id = ''
+    self._results = ''
+    self._test_run_id = ''
+
+  #override
+  def SetUp(self):
+    """Set up a test run."""
+    if self._env.trigger:
+      self._TriggerSetUp()
+    elif self._env.collect:
+      assert isinstance(self._env.collect, basestring), (
+                        'File for storing test_run_id must be a string.')
+      with open(self._env.collect, 'r') as persisted_data_file:
+        persisted_data = json.loads(persisted_data_file.read())
+        self._env.LoadFrom(persisted_data)
+        self.LoadFrom(persisted_data)
+
+  def _TriggerSetUp(self):
+    """Set up the triggering of a test run."""
+    raise NotImplementedError
+
+  #override
+  def RunTests(self):
+    """Run the test."""
+    if self._env.trigger:
+      with appurify_sanitized.SanitizeLogging(self._env.verbose_count,
+                                              logging.WARNING):
+        test_start_res = appurify_sanitized.api.tests_run(
+            self._env.token, self._env.device_type_id, self._app_id,
+            self._test_id)
+      remote_device_helper.TestHttpResponse(
+        test_start_res, 'Unable to run test.')
+      self._test_run_id = test_start_res.json()['response']['test_run_id']
+      logging.info('Test run id: %s' % self._test_run_id)
+
+    if self._env.collect:
+      current_status = ''
+      timeout_counter = 0
+      heartbeat_counter = 0
+      while self._GetTestStatus(self._test_run_id) != self.COMPLETE:
+        if self._results['detailed_status'] != current_status:
+          logging.info('Test status: %s', self._results['detailed_status'])
+          current_status = self._results['detailed_status']
+          timeout_counter = 0
+          heartbeat_counter = 0
+        if heartbeat_counter > self.HEARTBEAT_INTERVAL:
+          logging.info('Test status: %s', self._results['detailed_status'])
+          heartbeat_counter = 0
+
+        timeout = self._env.timeouts.get(
+            current_status, self._env.timeouts['unknown'])
+        if timeout_counter > timeout:
+          raise remote_device_helper.RemoteDeviceError(
+              'Timeout while in %s state for %s seconds'
+              % (current_status, timeout),
+              is_infra_error=True)
+        time.sleep(self.WAIT_TIME)
+        timeout_counter += self.WAIT_TIME
+        heartbeat_counter += self.WAIT_TIME
+      self._DownloadTestResults(self._env.results_path)
+
+      if self._results['results']['exception']:
+        raise remote_device_helper.RemoteDeviceError(
+            self._results['results']['exception'], is_infra_error=True)
+
+      return self._ParseTestResults()
+
+  #override
+  def TearDown(self):
+    """Tear down the test run."""
+    if self._env.collect:
+      self._CollectTearDown()
+    elif self._env.trigger:
+      assert isinstance(self._env.trigger, basestring), (
+                        'File for storing test_run_id must be a string.')
+      with open(self._env.trigger, 'w') as persisted_data_file:
+        persisted_data = {}
+        self.DumpTo(persisted_data)
+        self._env.DumpTo(persisted_data)
+        persisted_data_file.write(json.dumps(persisted_data))
+
+  def _CollectTearDown(self):
+    if self._GetTestStatus(self._test_run_id) != self.COMPLETE:
+      with appurify_sanitized.SanitizeLogging(self._env.verbose_count,
+                                              logging.WARNING):
+        test_abort_res = appurify_sanitized.api.tests_abort(
+            self._env.token, self._test_run_id, reason='Test runner exiting.')
+      remote_device_helper.TestHttpResponse(test_abort_res,
+                                            'Unable to abort test.')
+
+  def __enter__(self):
+    """Set up the test run when used as a context manager."""
+    self.SetUp()
+    return self
+
+  def __exit__(self, exc_type, exc_val, exc_tb):
+    """Tear down the test run when used as a context manager."""
+    self.TearDown()
+
+  def DumpTo(self, persisted_data):
+    test_run_data = {
+      self._TEST_RUN_ID_KEY: self._test_run_id,
+    }
+    persisted_data[self._TEST_RUN_KEY] = test_run_data
+
+  def LoadFrom(self, persisted_data):
+    test_run_data = persisted_data[self._TEST_RUN_KEY]
+    self._test_run_id = test_run_data[self._TEST_RUN_ID_KEY]
+
+  def _ParseTestResults(self):
+    raise NotImplementedError
+
+  def _GetTestByName(self, test_name):
+    """Gets test_id for specific test.
+
+    Args:
+      test_name: Test to find the ID of.
+    """
+    with appurify_sanitized.SanitizeLogging(self._env.verbose_count,
+                                            logging.WARNING):
+      test_list_res = appurify_sanitized.api.tests_list(self._env.token)
+    remote_device_helper.TestHttpResponse(test_list_res,
+                                          'Unable to get tests list.')
+    for test in test_list_res.json()['response']:
+      if test['test_type'] == test_name:
+        return test['test_id']
+    raise remote_device_helper.RemoteDeviceError(
+        'No test found with name %s' % (test_name))
+
+  def _DownloadTestResults(self, results_path):
+    """Download the test results from remote device service.
+
+    Args:
+      results_path: Path to download appurify results zipfile.
+    """
+    if results_path:
+      logging.info('Downloading results to %s.' % results_path)
+      if not os.path.exists(os.path.dirname(results_path)):
+        os.makedirs(os.path.dirname(results_path))
+      with appurify_sanitized.SanitizeLogging(self._env.verbose_count,
+                                              logging.WARNING):
+        appurify_sanitized.utils.wget(self._results['results']['url'],
+                                      results_path)
+
+  def _GetTestStatus(self, test_run_id):
+    """Checks the state of the test, and sets self._results
+
+    Args:
+      test_run_id: Id of test on on remote service.
+    """
+
+    with appurify_sanitized.SanitizeLogging(self._env.verbose_count,
+                                            logging.WARNING):
+      test_check_res = appurify_sanitized.api.tests_check_result(
+          self._env.token, test_run_id)
+    remote_device_helper.TestHttpResponse(test_check_res,
+                                          'Unable to get test status.')
+    self._results = test_check_res.json()['response']
+    return self._results['status']
+
+  def _AmInstrumentTestSetup(self, app_path, test_path, runner_package,
+                             environment_variables, extra_apks=None):
+    config = {'runner': runner_package}
+    if environment_variables:
+      config['environment_vars'] = ','.join(
+          '%s=%s' % (k, v) for k, v in environment_variables.iteritems())
+
+    self._app_id = self._UploadAppToDevice(app_path)
+
+    data_deps = self._test_instance.GetDataDependencies()
+    if data_deps:
+      with tempfile.NamedTemporaryFile(suffix='.zip') as test_with_deps:
+        sdcard_files = []
+        additional_apks = []
+        host_test = os.path.basename(test_path)
+        with zipfile.ZipFile(test_with_deps.name, 'w') as zip_file:
+          zip_file.write(test_path, host_test, zipfile.ZIP_DEFLATED)
+          for h, _ in data_deps:
+            if os.path.isdir(h):
+              zip_utils.WriteToZipFile(zip_file, h, '.')
+              sdcard_files.extend(os.listdir(h))
+            else:
+              zip_utils.WriteToZipFile(zip_file, h, os.path.basename(h))
+              sdcard_files.append(os.path.basename(h))
+          for a in extra_apks or ():
+            zip_utils.WriteToZipFile(zip_file, a, os.path.basename(a));
+            additional_apks.append(os.path.basename(a))
+
+        config['sdcard_files'] = ','.join(sdcard_files)
+        config['host_test'] = host_test
+        if additional_apks:
+          config['additional_apks'] = ','.join(additional_apks)
+        self._test_id = self._UploadTestToDevice(
+            'robotium', test_with_deps.name, app_id=self._app_id)
+    else:
+      self._test_id = self._UploadTestToDevice('robotium', test_path)
+
+    logging.info('Setting config: %s' % config)
+    appurify_configs = {}
+    if self._env.network_config:
+      appurify_configs['network'] = self._env.network_config
+    self._SetTestConfig('robotium', config, **appurify_configs)
+
+  def _UploadAppToDevice(self, app_path):
+    """Upload app to device."""
+    logging.info('Uploading %s to remote service as %s.', app_path,
+                 self._test_instance.suite)
+    with open(app_path, 'rb') as apk_src:
+      with appurify_sanitized.SanitizeLogging(self._env.verbose_count,
+                                              logging.WARNING):
+        upload_results = appurify_sanitized.api.apps_upload(
+            self._env.token, apk_src, 'raw', name=self._test_instance.suite)
+      remote_device_helper.TestHttpResponse(
+          upload_results, 'Unable to upload %s.' % app_path)
+      return upload_results.json()['response']['app_id']
+
+  def _UploadTestToDevice(self, test_type, test_path, app_id=None):
+    """Upload test to device
+    Args:
+      test_type: Type of test that is being uploaded. Ex. uirobot, gtest..
+    """
+    logging.info('Uploading %s to remote service.' % test_path)
+    with open(test_path, 'rb') as test_src:
+      with appurify_sanitized.SanitizeLogging(self._env.verbose_count,
+                                              logging.WARNING):
+        upload_results = appurify_sanitized.api.tests_upload(
+            self._env.token, test_src, 'raw', test_type, app_id=app_id)
+      remote_device_helper.TestHttpResponse(upload_results,
+          'Unable to upload %s.' % test_path)
+      return upload_results.json()['response']['test_id']
+
+  def _SetTestConfig(self, runner_type, runner_configs,
+                     network=appurify_constants.NETWORK.WIFI_1_BAR,
+                     pcap=0, profiler=0, videocapture=0):
+    """Generates and uploads config file for test.
+    Args:
+      runner_configs: Configs specific to the runner you are using.
+      network: Config to specify the network environment the devices running
+          the tests will be in.
+      pcap: Option to set the recording the of network traffic from the device.
+      profiler: Option to set the recording of CPU, memory, and network
+          transfer usage in the tests.
+      videocapture: Option to set video capture during the tests.
+
+    """
+    logging.info('Generating config file for test.')
+    with tempfile.TemporaryFile() as config:
+      config_data = [
+          '[appurify]',
+          'network=%s' % network,
+          'pcap=%s' % pcap,
+          'profiler=%s' % profiler,
+          'videocapture=%s' % videocapture,
+          '[%s]' % runner_type
+      ]
+      config_data.extend(
+          '%s=%s' % (k, v) for k, v in runner_configs.iteritems())
+      config.write(''.join('%s\n' % l for l in config_data))
+      config.flush()
+      config.seek(0)
+      with appurify_sanitized.SanitizeLogging(self._env.verbose_count,
+                                              logging.WARNING):
+        config_response = appurify_sanitized.api.config_upload(
+            self._env.token, config, self._test_id)
+      remote_device_helper.TestHttpResponse(
+          config_response, 'Unable to upload test config.')
diff --git a/build/android/pylib/remote/device/remote_device_uirobot_test_run.py b/build/android/pylib/remote/device/remote_device_uirobot_test_run.py
new file mode 100644
index 0000000..f818c98
--- /dev/null
+++ b/build/android/pylib/remote/device/remote_device_uirobot_test_run.py
@@ -0,0 +1,88 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Run specific test on specific environment."""
+
+import logging
+import os
+import sys
+
+from pylib import constants
+from pylib.base import base_test_result
+from pylib.remote.device import appurify_sanitized
+from pylib.remote.device import remote_device_test_run
+from pylib.remote.device import remote_device_helper
+
+
+class RemoteDeviceUirobotTestRun(remote_device_test_run.RemoteDeviceTestRun):
+  """Run uirobot tests on a remote device."""
+
+
+  def __init__(self, env, test_instance):
+    """Constructor.
+
+    Args:
+      env: Environment the tests will run in.
+      test_instance: The test that will be run.
+    """
+    super(RemoteDeviceUirobotTestRun, self).__init__(env, test_instance)
+
+  #override
+  def TestPackage(self):
+    return self._test_instance.package_name
+
+  #override
+  def _TriggerSetUp(self):
+    """Set up the triggering of a test run."""
+    logging.info('Triggering test run.')
+
+    if self._env.device_type == 'Android':
+      default_runner_type = 'android_robot'
+    elif self._env.device_type == 'iOS':
+      default_runner_type = 'ios_robot'
+    else:
+      raise remote_device_helper.RemoteDeviceError(
+          'Unknown device type: %s' % self._env.device_type)
+
+    self._app_id = self._UploadAppToDevice(self._test_instance.app_under_test)
+    if not self._env.runner_type:
+      runner_type = default_runner_type
+      logging.info('Using default runner type: %s', default_runner_type)
+    else:
+      runner_type = self._env.runner_type
+
+    self._test_id = self._UploadTestToDevice(
+        'android_robot', None, app_id=self._app_id)
+    config_body = {'duration': self._test_instance.minutes}
+    self._SetTestConfig(runner_type, config_body)
+
+
+  # TODO(rnephew): Switch to base class implementation when supported.
+  #override
+  def _UploadTestToDevice(self, test_type, test_path, app_id=None):
+    if test_path:
+      logging.info("Ignoring test path.")
+    data = {
+        'access_token':self._env.token,
+        'test_type':test_type,
+        'app_id':app_id,
+    }
+    with appurify_sanitized.SanitizeLogging(self._env.verbose_count,
+                                            logging.WARNING):
+      test_upload_res = appurify_sanitized.utils.post('tests/upload',
+                                                      data, None)
+    remote_device_helper.TestHttpResponse(
+        test_upload_res, 'Unable to get UiRobot test id.')
+    return test_upload_res.json()['response']['test_id']
+
+  #override
+  def _ParseTestResults(self):
+    logging.info('Parsing results from remote service.')
+    results = base_test_result.TestRunResults()
+    if self._results['results']['pass']:
+      result_type = base_test_result.ResultType.PASS
+    else:
+      result_type = base_test_result.ResultType.FAIL
+    results.AddResult(base_test_result.BaseTestResult('uirobot', result_type))
+    return results
diff --git a/build/android/pylib/restart_adbd.sh b/build/android/pylib/restart_adbd.sh
new file mode 100755
index 0000000..393b2eb
--- /dev/null
+++ b/build/android/pylib/restart_adbd.sh
@@ -0,0 +1,20 @@
+#!/system/bin/sh
+
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Android shell script to restart adbd on the device. This has to be run
+# atomically as a shell script because stopping adbd prevents further commands
+# from running (even if called in the same adb shell).
+
+trap '' HUP
+trap '' TERM
+trap '' PIPE
+
+function restart() {
+  stop adbd
+  start adbd
+}
+
+restart &
diff --git a/build/android/pylib/results/__init__.py b/build/android/pylib/results/__init__.py
new file mode 100644
index 0000000..4d6aabb
--- /dev/null
+++ b/build/android/pylib/results/__init__.py
@@ -0,0 +1,3 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
diff --git a/build/android/pylib/results/flakiness_dashboard/__init__.py b/build/android/pylib/results/flakiness_dashboard/__init__.py
new file mode 100644
index 0000000..4d6aabb
--- /dev/null
+++ b/build/android/pylib/results/flakiness_dashboard/__init__.py
@@ -0,0 +1,3 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
diff --git a/build/android/pylib/results/flakiness_dashboard/json_results_generator.py b/build/android/pylib/results/flakiness_dashboard/json_results_generator.py
new file mode 100644
index 0000000..e5c433d
--- /dev/null
+++ b/build/android/pylib/results/flakiness_dashboard/json_results_generator.py
@@ -0,0 +1,697 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+#
+# Most of this file was ported over from Blink's
+# Tools/Scripts/webkitpy/layout_tests/layout_package/json_results_generator.py
+# Tools/Scripts/webkitpy/common/net/file_uploader.py
+#
+
+import json
+import logging
+import mimetypes
+import os
+import time
+import urllib2
+
+_log = logging.getLogger(__name__)
+
+_JSON_PREFIX = 'ADD_RESULTS('
+_JSON_SUFFIX = ');'
+
+
+def HasJSONWrapper(string):
+  return string.startswith(_JSON_PREFIX) and string.endswith(_JSON_SUFFIX)
+
+
+def StripJSONWrapper(json_content):
+  # FIXME: Kill this code once the server returns json instead of jsonp.
+  if HasJSONWrapper(json_content):
+    return json_content[len(_JSON_PREFIX):len(json_content) - len(_JSON_SUFFIX)]
+  return json_content
+
+
+def WriteJSON(json_object, file_path, callback=None):
+  # Specify separators in order to get compact encoding.
+  json_string = json.dumps(json_object, separators=(',', ':'))
+  if callback:
+    json_string = callback + '(' + json_string + ');'
+  with open(file_path, 'w') as fp:
+    fp.write(json_string)
+
+
+def ConvertTrieToFlatPaths(trie, prefix=None):
+  """Flattens the trie of paths, prepending a prefix to each."""
+  result = {}
+  for name, data in trie.iteritems():
+    if prefix:
+      name = prefix + '/' + name
+
+    if len(data) and not 'results' in data:
+      result.update(ConvertTrieToFlatPaths(data, name))
+    else:
+      result[name] = data
+
+  return result
+
+
+def AddPathToTrie(path, value, trie):
+  """Inserts a single path and value into a directory trie structure."""
+  if not '/' in path:
+    trie[path] = value
+    return
+
+  directory, _slash, rest = path.partition('/')
+  if not directory in trie:
+    trie[directory] = {}
+  AddPathToTrie(rest, value, trie[directory])
+
+
+def TestTimingsTrie(individual_test_timings):
+  """Breaks a test name into dicts by directory
+
+  foo/bar/baz.html: 1ms
+  foo/bar/baz1.html: 3ms
+
+  becomes
+  foo: {
+      bar: {
+          baz.html: 1,
+          baz1.html: 3
+      }
+  }
+  """
+  trie = {}
+  for test_result in individual_test_timings:
+    test = test_result.test_name
+
+    AddPathToTrie(test, int(1000 * test_result.test_run_time), trie)
+
+  return trie
+
+
+class TestResult(object):
+  """A simple class that represents a single test result."""
+
+  # Test modifier constants.
+  (NONE, FAILS, FLAKY, DISABLED) = range(4)
+
+  def __init__(self, test, failed=False, elapsed_time=0):
+    self.test_name = test
+    self.failed = failed
+    self.test_run_time = elapsed_time
+
+    test_name = test
+    try:
+      test_name = test.split('.')[1]
+    except IndexError:
+      _log.warn('Invalid test name: %s.', test)
+
+    if test_name.startswith('FAILS_'):
+      self.modifier = self.FAILS
+    elif test_name.startswith('FLAKY_'):
+      self.modifier = self.FLAKY
+    elif test_name.startswith('DISABLED_'):
+      self.modifier = self.DISABLED
+    else:
+      self.modifier = self.NONE
+
+  def Fixable(self):
+    return self.failed or self.modifier == self.DISABLED
+
+
+class JSONResultsGeneratorBase(object):
+  """A JSON results generator for generic tests."""
+
+  MAX_NUMBER_OF_BUILD_RESULTS_TO_LOG = 750
+  # Min time (seconds) that will be added to the JSON.
+  MIN_TIME = 1
+
+  # Note that in non-chromium tests those chars are used to indicate
+  # test modifiers (FAILS, FLAKY, etc) but not actual test results.
+  PASS_RESULT = 'P'
+  SKIP_RESULT = 'X'
+  FAIL_RESULT = 'F'
+  FLAKY_RESULT = 'L'
+  NO_DATA_RESULT = 'N'
+
+  MODIFIER_TO_CHAR = {TestResult.NONE: PASS_RESULT,
+                      TestResult.DISABLED: SKIP_RESULT,
+                      TestResult.FAILS: FAIL_RESULT,
+                      TestResult.FLAKY: FLAKY_RESULT}
+
+  VERSION = 4
+  VERSION_KEY = 'version'
+  RESULTS = 'results'
+  TIMES = 'times'
+  BUILD_NUMBERS = 'buildNumbers'
+  TIME = 'secondsSinceEpoch'
+  TESTS = 'tests'
+
+  FIXABLE_COUNT = 'fixableCount'
+  FIXABLE = 'fixableCounts'
+  ALL_FIXABLE_COUNT = 'allFixableCount'
+
+  RESULTS_FILENAME = 'results.json'
+  TIMES_MS_FILENAME = 'times_ms.json'
+  INCREMENTAL_RESULTS_FILENAME = 'incremental_results.json'
+
+  # line too long pylint: disable=line-too-long
+  URL_FOR_TEST_LIST_JSON = (
+      'http://%s/testfile?builder=%s&name=%s&testlistjson=1&testtype=%s&master=%s')
+  # pylint: enable=line-too-long
+
+  def __init__(self, builder_name, build_name, build_number,
+               results_file_base_path, builder_base_url,
+               test_results_map, svn_repositories=None,
+               test_results_server=None,
+               test_type='',
+               master_name=''):
+    """Modifies the results.json file. Grabs it off the archive directory
+    if it is not found locally.
+
+    Args
+      builder_name: the builder name (e.g. Webkit).
+      build_name: the build name (e.g. webkit-rel).
+      build_number: the build number.
+      results_file_base_path: Absolute path to the directory containing the
+          results json file.
+      builder_base_url: the URL where we have the archived test results.
+          If this is None no archived results will be retrieved.
+      test_results_map: A dictionary that maps test_name to TestResult.
+      svn_repositories: A (json_field_name, svn_path) pair for SVN
+          repositories that tests rely on.  The SVN revision will be
+          included in the JSON with the given json_field_name.
+      test_results_server: server that hosts test results json.
+      test_type: test type string (e.g. 'layout-tests').
+      master_name: the name of the buildbot master.
+    """
+    self._builder_name = builder_name
+    self._build_name = build_name
+    self._build_number = build_number
+    self._builder_base_url = builder_base_url
+    self._results_directory = results_file_base_path
+
+    self._test_results_map = test_results_map
+    self._test_results = test_results_map.values()
+
+    self._svn_repositories = svn_repositories
+    if not self._svn_repositories:
+      self._svn_repositories = {}
+
+    self._test_results_server = test_results_server
+    self._test_type = test_type
+    self._master_name = master_name
+
+    self._archived_results = None
+
+  def GenerateJSONOutput(self):
+    json_object = self.GetJSON()
+    if json_object:
+      file_path = (
+          os.path.join(
+              self._results_directory,
+              self.INCREMENTAL_RESULTS_FILENAME))
+      WriteJSON(json_object, file_path)
+
+  def GenerateTimesMSFile(self):
+    times = TestTimingsTrie(self._test_results_map.values())
+    file_path = os.path.join(self._results_directory, self.TIMES_MS_FILENAME)
+    WriteJSON(times, file_path)
+
+  def GetJSON(self):
+    """Gets the results for the results.json file."""
+    results_json = {}
+
+    if not results_json:
+      results_json, error = self._GetArchivedJSONResults()
+      if error:
+        # If there was an error don't write a results.json
+        # file at all as it would lose all the information on the
+        # bot.
+        _log.error('Archive directory is inaccessible. Not '
+                   'modifying or clobbering the results.json '
+                   'file: ' + str(error))
+        return None
+
+    builder_name = self._builder_name
+    if results_json and builder_name not in results_json:
+      _log.debug('Builder name (%s) is not in the results.json file.'
+                 % builder_name)
+
+    self._ConvertJSONToCurrentVersion(results_json)
+
+    if builder_name not in results_json:
+      results_json[builder_name] = (
+          self._CreateResultsForBuilderJSON())
+
+    results_for_builder = results_json[builder_name]
+
+    if builder_name:
+      self._InsertGenericMetaData(results_for_builder)
+
+    self._InsertFailureSummaries(results_for_builder)
+
+    # Update the all failing tests with result type and time.
+    tests = results_for_builder[self.TESTS]
+    all_failing_tests = self._GetFailedTestNames()
+    all_failing_tests.update(ConvertTrieToFlatPaths(tests))
+
+    for test in all_failing_tests:
+      self._InsertTestTimeAndResult(test, tests)
+
+    return results_json
+
+  def SetArchivedResults(self, archived_results):
+    self._archived_results = archived_results
+
+  def UploadJSONFiles(self, json_files):
+    """Uploads the given json_files to the test_results_server (if the
+    test_results_server is given)."""
+    if not self._test_results_server:
+      return
+
+    if not self._master_name:
+      _log.error(
+          '--test-results-server was set, but --master-name was not.  Not '
+          'uploading JSON files.')
+      return
+
+    _log.info('Uploading JSON files for builder: %s', self._builder_name)
+    attrs = [('builder', self._builder_name),
+             ('testtype', self._test_type),
+             ('master', self._master_name)]
+
+    files = [(json_file, os.path.join(self._results_directory, json_file))
+             for json_file in json_files]
+
+    url = 'http://%s/testfile/upload' % self._test_results_server
+    # Set uploading timeout in case appengine server is having problems.
+    # 120 seconds are more than enough to upload test results.
+    uploader = _FileUploader(url, 120)
+    try:
+      response = uploader.UploadAsMultipartFormData(files, attrs)
+      if response:
+        if response.code == 200:
+          _log.info('JSON uploaded.')
+        else:
+          _log.debug(
+              "JSON upload failed, %d: '%s'" %
+              (response.code, response.read()))
+      else:
+        _log.error('JSON upload failed; no response returned')
+    except Exception, err:
+      _log.error('Upload failed: %s' % err)
+      return
+
+  def _GetTestTiming(self, test_name):
+    """Returns test timing data (elapsed time) in second
+    for the given test_name."""
+    if test_name in self._test_results_map:
+      # Floor for now to get time in seconds.
+      return int(self._test_results_map[test_name].test_run_time)
+    return 0
+
+  def _GetFailedTestNames(self):
+    """Returns a set of failed test names."""
+    return set([r.test_name for r in self._test_results if r.failed])
+
+  def _GetModifierChar(self, test_name):
+    """Returns a single char (e.g. SKIP_RESULT, FAIL_RESULT,
+    PASS_RESULT, NO_DATA_RESULT, etc) that indicates the test modifier
+    for the given test_name.
+    """
+    if test_name not in self._test_results_map:
+      return self.__class__.NO_DATA_RESULT
+
+    test_result = self._test_results_map[test_name]
+    if test_result.modifier in self.MODIFIER_TO_CHAR.keys():
+      return self.MODIFIER_TO_CHAR[test_result.modifier]
+
+    return self.__class__.PASS_RESULT
+
+  def _get_result_char(self, test_name):
+    """Returns a single char (e.g. SKIP_RESULT, FAIL_RESULT,
+    PASS_RESULT, NO_DATA_RESULT, etc) that indicates the test result
+    for the given test_name.
+    """
+    if test_name not in self._test_results_map:
+      return self.__class__.NO_DATA_RESULT
+
+    test_result = self._test_results_map[test_name]
+    if test_result.modifier == TestResult.DISABLED:
+      return self.__class__.SKIP_RESULT
+
+    if test_result.failed:
+      return self.__class__.FAIL_RESULT
+
+    return self.__class__.PASS_RESULT
+
+  def _GetSVNRevision(self, in_directory):
+    """Returns the svn revision for the given directory.
+
+    Args:
+      in_directory: The directory where svn is to be run.
+    """
+    # This is overridden in flakiness_dashboard_results_uploader.py.
+    raise NotImplementedError()
+
+  def _GetArchivedJSONResults(self):
+    """Download JSON file that only contains test
+    name list from test-results server. This is for generating incremental
+    JSON so the file generated has info for tests that failed before but
+    pass or are skipped from current run.
+
+    Returns (archived_results, error) tuple where error is None if results
+    were successfully read.
+    """
+    results_json = {}
+    old_results = None
+    error = None
+
+    if not self._test_results_server:
+      return {}, None
+
+    results_file_url = (self.URL_FOR_TEST_LIST_JSON %
+                        (urllib2.quote(self._test_results_server),
+                         urllib2.quote(self._builder_name),
+                         self.RESULTS_FILENAME,
+                         urllib2.quote(self._test_type),
+                         urllib2.quote(self._master_name)))
+
+    try:
+      # FIXME: We should talk to the network via a Host object.
+      results_file = urllib2.urlopen(results_file_url)
+      old_results = results_file.read()
+    except urllib2.HTTPError, http_error:
+      # A non-4xx status code means the bot is hosed for some reason
+      # and we can't grab the results.json file off of it.
+      if http_error.code < 400 and http_error.code >= 500:
+        error = http_error
+    except urllib2.URLError, url_error:
+      error = url_error
+
+    if old_results:
+      # Strip the prefix and suffix so we can get the actual JSON object.
+      old_results = StripJSONWrapper(old_results)
+
+      try:
+        results_json = json.loads(old_results)
+      except Exception:
+        _log.debug('results.json was not valid JSON. Clobbering.')
+        # The JSON file is not valid JSON. Just clobber the results.
+        results_json = {}
+    else:
+      _log.debug('Old JSON results do not exist. Starting fresh.')
+      results_json = {}
+
+    return results_json, error
+
+  def _InsertFailureSummaries(self, results_for_builder):
+    """Inserts aggregate pass/failure statistics into the JSON.
+    This method reads self._test_results and generates
+    FIXABLE, FIXABLE_COUNT and ALL_FIXABLE_COUNT entries.
+
+    Args:
+      results_for_builder: Dictionary containing the test results for a
+          single builder.
+    """
+    # Insert the number of tests that failed or skipped.
+    fixable_count = len([r for r in self._test_results if r.Fixable()])
+    self._InsertItemIntoRawList(results_for_builder,
+                                fixable_count, self.FIXABLE_COUNT)
+
+    # Create a test modifiers (FAILS, FLAKY etc) summary dictionary.
+    entry = {}
+    for test_name in self._test_results_map.iterkeys():
+      result_char = self._GetModifierChar(test_name)
+      entry[result_char] = entry.get(result_char, 0) + 1
+
+    # Insert the pass/skip/failure summary dictionary.
+    self._InsertItemIntoRawList(results_for_builder, entry,
+                                self.FIXABLE)
+
+    # Insert the number of all the tests that are supposed to pass.
+    all_test_count = len(self._test_results)
+    self._InsertItemIntoRawList(results_for_builder,
+                                all_test_count, self.ALL_FIXABLE_COUNT)
+
+  def _InsertItemIntoRawList(self, results_for_builder, item, key):
+    """Inserts the item into the list with the given key in the results for
+    this builder. Creates the list if no such list exists.
+
+    Args:
+      results_for_builder: Dictionary containing the test results for a
+          single builder.
+      item: Number or string to insert into the list.
+      key: Key in results_for_builder for the list to insert into.
+    """
+    if key in results_for_builder:
+      raw_list = results_for_builder[key]
+    else:
+      raw_list = []
+
+    raw_list.insert(0, item)
+    raw_list = raw_list[:self.MAX_NUMBER_OF_BUILD_RESULTS_TO_LOG]
+    results_for_builder[key] = raw_list
+
+  def _InsertItemRunLengthEncoded(self, item, encoded_results):
+    """Inserts the item into the run-length encoded results.
+
+    Args:
+      item: String or number to insert.
+      encoded_results: run-length encoded results. An array of arrays, e.g.
+          [[3,'A'],[1,'Q']] encodes AAAQ.
+    """
+    if len(encoded_results) and item == encoded_results[0][1]:
+      num_results = encoded_results[0][0]
+      if num_results <= self.MAX_NUMBER_OF_BUILD_RESULTS_TO_LOG:
+        encoded_results[0][0] = num_results + 1
+    else:
+      # Use a list instead of a class for the run-length encoding since
+      # we want the serialized form to be concise.
+      encoded_results.insert(0, [1, item])
+
+  def _InsertGenericMetaData(self, results_for_builder):
+    """ Inserts generic metadata (such as version number, current time etc)
+    into the JSON.
+
+    Args:
+      results_for_builder: Dictionary containing the test results for
+          a single builder.
+    """
+    self._InsertItemIntoRawList(results_for_builder,
+                                self._build_number, self.BUILD_NUMBERS)
+
+    # Include SVN revisions for the given repositories.
+    for (name, path) in self._svn_repositories:
+      # Note: for JSON file's backward-compatibility we use 'chrome' rather
+      # than 'chromium' here.
+      lowercase_name = name.lower()
+      if lowercase_name == 'chromium':
+        lowercase_name = 'chrome'
+      self._InsertItemIntoRawList(results_for_builder,
+                                  self._GetSVNRevision(path),
+                                  lowercase_name + 'Revision')
+
+    self._InsertItemIntoRawList(results_for_builder,
+                                int(time.time()),
+                                self.TIME)
+
+  def _InsertTestTimeAndResult(self, test_name, tests):
+    """ Insert a test item with its results to the given tests dictionary.
+
+    Args:
+      tests: Dictionary containing test result entries.
+    """
+
+    result = self._get_result_char(test_name)
+    test_time = self._GetTestTiming(test_name)
+
+    this_test = tests
+    for segment in test_name.split('/'):
+      if segment not in this_test:
+        this_test[segment] = {}
+      this_test = this_test[segment]
+
+    if not len(this_test):
+      self._PopulateResultsAndTimesJSON(this_test)
+
+    if self.RESULTS in this_test:
+      self._InsertItemRunLengthEncoded(result, this_test[self.RESULTS])
+    else:
+      this_test[self.RESULTS] = [[1, result]]
+
+    if self.TIMES in this_test:
+      self._InsertItemRunLengthEncoded(test_time, this_test[self.TIMES])
+    else:
+      this_test[self.TIMES] = [[1, test_time]]
+
+  def _ConvertJSONToCurrentVersion(self, results_json):
+    """If the JSON does not match the current version, converts it to the
+    current version and adds in the new version number.
+    """
+    if self.VERSION_KEY in results_json:
+      archive_version = results_json[self.VERSION_KEY]
+      if archive_version == self.VERSION:
+        return
+    else:
+      archive_version = 3
+
+    # version 3->4
+    if archive_version == 3:
+      for results in results_json.values():
+        self._ConvertTestsToTrie(results)
+
+    results_json[self.VERSION_KEY] = self.VERSION
+
+  def _ConvertTestsToTrie(self, results):
+    if not self.TESTS in results:
+      return
+
+    test_results = results[self.TESTS]
+    test_results_trie = {}
+    for test in test_results.iterkeys():
+      single_test_result = test_results[test]
+      AddPathToTrie(test, single_test_result, test_results_trie)
+
+    results[self.TESTS] = test_results_trie
+
+  def _PopulateResultsAndTimesJSON(self, results_and_times):
+    results_and_times[self.RESULTS] = []
+    results_and_times[self.TIMES] = []
+    return results_and_times
+
+  def _CreateResultsForBuilderJSON(self):
+    results_for_builder = {}
+    results_for_builder[self.TESTS] = {}
+    return results_for_builder
+
+  def _RemoveItemsOverMaxNumberOfBuilds(self, encoded_list):
+    """Removes items from the run-length encoded list after the final
+    item that exceeds the max number of builds to track.
+
+    Args:
+      encoded_results: run-length encoded results. An array of arrays, e.g.
+          [[3,'A'],[1,'Q']] encodes AAAQ.
+    """
+    num_builds = 0
+    index = 0
+    for result in encoded_list:
+      num_builds = num_builds + result[0]
+      index = index + 1
+      if num_builds > self.MAX_NUMBER_OF_BUILD_RESULTS_TO_LOG:
+        return encoded_list[:index]
+    return encoded_list
+
+  def _NormalizeResultsJSON(self, test, test_name, tests):
+    """ Prune tests where all runs pass or tests that no longer exist and
+    truncate all results to maxNumberOfBuilds.
+
+    Args:
+      test: ResultsAndTimes object for this test.
+      test_name: Name of the test.
+      tests: The JSON object with all the test results for this builder.
+    """
+    test[self.RESULTS] = self._RemoveItemsOverMaxNumberOfBuilds(
+        test[self.RESULTS])
+    test[self.TIMES] = self._RemoveItemsOverMaxNumberOfBuilds(
+        test[self.TIMES])
+
+    is_all_pass = self._IsResultsAllOfType(test[self.RESULTS],
+                                           self.PASS_RESULT)
+    is_all_no_data = self._IsResultsAllOfType(test[self.RESULTS],
+                                              self.NO_DATA_RESULT)
+    max_time = max([test_time[1] for test_time in test[self.TIMES]])
+
+    # Remove all passes/no-data from the results to reduce noise and
+    # filesize. If a test passes every run, but takes > MIN_TIME to run,
+    # don't throw away the data.
+    if is_all_no_data or (is_all_pass and max_time <= self.MIN_TIME):
+      del tests[test_name]
+
+  # method could be a function pylint: disable=R0201
+  def _IsResultsAllOfType(self, results, result_type):
+    """Returns whether all the results are of the given type
+    (e.g. all passes)."""
+    return len(results) == 1 and results[0][1] == result_type
+
+
+class _FileUploader(object):
+
+  def __init__(self, url, timeout_seconds):
+    self._url = url
+    self._timeout_seconds = timeout_seconds
+
+  def UploadAsMultipartFormData(self, files, attrs):
+    file_objs = []
+    for filename, path in files:
+      with file(path, 'rb') as fp:
+        file_objs.append(('file', filename, fp.read()))
+
+    # FIXME: We should use the same variable names for the formal and actual
+    # parameters.
+    content_type, data = _EncodeMultipartFormData(attrs, file_objs)
+    return self._UploadData(content_type, data)
+
+  def _UploadData(self, content_type, data):
+    start = time.time()
+    end = start + self._timeout_seconds
+    while time.time() < end:
+      try:
+        request = urllib2.Request(self._url, data,
+                                  {'Content-Type': content_type})
+        return urllib2.urlopen(request)
+      except urllib2.HTTPError as e:
+        _log.warn("Received HTTP status %s loading \"%s\".  "
+                  'Retrying in 10 seconds...' % (e.code, e.filename))
+        time.sleep(10)
+
+
+def _GetMIMEType(filename):
+  return mimetypes.guess_type(filename)[0] or 'application/octet-stream'
+
+
+# FIXME: Rather than taking tuples, this function should take more
+# structured data.
+def _EncodeMultipartFormData(fields, files):
+  """Encode form fields for multipart/form-data.
+
+  Args:
+    fields: A sequence of (name, value) elements for regular form fields.
+    files: A sequence of (name, filename, value) elements for data to be
+           uploaded as files.
+  Returns:
+    (content_type, body) ready for httplib.HTTP instance.
+
+  Source:
+    http://code.google.com/p/rietveld/source/browse/trunk/upload.py
+  """
+  BOUNDARY = '-M-A-G-I-C---B-O-U-N-D-A-R-Y-'
+  CRLF = '\r\n'
+  lines = []
+
+  for key, value in fields:
+    lines.append('--' + BOUNDARY)
+    lines.append('Content-Disposition: form-data; name="%s"' % key)
+    lines.append('')
+    if isinstance(value, unicode):
+      value = value.encode('utf-8')
+    lines.append(value)
+
+  for key, filename, value in files:
+    lines.append('--' + BOUNDARY)
+    lines.append('Content-Disposition: form-data; name="%s"; '
+                 'filename="%s"' % (key, filename))
+    lines.append('Content-Type: %s' % _GetMIMEType(filename))
+    lines.append('')
+    if isinstance(value, unicode):
+      value = value.encode('utf-8')
+    lines.append(value)
+
+  lines.append('--' + BOUNDARY + '--')
+  lines.append('')
+  body = CRLF.join(lines)
+  content_type = 'multipart/form-data; boundary=%s' % BOUNDARY
+  return content_type, body
diff --git a/build/android/pylib/results/flakiness_dashboard/results_uploader.py b/build/android/pylib/results/flakiness_dashboard/results_uploader.py
new file mode 100644
index 0000000..b86d7ac
--- /dev/null
+++ b/build/android/pylib/results/flakiness_dashboard/results_uploader.py
@@ -0,0 +1,181 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Uploads the results to the flakiness dashboard server."""
+# pylint: disable=E1002,R0201
+
+import logging
+import os
+import shutil
+import tempfile
+import xml
+
+
+from pylib import cmd_helper
+from pylib import constants
+from pylib.results.flakiness_dashboard import json_results_generator
+from pylib.utils import repo_utils
+
+
+
+class JSONResultsGenerator(json_results_generator.JSONResultsGeneratorBase):
+  """Writes test results to a JSON file and handles uploading that file to
+  the test results server.
+  """
+  def __init__(self, builder_name, build_name, build_number, tmp_folder,
+               test_results_map, test_results_server, test_type, master_name):
+    super(JSONResultsGenerator, self).__init__(
+        builder_name=builder_name,
+        build_name=build_name,
+        build_number=build_number,
+        results_file_base_path=tmp_folder,
+        builder_base_url=None,
+        test_results_map=test_results_map,
+        svn_repositories=(('webkit', 'third_party/WebKit'),
+                          ('chrome', '.')),
+        test_results_server=test_results_server,
+        test_type=test_type,
+        master_name=master_name)
+
+  #override
+  def _GetModifierChar(self, test_name):
+    if test_name not in self._test_results_map:
+      return self.__class__.NO_DATA_RESULT
+
+    return self._test_results_map[test_name].modifier
+
+  #override
+  def _GetSVNRevision(self, in_directory):
+    """Returns the git/svn revision for the given directory.
+
+    Args:
+      in_directory: The directory relative to src.
+    """
+    def _is_git_directory(in_directory):
+      """Returns true if the given directory is in a git repository.
+
+      Args:
+        in_directory: The directory path to be tested.
+      """
+      if os.path.exists(os.path.join(in_directory, '.git')):
+        return True
+      parent = os.path.dirname(in_directory)
+      if parent == constants.DIR_SOURCE_ROOT or parent == in_directory:
+        return False
+      return _is_git_directory(parent)
+
+    in_directory = os.path.join(constants.DIR_SOURCE_ROOT, in_directory)
+
+    if not os.path.exists(os.path.join(in_directory, '.svn')):
+      if _is_git_directory(in_directory):
+        return repo_utils.GetGitHeadSHA1(in_directory)
+      else:
+        return ''
+
+    output = cmd_helper.GetCmdOutput(['svn', 'info', '--xml'], cwd=in_directory)
+    try:
+      dom = xml.dom.minidom.parseString(output)
+      return dom.getElementsByTagName('entry')[0].getAttribute('revision')
+    except xml.parsers.expat.ExpatError:
+      return ''
+    return ''
+
+
+class ResultsUploader(object):
+  """Handles uploading buildbot tests results to the flakiness dashboard."""
+  def __init__(self, tests_type):
+    self._build_number = os.environ.get('BUILDBOT_BUILDNUMBER')
+    self._builder_name = os.environ.get('BUILDBOT_BUILDERNAME')
+    self._tests_type = tests_type
+
+    if not self._build_number or not self._builder_name:
+      raise Exception('You should not be uploading tests results to the server'
+                      'from your local machine.')
+
+    upstream = (tests_type != 'Chromium_Android_Instrumentation')
+    if upstream:
+      # TODO(frankf): Use factory properties (see buildbot/bb_device_steps.py)
+      # This requires passing the actual master name (e.g. 'ChromiumFYI' not
+      # 'chromium.fyi').
+      from slave import slave_utils # pylint: disable=F0401
+      self._build_name = slave_utils.SlaveBuildName(constants.DIR_SOURCE_ROOT)
+      self._master_name = slave_utils.GetActiveMaster()
+    else:
+      self._build_name = 'chromium-android'
+      buildbot_branch = os.environ.get('BUILDBOT_BRANCH')
+      if not buildbot_branch:
+        buildbot_branch = 'master'
+      else:
+        # Ensure there's no leading "origin/"
+        buildbot_branch = buildbot_branch[buildbot_branch.find('/') + 1:]
+      self._master_name = '%s-%s' % (self._build_name, buildbot_branch)
+
+    self._test_results_map = {}
+
+  def AddResults(self, test_results):
+    # TODO(frankf): Differentiate between fail/crash/timeouts.
+    conversion_map = [
+        (test_results.GetPass(), False,
+            json_results_generator.JSONResultsGeneratorBase.PASS_RESULT),
+        (test_results.GetFail(), True,
+            json_results_generator.JSONResultsGeneratorBase.FAIL_RESULT),
+        (test_results.GetCrash(), True,
+            json_results_generator.JSONResultsGeneratorBase.FAIL_RESULT),
+        (test_results.GetTimeout(), True,
+            json_results_generator.JSONResultsGeneratorBase.FAIL_RESULT),
+        (test_results.GetUnknown(), True,
+            json_results_generator.JSONResultsGeneratorBase.NO_DATA_RESULT),
+        ]
+
+    for results_list, failed, modifier in conversion_map:
+      for single_test_result in results_list:
+        test_result = json_results_generator.TestResult(
+            test=single_test_result.GetName(),
+            failed=failed,
+            elapsed_time=single_test_result.GetDuration() / 1000)
+        # The WebKit TestResult object sets the modifier it based on test name.
+        # Since we don't use the same test naming convention as WebKit the
+        # modifier will be wrong, so we need to overwrite it.
+        test_result.modifier = modifier
+
+        self._test_results_map[single_test_result.GetName()] = test_result
+
+  def Upload(self, test_results_server):
+    if not self._test_results_map:
+      return
+
+    tmp_folder = tempfile.mkdtemp()
+
+    try:
+      results_generator = JSONResultsGenerator(
+          builder_name=self._builder_name,
+          build_name=self._build_name,
+          build_number=self._build_number,
+          tmp_folder=tmp_folder,
+          test_results_map=self._test_results_map,
+          test_results_server=test_results_server,
+          test_type=self._tests_type,
+          master_name=self._master_name)
+
+      json_files = ["incremental_results.json", "times_ms.json"]
+      results_generator.GenerateJSONOutput()
+      results_generator.GenerateTimesMSFile()
+      results_generator.UploadJSONFiles(json_files)
+    except Exception as e:
+      logging.error("Uploading results to test server failed: %s." % e)
+    finally:
+      shutil.rmtree(tmp_folder)
+
+
+def Upload(results, flakiness_dashboard_server, test_type):
+  """Reports test results to the flakiness dashboard for Chrome for Android.
+
+  Args:
+    results: test results.
+    flakiness_dashboard_server: the server to upload the results to.
+    test_type: the type of the tests (as displayed by the flakiness dashboard).
+  """
+  uploader = ResultsUploader(test_type)
+  uploader.AddResults(results)
+  uploader.Upload(flakiness_dashboard_server)
diff --git a/build/android/pylib/results/json_results.py b/build/android/pylib/results/json_results.py
new file mode 100644
index 0000000..65664e3
--- /dev/null
+++ b/build/android/pylib/results/json_results.py
@@ -0,0 +1,139 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import json
+import logging
+
+from pylib.base import base_test_result
+
+
+def GenerateResultsDict(test_run_result):
+  """Create a results dict from |test_run_result| suitable for writing to JSON.
+  Args:
+    test_run_result: a base_test_result.TestRunResults object.
+  Returns:
+    A results dict that mirrors the one generated by
+      base/test/launcher/test_results_tracker.cc:SaveSummaryAsJSON.
+  """
+  # Example json output.
+  # {
+  #   "global_tags": [],
+  #   "all_tests": [
+  #     "test1",
+  #     "test2",
+  #    ],
+  #   "disabled_tests": [],
+  #   "per_iteration_data": [
+  #     {
+  #       "test1": [
+  #         {
+  #           "status": "SUCCESS",
+  #           "elapsed_time_ms": 1,
+  #           "output_snippet": "",
+  #           "output_snippet_base64": "",
+  #           "losless_snippet": "",
+  #         },
+  #       ],
+  #       "test2": [
+  #         {
+  #           "status": "FAILURE",
+  #           "elapsed_time_ms": 12,
+  #           "output_snippet": "",
+  #           "output_snippet_base64": "",
+  #           "losless_snippet": "",
+  #         },
+  #       ],
+  #     },
+  #   ],
+  # }
+
+  assert isinstance(test_run_result, base_test_result.TestRunResults)
+
+  def status_as_string(s):
+    if s == base_test_result.ResultType.PASS:
+      return 'SUCCESS'
+    elif s == base_test_result.ResultType.SKIP:
+      return 'SKIPPED'
+    elif s == base_test_result.ResultType.FAIL:
+      return 'FAILURE'
+    elif s == base_test_result.ResultType.CRASH:
+      return 'CRASH'
+    elif s == base_test_result.ResultType.TIMEOUT:
+      return 'TIMEOUT'
+    elif s == base_test_result.ResultType.UNKNOWN:
+      return 'UNKNOWN'
+
+  def generate_iteration_data(t):
+    return {
+      t.GetName(): [
+        {
+          'status': status_as_string(t.GetType()),
+          'elapsed_time_ms': t.GetDuration(),
+          'output_snippet': '',
+          'losless_snippet': '',
+          'output_snippet_base64:': '',
+        }
+      ]
+    }
+
+  all_tests_tuple, per_iteration_data_tuple = zip(
+      *[(t.GetName(), generate_iteration_data(t))
+        for t in test_run_result.GetAll()])
+
+  return {
+    'global_tags': [],
+    'all_tests': list(all_tests_tuple),
+    # TODO(jbudorick): Add support for disabled tests within base_test_result.
+    'disabled_tests': [],
+    'per_iteration_data': list(per_iteration_data_tuple),
+  }
+
+
+def GenerateJsonResultsFile(test_run_result, file_path):
+  """Write |test_run_result| to JSON.
+
+  This emulates the format of the JSON emitted by
+  base/test/launcher/test_results_tracker.cc:SaveSummaryAsJSON.
+
+  Args:
+    test_run_result: a base_test_result.TestRunResults object.
+    file_path: The path to the JSON file to write.
+  """
+  with open(file_path, 'w') as json_result_file:
+    json_result_file.write(json.dumps(GenerateResultsDict(test_run_result)))
+
+
+def ParseResultsFromJson(json_results):
+  """Creates a list of BaseTestResult objects from JSON.
+
+  Args:
+    json_results: A JSON dict in the format created by
+                  GenerateJsonResultsFile.
+  """
+
+  def string_as_status(s):
+    if s == 'SUCCESS':
+      return base_test_result.ResultType.PASS
+    elif s == 'SKIPPED':
+      return base_test_result.ResultType.SKIP
+    elif s == 'FAILURE':
+      return base_test_result.ResultType.FAIL
+    elif s == 'CRASH':
+      return base_test_result.ResultType.CRASH
+    elif s == 'TIMEOUT':
+      return base_test_result.ResultType.TIMEOUT
+    else:
+      return base_test_result.ResultType.UNKNOWN
+
+  results_list = []
+  testsuite_runs = json_results['per_iteration_data']
+  for testsuite_run in testsuite_runs:
+    for test, test_runs in testsuite_run.iteritems():
+      results_list.extend(
+          [base_test_result.BaseTestResult(test,
+                                           string_as_status(tr['status']),
+                                           duration=tr['elapsed_time_ms'])
+          for tr in test_runs])
+  return results_list
+
diff --git a/build/android/pylib/results/json_results_test.py b/build/android/pylib/results/json_results_test.py
new file mode 100755
index 0000000..1bc730d
--- /dev/null
+++ b/build/android/pylib/results/json_results_test.py
@@ -0,0 +1,133 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import unittest
+
+from pylib.base import base_test_result
+from pylib.results import json_results
+
+
+class JsonResultsTest(unittest.TestCase):
+
+  def testGenerateResultsDict_passedResult(self):
+    result = base_test_result.BaseTestResult(
+        'test.package.TestName', base_test_result.ResultType.PASS)
+
+    all_results = base_test_result.TestRunResults()
+    all_results.AddResult(result)
+
+    results_dict = json_results.GenerateResultsDict(all_results)
+    self.assertEquals(
+        ['test.package.TestName'],
+        results_dict['all_tests'])
+    self.assertEquals(1, len(results_dict['per_iteration_data']))
+
+    iteration_result = results_dict['per_iteration_data'][0]
+    self.assertTrue('test.package.TestName' in iteration_result)
+    self.assertEquals(1, len(iteration_result['test.package.TestName']))
+
+    test_iteration_result = iteration_result['test.package.TestName'][0]
+    self.assertTrue('status' in test_iteration_result)
+    self.assertEquals('SUCCESS', test_iteration_result['status'])
+
+  def testGenerateResultsDict_skippedResult(self):
+    result = base_test_result.BaseTestResult(
+        'test.package.TestName', base_test_result.ResultType.SKIP)
+
+    all_results = base_test_result.TestRunResults()
+    all_results.AddResult(result)
+
+    results_dict = json_results.GenerateResultsDict(all_results)
+    self.assertEquals(
+        ['test.package.TestName'],
+        results_dict['all_tests'])
+    self.assertEquals(1, len(results_dict['per_iteration_data']))
+
+    iteration_result = results_dict['per_iteration_data'][0]
+    self.assertTrue('test.package.TestName' in iteration_result)
+    self.assertEquals(1, len(iteration_result['test.package.TestName']))
+
+    test_iteration_result = iteration_result['test.package.TestName'][0]
+    self.assertTrue('status' in test_iteration_result)
+    self.assertEquals('SKIPPED', test_iteration_result['status'])
+
+  def testGenerateResultsDict_failedResult(self):
+    result = base_test_result.BaseTestResult(
+        'test.package.TestName', base_test_result.ResultType.FAIL)
+
+    all_results = base_test_result.TestRunResults()
+    all_results.AddResult(result)
+
+    results_dict = json_results.GenerateResultsDict(all_results)
+    self.assertEquals(
+        ['test.package.TestName'],
+        results_dict['all_tests'])
+    self.assertEquals(1, len(results_dict['per_iteration_data']))
+
+    iteration_result = results_dict['per_iteration_data'][0]
+    self.assertTrue('test.package.TestName' in iteration_result)
+    self.assertEquals(1, len(iteration_result['test.package.TestName']))
+
+    test_iteration_result = iteration_result['test.package.TestName'][0]
+    self.assertTrue('status' in test_iteration_result)
+    self.assertEquals('FAILURE', test_iteration_result['status'])
+
+  def testGenerateResultsDict_duration(self):
+    result = base_test_result.BaseTestResult(
+        'test.package.TestName', base_test_result.ResultType.PASS, duration=123)
+
+    all_results = base_test_result.TestRunResults()
+    all_results.AddResult(result)
+
+    results_dict = json_results.GenerateResultsDict(all_results)
+    self.assertEquals(
+        ['test.package.TestName'],
+        results_dict['all_tests'])
+    self.assertEquals(1, len(results_dict['per_iteration_data']))
+
+    iteration_result = results_dict['per_iteration_data'][0]
+    self.assertTrue('test.package.TestName' in iteration_result)
+    self.assertEquals(1, len(iteration_result['test.package.TestName']))
+
+    test_iteration_result = iteration_result['test.package.TestName'][0]
+    self.assertTrue('elapsed_time_ms' in test_iteration_result)
+    self.assertEquals(123, test_iteration_result['elapsed_time_ms'])
+
+  def testGenerateResultsDict_multipleResults(self):
+    result1 = base_test_result.BaseTestResult(
+        'test.package.TestName1', base_test_result.ResultType.PASS)
+    result2 = base_test_result.BaseTestResult(
+        'test.package.TestName2', base_test_result.ResultType.PASS)
+
+    all_results = base_test_result.TestRunResults()
+    all_results.AddResult(result1)
+    all_results.AddResult(result2)
+
+    results_dict = json_results.GenerateResultsDict(all_results)
+    self.assertEquals(
+        ['test.package.TestName1', 'test.package.TestName2'],
+        results_dict['all_tests'])
+    self.assertEquals(2, len(results_dict['per_iteration_data']))
+
+    expected_tests = set([
+        'test.package.TestName1',
+        'test.package.TestName2',
+    ])
+
+    for iteration_result in results_dict['per_iteration_data']:
+      self.assertEquals(1, len(iteration_result))
+      name = iteration_result.keys()[0]
+      self.assertTrue(name in expected_tests)
+      expected_tests.remove(name)
+      self.assertEquals(1, len(iteration_result[name]))
+
+      test_iteration_result = iteration_result[name][0]
+      self.assertTrue('status' in test_iteration_result)
+      self.assertEquals('SUCCESS', test_iteration_result['status'])
+
+
+if __name__ == '__main__':
+  unittest.main(verbosity=2)
+
diff --git a/build/android/pylib/results/report_results.py b/build/android/pylib/results/report_results.py
new file mode 100644
index 0000000..4fc6aa0
--- /dev/null
+++ b/build/android/pylib/results/report_results.py
@@ -0,0 +1,114 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Module containing utility functions for reporting results."""
+
+import logging
+import os
+import re
+
+from pylib import constants
+from pylib.results.flakiness_dashboard import results_uploader
+
+
+def _LogToFile(results, test_type, suite_name):
+  """Log results to local files which can be used for aggregation later."""
+  log_file_path = os.path.join(constants.GetOutDirectory(), 'test_logs')
+  if not os.path.exists(log_file_path):
+    os.mkdir(log_file_path)
+  full_file_name = os.path.join(
+      log_file_path, re.sub(r'\W', '_', test_type).lower() + '.log')
+  if not os.path.exists(full_file_name):
+    with open(full_file_name, 'w') as log_file:
+      print >> log_file, '\n%s results for %s build %s:' % (
+          test_type, os.environ.get('BUILDBOT_BUILDERNAME'),
+          os.environ.get('BUILDBOT_BUILDNUMBER'))
+    logging.info('Writing results to %s.' % full_file_name)
+
+  logging.info('Writing results to %s.' % full_file_name)
+  with open(full_file_name, 'a') as log_file:
+    shortened_suite_name = suite_name[:25] + (suite_name[25:] and '...')
+    print >> log_file, '%s%s' % (shortened_suite_name.ljust(30),
+                                 results.GetShortForm())
+
+
+def _LogToFlakinessDashboard(results, test_type, test_package,
+                             flakiness_server):
+  """Upload results to the flakiness dashboard"""
+  logging.info('Upload results for test type "%s", test package "%s" to %s' %
+               (test_type, test_package, flakiness_server))
+
+  try:
+    if test_type == 'Instrumentation':
+      if flakiness_server == constants.UPSTREAM_FLAKINESS_SERVER:
+        assert test_package in ['ContentShellTest',
+                                'ChromePublicTest',
+                                'ChromeShellTest',
+                                'ChromeSyncShellTest',
+                                'AndroidWebViewTest']
+        dashboard_test_type = ('%s_instrumentation_tests' %
+                               test_package.lower().rstrip('test'))
+      # Downstream server.
+      else:
+        dashboard_test_type = 'Chromium_Android_Instrumentation'
+
+    elif test_type == 'Unit test':
+      dashboard_test_type = test_package
+
+    else:
+      logging.warning('Invalid test type')
+      return
+
+    results_uploader.Upload(
+        results, flakiness_server, dashboard_test_type)
+
+  except Exception as e:
+    logging.error(e)
+
+
+def LogFull(results, test_type, test_package, annotation=None,
+            flakiness_server=None):
+  """Log the tests results for the test suite.
+
+  The results will be logged three different ways:
+    1. Log to stdout.
+    2. Log to local files for aggregating multiple test steps
+       (on buildbots only).
+    3. Log to flakiness dashboard (on buildbots only).
+
+  Args:
+    results: An instance of TestRunResults object.
+    test_type: Type of the test (e.g. 'Instrumentation', 'Unit test', etc.).
+    test_package: Test package name (e.g. 'ipc_tests' for gtests,
+                  'ContentShellTest' for instrumentation tests)
+    annotation: If instrumenation test type, this is a list of annotations
+                (e.g. ['Smoke', 'SmallTest']).
+    flakiness_server: If provider, upload the results to flakiness dashboard
+                      with this URL.
+    """
+  if not results.DidRunPass():
+    logging.critical('*' * 80)
+    logging.critical('Detailed Logs')
+    logging.critical('*' * 80)
+    for line in results.GetLogs().splitlines():
+      logging.critical(line)
+  logging.critical('*' * 80)
+  logging.critical('Summary')
+  logging.critical('*' * 80)
+  for line in results.GetGtestForm().splitlines():
+    logging.critical(line)
+  logging.critical('*' * 80)
+
+  if os.environ.get('BUILDBOT_BUILDERNAME'):
+    # It is possible to have multiple buildbot steps for the same
+    # instrumenation test package using different annotations.
+    if annotation and len(annotation) == 1:
+      suite_name = annotation[0]
+    else:
+      suite_name = test_package
+    _LogToFile(results, test_type, suite_name)
+
+    if flakiness_server:
+      _LogToFlakinessDashboard(results, test_type, test_package,
+                               flakiness_server)
diff --git a/build/android/pylib/screenshot.py b/build/android/pylib/screenshot.py
new file mode 100644
index 0000000..0fcc590
--- /dev/null
+++ b/build/android/pylib/screenshot.py
@@ -0,0 +1,99 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+import os
+import tempfile
+import time
+
+from pylib import cmd_helper
+from pylib import device_signal
+from pylib.device import device_errors
+
+# TODO(jbudorick) Remove once telemetry gets switched over.
+import pylib.android_commands
+import pylib.device.device_utils
+
+
+class VideoRecorder(object):
+  """Records a screen capture video from an Android Device (KitKat or newer).
+
+  Args:
+    device: DeviceUtils instance.
+    host_file: Path to the video file to store on the host.
+    megabits_per_second: Video bitrate in megabits per second. Allowed range
+                         from 0.1 to 100 mbps.
+    size: Video frame size tuple (width, height) or None to use the device
+          default.
+    rotate: If True, the video will be rotated 90 degrees.
+  """
+  def __init__(self, device, megabits_per_second=4, size=None,
+               rotate=False):
+    # TODO(jbudorick) Remove once telemetry gets switched over.
+    if isinstance(device, pylib.android_commands.AndroidCommands):
+      device = pylib.device.device_utils.DeviceUtils(device)
+    self._device = device
+    self._device_file = (
+        '%s/screen-recording.mp4' % device.GetExternalStoragePath())
+    self._recorder = None
+    self._recorder_stdout = None
+    self._is_started = False
+
+    self._args = ['adb']
+    if str(self._device):
+      self._args += ['-s', str(self._device)]
+    self._args += ['shell', 'screenrecord', '--verbose']
+    self._args += ['--bit-rate', str(megabits_per_second * 1000 * 1000)]
+    if size:
+      self._args += ['--size', '%dx%d' % size]
+    if rotate:
+      self._args += ['--rotate']
+    self._args += [self._device_file]
+
+  def Start(self):
+    """Start recording video."""
+    self._recorder_stdout = tempfile.mkstemp()[1]
+    self._recorder = cmd_helper.Popen(
+        self._args, stdout=open(self._recorder_stdout, 'w'))
+    if not self._device.GetPids('screenrecord'):
+      raise RuntimeError('Recording failed. Is your device running Android '
+                         'KitKat or later?')
+
+  def IsStarted(self):
+    if not self._is_started:
+      for line in open(self._recorder_stdout):
+        self._is_started = line.startswith('Content area is ')
+        if self._is_started:
+          break
+    return self._is_started
+
+  def Stop(self):
+    """Stop recording video."""
+    os.remove(self._recorder_stdout)
+    self._is_started = False
+    if not self._recorder:
+      return
+    if not self._device.KillAll('screenrecord', signum=device_signal.SIGINT,
+                                quiet=True):
+      logging.warning('Nothing to kill: screenrecord was not running')
+    self._recorder.wait()
+
+  def Pull(self, host_file=None):
+    """Pull resulting video file from the device.
+
+    Args:
+      host_file: Path to the video file to store on the host.
+    Returns:
+      Output video file name on the host.
+    """
+    # TODO(jbudorick): Merge filename generation with the logic for doing so in
+    # DeviceUtils.
+    host_file_name = (
+        host_file
+        or 'screen-recording-%s.mp4' % time.strftime('%Y%m%dT%H%M%S',
+                                                     time.localtime()))
+    host_file_name = os.path.abspath(host_file_name)
+    self._device.PullFile(self._device_file, host_file_name)
+    self._device.RunShellCommand('rm -f "%s"' % self._device_file)
+    return host_file_name
diff --git a/build/android/pylib/sdk/__init__.py b/build/android/pylib/sdk/__init__.py
new file mode 100644
index 0000000..50b23df
--- /dev/null
+++ b/build/android/pylib/sdk/__init__.py
@@ -0,0 +1,3 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
diff --git a/build/android/pylib/sdk/aapt.py b/build/android/pylib/sdk/aapt.py
new file mode 100644
index 0000000..3d317ff
--- /dev/null
+++ b/build/android/pylib/sdk/aapt.py
@@ -0,0 +1,42 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""This module wraps the Android Asset Packaging Tool."""
+
+import os
+
+from pylib import cmd_helper
+from pylib import constants
+from pylib.utils import timeout_retry
+
+_AAPT_PATH = os.path.join(constants.ANDROID_SDK_TOOLS, 'aapt')
+
+def _RunAaptCmd(args):
+  """Runs an aapt command.
+
+  Args:
+    args: A list of arguments for aapt.
+
+  Returns:
+    The output of the command.
+  """
+  cmd = [_AAPT_PATH] + args
+  status, output = cmd_helper.GetCmdStatusAndOutput(cmd)
+  if status != 0:
+    raise Exception('Failed running aapt command: "%s" with output "%s".' %
+                    (' '.join(cmd), output))
+  return output
+
+def Dump(what, apk, assets=None):
+  """Returns the output of the aapt dump command.
+
+  Args:
+    what: What you want to dump.
+    apk: Path to apk you want to dump information for.
+    assets: List of assets in apk you want to dump information for.
+  """
+  assets = assets or []
+  if isinstance(assets, basestring):
+    assets = [assets]
+  return _RunAaptCmd(['dump', what, apk] + assets).splitlines()
\ No newline at end of file
diff --git a/build/android/pylib/sdk/dexdump.py b/build/android/pylib/sdk/dexdump.py
new file mode 100644
index 0000000..ec10aba
--- /dev/null
+++ b/build/android/pylib/sdk/dexdump.py
@@ -0,0 +1,30 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+
+from pylib import cmd_helper
+from pylib import constants
+
+_DEXDUMP_PATH = os.path.join(constants.ANDROID_SDK_TOOLS, 'dexdump')
+
+def DexDump(dexfiles, file_summary=False):
+  """A wrapper around the Android SDK's dexdump tool.
+
+  Args:
+    dexfiles: The dexfile or list of dex files to dump.
+    file_summary: Display summary information from the file header. (-f)
+
+  Returns:
+    An iterable over the output lines.
+  """
+  # TODO(jbudorick): Add support for more options as necessary.
+  if isinstance(dexfiles, basestring):
+    dexfiles = [dexfiles]
+  args = [_DEXDUMP_PATH] + dexfiles
+  if file_summary:
+    args.append('-f')
+
+  return cmd_helper.IterCmdOutputLines(args)
+
diff --git a/build/android/pylib/sdk/split_select.py b/build/android/pylib/sdk/split_select.py
new file mode 100644
index 0000000..e204662
--- /dev/null
+++ b/build/android/pylib/sdk/split_select.py
@@ -0,0 +1,58 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""This module wraps Android's split-select tool."""
+
+import os
+
+from pylib import cmd_helper
+from pylib import constants
+from pylib.utils import timeout_retry
+
+_SPLIT_SELECT_PATH = os.path.join(constants.ANDROID_SDK_TOOLS, 'split-select')
+
+def _RunSplitSelectCmd(args):
+  """Runs a split-select command.
+
+  Args:
+    args: A list of arguments for split-select.
+
+  Returns:
+    The output of the command.
+  """
+  cmd = [_SPLIT_SELECT_PATH] + args
+  status, output = cmd_helper.GetCmdStatusAndOutput(cmd)
+  if status != 0:
+    raise Exception('Failed running command "%s" with output "%s".' %
+                    (' '.join(cmd), output))
+  return output
+
+def _SplitConfig(device):
+  """Returns a config specifying which APK splits are required by the device.
+
+  Args:
+    device: A DeviceUtils object.
+  """
+  return ('%s-r%s-%s:%s' %
+          (device.language,
+           device.country,
+           device.screen_density,
+           device.product_cpu_abi))
+
+def SelectSplits(device, base_apk, split_apks):
+  """Determines which APK splits the device requires.
+
+  Args:
+    device: A DeviceUtils object.
+    base_apk: The path of the base APK.
+    split_apks: A list of paths of APK splits.
+
+  Returns:
+    The list of APK splits that the device requires.
+  """
+  config = _SplitConfig(device)
+  args = ['--target', config, '--base', base_apk]
+  for split in split_apks:
+    args.extend(['--split', split])
+  return _RunSplitSelectCmd(args).splitlines()
\ No newline at end of file
diff --git a/build/android/pylib/symbols/PRESUBMIT.py b/build/android/pylib/symbols/PRESUBMIT.py
new file mode 100644
index 0000000..b4d94ae
--- /dev/null
+++ b/build/android/pylib/symbols/PRESUBMIT.py
@@ -0,0 +1,21 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+def CommonChecks(input_api, output_api):
+  output = []
+  output.extend(input_api.canned_checks.RunPylint(input_api, output_api))
+  output.extend(input_api.canned_checks.RunUnitTestsInDirectory(
+      input_api,
+      output_api,
+      input_api.PresubmitLocalPath(),
+      whitelist=[r'^.+_unittest\.py$']))
+  return output
+
+
+def CheckChangeOnUpload(input_api, output_api):
+  return CommonChecks(input_api, output_api)
+
+
+def CheckChangeOnCommit(input_api, output_api):
+  return CommonChecks(input_api, output_api)
\ No newline at end of file
diff --git a/build/android/pylib/symbols/__init__.py b/build/android/pylib/symbols/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/build/android/pylib/symbols/__init__.py
diff --git a/build/android/pylib/symbols/elf_symbolizer.py b/build/android/pylib/symbols/elf_symbolizer.py
new file mode 100644
index 0000000..374063a
--- /dev/null
+++ b/build/android/pylib/symbols/elf_symbolizer.py
@@ -0,0 +1,467 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import collections
+import datetime
+import logging
+import multiprocessing
+import os
+import posixpath
+import Queue
+import re
+import subprocess
+import sys
+import threading
+import time
+
+
+# addr2line builds a possibly infinite memory cache that can exhaust
+# the computer's memory if allowed to grow for too long. This constant
+# controls how many lookups we do before restarting the process. 4000
+# gives near peak performance without extreme memory usage.
+ADDR2LINE_RECYCLE_LIMIT = 4000
+
+
+class ELFSymbolizer(object):
+  """An uber-fast (multiprocessing, pipelined and asynchronous) ELF symbolizer.
+
+  This class is a frontend for addr2line (part of GNU binutils), designed to
+  symbolize batches of large numbers of symbols for a given ELF file. It
+  supports sharding symbolization against many addr2line instances and
+  pipelining of multiple requests per each instance (in order to hide addr2line
+  internals and OS pipe latencies).
+
+  The interface exhibited by this class is a very simple asynchronous interface,
+  which is based on the following three methods:
+  - SymbolizeAsync(): used to request (enqueue) resolution of a given address.
+  - The |callback| method: used to communicated back the symbol information.
+  - Join(): called to conclude the batch to gather the last outstanding results.
+  In essence, before the Join method returns, this class will have issued as
+  many callbacks as the number of SymbolizeAsync() calls. In this regard, note
+  that due to multiprocess sharding, callbacks can be delivered out of order.
+
+  Some background about addr2line:
+  - it is invoked passing the elf path in the cmdline, piping the addresses in
+    its stdin and getting results on its stdout.
+  - it has pretty large response times for the first requests, but it
+    works very well in streaming mode once it has been warmed up.
+  - it doesn't scale by itself (on more cores). However, spawning multiple
+    instances at the same time on the same file is pretty efficient as they
+    keep hitting the pagecache and become mostly CPU bound.
+  - it might hang or crash, mostly for OOM. This class deals with both of these
+    problems.
+
+  Despite the "scary" imports and the multi* words above, (almost) no multi-
+  threading/processing is involved from the python viewpoint. Concurrency
+  here is achieved by spawning several addr2line subprocesses and handling their
+  output pipes asynchronously. Therefore, all the code here (with the exception
+  of the Queue instance in Addr2Line) should be free from mind-blowing
+  thread-safety concerns.
+
+  The multiprocess sharding works as follows:
+  The symbolizer tries to use the lowest number of addr2line instances as
+  possible (with respect of |max_concurrent_jobs|) and enqueue all the requests
+  in a single addr2line instance. For few symbols (i.e. dozens) sharding isn't
+  worth the startup cost.
+  The multiprocess logic kicks in as soon as the queues for the existing
+  instances grow. Specifically, once all the existing instances reach the
+  |max_queue_size| bound, a new addr2line instance is kicked in.
+  In the case of a very eager producer (i.e. all |max_concurrent_jobs| instances
+  have a backlog of |max_queue_size|), back-pressure is applied on the caller by
+  blocking the SymbolizeAsync method.
+
+  This module has been deliberately designed to be dependency free (w.r.t. of
+  other modules in this project), to allow easy reuse in external projects.
+  """
+
+  def __init__(self, elf_file_path, addr2line_path, callback, inlines=False,
+      max_concurrent_jobs=None, addr2line_timeout=30, max_queue_size=50,
+      source_root_path=None, strip_base_path=None):
+    """Args:
+      elf_file_path: path of the elf file to be symbolized.
+      addr2line_path: path of the toolchain's addr2line binary.
+      callback: a callback which will be invoked for each resolved symbol with
+          the two args (sym_info, callback_arg). The former is an instance of
+          |ELFSymbolInfo| and contains the symbol information. The latter is an
+          embedder-provided argument which is passed to SymbolizeAsync().
+      inlines: when True, the ELFSymbolInfo will contain also the details about
+          the outer inlining functions. When False, only the innermost function
+          will be provided.
+      max_concurrent_jobs: Max number of addr2line instances spawned.
+          Parallelize responsibly, addr2line is a memory and I/O monster.
+      max_queue_size: Max number of outstanding requests per addr2line instance.
+      addr2line_timeout: Max time (in seconds) to wait for a addr2line response.
+          After the timeout, the instance will be considered hung and respawned.
+      source_root_path: In some toolchains only the name of the source file is
+          is output, without any path information; disambiguation searches
+          through the source directory specified by |source_root_path| argument
+          for files whose name matches, adding the full path information to the
+          output. For example, if the toolchain outputs "unicode.cc" and there
+          is a file called "unicode.cc" located under |source_root_path|/foo,
+          the tool will replace "unicode.cc" with
+          "|source_root_path|/foo/unicode.cc". If there are multiple files with
+          the same name, disambiguation will fail because the tool cannot
+          determine which of the files was the source of the symbol.
+      strip_base_path: Rebases the symbols source paths onto |source_root_path|
+          (i.e replace |strip_base_path| with |source_root_path).
+    """
+    assert(os.path.isfile(addr2line_path)), 'Cannot find ' + addr2line_path
+    self.elf_file_path = elf_file_path
+    self.addr2line_path = addr2line_path
+    self.callback = callback
+    self.inlines = inlines
+    self.max_concurrent_jobs = (max_concurrent_jobs or
+                                min(multiprocessing.cpu_count(), 4))
+    self.max_queue_size = max_queue_size
+    self.addr2line_timeout = addr2line_timeout
+    self.requests_counter = 0  # For generating monotonic request IDs.
+    self._a2l_instances = []  # Up to |max_concurrent_jobs| _Addr2Line inst.
+
+    # If necessary, create disambiguation lookup table
+    self.disambiguate = source_root_path is not None
+    self.disambiguation_table = {}
+    self.strip_base_path = strip_base_path
+    if(self.disambiguate):
+      self.source_root_path = os.path.abspath(source_root_path)
+      self._CreateDisambiguationTable()
+
+    # Create one addr2line instance. More instances will be created on demand
+    # (up to |max_concurrent_jobs|) depending on the rate of the requests.
+    self._CreateNewA2LInstance()
+
+  def SymbolizeAsync(self, addr, callback_arg=None):
+    """Requests symbolization of a given address.
+
+    This method is not guaranteed to return immediately. It generally does, but
+    in some scenarios (e.g. all addr2line instances have full queues) it can
+    block to create back-pressure.
+
+    Args:
+      addr: address to symbolize.
+      callback_arg: optional argument which will be passed to the |callback|."""
+    assert(isinstance(addr, int))
+
+    # Process all the symbols that have been resolved in the meanwhile.
+    # Essentially, this drains all the addr2line(s) out queues.
+    for a2l_to_purge in self._a2l_instances:
+      a2l_to_purge.ProcessAllResolvedSymbolsInQueue()
+      a2l_to_purge.RecycleIfNecessary()
+
+    # Find the best instance according to this logic:
+    # 1. Find an existing instance with the shortest queue.
+    # 2. If all of instances' queues are full, but there is room in the pool,
+    #    (i.e. < |max_concurrent_jobs|) create a new instance.
+    # 3. If there were already |max_concurrent_jobs| instances and all of them
+    #    had full queues, make back-pressure.
+
+    # 1.
+    def _SortByQueueSizeAndReqID(a2l):
+      return (a2l.queue_size, a2l.first_request_id)
+    a2l = min(self._a2l_instances, key=_SortByQueueSizeAndReqID)
+
+    # 2.
+    if (a2l.queue_size >= self.max_queue_size and
+        len(self._a2l_instances) < self.max_concurrent_jobs):
+      a2l = self._CreateNewA2LInstance()
+
+    # 3.
+    if a2l.queue_size >= self.max_queue_size:
+      a2l.WaitForNextSymbolInQueue()
+
+    a2l.EnqueueRequest(addr, callback_arg)
+
+  def Join(self):
+    """Waits for all the outstanding requests to complete and terminates."""
+    for a2l in self._a2l_instances:
+      a2l.WaitForIdle()
+      a2l.Terminate()
+
+  def _CreateNewA2LInstance(self):
+    assert(len(self._a2l_instances) < self.max_concurrent_jobs)
+    a2l = ELFSymbolizer.Addr2Line(self)
+    self._a2l_instances.append(a2l)
+    return a2l
+
+  def _CreateDisambiguationTable(self):
+    """ Non-unique file names will result in None entries"""
+    start_time = time.time()
+    logging.info('Collecting information about available source files...')
+    self.disambiguation_table = {}
+
+    for root, _, filenames in os.walk(self.source_root_path):
+      for f in filenames:
+        self.disambiguation_table[f] = os.path.join(root, f) if (f not in
+                                       self.disambiguation_table) else None
+    logging.info('Finished collecting information about '
+                 'possible files (took %.1f s).',
+                 (time.time() - start_time))
+
+
+  class Addr2Line(object):
+    """A python wrapper around an addr2line instance.
+
+    The communication with the addr2line process looks as follows:
+      [STDIN]         [STDOUT]  (from addr2line's viewpoint)
+    > f001111
+    > f002222
+                    < Symbol::Name(foo, bar) for f001111
+                    < /path/to/source/file.c:line_number
+    > f003333
+                    < Symbol::Name2() for f002222
+                    < /path/to/source/file.c:line_number
+                    < Symbol::Name3() for f003333
+                    < /path/to/source/file.c:line_number
+    """
+
+    SYM_ADDR_RE = re.compile(r'([^:]+):(\?|\d+).*')
+
+    def __init__(self, symbolizer):
+      self._symbolizer = symbolizer
+      self._lib_file_name = posixpath.basename(symbolizer.elf_file_path)
+
+      # The request queue (i.e. addresses pushed to addr2line's stdin and not
+      # yet retrieved on stdout)
+      self._request_queue = collections.deque()
+
+      # This is essentially len(self._request_queue). It has been optimized to a
+      # separate field because turned out to be a perf hot-spot.
+      self.queue_size = 0
+
+      # Keep track of the number of symbols a process has processed to
+      # avoid a single process growing too big and using all the memory.
+      self._processed_symbols_count = 0
+
+      # Objects required to handle the addr2line subprocess.
+      self._proc = None  # Subprocess.Popen(...) instance.
+      self._thread = None  # Threading.thread instance.
+      self._out_queue = None  # Queue.Queue instance (for buffering a2l stdout).
+      self._RestartAddr2LineProcess()
+
+    def EnqueueRequest(self, addr, callback_arg):
+      """Pushes an address to addr2line's stdin (and keeps track of it)."""
+      self._symbolizer.requests_counter += 1  # For global "age" of requests.
+      req_idx = self._symbolizer.requests_counter
+      self._request_queue.append((addr, callback_arg, req_idx))
+      self.queue_size += 1
+      self._WriteToA2lStdin(addr)
+
+    def WaitForIdle(self):
+      """Waits until all the pending requests have been symbolized."""
+      while self.queue_size > 0:
+        self.WaitForNextSymbolInQueue()
+
+    def WaitForNextSymbolInQueue(self):
+      """Waits for the next pending request to be symbolized."""
+      if not self.queue_size:
+        return
+
+      # This outer loop guards against a2l hanging (detecting stdout timeout).
+      while True:
+        start_time = datetime.datetime.now()
+        timeout = datetime.timedelta(seconds=self._symbolizer.addr2line_timeout)
+
+        # The inner loop guards against a2l crashing (checking if it exited).
+        while (datetime.datetime.now() - start_time < timeout):
+          # poll() returns !None if the process exited. a2l should never exit.
+          if self._proc.poll():
+            logging.warning('addr2line crashed, respawning (lib: %s).' %
+                            self._lib_file_name)
+            self._RestartAddr2LineProcess()
+            # TODO(primiano): the best thing to do in this case would be
+            # shrinking the pool size as, very likely, addr2line is crashed
+            # due to low memory (and the respawned one will die again soon).
+
+          try:
+            lines = self._out_queue.get(block=True, timeout=0.25)
+          except Queue.Empty:
+            # On timeout (1/4 s.) repeat the inner loop and check if either the
+            # addr2line process did crash or we waited its output for too long.
+            continue
+
+          # In nominal conditions, we get straight to this point.
+          self._ProcessSymbolOutput(lines)
+          return
+
+        # If this point is reached, we waited more than |addr2line_timeout|.
+        logging.warning('Hung addr2line process, respawning (lib: %s).' %
+                        self._lib_file_name)
+        self._RestartAddr2LineProcess()
+
+    def ProcessAllResolvedSymbolsInQueue(self):
+      """Consumes all the addr2line output lines produced (without blocking)."""
+      if not self.queue_size:
+        return
+      while True:
+        try:
+          lines = self._out_queue.get_nowait()
+        except Queue.Empty:
+          break
+        self._ProcessSymbolOutput(lines)
+
+    def RecycleIfNecessary(self):
+      """Restarts the process if it has been used for too long.
+
+      A long running addr2line process will consume excessive amounts
+      of memory without any gain in performance."""
+      if self._processed_symbols_count >= ADDR2LINE_RECYCLE_LIMIT:
+        self._RestartAddr2LineProcess()
+
+
+    def Terminate(self):
+      """Kills the underlying addr2line process.
+
+      The poller |_thread| will terminate as well due to the broken pipe."""
+      try:
+        self._proc.kill()
+        self._proc.communicate()  # Essentially wait() without risking deadlock.
+      except Exception:  # An exception while terminating? How interesting.
+        pass
+      self._proc = None
+
+    def _WriteToA2lStdin(self, addr):
+      self._proc.stdin.write('%s\n' % hex(addr))
+      if self._symbolizer.inlines:
+        # In the case of inlines we output an extra blank line, which causes
+        # addr2line to emit a (??,??:0) tuple that we use as a boundary marker.
+        self._proc.stdin.write('\n')
+      self._proc.stdin.flush()
+
+    def _ProcessSymbolOutput(self, lines):
+      """Parses an addr2line symbol output and triggers the client callback."""
+      (_, callback_arg, _) = self._request_queue.popleft()
+      self.queue_size -= 1
+
+      innermost_sym_info = None
+      sym_info = None
+      for (line1, line2) in lines:
+        prev_sym_info = sym_info
+        name = line1 if not line1.startswith('?') else None
+        source_path = None
+        source_line = None
+        m = ELFSymbolizer.Addr2Line.SYM_ADDR_RE.match(line2)
+        if m:
+          if not m.group(1).startswith('?'):
+            source_path = m.group(1)
+            if not m.group(2).startswith('?'):
+              source_line = int(m.group(2))
+        else:
+          logging.warning('Got invalid symbol path from addr2line: %s' % line2)
+
+        # In case disambiguation is on, and needed
+        was_ambiguous = False
+        disambiguated = False
+        if self._symbolizer.disambiguate:
+          if source_path and not posixpath.isabs(source_path):
+            path = self._symbolizer.disambiguation_table.get(source_path)
+            was_ambiguous = True
+            disambiguated = path is not None
+            source_path = path if disambiguated else source_path
+
+          # Use absolute paths (so that paths are consistent, as disambiguation
+          # uses absolute paths)
+          if source_path and not was_ambiguous:
+            source_path = os.path.abspath(source_path)
+
+        if source_path and self._symbolizer.strip_base_path:
+          # Strip the base path
+          source_path = re.sub('^' + self._symbolizer.strip_base_path,
+              self._symbolizer.source_root_path or '', source_path)
+
+        sym_info = ELFSymbolInfo(name, source_path, source_line, was_ambiguous,
+                                 disambiguated)
+        if prev_sym_info:
+          prev_sym_info.inlined_by = sym_info
+        if not innermost_sym_info:
+          innermost_sym_info = sym_info
+
+      self._processed_symbols_count += 1
+      self._symbolizer.callback(innermost_sym_info, callback_arg)
+
+    def _RestartAddr2LineProcess(self):
+      if self._proc:
+        self.Terminate()
+
+      # The only reason of existence of this Queue (and the corresponding
+      # Thread below) is the lack of a subprocess.stdout.poll_avail_lines().
+      # Essentially this is a pipe able to extract a couple of lines atomically.
+      self._out_queue = Queue.Queue()
+
+      # Start the underlying addr2line process in line buffered mode.
+
+      cmd = [self._symbolizer.addr2line_path, '--functions', '--demangle',
+          '--exe=' + self._symbolizer.elf_file_path]
+      if self._symbolizer.inlines:
+        cmd += ['--inlines']
+      self._proc = subprocess.Popen(cmd, bufsize=1, stdout=subprocess.PIPE,
+          stdin=subprocess.PIPE, stderr=sys.stderr, close_fds=True)
+
+      # Start the poller thread, which simply moves atomically the lines read
+      # from the addr2line's stdout to the |_out_queue|.
+      self._thread = threading.Thread(
+          target=ELFSymbolizer.Addr2Line.StdoutReaderThread,
+          args=(self._proc.stdout, self._out_queue, self._symbolizer.inlines))
+      self._thread.daemon = True  # Don't prevent early process exit.
+      self._thread.start()
+
+      self._processed_symbols_count = 0
+
+      # Replay the pending requests on the new process (only for the case
+      # of a hung addr2line timing out during the game).
+      for (addr, _, _) in self._request_queue:
+        self._WriteToA2lStdin(addr)
+
+    @staticmethod
+    def StdoutReaderThread(process_pipe, queue, inlines):
+      """The poller thread fn, which moves the addr2line stdout to the |queue|.
+
+      This is the only piece of code not running on the main thread. It merely
+      writes to a Queue, which is thread-safe. In the case of inlines, it
+      detects the ??,??:0 marker and sends the lines atomically, such that the
+      main thread always receives all the lines corresponding to one symbol in
+      one shot."""
+      try:
+        lines_for_one_symbol = []
+        while True:
+          line1 = process_pipe.readline().rstrip('\r\n')
+          line2 = process_pipe.readline().rstrip('\r\n')
+          if not line1 or not line2:
+            break
+          inline_has_more_lines = inlines and (len(lines_for_one_symbol) == 0 or
+                                  (line1 != '??' and line2 != '??:0'))
+          if not inlines or inline_has_more_lines:
+            lines_for_one_symbol += [(line1, line2)]
+          if inline_has_more_lines:
+            continue
+          queue.put(lines_for_one_symbol)
+          lines_for_one_symbol = []
+        process_pipe.close()
+
+      # Every addr2line processes will die at some point, please die silently.
+      except (IOError, OSError):
+        pass
+
+    @property
+    def first_request_id(self):
+      """Returns the request_id of the oldest pending request in the queue."""
+      return self._request_queue[0][2] if self._request_queue else 0
+
+
+class ELFSymbolInfo(object):
+  """The result of the symbolization passed as first arg. of each callback."""
+
+  def __init__(self, name, source_path, source_line, was_ambiguous=False,
+               disambiguated=False):
+    """All the fields here can be None (if addr2line replies with '??')."""
+    self.name = name
+    self.source_path = source_path
+    self.source_line = source_line
+    # In the case of |inlines|=True, the |inlined_by| points to the outer
+    # function inlining the current one (and so on, to form a chain).
+    self.inlined_by = None
+    self.disambiguated = disambiguated
+    self.was_ambiguous = was_ambiguous
+
+  def __str__(self):
+    return '%s [%s:%d]' % (
+        self.name or '??', self.source_path or '??', self.source_line or 0)
diff --git a/build/android/pylib/symbols/elf_symbolizer_unittest.py b/build/android/pylib/symbols/elf_symbolizer_unittest.py
new file mode 100755
index 0000000..e963a34
--- /dev/null
+++ b/build/android/pylib/symbols/elf_symbolizer_unittest.py
@@ -0,0 +1,173 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import functools
+import logging
+import os
+import sys
+import unittest
+
+sys.path.insert(0, os.path.dirname(__file__))
+import elf_symbolizer
+import mock_addr2line
+
+
+_MOCK_A2L_PATH = os.path.join(os.path.dirname(mock_addr2line.__file__),
+                              'mock_addr2line')
+_INCOMPLETE_MOCK_ADDR = 1024 * 1024
+_UNKNOWN_MOCK_ADDR = 2 * 1024 * 1024
+_INLINE_MOCK_ADDR = 3 * 1024 * 1024
+
+
+class ELFSymbolizerTest(unittest.TestCase):
+  def setUp(self):
+    self._callback = functools.partial(
+        ELFSymbolizerTest._SymbolizeCallback, self)
+    self._resolved_addresses = set()
+    # Mute warnings, we expect them due to the crash/hang tests.
+    logging.getLogger().setLevel(logging.ERROR)
+
+  def testParallelism1(self):
+    self._RunTest(max_concurrent_jobs=1, num_symbols=100)
+
+  def testParallelism4(self):
+    self._RunTest(max_concurrent_jobs=4, num_symbols=100)
+
+  def testParallelism8(self):
+    self._RunTest(max_concurrent_jobs=8, num_symbols=100)
+
+  def testCrash(self):
+    os.environ['MOCK_A2L_CRASH_EVERY'] = '99'
+    self._RunTest(max_concurrent_jobs=1, num_symbols=100)
+    os.environ['MOCK_A2L_CRASH_EVERY'] = '0'
+
+  def testHang(self):
+    os.environ['MOCK_A2L_HANG_EVERY'] = '99'
+    self._RunTest(max_concurrent_jobs=1, num_symbols=100)
+    os.environ['MOCK_A2L_HANG_EVERY'] = '0'
+
+  def testInlines(self):
+    """Stimulate the inline processing logic."""
+    symbolizer = elf_symbolizer.ELFSymbolizer(
+        elf_file_path='/path/doesnt/matter/mock_lib1.so',
+        addr2line_path=_MOCK_A2L_PATH,
+        callback=self._callback,
+        inlines=True,
+        max_concurrent_jobs=4)
+
+    for addr in xrange(1000):
+      exp_inline = False
+      exp_unknown = False
+
+      # First 100 addresses with inlines.
+      if addr < 100:
+        addr += _INLINE_MOCK_ADDR
+        exp_inline = True
+
+      # Followed by 100 without inlines.
+      elif addr < 200:
+        pass
+
+      # Followed by 100 interleaved inlines and not inlines.
+      elif addr < 300:
+        if addr & 1:
+          addr += _INLINE_MOCK_ADDR
+          exp_inline = True
+
+      # Followed by 100 interleaved inlines and unknonwn.
+      elif addr < 400:
+        if addr & 1:
+          addr += _INLINE_MOCK_ADDR
+          exp_inline = True
+        else:
+          addr += _UNKNOWN_MOCK_ADDR
+          exp_unknown = True
+
+      exp_name = 'mock_sym_for_addr_%d' % addr if not exp_unknown else None
+      exp_source_path = 'mock_src/mock_lib1.so.c' if not exp_unknown else None
+      exp_source_line = addr if not exp_unknown else None
+      cb_arg = (addr, exp_name, exp_source_path, exp_source_line, exp_inline)
+      symbolizer.SymbolizeAsync(addr, cb_arg)
+
+    symbolizer.Join()
+
+  def testIncompleteSyminfo(self):
+    """Stimulate the symbol-not-resolved logic."""
+    symbolizer = elf_symbolizer.ELFSymbolizer(
+        elf_file_path='/path/doesnt/matter/mock_lib1.so',
+        addr2line_path=_MOCK_A2L_PATH,
+        callback=self._callback,
+        max_concurrent_jobs=1)
+
+    # Test symbols with valid name but incomplete path.
+    addr = _INCOMPLETE_MOCK_ADDR
+    exp_name = 'mock_sym_for_addr_%d' % addr
+    exp_source_path = None
+    exp_source_line = None
+    cb_arg = (addr, exp_name, exp_source_path, exp_source_line, False)
+    symbolizer.SymbolizeAsync(addr, cb_arg)
+
+    # Test symbols with no name or sym info.
+    addr = _UNKNOWN_MOCK_ADDR
+    exp_name = None
+    exp_source_path = None
+    exp_source_line = None
+    cb_arg = (addr, exp_name, exp_source_path, exp_source_line, False)
+    symbolizer.SymbolizeAsync(addr, cb_arg)
+
+    symbolizer.Join()
+
+  def _RunTest(self, max_concurrent_jobs, num_symbols):
+    symbolizer = elf_symbolizer.ELFSymbolizer(
+        elf_file_path='/path/doesnt/matter/mock_lib1.so',
+        addr2line_path=_MOCK_A2L_PATH,
+        callback=self._callback,
+        max_concurrent_jobs=max_concurrent_jobs,
+        addr2line_timeout=0.5)
+
+    for addr in xrange(num_symbols):
+      exp_name = 'mock_sym_for_addr_%d' % addr
+      exp_source_path = 'mock_src/mock_lib1.so.c'
+      exp_source_line = addr
+      cb_arg = (addr, exp_name, exp_source_path, exp_source_line, False)
+      symbolizer.SymbolizeAsync(addr, cb_arg)
+
+    symbolizer.Join()
+
+    # Check that all the expected callbacks have been received.
+    for addr in xrange(num_symbols):
+      self.assertIn(addr, self._resolved_addresses)
+      self._resolved_addresses.remove(addr)
+
+    # Check for unexpected callbacks.
+    self.assertEqual(len(self._resolved_addresses), 0)
+
+  def _SymbolizeCallback(self, sym_info, cb_arg):
+    self.assertTrue(isinstance(sym_info, elf_symbolizer.ELFSymbolInfo))
+    self.assertTrue(isinstance(cb_arg, tuple))
+    self.assertEqual(len(cb_arg), 5)
+
+    # Unpack expectations from the callback extra argument.
+    (addr, exp_name, exp_source_path, exp_source_line, exp_inlines) = cb_arg
+    if exp_name is None:
+      self.assertIsNone(sym_info.name)
+    else:
+      self.assertTrue(sym_info.name.startswith(exp_name))
+    self.assertEqual(sym_info.source_path, exp_source_path)
+    self.assertEqual(sym_info.source_line, exp_source_line)
+
+    if exp_inlines:
+      self.assertEqual(sym_info.name, exp_name + '_inner')
+      self.assertEqual(sym_info.inlined_by.name, exp_name + '_middle')
+      self.assertEqual(sym_info.inlined_by.inlined_by.name,
+                       exp_name + '_outer')
+
+    # Check against duplicate callbacks.
+    self.assertNotIn(addr, self._resolved_addresses)
+    self._resolved_addresses.add(addr)
+
+
+if __name__ == '__main__':
+  unittest.main()
diff --git a/build/android/pylib/symbols/mock_addr2line/__init__.py b/build/android/pylib/symbols/mock_addr2line/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/build/android/pylib/symbols/mock_addr2line/__init__.py
diff --git a/build/android/pylib/symbols/mock_addr2line/mock_addr2line b/build/android/pylib/symbols/mock_addr2line/mock_addr2line
new file mode 100755
index 0000000..cd58f56
--- /dev/null
+++ b/build/android/pylib/symbols/mock_addr2line/mock_addr2line
@@ -0,0 +1,79 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Simple mock for addr2line.
+
+Outputs mock symbol information, with each symbol being a function of the
+original address (so it is easy to double-check consistency in unittests).
+"""
+
+import optparse
+import os
+import posixpath
+import sys
+import time
+
+
+def main(argv):
+  parser = optparse.OptionParser()
+  parser.add_option('-e', '--exe', dest='exe')  # Path of the debug-library.so.
+  # Silently swallow the other unnecessary arguments.
+  parser.add_option('-C', '--demangle', action='store_true')
+  parser.add_option('-f', '--functions', action='store_true')
+  parser.add_option('-i', '--inlines', action='store_true')
+  options, _ = parser.parse_args(argv[1:])
+  lib_file_name = posixpath.basename(options.exe)
+  processed_sym_count = 0
+  crash_every = int(os.environ.get('MOCK_A2L_CRASH_EVERY', 0))
+  hang_every = int(os.environ.get('MOCK_A2L_HANG_EVERY', 0))
+
+  while(True):
+    line = sys.stdin.readline().rstrip('\r')
+    if not line:
+      break
+
+    # An empty line should generate '??,??:0' (is used as marker for inlines).
+    if line == '\n':
+      print '??'
+      print '??:0'
+      sys.stdout.flush()
+      continue
+
+    addr = int(line, 16)
+    processed_sym_count += 1
+    if crash_every and processed_sym_count % crash_every == 0:
+      sys.exit(1)
+    if hang_every and processed_sym_count % hang_every == 0:
+      time.sleep(1)
+
+    # Addresses < 1M will return good mock symbol information.
+    if addr < 1024 * 1024:
+      print 'mock_sym_for_addr_%d' % addr
+      print 'mock_src/%s.c:%d' % (lib_file_name, addr)
+
+    # Addresses 1M <= x < 2M will return symbols with a name but a missing path.
+    elif addr < 2 * 1024 * 1024:
+      print 'mock_sym_for_addr_%d' % addr
+      print '??:0'
+
+    # Addresses 2M <= x < 3M will return unknown symbol information.
+    elif addr < 3 * 1024 * 1024:
+      print '??'
+      print '??'
+
+    # Addresses 3M <= x < 4M will return inlines.
+    elif addr < 4 * 1024 * 1024:
+      print 'mock_sym_for_addr_%d_inner' % addr
+      print 'mock_src/%s.c:%d' % (lib_file_name, addr)
+      print 'mock_sym_for_addr_%d_middle' % addr
+      print 'mock_src/%s.c:%d' % (lib_file_name, addr)
+      print 'mock_sym_for_addr_%d_outer' % addr
+      print 'mock_src/%s.c:%d' % (lib_file_name, addr)
+
+    sys.stdout.flush()
+
+
+if __name__ == '__main__':
+  main(sys.argv)
\ No newline at end of file
diff --git a/build/android/pylib/system_properties.py b/build/android/pylib/system_properties.py
new file mode 100644
index 0000000..3f16f86
--- /dev/null
+++ b/build/android/pylib/system_properties.py
@@ -0,0 +1,40 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+
+class SystemProperties(dict):
+
+  """A dict interface to interact with device system properties.
+
+  System properties are key/value pairs as exposed by adb shell getprop/setprop.
+
+  This implementation minimizes interaction with the physical device. It is
+  valid for the lifetime of a boot.
+  """
+
+  def __init__(self, android_commands):
+    super(SystemProperties, self).__init__()
+    self._adb = android_commands
+    self._cached_static_properties = {}
+
+  def __getitem__(self, key):
+    if self._IsStatic(key):
+      if key not in self._cached_static_properties:
+        self._cached_static_properties[key] = self._GetProperty(key)
+      return self._cached_static_properties[key]
+    return self._GetProperty(key)
+
+  def __setitem__(self, key, value):
+    # TODO(tonyg): This can fail with no root. Verify that it succeeds.
+    self._adb.SendShellCommand('setprop %s "%s"' % (key, value), retry_count=3)
+
+  @staticmethod
+  def _IsStatic(key):
+    # TODO(tonyg): This list is conservative and could be expanded as needed.
+    return (key.startswith('ro.boot.') or
+            key.startswith('ro.build.') or
+            key.startswith('ro.product.'))
+
+  def _GetProperty(self, key):
+    return self._adb.SendShellCommand('getprop %s' % key, retry_count=3).strip()
diff --git a/build/android/pylib/uiautomator/__init__.py b/build/android/pylib/uiautomator/__init__.py
new file mode 100644
index 0000000..cda7672
--- /dev/null
+++ b/build/android/pylib/uiautomator/__init__.py
@@ -0,0 +1,4 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
diff --git a/build/android/pylib/uiautomator/setup.py b/build/android/pylib/uiautomator/setup.py
new file mode 100644
index 0000000..bd8ffc7
--- /dev/null
+++ b/build/android/pylib/uiautomator/setup.py
@@ -0,0 +1,35 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Generates test runner factory and tests for uiautomator tests."""
+
+import logging
+
+from pylib.uiautomator import test_package
+from pylib.uiautomator import test_runner
+
+
+def Setup(test_options):
+  """Runs uiautomator tests on connected device(s).
+
+  Args:
+    test_options: A UIAutomatorOptions object.
+
+  Returns:
+    A tuple of (TestRunnerFactory, tests).
+  """
+  test_pkg = test_package.TestPackage(test_options.uiautomator_jar,
+                                      test_options.uiautomator_info_jar)
+  tests = test_pkg.GetAllMatchingTests(test_options.annotations,
+                                       test_options.exclude_annotations,
+                                       test_options.test_filter)
+
+  if not tests:
+    logging.error('No uiautomator tests to run with current args.')
+
+  def TestRunnerFactory(device, shard_index):
+    return test_runner.TestRunner(
+        test_options, device, shard_index, test_pkg)
+
+  return (TestRunnerFactory, tests)
diff --git a/build/android/pylib/uiautomator/test_options.py b/build/android/pylib/uiautomator/test_options.py
new file mode 100644
index 0000000..3f5f950
--- /dev/null
+++ b/build/android/pylib/uiautomator/test_options.py
@@ -0,0 +1,20 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Defines the UIAutomatorOptions named tuple."""
+
+import collections
+
+UIAutomatorOptions = collections.namedtuple('UIAutomatorOptions', [
+    'tool',
+    'annotations',
+    'exclude_annotations',
+    'test_filter',
+    'test_data',
+    'save_perf_json',
+    'screenshot_failures',
+    'uiautomator_jar',
+    'uiautomator_info_jar',
+    'package',
+    'set_asserts'])
diff --git a/build/android/pylib/uiautomator/test_package.py b/build/android/pylib/uiautomator/test_package.py
new file mode 100644
index 0000000..cb51fdf
--- /dev/null
+++ b/build/android/pylib/uiautomator/test_package.py
@@ -0,0 +1,33 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Class representing uiautomator test package."""
+
+import os
+
+from pylib import constants
+from pylib.instrumentation import test_jar
+
+
+class TestPackage(test_jar.TestJar):
+
+  UIAUTOMATOR_PATH = 'uiautomator/'
+  UIAUTOMATOR_DEVICE_DIR = os.path.join(constants.TEST_EXECUTABLE_DIR,
+                                        UIAUTOMATOR_PATH)
+
+  def __init__(self, jar_path, jar_info_path):
+    test_jar.TestJar.__init__(self, jar_info_path)
+
+    if not os.path.exists(jar_path):
+      raise Exception('%s not found, please build it' % jar_path)
+    self._jar_path = jar_path
+
+  def GetPackageName(self):
+    """Returns the JAR named that is installed on the device."""
+    return os.path.basename(self._jar_path)
+
+  # Override.
+  def Install(self, device):
+    device.PushChangedFiles([(self._jar_path, self.UIAUTOMATOR_DEVICE_DIR +
+                              self.GetPackageName())])
diff --git a/build/android/pylib/uiautomator/test_runner.py b/build/android/pylib/uiautomator/test_runner.py
new file mode 100644
index 0000000..bda6687
--- /dev/null
+++ b/build/android/pylib/uiautomator/test_runner.py
@@ -0,0 +1,89 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Class for running uiautomator tests on a single device."""
+
+from pylib import constants
+from pylib import flag_changer
+from pylib.device import intent
+from pylib.instrumentation import test_options as instr_test_options
+from pylib.instrumentation import test_runner as instr_test_runner
+
+
+class TestRunner(instr_test_runner.TestRunner):
+  """Responsible for running a series of tests connected to a single device."""
+
+  def __init__(self, test_options, device, shard_index, test_pkg):
+    """Create a new TestRunner.
+
+    Args:
+      test_options: A UIAutomatorOptions object.
+      device: Attached android device.
+      shard_index: Shard index.
+      test_pkg: A TestPackage object.
+    """
+    # Create an InstrumentationOptions object to pass to the super class
+    instrumentation_options = instr_test_options.InstrumentationOptions(
+        test_options.tool,
+        test_options.annotations,
+        test_options.exclude_annotations,
+        test_options.test_filter,
+        test_options.test_data,
+        test_options.save_perf_json,
+        test_options.screenshot_failures,
+        wait_for_debugger=False,
+        coverage_dir=None,
+        test_apk=None,
+        test_apk_path=None,
+        test_apk_jar_path=None,
+        test_runner=None,
+        test_support_apk_path=None,
+        device_flags=None,
+        isolate_file_path=None,
+        set_asserts=test_options.set_asserts,
+        delete_stale_data=False)
+    super(TestRunner, self).__init__(instrumentation_options, device,
+                                     shard_index, test_pkg)
+
+    cmdline_file = constants.PACKAGE_INFO[test_options.package].cmdline_file
+    self.flags = None
+    if cmdline_file:
+      self.flags = flag_changer.FlagChanger(self.device, cmdline_file)
+    self._package = constants.PACKAGE_INFO[test_options.package].package
+    self._activity = constants.PACKAGE_INFO[test_options.package].activity
+
+  #override
+  def InstallTestPackage(self):
+    self.test_pkg.Install(self.device)
+
+  #override
+  def _RunTest(self, test, timeout):
+    self.device.ClearApplicationState(self._package)
+    if self.flags:
+      annotations = self.test_pkg.GetTestAnnotations(test)
+      if 'FirstRunExperience' == annotations.get('Feature', None):
+        self.flags.RemoveFlags(['--disable-fre'])
+      else:
+        self.flags.AddFlags(['--disable-fre'])
+    self.device.StartActivity(
+        intent.Intent(action='android.intent.action.MAIN',
+                      activity=self._activity,
+                      package=self._package),
+        blocking=True,
+        force_stop=True)
+    cmd = ['uiautomator', 'runtest',
+           self.test_pkg.UIAUTOMATOR_PATH + self.test_pkg.GetPackageName(),
+           '-e', 'class', test,
+           '-e', 'test_package', self._package]
+    return self.device.RunShellCommand(cmd, timeout=timeout, retries=0)
+
+  #override
+  def _GenerateTestResult(self, test, _result_code, _result_bundle, statuses,
+                          start_ms, duration_ms):
+    # uiautomator emits its summary status with INSTRUMENTATION_STATUS_CODE,
+    # not INSTRUMENTATION_CODE, so we have to drop if off the list of statuses.
+    summary_code, summary_bundle = statuses[-1]
+    return super(TestRunner, self)._GenerateTestResult(
+        test, summary_code, summary_bundle, statuses[:-1], start_ms,
+        duration_ms)
diff --git a/build/android/pylib/uirobot/__init__.py b/build/android/pylib/uirobot/__init__.py
new file mode 100644
index 0000000..5cac026
--- /dev/null
+++ b/build/android/pylib/uirobot/__init__.py
@@ -0,0 +1,4 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
diff --git a/build/android/pylib/uirobot/uirobot_test_instance.py b/build/android/pylib/uirobot/uirobot_test_instance.py
new file mode 100644
index 0000000..e3f6eb7
--- /dev/null
+++ b/build/android/pylib/uirobot/uirobot_test_instance.py
@@ -0,0 +1,79 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+import json
+import logging
+
+from pylib import constants
+from pylib.base import test_instance
+from pylib.utils import apk_helper
+
+class UirobotTestInstance(test_instance.TestInstance):
+
+  def __init__(self, args, error_func):
+    """Constructor.
+
+    Args:
+      args: Command line arguments.
+    """
+    super(UirobotTestInstance, self).__init__()
+    if not args.app_under_test:
+      error_func('Must set --app-under-test.')
+    self._app_under_test = args.app_under_test
+    self._minutes = args.minutes
+
+    if args.remote_device_file:
+      with open(args.remote_device_file) as remote_device_file:
+        device_json = json.load(remote_device_file)
+    else:
+      device_json = {}
+    device_type = device_json.get('device_type', 'Android')
+    if args.device_type:
+      if device_type and device_type != args.device_type:
+        logging.info('Overriding device_type from %s to %s',
+                     device_type, args.device_type)
+      device_type = args.device_type
+
+    if device_type == 'Android':
+      self._suite = 'Android Uirobot'
+      self._package_name = apk_helper.GetPackageName(self._app_under_test)
+    elif device_type == 'iOS':
+      self._suite = 'iOS Uirobot'
+      self._package_name = self._app_under_test
+
+
+  #override
+  def TestType(self):
+    """Returns type of test."""
+    return 'uirobot'
+
+  #override
+  def SetUp(self):
+    """Setup for test."""
+    pass
+
+  #override
+  def TearDown(self):
+    """Teardown for test."""
+    pass
+
+  @property
+  def app_under_test(self):
+    """Returns the app to run the test on."""
+    return self._app_under_test
+
+  @property
+  def minutes(self):
+    """Returns the number of minutes to run the uirobot for."""
+    return self._minutes
+
+  @property
+  def package_name(self):
+    """Returns the name of the package in the APK."""
+    return self._package_name
+
+  @property
+  def suite(self):
+    return self._suite
diff --git a/build/android/pylib/utils/__init__.py b/build/android/pylib/utils/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/build/android/pylib/utils/__init__.py
diff --git a/build/android/pylib/utils/apk_helper.py b/build/android/pylib/utils/apk_helper.py
new file mode 100644
index 0000000..a556e7b
--- /dev/null
+++ b/build/android/pylib/utils/apk_helper.py
@@ -0,0 +1,131 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Module containing utilities for apk packages."""
+
+import os.path
+import re
+
+from pylib import cmd_helper
+from pylib import constants
+from pylib.sdk import aapt
+
+
+_AAPT_PATH = os.path.join(constants.ANDROID_SDK_TOOLS, 'aapt')
+_MANIFEST_ATTRIBUTE_RE = re.compile(
+    r'\s*A: ([^\(\)= ]*)\([^\(\)= ]*\)="(.*)" \(Raw: .*\)$')
+_MANIFEST_ELEMENT_RE = re.compile(r'\s*(?:E|N): (\S*) .*$')
+_PACKAGE_NAME_RE = re.compile(r'package: .*name=\'(\S*)\'')
+_SPLIT_NAME_RE = re.compile(r'package: .*split=\'(\S*)\'')
+
+
+def GetPackageName(apk_path):
+  """Returns the package name of the apk."""
+  return ApkHelper(apk_path).GetPackageName()
+
+
+# TODO(jbudorick): Deprecate and remove this function once callers have been
+# converted to ApkHelper.GetInstrumentationName
+def GetInstrumentationName(apk_path):
+  """Returns the name of the Instrumentation in the apk."""
+  return ApkHelper(apk_path).GetInstrumentationName()
+
+
+def _ParseManifestFromApk(apk_path):
+  aapt_output = aapt.Dump('xmltree', apk_path, 'AndroidManifest.xml')
+
+  parsed_manifest = {}
+  node_stack = [parsed_manifest]
+  indent = '  '
+
+  for line in aapt_output[1:]:
+    if len(line) == 0:
+      continue
+
+    indent_depth = 0
+    while line[(len(indent) * indent_depth):].startswith(indent):
+      indent_depth += 1
+
+    node_stack = node_stack[:indent_depth]
+    node = node_stack[-1]
+
+    m = _MANIFEST_ELEMENT_RE.match(line[len(indent) * indent_depth:])
+    if m:
+      if not m.group(1) in node:
+        node[m.group(1)] = {}
+      node_stack += [node[m.group(1)]]
+      continue
+
+    m = _MANIFEST_ATTRIBUTE_RE.match(line[len(indent) * indent_depth:])
+    if m:
+      if not m.group(1) in node:
+        node[m.group(1)] = []
+      node[m.group(1)].append(m.group(2))
+      continue
+
+  return parsed_manifest
+
+
+class ApkHelper(object):
+  def __init__(self, apk_path):
+    self._apk_path = apk_path
+    self._manifest = None
+    self._package_name = None
+    self._split_name = None
+
+  def GetActivityName(self):
+    """Returns the name of the Activity in the apk."""
+    manifest_info = self._GetManifest()
+    try:
+      activity = (
+          manifest_info['manifest']['application']['activity']
+              ['android:name'][0])
+    except KeyError:
+      return None
+    if '.' not in activity:
+      activity = '%s.%s' % (self.GetPackageName(), activity)
+    elif activity.startswith('.'):
+      activity = '%s%s' % (self.GetPackageName(), activity)
+    return activity
+
+  def GetInstrumentationName(
+      self, default='android.test.InstrumentationTestRunner'):
+    """Returns the name of the Instrumentation in the apk."""
+    manifest_info = self._GetManifest()
+    try:
+      return manifest_info['manifest']['instrumentation']['android:name'][0]
+    except KeyError:
+      return default
+
+  def GetPackageName(self):
+    """Returns the package name of the apk."""
+    if self._package_name:
+      return self._package_name
+
+    aapt_output = aapt.Dump('badging', self._apk_path)
+    for line in aapt_output:
+      m = _PACKAGE_NAME_RE.match(line)
+      if m:
+        self._package_name = m.group(1)
+        return self._package_name
+    raise Exception('Failed to determine package name of %s' % self._apk_path)
+
+  def GetSplitName(self):
+    """Returns the name of the split of the apk."""
+    if self._split_name:
+      return self._split_name
+
+    aapt_output = aapt.Dump('badging', self._apk_path)
+    for line in aapt_output:
+      m = _SPLIT_NAME_RE.match(line)
+      if m:
+        self._split_name = m.group(1)
+        return self._split_name
+    return None
+
+  def _GetManifest(self):
+    if not self._manifest:
+      self._manifest = _ParseManifestFromApk(self._apk_path)
+    return self._manifest
+
diff --git a/build/android/pylib/utils/base_error.py b/build/android/pylib/utils/base_error.py
new file mode 100644
index 0000000..31eaa54
--- /dev/null
+++ b/build/android/pylib/utils/base_error.py
@@ -0,0 +1,16 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+
+class BaseError(Exception):
+  """Base error for all test runner errors."""
+
+  def __init__(self, message, is_infra_error=False):
+    super(BaseError, self).__init__(message)
+    self._is_infra_error = is_infra_error
+
+  @property
+  def is_infra_error(self):
+    """Property to indicate if error was caused by an infrastructure issue."""
+    return self._is_infra_error
\ No newline at end of file
diff --git a/build/android/pylib/utils/command_option_parser.py b/build/android/pylib/utils/command_option_parser.py
new file mode 100644
index 0000000..cf501d0
--- /dev/null
+++ b/build/android/pylib/utils/command_option_parser.py
@@ -0,0 +1,75 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""An option parser which handles the first arg as a command.
+
+Add other nice functionality such as printing a list of commands
+and an example in usage.
+"""
+
+import optparse
+import sys
+
+
+class CommandOptionParser(optparse.OptionParser):
+  """Wrapper class for OptionParser to help with listing commands."""
+
+  def __init__(self, *args, **kwargs):
+    """Creates a CommandOptionParser.
+
+    Args:
+      commands_dict: A dictionary mapping command strings to an object defining
+          - add_options_func: Adds options to the option parser
+          - run_command_func: Runs the command itself.
+      example: An example command.
+      everything else: Passed to optparse.OptionParser contructor.
+    """
+    self.commands_dict = kwargs.pop('commands_dict', {})
+    self.example = kwargs.pop('example', '')
+    if not 'usage' in kwargs:
+      kwargs['usage'] = 'Usage: %prog <command> [options]'
+    optparse.OptionParser.__init__(self, *args, **kwargs)
+
+  #override
+  def get_usage(self):
+    normal_usage = optparse.OptionParser.get_usage(self)
+    command_list = self.get_command_list()
+    example = self.get_example()
+    return self.expand_prog_name(normal_usage + example + command_list)
+
+  #override
+  def get_command_list(self):
+    if self.commands_dict.keys():
+      return '\nCommands:\n  %s\n' % '\n  '.join(
+          sorted(self.commands_dict.keys()))
+    return ''
+
+  def get_example(self):
+    if self.example:
+      return '\nExample:\n  %s\n' % self.example
+    return ''
+
+
+def ParseAndExecute(option_parser, argv=None):
+  """Parses options/args from argv and runs the specified command.
+
+  Args:
+    option_parser: A CommandOptionParser object.
+    argv: Command line arguments. If None, automatically draw from sys.argv.
+
+  Returns:
+    An exit code.
+  """
+  if not argv:
+    argv = sys.argv
+
+    if len(argv) < 2 or argv[1] not in option_parser.commands_dict:
+      # Parse args first, if this is '--help', optparse will print help and exit
+      option_parser.parse_args(argv)
+      option_parser.error('Invalid command.')
+
+    cmd = option_parser.commands_dict[argv[1]]
+    cmd.add_options_func(option_parser)
+    options, args = option_parser.parse_args(argv)
+    return cmd.run_command_func(argv[1], options, args, option_parser)
diff --git a/build/android/pylib/utils/device_temp_file.py b/build/android/pylib/utils/device_temp_file.py
new file mode 100644
index 0000000..7d3b95b
--- /dev/null
+++ b/build/android/pylib/utils/device_temp_file.py
@@ -0,0 +1,57 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""A temp file that automatically gets pushed and deleted from a device."""
+
+# pylint: disable=W0622
+
+import random
+import time
+
+from pylib import cmd_helper
+from pylib.device import device_errors
+
+
+class DeviceTempFile(object):
+  def __init__(self, adb, suffix='', prefix='temp_file', dir='/data/local/tmp'):
+    """Find an unused temporary file path in the devices external directory.
+
+    When this object is closed, the file will be deleted on the device.
+
+    Args:
+      adb: An instance of AdbWrapper
+      suffix: The suffix of the name of the temp file.
+      prefix: The prefix of the name of the temp file.
+      dir: The directory on the device where to place the temp file.
+    """
+    self._adb = adb
+    # make sure that the temp dir is writable
+    self._adb.Shell('test -d %s' % cmd_helper.SingleQuote(dir))
+    while True:
+      self.name = '{dir}/{prefix}-{time:d}-{nonce:d}{suffix}'.format(
+        dir=dir, prefix=prefix, time=int(time.time()),
+        nonce=random.randint(0, 1000000), suffix=suffix)
+      self.name_quoted = cmd_helper.SingleQuote(self.name)
+      try:
+        self._adb.Shell('test -e %s' % self.name_quoted)
+      except device_errors.AdbCommandFailedError:
+        break # file does not exist
+
+    # Immediately touch the file, so other temp files can't get the same name.
+    self._adb.Shell('touch %s' % self.name_quoted)
+
+  def close(self):
+    """Deletes the temporary file from the device."""
+    # ignore exception if the file is already gone.
+    try:
+      self._adb.Shell('rm -f %s' % self.name_quoted)
+    except device_errors.AdbCommandFailedError:
+      # file does not exist on Android version without 'rm -f' support (ICS)
+      pass
+
+  def __enter__(self):
+    return self
+
+  def __exit__(self, type, value, traceback):
+    self.close()
diff --git a/build/android/pylib/utils/device_temp_file_test.py b/build/android/pylib/utils/device_temp_file_test.py
new file mode 100755
index 0000000..f839ce0
--- /dev/null
+++ b/build/android/pylib/utils/device_temp_file_test.py
@@ -0,0 +1,91 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Unit tests for the contents of device_temp_file.py.
+"""
+
+import logging
+import os
+import sys
+import unittest
+
+from pylib import constants
+from pylib.device import adb_wrapper
+from pylib.device import device_errors
+from pylib.utils import device_temp_file
+from pylib.utils import mock_calls
+
+sys.path.append(os.path.join(
+    constants.DIR_SOURCE_ROOT, 'third_party', 'pymock'))
+import mock # pylint: disable=F0401
+
+class DeviceTempFileTest(mock_calls.TestCase):
+
+  def setUp(self):
+    test_serial = '0123456789abcdef'
+    self.adb = mock.Mock(spec=adb_wrapper.AdbWrapper)
+    self.adb.__str__ = mock.Mock(return_value=test_serial)
+    self.watchMethodCalls(self.call.adb)
+
+  def mockShellCall(self, cmd_prefix, action=''):
+    """Expect an adb.Shell(cmd) call with cmd_prefix and do some action
+
+    Args:
+      cmd_prefix: A string, the cmd of the received call is expected to have
+          this as a prefix.
+      action: If callable, an action to perform when the expected call is
+          received, otherwise a return value.
+    Returns:
+      An (expected_call, action) pair suitable for use in assertCalls.
+    """
+    def check_and_return(cmd):
+      self.assertTrue(
+          cmd.startswith(cmd_prefix),
+          'command %r does not start with prefix %r' % (cmd, cmd_prefix))
+      if callable(action):
+        return action(cmd)
+      else:
+        return action
+    return (self.call.adb.Shell(mock.ANY), check_and_return)
+
+  def mockExistsTest(self, exists_result):
+    def action(cmd):
+      if exists_result:
+        return ''
+      else:
+        raise device_errors.AdbCommandFailedError(
+            cmd, 'File not found', 1, str(self.adb))
+    return self.mockShellCall('test -e ', action)
+
+  def testTempFileNameAlreadyExists(self):
+    with self.assertCalls(
+        self.mockShellCall('test -d /data/local/tmp'),
+        self.mockExistsTest(True),
+        self.mockExistsTest(True),
+        self.mockExistsTest(True),
+        self.mockExistsTest(False),
+        self.mockShellCall('touch '),
+        self.mockShellCall('rm -f ')):
+      with device_temp_file.DeviceTempFile(self.adb) as tmpfile:
+        logging.debug('Temp file name: %s' % tmpfile.name)
+
+  def testTempFileLifecycle(self):
+    with self.assertCalls(
+        self.mockShellCall('test -d /data/local/tmp'),
+        self.mockExistsTest(False),
+        self.mockShellCall('touch ')):
+      tempFileContextManager = device_temp_file.DeviceTempFile(self.adb)
+    with mock.patch.object(self.adb, 'Shell'):
+      with tempFileContextManager as tmpfile:
+        logging.debug('Temp file name: %s' % tmpfile.name)
+        self.assertEquals(0, self.adb.Shell.call_count)
+      self.assertEquals(1, self.adb.Shell.call_count)
+      args, _ = self.adb.Shell.call_args
+      self.assertTrue(args[0].startswith('rm -f '))
+
+if __name__ == '__main__':
+  logging.getLogger().setLevel(logging.DEBUG)
+  unittest.main(verbosity=2)
diff --git a/build/android/pylib/utils/emulator.py b/build/android/pylib/utils/emulator.py
new file mode 100644
index 0000000..cc07e61
--- /dev/null
+++ b/build/android/pylib/utils/emulator.py
@@ -0,0 +1,444 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Provides an interface to start and stop Android emulator.
+
+  Emulator: The class provides the methods to launch/shutdown the emulator with
+            the android virtual device named 'avd_armeabi' .
+"""
+
+import logging
+import os
+import signal
+import subprocess
+import time
+
+# TODO(craigdh): Move these pylib dependencies to pylib/utils/.
+from pylib import cmd_helper
+from pylib import constants
+from pylib import pexpect
+from pylib.device import device_errors
+from pylib.device import device_utils
+from pylib.utils import time_profile
+
+import errors
+import run_command
+
+# SD card size
+SDCARD_SIZE = '512M'
+
+# Template used to generate config.ini files for the emulator
+CONFIG_TEMPLATE = """avd.ini.encoding=ISO-8859-1
+hw.dPad=no
+hw.lcd.density=320
+sdcard.size=512M
+hw.cpu.arch={hw.cpu.arch}
+hw.device.hash=-708107041
+hw.camera.back=none
+disk.dataPartition.size=800M
+hw.gpu.enabled=yes
+skin.path=720x1280
+skin.dynamic=yes
+hw.keyboard=yes
+hw.ramSize=1024
+hw.device.manufacturer=Google
+hw.sdCard=yes
+hw.mainKeys=no
+hw.accelerometer=yes
+skin.name=720x1280
+abi.type={abi.type}
+hw.trackBall=no
+hw.device.name=Galaxy Nexus
+hw.battery=yes
+hw.sensors.proximity=yes
+image.sysdir.1=system-images/android-{api.level}/{abi.type}/
+hw.sensors.orientation=yes
+hw.audioInput=yes
+hw.camera.front=none
+hw.gps=yes
+vm.heapSize=128
+{extras}"""
+
+CONFIG_REPLACEMENTS = {
+  'x86': {
+    '{hw.cpu.arch}': 'x86',
+    '{abi.type}': 'x86',
+    '{extras}': ''
+  },
+  'arm': {
+    '{hw.cpu.arch}': 'arm',
+    '{abi.type}': 'armeabi-v7a',
+    '{extras}': 'hw.cpu.model=cortex-a8\n'
+  },
+  'mips': {
+    '{hw.cpu.arch}': 'mips',
+    '{abi.type}': 'mips',
+    '{extras}': ''
+  }
+}
+
+class EmulatorLaunchException(Exception):
+  """Emulator failed to launch."""
+  pass
+
+def _KillAllEmulators():
+  """Kill all running emulators that look like ones we started.
+
+  There are odd 'sticky' cases where there can be no emulator process
+  running but a device slot is taken.  A little bot trouble and we're out of
+  room forever.
+  """
+  emulators = [d for d in device_utils.DeviceUtils.HealthyDevices()
+               if d.adb.is_emulator]
+  if not emulators:
+    return
+  for e in emulators:
+    e.adb.Emu(['kill'])
+  logging.info('Emulator killing is async; give a few seconds for all to die.')
+  for _ in range(5):
+    if not any(d.adb.is_emulator for d
+               in device_utils.DeviceUtils.HealthyDevices()):
+      return
+    time.sleep(1)
+
+
+def DeleteAllTempAVDs():
+  """Delete all temporary AVDs which are created for tests.
+
+  If the test exits abnormally and some temporary AVDs created when testing may
+  be left in the system. Clean these AVDs.
+  """
+  avds = device_utils.GetAVDs()
+  if not avds:
+    return
+  for avd_name in avds:
+    if 'run_tests_avd' in avd_name:
+      cmd = ['android', '-s', 'delete', 'avd', '--name', avd_name]
+      cmd_helper.RunCmd(cmd)
+      logging.info('Delete AVD %s' % avd_name)
+
+
+class PortPool(object):
+  """Pool for emulator port starting position that changes over time."""
+  _port_min = 5554
+  _port_max = 5585
+  _port_current_index = 0
+
+  @classmethod
+  def port_range(cls):
+    """Return a range of valid ports for emulator use.
+
+    The port must be an even number between 5554 and 5584.  Sometimes
+    a killed emulator "hangs on" to a port long enough to prevent
+    relaunch.  This is especially true on slow machines (like a bot).
+    Cycling through a port start position helps make us resilient."""
+    ports = range(cls._port_min, cls._port_max, 2)
+    n = cls._port_current_index
+    cls._port_current_index = (n + 1) % len(ports)
+    return ports[n:] + ports[:n]
+
+
+def _GetAvailablePort():
+  """Returns an available TCP port for the console."""
+  used_ports = []
+  emulators = [d for d in device_utils.DeviceUtils.HealthyDevices()
+               if d.adb.is_emulator]
+  for emulator in emulators:
+    used_ports.append(emulator.adb.GetDeviceSerial().split('-')[1])
+  for port in PortPool.port_range():
+    if str(port) not in used_ports:
+      return port
+
+
+def LaunchTempEmulators(emulator_count, abi, api_level, wait_for_boot=True):
+  """Create and launch temporary emulators and wait for them to boot.
+
+  Args:
+    emulator_count: number of emulators to launch.
+    abi: the emulator target platform
+    api_level: the api level (e.g., 19 for Android v4.4 - KitKat release)
+    wait_for_boot: whether or not to wait for emulators to boot up
+
+  Returns:
+    List of emulators.
+  """
+  emulators = []
+  for n in xrange(emulator_count):
+    t = time_profile.TimeProfile('Emulator launch %d' % n)
+    # Creates a temporary AVD.
+    avd_name = 'run_tests_avd_%d' % n
+    logging.info('Emulator launch %d with avd_name=%s and api=%d',
+        n, avd_name, api_level)
+    emulator = Emulator(avd_name, abi)
+    emulator.CreateAVD(api_level)
+    emulator.Launch(kill_all_emulators=n == 0)
+    t.Stop()
+    emulators.append(emulator)
+  # Wait for all emulators to boot completed.
+  if wait_for_boot:
+    for emulator in emulators:
+      emulator.ConfirmLaunch(True)
+  return emulators
+
+
+def LaunchEmulator(avd_name, abi):
+  """Launch an existing emulator with name avd_name.
+
+  Args:
+    avd_name: name of existing emulator
+    abi: the emulator target platform
+
+  Returns:
+    emulator object.
+  """
+  logging.info('Specified emulator named avd_name=%s launched', avd_name)
+  emulator = Emulator(avd_name, abi)
+  emulator.Launch(kill_all_emulators=True)
+  emulator.ConfirmLaunch(True)
+  return emulator
+
+
+class Emulator(object):
+  """Provides the methods to launch/shutdown the emulator.
+
+  The emulator has the android virtual device named 'avd_armeabi'.
+
+  The emulator could use any even TCP port between 5554 and 5584 for the
+  console communication, and this port will be part of the device name like
+  'emulator-5554'. Assume it is always True, as the device name is the id of
+  emulator managed in this class.
+
+  Attributes:
+    emulator: Path of Android's emulator tool.
+    popen: Popen object of the running emulator process.
+    device: Device name of this emulator.
+  """
+
+  # Signals we listen for to kill the emulator on
+  _SIGNALS = (signal.SIGINT, signal.SIGHUP)
+
+  # Time to wait for an emulator launch, in seconds.  This includes
+  # the time to launch the emulator and a wait-for-device command.
+  _LAUNCH_TIMEOUT = 120
+
+  # Timeout interval of wait-for-device command before bouncing to a a
+  # process life check.
+  _WAITFORDEVICE_TIMEOUT = 5
+
+  # Time to wait for a "wait for boot complete" (property set on device).
+  _WAITFORBOOT_TIMEOUT = 300
+
+  def __init__(self, avd_name, abi):
+    """Init an Emulator.
+
+    Args:
+      avd_name: name of the AVD to create
+      abi: target platform for emulator being created, defaults to x86
+    """
+    android_sdk_root = os.path.join(constants.EMULATOR_SDK_ROOT, 'sdk')
+    self.emulator = os.path.join(android_sdk_root, 'tools', 'emulator')
+    self.android = os.path.join(android_sdk_root, 'tools', 'android')
+    self.popen = None
+    self.device_serial = None
+    self.abi = abi
+    self.avd_name = avd_name
+
+  @staticmethod
+  def _DeviceName():
+    """Return our device name."""
+    port = _GetAvailablePort()
+    return ('emulator-%d' % port, port)
+
+  def CreateAVD(self, api_level):
+    """Creates an AVD with the given name.
+
+    Args:
+      api_level: the api level of the image
+
+    Return avd_name.
+    """
+
+    if self.abi == 'arm':
+      abi_option = 'armeabi-v7a'
+    elif self.abi == 'mips':
+      abi_option = 'mips'
+    else:
+      abi_option = 'x86'
+
+    api_target = 'android-%s' % api_level
+
+    avd_command = [
+        self.android,
+        '--silent',
+        'create', 'avd',
+        '--name', self.avd_name,
+        '--abi', abi_option,
+        '--target', api_target,
+        '--sdcard', SDCARD_SIZE,
+        '--force',
+    ]
+    avd_cmd_str = ' '.join(avd_command)
+    logging.info('Create AVD command: %s', avd_cmd_str)
+    avd_process = pexpect.spawn(avd_cmd_str)
+
+    # Instead of creating a custom profile, we overwrite config files.
+    avd_process.expect('Do you wish to create a custom hardware profile')
+    avd_process.sendline('no\n')
+    avd_process.expect('Created AVD \'%s\'' % self.avd_name)
+
+    # Replace current configuration with default Galaxy Nexus config.
+    avds_dir = os.path.join(os.path.expanduser('~'), '.android', 'avd')
+    ini_file = os.path.join(avds_dir, '%s.ini' % self.avd_name)
+    new_config_ini = os.path.join(avds_dir, '%s.avd' % self.avd_name,
+                                  'config.ini')
+
+    # Remove config files with defaults to replace with Google's GN settings.
+    os.unlink(ini_file)
+    os.unlink(new_config_ini)
+
+    # Create new configuration files with Galaxy Nexus by Google settings.
+    with open(ini_file, 'w') as new_ini:
+      new_ini.write('avd.ini.encoding=ISO-8859-1\n')
+      new_ini.write('target=%s\n' % api_target)
+      new_ini.write('path=%s/%s.avd\n' % (avds_dir, self.avd_name))
+      new_ini.write('path.rel=avd/%s.avd\n' % self.avd_name)
+
+    custom_config = CONFIG_TEMPLATE
+    replacements = CONFIG_REPLACEMENTS[self.abi]
+    for key in replacements:
+      custom_config = custom_config.replace(key, replacements[key])
+    custom_config = custom_config.replace('{api.level}', str(api_level))
+
+    with open(new_config_ini, 'w') as new_config_ini:
+      new_config_ini.write(custom_config)
+
+    return self.avd_name
+
+
+  def _DeleteAVD(self):
+    """Delete the AVD of this emulator."""
+    avd_command = [
+        self.android,
+        '--silent',
+        'delete',
+        'avd',
+        '--name', self.avd_name,
+    ]
+    logging.info('Delete AVD command: %s', ' '.join(avd_command))
+    cmd_helper.RunCmd(avd_command)
+
+
+  def Launch(self, kill_all_emulators):
+    """Launches the emulator asynchronously. Call ConfirmLaunch() to ensure the
+    emulator is ready for use.
+
+    If fails, an exception will be raised.
+    """
+    if kill_all_emulators:
+      _KillAllEmulators()  # just to be sure
+    self._AggressiveImageCleanup()
+    (self.device_serial, port) = self._DeviceName()
+    emulator_command = [
+        self.emulator,
+        # Speed up emulator launch by 40%.  Really.
+        '-no-boot-anim',
+        # The default /data size is 64M.
+        # That's not enough for 8 unit test bundles and their data.
+        '-partition-size', '512',
+        # Use a familiar name and port.
+        '-avd', self.avd_name,
+        '-port', str(port),
+        # Wipe the data.  We've seen cases where an emulator gets 'stuck' if we
+        # don't do this (every thousand runs or so).
+        '-wipe-data',
+        # Enable GPU by default.
+        '-gpu', 'on',
+        '-qemu', '-m', '1024',
+        ]
+    if self.abi == 'x86':
+      emulator_command.extend([
+          # For x86 emulator --enable-kvm will fail early, avoiding accidental
+          # runs in a slow mode (i.e. without hardware virtualization support).
+          '--enable-kvm',
+          ])
+
+    logging.info('Emulator launch command: %s', ' '.join(emulator_command))
+    self.popen = subprocess.Popen(args=emulator_command,
+                                  stderr=subprocess.STDOUT)
+    self._InstallKillHandler()
+
+  @staticmethod
+  def _AggressiveImageCleanup():
+    """Aggressive cleanup of emulator images.
+
+    Experimentally it looks like our current emulator use on the bot
+    leaves image files around in /tmp/android-$USER.  If a "random"
+    name gets reused, we choke with a 'File exists' error.
+    TODO(jrg): is there a less hacky way to accomplish the same goal?
+    """
+    logging.info('Aggressive Image Cleanup')
+    emulator_imagedir = '/tmp/android-%s' % os.environ['USER']
+    if not os.path.exists(emulator_imagedir):
+      return
+    for image in os.listdir(emulator_imagedir):
+      full_name = os.path.join(emulator_imagedir, image)
+      if 'emulator' in full_name:
+        logging.info('Deleting emulator image %s', full_name)
+        os.unlink(full_name)
+
+  def ConfirmLaunch(self, wait_for_boot=False):
+    """Confirm the emulator launched properly.
+
+    Loop on a wait-for-device with a very small timeout.  On each
+    timeout, check the emulator process is still alive.
+    After confirming a wait-for-device can be successful, make sure
+    it returns the right answer.
+    """
+    seconds_waited = 0
+    number_of_waits = 2  # Make sure we can wfd twice
+
+    device = device_utils.DeviceUtils(self.device_serial)
+    while seconds_waited < self._LAUNCH_TIMEOUT:
+      try:
+        device.adb.WaitForDevice(
+            timeout=self._WAITFORDEVICE_TIMEOUT, retries=1)
+        number_of_waits -= 1
+        if not number_of_waits:
+          break
+      except device_errors.CommandTimeoutError:
+        seconds_waited += self._WAITFORDEVICE_TIMEOUT
+        device.adb.KillServer()
+      self.popen.poll()
+      if self.popen.returncode != None:
+        raise EmulatorLaunchException('EMULATOR DIED')
+
+    if seconds_waited >= self._LAUNCH_TIMEOUT:
+      raise EmulatorLaunchException('TIMEOUT with wait-for-device')
+
+    logging.info('Seconds waited on wait-for-device: %d', seconds_waited)
+    if wait_for_boot:
+      # Now that we checked for obvious problems, wait for a boot complete.
+      # Waiting for the package manager is sometimes problematic.
+      device.WaitUntilFullyBooted(timeout=self._WAITFORBOOT_TIMEOUT)
+
+  def Shutdown(self):
+    """Shuts down the process started by launch."""
+    self._DeleteAVD()
+    if self.popen:
+      self.popen.poll()
+      if self.popen.returncode == None:
+        self.popen.kill()
+      self.popen = None
+
+  def _ShutdownOnSignal(self, _signum, _frame):
+    logging.critical('emulator _ShutdownOnSignal')
+    for sig in self._SIGNALS:
+      signal.signal(sig, signal.SIG_DFL)
+    self.Shutdown()
+    raise KeyboardInterrupt  # print a stack
+
+  def _InstallKillHandler(self):
+    """Install a handler to kill the emulator when we exit unexpectedly."""
+    for sig in self._SIGNALS:
+      signal.signal(sig, self._ShutdownOnSignal)
diff --git a/build/android/pylib/utils/findbugs.py b/build/android/pylib/utils/findbugs.py
new file mode 100644
index 0000000..8deb0fe
--- /dev/null
+++ b/build/android/pylib/utils/findbugs.py
@@ -0,0 +1,154 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import logging
+import os
+import re
+import shlex
+import sys
+import xml.dom.minidom
+
+from pylib import cmd_helper
+from pylib import constants
+
+
+_FINDBUGS_HOME = os.path.join(constants.DIR_SOURCE_ROOT, 'third_party',
+                              'findbugs')
+_FINDBUGS_JAR = os.path.join(_FINDBUGS_HOME, 'lib', 'findbugs.jar')
+_FINDBUGS_MAX_HEAP = 768
+_FINDBUGS_PLUGIN_PATH = os.path.join(
+    constants.DIR_SOURCE_ROOT, 'tools', 'android', 'findbugs_plugin', 'lib',
+    'chromiumPlugin.jar')
+
+
+def _ParseXmlResults(results_doc):
+  warnings = set()
+  for en in (n for n in results_doc.documentElement.childNodes
+             if n.nodeType == xml.dom.Node.ELEMENT_NODE):
+    if en.tagName == 'BugInstance':
+      warnings.add(_ParseBugInstance(en))
+  return warnings
+
+
+def _GetMessage(node):
+  for c in (n for n in node.childNodes
+            if n.nodeType == xml.dom.Node.ELEMENT_NODE):
+    if c.tagName == 'Message':
+      if (len(c.childNodes) == 1
+          and c.childNodes[0].nodeType == xml.dom.Node.TEXT_NODE):
+        return c.childNodes[0].data
+  return None
+
+
+def _ParseBugInstance(node):
+  bug = FindBugsWarning(node.getAttribute('type'))
+  msg_parts = []
+  for c in (n for n in node.childNodes
+            if n.nodeType == xml.dom.Node.ELEMENT_NODE):
+    if c.tagName == 'Class':
+      msg_parts.append(_GetMessage(c))
+    elif c.tagName == 'Method':
+      msg_parts.append(_GetMessage(c))
+    elif c.tagName == 'Field':
+      msg_parts.append(_GetMessage(c))
+    elif c.tagName == 'SourceLine':
+      bug.file_name = c.getAttribute('sourcefile')
+      if c.hasAttribute('start'):
+        bug.start_line = int(c.getAttribute('start'))
+      if c.hasAttribute('end'):
+        bug.end_line = int(c.getAttribute('end'))
+      msg_parts.append(_GetMessage(c))
+    elif (c.tagName == 'ShortMessage' and len(c.childNodes) == 1
+          and c.childNodes[0].nodeType == xml.dom.Node.TEXT_NODE):
+      msg_parts.append(c.childNodes[0].data)
+  bug.message = tuple(m for m in msg_parts if m)
+  return bug
+
+
+class FindBugsWarning(object):
+
+  def __init__(self, bug_type='', end_line=0, file_name='', message=None,
+               start_line=0):
+    self.bug_type = bug_type
+    self.end_line = end_line
+    self.file_name = file_name
+    if message is None:
+      self.message = tuple()
+    else:
+      self.message = message
+    self.start_line = start_line
+
+  def __cmp__(self, other):
+    return (cmp(self.file_name, other.file_name)
+            or cmp(self.start_line, other.start_line)
+            or cmp(self.end_line, other.end_line)
+            or cmp(self.bug_type, other.bug_type)
+            or cmp(self.message, other.message))
+
+  def __eq__(self, other):
+    return self.__dict__ == other.__dict__
+
+  def __hash__(self):
+    return hash((self.bug_type, self.end_line, self.file_name, self.message,
+                 self.start_line))
+
+  def __ne__(self, other):
+    return not self == other
+
+  def __str__(self):
+    return '%s: %s' % (self.bug_type, '\n  '.join(self.message))
+
+
+def Run(exclude, classes_to_analyze, auxiliary_classes, output_file,
+        findbug_args, jars):
+  """Run FindBugs.
+
+  Args:
+    exclude: the exclude xml file, refer to FindBugs's -exclude command option.
+    classes_to_analyze: the list of classes need to analyze, refer to FindBug's
+                        -onlyAnalyze command line option.
+    auxiliary_classes: the classes help to analyze, refer to FindBug's
+                       -auxclasspath command line option.
+    output_file: An optional path to dump XML results to.
+    findbug_args: A list of addtional command line options to pass to Findbugs.
+  """
+  # TODO(jbudorick): Get this from the build system.
+  system_classes = [
+    os.path.join(constants.ANDROID_SDK_ROOT, 'platforms',
+                 'android-%s' % constants.ANDROID_SDK_VERSION, 'android.jar')
+  ]
+  system_classes.extend(os.path.abspath(classes)
+                        for classes in auxiliary_classes or [])
+
+  cmd = ['java',
+         '-classpath', '%s:' % _FINDBUGS_JAR,
+         '-Xmx%dm' % _FINDBUGS_MAX_HEAP,
+         '-Dfindbugs.home="%s"' % _FINDBUGS_HOME,
+         '-jar', _FINDBUGS_JAR,
+         '-textui', '-sortByClass',
+         '-pluginList', _FINDBUGS_PLUGIN_PATH, '-xml:withMessages']
+  if system_classes:
+    cmd.extend(['-auxclasspath', ':'.join(system_classes)])
+  if classes_to_analyze:
+    cmd.extend(['-onlyAnalyze', classes_to_analyze])
+  if exclude:
+    cmd.extend(['-exclude', os.path.abspath(exclude)])
+  if output_file:
+    cmd.extend(['-output', output_file])
+  if findbug_args:
+    cmd.extend(findbug_args)
+  cmd.extend(os.path.abspath(j) for j in jars or [])
+
+  if output_file:
+    cmd_helper.RunCmd(cmd)
+    results_doc = xml.dom.minidom.parse(output_file)
+  else:
+    raw_out = cmd_helper.GetCmdOutput(cmd)
+    results_doc = xml.dom.minidom.parseString(raw_out)
+
+  current_warnings_set = _ParseXmlResults(results_doc)
+
+  return (' '.join(cmd), current_warnings_set)
+
diff --git a/build/android/pylib/utils/host_path_finder.py b/build/android/pylib/utils/host_path_finder.py
new file mode 100644
index 0000000..389ac43
--- /dev/null
+++ b/build/android/pylib/utils/host_path_finder.py
@@ -0,0 +1,22 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+
+from pylib import constants
+
+
+def GetMostRecentHostPath(file_name):
+  """Returns the most recent existing full path for the given file name.
+
+  Returns: An empty string if no path could be found.
+  """
+  out_dir = os.path.join(
+      constants.DIR_SOURCE_ROOT, os.environ.get('CHROMIUM_OUT_DIR', 'out'))
+  candidate_paths = [os.path.join(out_dir, build_type, file_name)
+                     for build_type in ['Debug', 'Release']]
+  candidate_paths = filter(os.path.exists, candidate_paths)
+  candidate_paths = sorted(candidate_paths, key=os.path.getmtime, reverse=True)
+  candidate_paths.append('')
+  return candidate_paths[0]
diff --git a/build/android/pylib/utils/host_utils.py b/build/android/pylib/utils/host_utils.py
new file mode 100644
index 0000000..580721f
--- /dev/null
+++ b/build/android/pylib/utils/host_utils.py
@@ -0,0 +1,16 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+
+
+def GetRecursiveDiskUsage(path):
+  """Returns the disk usage in bytes of |path|. Similar to `du -sb |path|`."""
+  running_size = os.path.getsize(path)
+  if os.path.isdir(path):
+    for root, dirs, files in os.walk(path):
+      running_size += sum([os.path.getsize(os.path.join(root, f))
+                           for f in files + dirs])
+  return running_size
+
diff --git a/build/android/pylib/utils/isolator.py b/build/android/pylib/utils/isolator.py
new file mode 100644
index 0000000..cac39d8
--- /dev/null
+++ b/build/android/pylib/utils/isolator.py
@@ -0,0 +1,173 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import fnmatch
+import glob
+import os
+import shutil
+import sys
+
+from pylib import cmd_helper
+from pylib import constants
+
+
+_ISOLATE_SCRIPT = os.path.join(
+    constants.DIR_SOURCE_ROOT, 'tools', 'swarming_client', 'isolate.py')
+
+
+def DefaultPathVariables():
+  return {
+    'DEPTH': constants.DIR_SOURCE_ROOT,
+    'PRODUCT_DIR': constants.GetOutDirectory(),
+  }
+
+
+def DefaultConfigVariables():
+  # Note: This list must match the --config-vars in build/isolate.gypi
+  return {
+    'CONFIGURATION_NAME': constants.GetBuildType(),
+    'OS': 'android',
+    'asan': '0',
+    'branding': 'Chromium',
+    'chromeos': '0',
+    'component': 'static_library',
+    'enable_pepper_cdms': '0',
+    'enable_plugins': '0',
+    'fastbuild': '0',
+    'icu_use_data_file_flag': '1',
+    'kasko': '0',
+    'lsan': '0',
+    'msan': '0',
+    # TODO(maruel): This may not always be true.
+    'target_arch': 'arm',
+    'tsan': '0',
+    'use_custom_libcxx': '0',
+    'use_instrumented_libraries': '0',
+    'use_prebuilt_instrumented_libraries': '0',
+    'use_openssl': '0',
+    'use_ozone': '0',
+    'use_x11': '0',
+    'v8_use_external_startup_data': '1',
+  }
+
+
+class Isolator(object):
+  """Manages calls to isolate.py for the android test runner scripts."""
+
+  def __init__(self, isolate_deps_dir):
+    """
+    Args:
+      isolate_deps_dir: The directory in which dependencies specified by
+        isolate are or should be stored.
+    """
+    self._isolate_deps_dir = isolate_deps_dir
+
+  def Clear(self):
+    """Deletes the isolate dependency directory."""
+    if os.path.exists(self._isolate_deps_dir):
+      shutil.rmtree(self._isolate_deps_dir)
+
+  def Remap(self, isolate_abs_path, isolated_abs_path,
+            path_variables=None, config_variables=None):
+    """Remaps data dependencies into |self._isolate_deps_dir|.
+
+    Args:
+      isolate_abs_path: The absolute path to the .isolate file, which specifies
+        data dependencies in the source tree.
+      isolated_abs_path: The absolute path to the .isolated file, which is
+        generated by isolate.py and specifies data dependencies in
+        |self._isolate_deps_dir| and their digests.
+      path_variables: A dict containing everything that should be passed
+        as a |--path-variable| to the isolate script. Defaults to the return
+        value of |DefaultPathVariables()|.
+      config_variables: A dict containing everything that should be passed
+        as a |--config-variable| to the isolate script. Defaults to the return
+        value of |DefaultConfigVariables()|.
+    Raises:
+      Exception if the isolate command fails for some reason.
+    """
+    if not path_variables:
+      path_variables = DefaultPathVariables()
+    if not config_variables:
+      config_variables = DefaultConfigVariables()
+
+    isolate_cmd = [
+      sys.executable, _ISOLATE_SCRIPT, 'remap',
+      '--isolate', isolate_abs_path,
+      '--isolated', isolated_abs_path,
+      '--outdir', self._isolate_deps_dir,
+    ]
+    for k, v in path_variables.iteritems():
+      isolate_cmd.extend(['--path-variable', k, v])
+    for k, v in config_variables.iteritems():
+      isolate_cmd.extend(['--config-variable', k, v])
+
+    if cmd_helper.RunCmd(isolate_cmd):
+      raise Exception('isolate command failed: %s' % ' '.join(isolate_cmd))
+
+  def VerifyHardlinks(self):
+    """Checks |isolate_deps_dir| for a hardlink.
+
+    Returns:
+      True if a hardlink is found.
+      False if nothing is found.
+    Raises:
+      Exception if a non-hardlink is found.
+    """
+    for root, _, filenames in os.walk(self._isolate_deps_dir):
+      if filenames:
+        linked_file = os.path.join(root, filenames[0])
+        orig_file = os.path.join(
+            self._isolate_deps_dir,
+            os.path.relpath(linked_file, self._isolate_deps_dir))
+        if os.stat(linked_file).st_ino == os.stat(orig_file).st_ino:
+          return True
+        else:
+          raise Exception('isolate remap command did not use hardlinks.')
+    return False
+
+  def PurgeExcluded(self, deps_exclusion_list):
+    """Deletes anything on |deps_exclusion_list| from |self._isolate_deps_dir|.
+
+    Args:
+      deps_exclusion_list: A list of globs to exclude from the isolate
+        dependency directory.
+    """
+    excluded_paths = (
+        x for y in deps_exclusion_list
+        for x in glob.glob(
+            os.path.abspath(os.path.join(self._isolate_deps_dir, y))))
+    for p in excluded_paths:
+      if os.path.isdir(p):
+        shutil.rmtree(p)
+      else:
+        os.remove(p)
+
+  def MoveOutputDeps(self):
+    """Moves files from the output directory to the top level of
+      |self._isolate_deps_dir|.
+
+    Moves pak files from the output directory to to <isolate_deps_dir>/paks
+    Moves files from the product directory to <isolate_deps_dir>
+    """
+    # On Android, all pak files need to be in the top-level 'paks' directory.
+    paks_dir = os.path.join(self._isolate_deps_dir, 'paks')
+    os.mkdir(paks_dir)
+
+    deps_out_dir = os.path.join(
+        self._isolate_deps_dir,
+        os.path.relpath(os.path.join(constants.GetOutDirectory(), os.pardir),
+                        constants.DIR_SOURCE_ROOT))
+    for root, _, filenames in os.walk(deps_out_dir):
+      for filename in fnmatch.filter(filenames, '*.pak'):
+        shutil.move(os.path.join(root, filename), paks_dir)
+
+    # Move everything in PRODUCT_DIR to top level.
+    deps_product_dir = os.path.join(deps_out_dir, constants.GetBuildType())
+    if os.path.isdir(deps_product_dir):
+      for p in os.listdir(deps_product_dir):
+        shutil.move(os.path.join(deps_product_dir, p), self._isolate_deps_dir)
+      os.rmdir(deps_product_dir)
+      os.rmdir(deps_out_dir)
+
diff --git a/build/android/pylib/utils/json_results_generator_unittest.py b/build/android/pylib/utils/json_results_generator_unittest.py
new file mode 100644
index 0000000..41ab77b
--- /dev/null
+++ b/build/android/pylib/utils/json_results_generator_unittest.py
@@ -0,0 +1,213 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+#
+# Most of this file was ported over from Blink's
+# webkitpy/layout_tests/layout_package/json_results_generator_unittest.py
+#
+
+import unittest
+import json
+
+from pylib.utils import json_results_generator
+
+
+class JSONGeneratorTest(unittest.TestCase):
+
+  def setUp(self):
+    self.builder_name = 'DUMMY_BUILDER_NAME'
+    self.build_name = 'DUMMY_BUILD_NAME'
+    self.build_number = 'DUMMY_BUILDER_NUMBER'
+
+    # For archived results.
+    self._json = None
+    self._num_runs = 0
+    self._tests_set = set([])
+    self._test_timings = {}
+    self._failed_count_map = {}
+
+    self._PASS_count = 0
+    self._DISABLED_count = 0
+    self._FLAKY_count = 0
+    self._FAILS_count = 0
+    self._fixable_count = 0
+
+    self._orig_write_json = json_results_generator.WriteJSON
+
+    # unused arguments ... pylint: disable=W0613
+    def _WriteJSONStub(json_object, file_path, callback=None):
+      pass
+
+    json_results_generator.WriteJSON = _WriteJSONStub
+
+  def tearDown(self):
+    json_results_generator.WriteJSON = self._orig_write_json
+
+  def _TestJSONGeneration(self, passed_tests_list, failed_tests_list):
+    tests_set = set(passed_tests_list) | set(failed_tests_list)
+
+    DISABLED_tests = set([t for t in tests_set
+                          if t.startswith('DISABLED_')])
+    FLAKY_tests = set([t for t in tests_set
+                       if t.startswith('FLAKY_')])
+    FAILS_tests = set([t for t in tests_set
+                       if t.startswith('FAILS_')])
+    PASS_tests = tests_set - (DISABLED_tests | FLAKY_tests | FAILS_tests)
+
+    failed_tests = set(failed_tests_list) - DISABLED_tests
+    failed_count_map = dict([(t, 1) for t in failed_tests])
+
+    test_timings = {}
+    i = 0
+    for test in tests_set:
+      test_timings[test] = float(self._num_runs * 100 + i)
+      i += 1
+
+    test_results_map = dict()
+    for test in tests_set:
+      test_results_map[test] = json_results_generator.TestResult(
+          test, failed=(test in failed_tests),
+          elapsed_time=test_timings[test])
+
+    generator = json_results_generator.JSONResultsGeneratorBase(
+        self.builder_name, self.build_name, self.build_number,
+        '',
+        None,   # don't fetch past json results archive
+        test_results_map)
+
+    failed_count_map = dict([(t, 1) for t in failed_tests])
+
+    # Test incremental json results
+    incremental_json = generator.GetJSON()
+    self._VerifyJSONResults(
+        tests_set,
+        test_timings,
+        failed_count_map,
+        len(PASS_tests),
+        len(DISABLED_tests),
+        len(FLAKY_tests),
+        len(DISABLED_tests | failed_tests),
+        incremental_json,
+        1)
+
+    # We don't verify the results here, but at least we make sure the code
+    # runs without errors.
+    generator.GenerateJSONOutput()
+    generator.GenerateTimesMSFile()
+
+  def _VerifyJSONResults(self, tests_set, test_timings, failed_count_map,
+                         PASS_count, DISABLED_count, FLAKY_count,
+                         fixable_count, json_obj, num_runs):
+    # Aliasing to a short name for better access to its constants.
+    JRG = json_results_generator.JSONResultsGeneratorBase
+
+    self.assertIn(JRG.VERSION_KEY, json_obj)
+    self.assertIn(self.builder_name, json_obj)
+
+    buildinfo = json_obj[self.builder_name]
+    self.assertIn(JRG.FIXABLE, buildinfo)
+    self.assertIn(JRG.TESTS, buildinfo)
+    self.assertEqual(len(buildinfo[JRG.BUILD_NUMBERS]), num_runs)
+    self.assertEqual(buildinfo[JRG.BUILD_NUMBERS][0], self.build_number)
+
+    if tests_set or DISABLED_count:
+      fixable = {}
+      for fixable_items in buildinfo[JRG.FIXABLE]:
+        for (result_type, count) in fixable_items.iteritems():
+          if result_type in fixable:
+            fixable[result_type] = fixable[result_type] + count
+          else:
+            fixable[result_type] = count
+
+      if PASS_count:
+        self.assertEqual(fixable[JRG.PASS_RESULT], PASS_count)
+      else:
+        self.assertTrue(JRG.PASS_RESULT not in fixable or
+                        fixable[JRG.PASS_RESULT] == 0)
+      if DISABLED_count:
+        self.assertEqual(fixable[JRG.SKIP_RESULT], DISABLED_count)
+      else:
+        self.assertTrue(JRG.SKIP_RESULT not in fixable or
+                        fixable[JRG.SKIP_RESULT] == 0)
+      if FLAKY_count:
+        self.assertEqual(fixable[JRG.FLAKY_RESULT], FLAKY_count)
+      else:
+        self.assertTrue(JRG.FLAKY_RESULT not in fixable or
+                        fixable[JRG.FLAKY_RESULT] == 0)
+
+    if failed_count_map:
+      tests = buildinfo[JRG.TESTS]
+      for test_name in failed_count_map.iterkeys():
+        test = self._FindTestInTrie(test_name, tests)
+
+        failed = 0
+        for result in test[JRG.RESULTS]:
+          if result[1] == JRG.FAIL_RESULT:
+            failed += result[0]
+        self.assertEqual(failed_count_map[test_name], failed)
+
+        timing_count = 0
+        for timings in test[JRG.TIMES]:
+          if timings[1] == test_timings[test_name]:
+            timing_count = timings[0]
+        self.assertEqual(1, timing_count)
+
+    if fixable_count:
+      self.assertEqual(sum(buildinfo[JRG.FIXABLE_COUNT]), fixable_count)
+
+  def _FindTestInTrie(self, path, trie):
+    nodes = path.split('/')
+    sub_trie = trie
+    for node in nodes:
+      self.assertIn(node, sub_trie)
+      sub_trie = sub_trie[node]
+    return sub_trie
+
+  def testJSONGeneration(self):
+    self._TestJSONGeneration([], [])
+    self._TestJSONGeneration(['A1', 'B1'], [])
+    self._TestJSONGeneration([], ['FAILS_A2', 'FAILS_B2'])
+    self._TestJSONGeneration(['DISABLED_A3', 'DISABLED_B3'], [])
+    self._TestJSONGeneration(['A4'], ['B4', 'FAILS_C4'])
+    self._TestJSONGeneration(['DISABLED_C5', 'DISABLED_D5'], ['A5', 'B5'])
+    self._TestJSONGeneration(
+        ['A6', 'B6', 'FAILS_C6', 'DISABLED_E6', 'DISABLED_F6'],
+        ['FAILS_D6'])
+
+    # Generate JSON with the same test sets. (Both incremental results and
+    # archived results must be updated appropriately.)
+    self._TestJSONGeneration(
+        ['A', 'FLAKY_B', 'DISABLED_C'],
+        ['FAILS_D', 'FLAKY_E'])
+    self._TestJSONGeneration(
+        ['A', 'DISABLED_C', 'FLAKY_E'],
+        ['FLAKY_B', 'FAILS_D'])
+    self._TestJSONGeneration(
+        ['FLAKY_B', 'DISABLED_C', 'FAILS_D'],
+        ['A', 'FLAKY_E'])
+
+  def testHierarchicalJSNGeneration(self):
+    # FIXME: Re-work tests to be more comprehensible and comprehensive.
+    self._TestJSONGeneration(['foo/A'], ['foo/B', 'bar/C'])
+
+  def testTestTimingsTrie(self):
+    individual_test_timings = []
+    individual_test_timings.append(
+        json_results_generator.TestResult(
+            'foo/bar/baz.html',
+            elapsed_time=1.2))
+    individual_test_timings.append(
+        json_results_generator.TestResult('bar.html', elapsed_time=0.0001))
+    trie = json_results_generator.TestTimingsTrie(individual_test_timings)
+
+    expected_trie = {
+        'bar.html': 0,
+        'foo': {
+            'bar': {
+                'baz.html': 1200,
+            }
+        }
+    }
+
+    self.assertEqual(json.dumps(trie), json.dumps(expected_trie))
diff --git a/build/android/pylib/utils/logging_utils.py b/build/android/pylib/utils/logging_utils.py
new file mode 100644
index 0000000..1e46fa8
--- /dev/null
+++ b/build/android/pylib/utils/logging_utils.py
@@ -0,0 +1,27 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import contextlib
+import logging
+
+@contextlib.contextmanager
+def SuppressLogging(level=logging.ERROR):
+  """Momentarilly suppress logging events from all loggers.
+
+  TODO(jbudorick): This is not thread safe. Log events from other threads might
+  also inadvertently dissapear.
+
+  Example:
+
+    with logging_utils.SuppressLogging():
+      # all but CRITICAL logging messages are suppressed
+      logging.info('just doing some thing') # not shown
+      logging.critical('something really bad happened') # still shown
+
+  Args:
+    level: logging events with this or lower levels are suppressed.
+  """
+  logging.disable(level)
+  yield
+  logging.disable(logging.NOTSET)
diff --git a/build/android/pylib/utils/md5sum.py b/build/android/pylib/utils/md5sum.py
new file mode 100644
index 0000000..3e61c8f
--- /dev/null
+++ b/build/android/pylib/utils/md5sum.py
@@ -0,0 +1,91 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import collections
+import logging
+import os
+import re
+import tempfile
+import types
+
+from pylib import cmd_helper
+from pylib import constants
+from pylib.utils import device_temp_file
+
+MD5SUM_DEVICE_LIB_PATH = '/data/local/tmp/md5sum/'
+MD5SUM_DEVICE_BIN_PATH = MD5SUM_DEVICE_LIB_PATH + 'md5sum_bin'
+
+MD5SUM_DEVICE_SCRIPT_FORMAT = (
+    'test -f {path} -o -d {path} '
+    '&& LD_LIBRARY_PATH={md5sum_lib} {md5sum_bin} {path}')
+
+_STARTS_WITH_CHECKSUM_RE = re.compile(r'^\s*[0-9a-fA-F]{32}\s+')
+
+
+def CalculateHostMd5Sums(paths):
+  """Calculates the MD5 sum value for all items in |paths|.
+
+  Directories are traversed recursively and the MD5 sum of each file found is
+  reported in the result.
+
+  Args:
+    paths: A list of host paths to md5sum.
+  Returns:
+    A dict mapping file paths to their respective md5sum checksums.
+  """
+  if isinstance(paths, basestring):
+    paths = [paths]
+
+  md5sum_bin_host_path = os.path.join(
+      constants.GetOutDirectory(), 'md5sum_bin_host')
+  if not os.path.exists(md5sum_bin_host_path):
+    raise IOError('File not built: %s' % md5sum_bin_host_path)
+  out = cmd_helper.GetCmdOutput([md5sum_bin_host_path] + [p for p in paths])
+
+  return _ParseMd5SumOutput(out.splitlines())
+
+
+def CalculateDeviceMd5Sums(paths, device):
+  """Calculates the MD5 sum value for all items in |paths|.
+
+  Directories are traversed recursively and the MD5 sum of each file found is
+  reported in the result.
+
+  Args:
+    paths: A list of device paths to md5sum.
+  Returns:
+    A dict mapping file paths to their respective md5sum checksums.
+  """
+  if isinstance(paths, basestring):
+    paths = [paths]
+
+  if not device.FileExists(MD5SUM_DEVICE_BIN_PATH):
+    md5sum_dist_path = os.path.join(constants.GetOutDirectory(), 'md5sum_dist')
+    if not os.path.exists(md5sum_dist_path):
+      raise IOError('File not built: %s' % md5sum_dist_path)
+    device.adb.Push(md5sum_dist_path, MD5SUM_DEVICE_LIB_PATH)
+
+  out = []
+
+  with tempfile.NamedTemporaryFile() as md5sum_script_file:
+    with device_temp_file.DeviceTempFile(
+        device.adb) as md5sum_device_script_file:
+      md5sum_script = (
+          MD5SUM_DEVICE_SCRIPT_FORMAT.format(
+              path=p, md5sum_lib=MD5SUM_DEVICE_LIB_PATH,
+              md5sum_bin=MD5SUM_DEVICE_BIN_PATH)
+          for p in paths)
+      md5sum_script_file.write('; '.join(md5sum_script))
+      md5sum_script_file.flush()
+      device.adb.Push(md5sum_script_file.name, md5sum_device_script_file.name)
+      out = device.RunShellCommand(['sh', md5sum_device_script_file.name])
+
+  return _ParseMd5SumOutput(out)
+
+
+def _ParseMd5SumOutput(out):
+  hash_and_path = (l.split(None, 1) for l in out
+                   if l and _STARTS_WITH_CHECKSUM_RE.match(l))
+  return dict((p, h) for h, p in hash_and_path)
+
diff --git a/build/android/pylib/utils/md5sum_test.py b/build/android/pylib/utils/md5sum_test.py
new file mode 100755
index 0000000..c94c19d
--- /dev/null
+++ b/build/android/pylib/utils/md5sum_test.py
@@ -0,0 +1,231 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+import sys
+import unittest
+
+from pylib import cmd_helper
+from pylib import constants
+from pylib.utils import md5sum
+
+sys.path.append(
+    os.path.join(constants.DIR_SOURCE_ROOT, 'third_party', 'pymock'))
+import mock
+
+TEST_OUT_DIR = os.path.join('test', 'out', 'directory')
+HOST_MD5_EXECUTABLE = os.path.join(TEST_OUT_DIR, 'md5sum_bin_host')
+
+class Md5SumTest(unittest.TestCase):
+
+  def setUp(self):
+    self._patchers = [
+        mock.patch('pylib.constants.GetOutDirectory',
+                   new=mock.Mock(return_value=TEST_OUT_DIR)),
+        mock.patch('os.path.exists',
+                   new=mock.Mock(return_value=True)),
+    ]
+    for p in self._patchers:
+      p.start()
+
+  def tearDown(self):
+    for p in self._patchers:
+      p.stop()
+
+  def testCalculateHostMd5Sums_singlePath(self):
+    test_path = '/test/host/file.dat'
+    mock_get_cmd_output = mock.Mock(
+        return_value='0123456789abcdeffedcba9876543210 /test/host/file.dat')
+    with mock.patch('pylib.cmd_helper.GetCmdOutput', new=mock_get_cmd_output):
+      out = md5sum.CalculateHostMd5Sums(test_path)
+      self.assertEquals(1, len(out))
+      self.assertTrue('/test/host/file.dat' in out)
+      self.assertEquals('0123456789abcdeffedcba9876543210',
+                        out['/test/host/file.dat'])
+      mock_get_cmd_output.assert_called_once_with(
+          [HOST_MD5_EXECUTABLE, '/test/host/file.dat'])
+
+  def testCalculateHostMd5Sums_list(self):
+    test_paths = ['/test/host/file0.dat', '/test/host/file1.dat']
+    mock_get_cmd_output = mock.Mock(
+        return_value='0123456789abcdeffedcba9876543210 /test/host/file0.dat\n'
+                     '123456789abcdef00fedcba987654321 /test/host/file1.dat\n')
+    with mock.patch('pylib.cmd_helper.GetCmdOutput', new=mock_get_cmd_output):
+      out = md5sum.CalculateHostMd5Sums(test_paths)
+      self.assertEquals(2, len(out))
+      self.assertTrue('/test/host/file0.dat' in out)
+      self.assertEquals('0123456789abcdeffedcba9876543210',
+                        out['/test/host/file0.dat'])
+      self.assertTrue('/test/host/file1.dat' in out)
+      self.assertEquals('123456789abcdef00fedcba987654321',
+                        out['/test/host/file1.dat'])
+      mock_get_cmd_output.assert_called_once_with(
+          [HOST_MD5_EXECUTABLE, '/test/host/file0.dat',
+           '/test/host/file1.dat'])
+
+  def testCalculateHostMd5Sums_generator(self):
+    test_paths = ('/test/host/' + p for p in ['file0.dat', 'file1.dat'])
+    mock_get_cmd_output = mock.Mock(
+        return_value='0123456789abcdeffedcba9876543210 /test/host/file0.dat\n'
+                     '123456789abcdef00fedcba987654321 /test/host/file1.dat\n')
+    with mock.patch('pylib.cmd_helper.GetCmdOutput', new=mock_get_cmd_output):
+      out = md5sum.CalculateHostMd5Sums(test_paths)
+      self.assertEquals(2, len(out))
+      self.assertTrue('/test/host/file0.dat' in out)
+      self.assertEquals('0123456789abcdeffedcba9876543210',
+                        out['/test/host/file0.dat'])
+      self.assertTrue('/test/host/file1.dat' in out)
+      self.assertEquals('123456789abcdef00fedcba987654321',
+                        out['/test/host/file1.dat'])
+      mock_get_cmd_output.assert_called_once_with(
+          [HOST_MD5_EXECUTABLE, '/test/host/file0.dat', '/test/host/file1.dat'])
+
+  def testCalculateDeviceMd5Sums_singlePath(self):
+    test_path = '/storage/emulated/legacy/test/file.dat'
+
+    device = mock.NonCallableMock()
+    device.adb = mock.NonCallableMock()
+    device.adb.Push = mock.Mock()
+    device_md5sum_output = [
+        '0123456789abcdeffedcba9876543210 '
+            '/storage/emulated/legacy/test/file.dat',
+    ]
+    device.RunShellCommand = mock.Mock(return_value=device_md5sum_output)
+
+    mock_temp_file = mock.mock_open()
+    mock_temp_file.return_value.name = '/tmp/test/script/file.sh'
+
+    mock_device_temp_file = mock.mock_open()
+    mock_device_temp_file.return_value.name = (
+        '/data/local/tmp/test/script/file.sh')
+
+    with mock.patch('tempfile.NamedTemporaryFile', new=mock_temp_file), (
+         mock.patch('pylib.utils.device_temp_file.DeviceTempFile',
+                    new=mock_device_temp_file)):
+      out = md5sum.CalculateDeviceMd5Sums(test_path, device)
+      self.assertEquals(1, len(out))
+      self.assertTrue('/storage/emulated/legacy/test/file.dat' in out)
+      self.assertEquals('0123456789abcdeffedcba9876543210',
+                        out['/storage/emulated/legacy/test/file.dat'])
+      device.adb.Push.assert_called_once_with(
+          '/tmp/test/script/file.sh', '/data/local/tmp/test/script/file.sh')
+      device.RunShellCommand.assert_called_once_with(
+          ['sh', '/data/local/tmp/test/script/file.sh'])
+
+  def testCalculateDeviceMd5Sums_list(self):
+    test_path = ['/storage/emulated/legacy/test/file0.dat',
+                 '/storage/emulated/legacy/test/file1.dat']
+    device = mock.NonCallableMock()
+    device.adb = mock.NonCallableMock()
+    device.adb.Push = mock.Mock()
+    device_md5sum_output = [
+        '0123456789abcdeffedcba9876543210 '
+            '/storage/emulated/legacy/test/file0.dat',
+        '123456789abcdef00fedcba987654321 '
+            '/storage/emulated/legacy/test/file1.dat',
+    ]
+    device.RunShellCommand = mock.Mock(return_value=device_md5sum_output)
+
+    mock_temp_file = mock.mock_open()
+    mock_temp_file.return_value.name = '/tmp/test/script/file.sh'
+
+    mock_device_temp_file = mock.mock_open()
+    mock_device_temp_file.return_value.name = (
+        '/data/local/tmp/test/script/file.sh')
+
+    with mock.patch('tempfile.NamedTemporaryFile', new=mock_temp_file), (
+         mock.patch('pylib.utils.device_temp_file.DeviceTempFile',
+                    new=mock_device_temp_file)):
+      out = md5sum.CalculateDeviceMd5Sums(test_path, device)
+      self.assertEquals(2, len(out))
+      self.assertTrue('/storage/emulated/legacy/test/file0.dat' in out)
+      self.assertEquals('0123456789abcdeffedcba9876543210',
+                        out['/storage/emulated/legacy/test/file0.dat'])
+      self.assertTrue('/storage/emulated/legacy/test/file1.dat' in out)
+      self.assertEquals('123456789abcdef00fedcba987654321',
+                        out['/storage/emulated/legacy/test/file1.dat'])
+      device.adb.Push.assert_called_once_with(
+          '/tmp/test/script/file.sh', '/data/local/tmp/test/script/file.sh')
+      device.RunShellCommand.assert_called_once_with(
+          ['sh', '/data/local/tmp/test/script/file.sh'])
+
+  def testCalculateDeviceMd5Sums_generator(self):
+    test_path = ('/storage/emulated/legacy/test/file%d.dat' % n
+                 for n in xrange(0, 2))
+
+    device = mock.NonCallableMock()
+    device.adb = mock.NonCallableMock()
+    device.adb.Push = mock.Mock()
+    device_md5sum_output = [
+        '0123456789abcdeffedcba9876543210 '
+            '/storage/emulated/legacy/test/file0.dat',
+        '123456789abcdef00fedcba987654321 '
+            '/storage/emulated/legacy/test/file1.dat',
+    ]
+    device.RunShellCommand = mock.Mock(return_value=device_md5sum_output)
+
+    mock_temp_file = mock.mock_open()
+    mock_temp_file.return_value.name = '/tmp/test/script/file.sh'
+
+    mock_device_temp_file = mock.mock_open()
+    mock_device_temp_file.return_value.name = (
+        '/data/local/tmp/test/script/file.sh')
+
+    with mock.patch('tempfile.NamedTemporaryFile', new=mock_temp_file), (
+         mock.patch('pylib.utils.device_temp_file.DeviceTempFile',
+                    new=mock_device_temp_file)):
+      out = md5sum.CalculateDeviceMd5Sums(test_path, device)
+      self.assertEquals(2, len(out))
+      self.assertTrue('/storage/emulated/legacy/test/file0.dat' in out)
+      self.assertEquals('0123456789abcdeffedcba9876543210',
+                        out['/storage/emulated/legacy/test/file0.dat'])
+      self.assertTrue('/storage/emulated/legacy/test/file1.dat' in out)
+      self.assertEquals('123456789abcdef00fedcba987654321',
+                        out['/storage/emulated/legacy/test/file1.dat'])
+      device.adb.Push.assert_called_once_with(
+          '/tmp/test/script/file.sh', '/data/local/tmp/test/script/file.sh')
+      device.RunShellCommand.assert_called_once_with(
+          ['sh', '/data/local/tmp/test/script/file.sh'])
+
+  def testCalculateDeviceMd5Sums_singlePath_linkerWarning(self):
+    # See crbug/479966
+    test_path = '/storage/emulated/legacy/test/file.dat'
+
+    device = mock.NonCallableMock()
+    device.adb = mock.NonCallableMock()
+    device.adb.Push = mock.Mock()
+    device_md5sum_output = [
+        'WARNING: linker: /data/local/tmp/md5sum/md5sum_bin: '
+            'unused DT entry: type 0x1d arg 0x15db',
+        'THIS_IS_NOT_A_VALID_CHECKSUM_ZZZ some random text',
+        '0123456789abcdeffedcba9876543210 '
+            '/storage/emulated/legacy/test/file.dat',
+    ]
+    device.RunShellCommand = mock.Mock(return_value=device_md5sum_output)
+
+    mock_temp_file = mock.mock_open()
+    mock_temp_file.return_value.name = '/tmp/test/script/file.sh'
+
+    mock_device_temp_file = mock.mock_open()
+    mock_device_temp_file.return_value.name = (
+        '/data/local/tmp/test/script/file.sh')
+
+    with mock.patch('tempfile.NamedTemporaryFile', new=mock_temp_file), (
+         mock.patch('pylib.utils.device_temp_file.DeviceTempFile',
+                    new=mock_device_temp_file)):
+      out = md5sum.CalculateDeviceMd5Sums(test_path, device)
+      self.assertEquals(1, len(out))
+      self.assertTrue('/storage/emulated/legacy/test/file.dat' in out)
+      self.assertEquals('0123456789abcdeffedcba9876543210',
+                        out['/storage/emulated/legacy/test/file.dat'])
+      device.adb.Push.assert_called_once_with(
+          '/tmp/test/script/file.sh', '/data/local/tmp/test/script/file.sh')
+      device.RunShellCommand.assert_called_once_with(
+          ['sh', '/data/local/tmp/test/script/file.sh'])
+
+
+if __name__ == '__main__':
+  unittest.main(verbosity=2)
+
diff --git a/build/android/pylib/utils/mock_calls.py b/build/android/pylib/utils/mock_calls.py
new file mode 100644
index 0000000..59167ba
--- /dev/null
+++ b/build/android/pylib/utils/mock_calls.py
@@ -0,0 +1,182 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+A test facility to assert call sequences while mocking their behavior.
+"""
+
+import os
+import sys
+import unittest
+
+from pylib import constants
+
+sys.path.append(os.path.join(
+    constants.DIR_SOURCE_ROOT, 'third_party', 'pymock'))
+import mock # pylint: disable=F0401
+
+
+class TestCase(unittest.TestCase):
+  """Adds assertCalls to TestCase objects."""
+  class _AssertCalls(object):
+    def __init__(self, test_case, expected_calls, watched):
+      def call_action(pair):
+        if isinstance(pair, type(mock.call)):
+          return (pair, None)
+        else:
+          return pair
+
+      def do_check(call):
+        def side_effect(*args, **kwargs):
+          received_call = call(*args, **kwargs)
+          self._test_case.assertTrue(
+              self._expected_calls,
+              msg=('Unexpected call: %s' % str(received_call)))
+          expected_call, action = self._expected_calls.pop(0)
+          self._test_case.assertTrue(
+              received_call == expected_call,
+              msg=('Expected call mismatch:\n'
+                   '  expected: %s\n'
+                   '  received: %s\n'
+                   % (str(expected_call), str(received_call))))
+          if callable(action):
+            return action(*args, **kwargs)
+          else:
+            return action
+        return side_effect
+
+      self._test_case = test_case
+      self._expected_calls = [call_action(pair) for pair in expected_calls]
+      watched = watched.copy() # do not pollute the caller's dict
+      watched.update((call.parent.name, call.parent)
+                     for call, _ in self._expected_calls)
+      self._patched = [test_case.patch_call(call, side_effect=do_check(call))
+                       for call in watched.itervalues()]
+
+    def __enter__(self):
+      for patch in self._patched:
+        patch.__enter__()
+      return self
+
+    def __exit__(self, exc_type, exc_val, exc_tb):
+      for patch in self._patched:
+        patch.__exit__(exc_type, exc_val, exc_tb)
+      if exc_type is None:
+        missing = ''.join('  expected: %s\n' % str(call)
+                          for call, _ in self._expected_calls)
+        self._test_case.assertFalse(
+            missing,
+            msg='Expected calls not found:\n' + missing)
+
+  def __init__(self, *args, **kwargs):
+    super(TestCase, self).__init__(*args, **kwargs)
+    self.call = mock.call.self
+    self._watched = {}
+
+  def call_target(self, call):
+    """Resolve a self.call instance to the target it represents.
+
+    Args:
+      call: a self.call instance, e.g. self.call.adb.Shell
+
+    Returns:
+      The target object represented by the call, e.g. self.adb.Shell
+
+    Raises:
+      ValueError if the path of the call does not start with "self", i.e. the
+          target of the call is external to the self object.
+      AttributeError if the path of the call does not specify a valid
+          chain of attributes (without any calls) starting from "self".
+    """
+    path = call.name.split('.')
+    if path.pop(0) != 'self':
+      raise ValueError("Target %r outside of 'self' object" % call.name)
+    target = self
+    for attr in path:
+      target = getattr(target, attr)
+    return target
+
+  def patch_call(self, call, **kwargs):
+    """Patch the target of a mock.call instance.
+
+    Args:
+      call: a mock.call instance identifying a target to patch
+      Extra keyword arguments are processed by mock.patch
+
+    Returns:
+      A context manager to mock/unmock the target of the call
+    """
+    if call.name.startswith('self.'):
+      target = self.call_target(call.parent)
+      _, attribute = call.name.rsplit('.', 1)
+      if (hasattr(type(target), attribute)
+          and isinstance(getattr(type(target), attribute), property)):
+        return mock.patch.object(
+            type(target), attribute, new_callable=mock.PropertyMock, **kwargs)
+      else:
+        return mock.patch.object(target, attribute, **kwargs)
+    else:
+      return mock.patch(call.name, **kwargs)
+
+  def watchCalls(self, calls):
+    """Add calls to the set of watched calls.
+
+    Args:
+      calls: a sequence of mock.call instances identifying targets to watch
+    """
+    self._watched.update((call.name, call) for call in calls)
+
+  def watchMethodCalls(self, call, ignore=None):
+    """Watch all public methods of the target identified by a self.call.
+
+    Args:
+      call: a self.call instance indetifying an object
+      ignore: a list of public methods to ignore when watching for calls
+    """
+    target = self.call_target(call)
+    if ignore is None:
+      ignore = []
+    self.watchCalls(getattr(call, method)
+                    for method in dir(target.__class__)
+                    if not method.startswith('_') and not method in ignore)
+
+  def clearWatched(self):
+    """Clear the set of watched calls."""
+    self._watched = {}
+
+  def assertCalls(self, *calls):
+    """A context manager to assert that a sequence of calls is made.
+
+    During the assertion, a number of functions and methods will be "watched",
+    and any calls made to them is expected to appear---in the exact same order,
+    and with the exact same arguments---as specified by the argument |calls|.
+
+    By default, the targets of all expected calls are watched. Further targets
+    to watch may be added using watchCalls and watchMethodCalls.
+
+    Optionaly, each call may be accompanied by an action. If the action is a
+    (non-callable) value, this value will be used as the return value given to
+    the caller when the matching call is found. Alternatively, if the action is
+    a callable, the action will be then called with the same arguments as the
+    intercepted call, so that it can provide a return value or perform other
+    side effects. If the action is missing, a return value of None is assumed.
+
+    Note that mock.Mock objects are often convenient to use as a callable
+    action, e.g. to raise exceptions or return other objects which are
+    themselves callable.
+
+    Args:
+      calls: each argument is either a pair (expected_call, action) or just an
+          expected_call, where expected_call is a mock.call instance.
+
+    Raises:
+      AssertionError if the watched targets do not receive the exact sequence
+          of calls specified. Missing calls, extra calls, and calls with
+          mismatching arguments, all cause the assertion to fail.
+    """
+    return self._AssertCalls(self, calls, self._watched)
+
+  def assertCall(self, call, action=None):
+    return self.assertCalls((call, action))
+
diff --git a/build/android/pylib/utils/mock_calls_test.py b/build/android/pylib/utils/mock_calls_test.py
new file mode 100755
index 0000000..4dbafd4
--- /dev/null
+++ b/build/android/pylib/utils/mock_calls_test.py
@@ -0,0 +1,175 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Unit tests for the contents of mock_calls.py.
+"""
+
+import logging
+import os
+import sys
+import unittest
+
+from pylib import constants
+from pylib.utils import mock_calls
+
+sys.path.append(os.path.join(
+    constants.DIR_SOURCE_ROOT, 'third_party', 'pymock'))
+import mock # pylint: disable=F0401
+
+
+class _DummyAdb(object):
+  def __str__(self):
+    return '0123456789abcdef'
+
+  def Push(self, host_path, device_path):
+    logging.debug('(device %s) pushing %r to %r', self, host_path, device_path)
+
+  def IsOnline(self):
+    logging.debug('(device %s) checking device online', self)
+    return True
+
+  def Shell(self, cmd):
+    logging.debug('(device %s) running command %r', self, cmd)
+    return "nice output\n"
+
+  def Reboot(self):
+    logging.debug('(device %s) rebooted!', self)
+
+  @property
+  def build_version_sdk(self):
+    logging.debug('(device %s) getting build_version_sdk', self)
+    return constants.ANDROID_SDK_VERSION_CODES.LOLLIPOP
+
+
+class TestCaseWithAssertCallsTest(mock_calls.TestCase):
+  def setUp(self):
+    self.adb = _DummyAdb()
+
+  def ShellError(self):
+    def action(cmd):
+      raise ValueError('(device %s) command %r is not nice' % (self.adb, cmd))
+    return action
+
+  def get_answer(self):
+    logging.debug("called 'get_answer' of %r object", self)
+    return 42
+
+  def echo(self, thing):
+    logging.debug("called 'echo' of %r object", self)
+    return thing
+
+  def testCallTarget_succeds(self):
+    self.assertEquals(self.adb.Shell,
+                      self.call_target(self.call.adb.Shell))
+
+  def testCallTarget_failsExternal(self):
+    with self.assertRaises(ValueError):
+      self.call_target(mock.call.sys.getcwd)
+
+  def testCallTarget_failsUnknownAttribute(self):
+    with self.assertRaises(AttributeError):
+      self.call_target(self.call.adb.Run)
+
+  def testCallTarget_failsIntermediateCalls(self):
+    with self.assertRaises(AttributeError):
+      self.call_target(self.call.adb.RunShell('cmd').append)
+
+  def testPatchCall_method(self):
+    self.assertEquals(42, self.get_answer())
+    with self.patch_call(self.call.get_answer, return_value=123):
+      self.assertEquals(123, self.get_answer())
+    self.assertEquals(42, self.get_answer())
+
+  def testPatchCall_attribute_method(self):
+    with self.patch_call(self.call.adb.Shell, return_value='hello'):
+      self.assertEquals('hello', self.adb.Shell('echo hello'))
+
+  def testPatchCall_global(self):
+    with self.patch_call(mock.call.os.getcwd, return_value='/some/path'):
+      self.assertEquals('/some/path', os.getcwd())
+
+  def testPatchCall_withSideEffect(self):
+    with self.patch_call(self.call.adb.Shell, side_effect=ValueError):
+      with self.assertRaises(ValueError):
+        self.adb.Shell('echo hello')
+
+  def testPatchCall_property(self):
+    self.assertEquals(constants.ANDROID_SDK_VERSION_CODES.LOLLIPOP,
+                      self.adb.build_version_sdk)
+    with self.patch_call(
+        self.call.adb.build_version_sdk,
+        return_value=constants.ANDROID_SDK_VERSION_CODES.KITKAT):
+      self.assertEquals(constants.ANDROID_SDK_VERSION_CODES.KITKAT,
+                        self.adb.build_version_sdk)
+    self.assertEquals(constants.ANDROID_SDK_VERSION_CODES.LOLLIPOP,
+                      self.adb.build_version_sdk)
+
+  def testAssertCalls_succeeds_simple(self):
+    self.assertEquals(42, self.get_answer())
+    with self.assertCall(self.call.get_answer(), 123):
+      self.assertEquals(123, self.get_answer())
+    self.assertEquals(42, self.get_answer())
+
+  def testAssertCalls_succeeds_multiple(self):
+    with self.assertCalls(
+        (mock.call.os.getcwd(), '/some/path'),
+        (self.call.echo('hello'), 'hello'),
+        (self.call.get_answer(), 11),
+        self.call.adb.Push('this_file', 'that_file'),
+        (self.call.get_answer(), 12)):
+      self.assertEquals(os.getcwd(), '/some/path')
+      self.assertEquals('hello', self.echo('hello'))
+      self.assertEquals(11, self.get_answer())
+      self.adb.Push('this_file', 'that_file')
+      self.assertEquals(12, self.get_answer())
+
+  def testAsserCalls_succeeds_withAction(self):
+    with self.assertCall(
+        self.call.adb.Shell('echo hello'), self.ShellError()):
+      with self.assertRaises(ValueError):
+        self.adb.Shell('echo hello')
+
+  def testAssertCalls_fails_tooManyCalls(self):
+    with self.assertRaises(AssertionError):
+      with self.assertCalls(self.call.adb.IsOnline()):
+        self.adb.IsOnline()
+        self.adb.IsOnline()
+
+  def testAssertCalls_fails_tooFewCalls(self):
+    with self.assertRaises(AssertionError):
+      with self.assertCalls(self.call.adb.IsOnline()):
+        pass
+
+  def testAssertCalls_succeeds_extraCalls(self):
+    # we are not watching Reboot, so the assertion succeeds
+    with self.assertCalls(self.call.adb.IsOnline()):
+      self.adb.IsOnline()
+      self.adb.Reboot()
+
+  def testAssertCalls_fails_extraCalls(self):
+    self.watchCalls([self.call.adb.Reboot])
+    # this time we are also watching Reboot, so the assertion fails
+    with self.assertRaises(AssertionError):
+      with self.assertCalls(self.call.adb.IsOnline()):
+        self.adb.IsOnline()
+        self.adb.Reboot()
+
+  def testAssertCalls_succeeds_NoCalls(self):
+    self.watchMethodCalls(self.call.adb) # we are watching all adb methods
+    with self.assertCalls():
+      pass
+
+  def testAssertCalls_fails_NoCalls(self):
+    self.watchMethodCalls(self.call.adb)
+    with self.assertRaises(AssertionError):
+      with self.assertCalls():
+        self.adb.IsOnline()
+
+
+if __name__ == '__main__':
+  logging.getLogger().setLevel(logging.DEBUG)
+  unittest.main(verbosity=2)
+
diff --git a/build/android/pylib/utils/parallelizer.py b/build/android/pylib/utils/parallelizer.py
new file mode 100644
index 0000000..9a85b54
--- /dev/null
+++ b/build/android/pylib/utils/parallelizer.py
@@ -0,0 +1,242 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+""" Wrapper that allows method execution in parallel.
+
+This class wraps a list of objects of the same type, emulates their
+interface, and executes any functions called on the objects in parallel
+in ReraiserThreads.
+
+This means that, given a list of objects:
+
+  class Foo:
+    def __init__(self):
+      self.baz = Baz()
+
+    def bar(self, my_param):
+      // do something
+
+  list_of_foos = [Foo(1), Foo(2), Foo(3)]
+
+we can take a sequential operation on that list of objects:
+
+  for f in list_of_foos:
+    f.bar('Hello')
+
+and run it in parallel across all of the objects:
+
+  Parallelizer(list_of_foos).bar('Hello')
+
+It can also handle (non-method) attributes of objects, so that this:
+
+  for f in list_of_foos:
+    f.baz.myBazMethod()
+
+can be run in parallel with:
+
+  Parallelizer(list_of_foos).baz.myBazMethod()
+
+Because it emulates the interface of the wrapped objects, a Parallelizer
+can be passed to a method or function that takes objects of that type:
+
+  def DoesSomethingWithFoo(the_foo):
+    the_foo.bar('Hello')
+    the_foo.bar('world')
+    the_foo.baz.myBazMethod
+
+  DoesSomethingWithFoo(Parallelizer(list_of_foos))
+
+Note that this class spins up a thread for each object. Using this class
+to parallelize operations that are already fast will incur a net performance
+penalty.
+
+"""
+# pylint: disable=protected-access
+
+from pylib.utils import reraiser_thread
+from pylib.utils import watchdog_timer
+
+_DEFAULT_TIMEOUT = 30
+_DEFAULT_RETRIES = 3
+
+
+class Parallelizer(object):
+  """Allows parallel execution of method calls across a group of objects."""
+
+  def __init__(self, objs):
+    assert (objs is not None and len(objs) > 0), (
+        "Passed empty list to 'Parallelizer'")
+    self._orig_objs = objs
+    self._objs = objs
+
+  def __getattr__(self, name):
+    """Emulate getting the |name| attribute of |self|.
+
+    Args:
+      name: The name of the attribute to retrieve.
+    Returns:
+      A Parallelizer emulating the |name| attribute of |self|.
+    """
+    self.pGet(None)
+
+    r = type(self)(self._orig_objs)
+    r._objs = [getattr(o, name) for o in self._objs]
+    return r
+
+  def __getitem__(self, index):
+    """Emulate getting the value of |self| at |index|.
+
+    Returns:
+      A Parallelizer emulating the value of |self| at |index|.
+    """
+    self.pGet(None)
+
+    r = type(self)(self._orig_objs)
+    r._objs = [o[index] for o in self._objs]
+    return r
+
+  def __call__(self, *args, **kwargs):
+    """Emulate calling |self| with |args| and |kwargs|.
+
+    Note that this call is asynchronous. Call pFinish on the return value to
+    block until the call finishes.
+
+    Returns:
+      A Parallelizer wrapping the ReraiserThreadGroup running the call in
+      parallel.
+    Raises:
+      AttributeError if the wrapped objects aren't callable.
+    """
+    self.pGet(None)
+
+    if not self._objs:
+      raise AttributeError('Nothing to call.')
+    for o in self._objs:
+      if not callable(o):
+        raise AttributeError("'%s' is not callable" % o.__name__)
+
+    r = type(self)(self._orig_objs)
+    r._objs = reraiser_thread.ReraiserThreadGroup(
+        [reraiser_thread.ReraiserThread(
+            o, args=args, kwargs=kwargs,
+            name='%s.%s' % (str(d), o.__name__))
+         for d, o in zip(self._orig_objs, self._objs)])
+    r._objs.StartAll() # pylint: disable=W0212
+    return r
+
+  def pFinish(self, timeout):
+    """Finish any outstanding asynchronous operations.
+
+    Args:
+      timeout: The maximum number of seconds to wait for an individual
+               result to return, or None to wait forever.
+    Returns:
+      self, now emulating the return values.
+    """
+    self._assertNoShadow('pFinish')
+    if isinstance(self._objs, reraiser_thread.ReraiserThreadGroup):
+      self._objs.JoinAll()
+      self._objs = self._objs.GetAllReturnValues(
+          watchdog_timer.WatchdogTimer(timeout))
+    return self
+
+  def pGet(self, timeout):
+    """Get the current wrapped objects.
+
+    Args:
+      timeout: Same as |pFinish|.
+    Returns:
+      A list of the results, in order of the provided devices.
+    Raises:
+      Any exception raised by any of the called functions.
+    """
+    self._assertNoShadow('pGet')
+    self.pFinish(timeout)
+    return self._objs
+
+  def pMap(self, f, *args, **kwargs):
+    """Map a function across the current wrapped objects in parallel.
+
+    This calls f(o, *args, **kwargs) for each o in the set of wrapped objects.
+
+    Note that this call is asynchronous. Call pFinish on the return value to
+    block until the call finishes.
+
+    Args:
+      f: The function to call.
+      args: The positional args to pass to f.
+      kwargs: The keyword args to pass to f.
+    Returns:
+      A Parallelizer wrapping the ReraiserThreadGroup running the map in
+      parallel.
+    """
+    self._assertNoShadow('pMap')
+    r = type(self)(self._orig_objs)
+    r._objs = reraiser_thread.ReraiserThreadGroup(
+        [reraiser_thread.ReraiserThread(
+            f, args=tuple([o] + list(args)), kwargs=kwargs,
+            name='%s(%s)' % (f.__name__, d))
+         for d, o in zip(self._orig_objs, self._objs)])
+    r._objs.StartAll() # pylint: disable=W0212
+    return r
+
+  def _assertNoShadow(self, attr_name):
+    """Ensures that |attr_name| isn't shadowing part of the wrapped obejcts.
+
+    If the wrapped objects _do_ have an |attr_name| attribute, it will be
+    inaccessible to clients.
+
+    Args:
+      attr_name: The attribute to check.
+    Raises:
+      AssertionError if the wrapped objects have an attribute named 'attr_name'
+      or '_assertNoShadow'.
+    """
+    if isinstance(self._objs, reraiser_thread.ReraiserThreadGroup):
+      assert not hasattr(self._objs, '_assertNoShadow')
+      assert not hasattr(self._objs, attr_name)
+    else:
+      assert not any(hasattr(o, '_assertNoShadow') for o in self._objs)
+      assert not any(hasattr(o, attr_name) for o in self._objs)
+
+
+class SyncParallelizer(Parallelizer):
+  """A Parallelizer that blocks on function calls."""
+
+  #override
+  def __call__(self, *args, **kwargs):
+    """Emulate calling |self| with |args| and |kwargs|.
+
+    Note that this call is synchronous.
+
+    Returns:
+      A Parallelizer emulating the value returned from calling |self| with
+      |args| and |kwargs|.
+    Raises:
+      AttributeError if the wrapped objects aren't callable.
+    """
+    r = super(SyncParallelizer, self).__call__(*args, **kwargs)
+    r.pFinish(None)
+    return r
+
+  #override
+  def pMap(self, f, *args, **kwargs):
+    """Map a function across the current wrapped objects in parallel.
+
+    This calls f(o, *args, **kwargs) for each o in the set of wrapped objects.
+
+    Note that this call is synchronous.
+
+    Args:
+      f: The function to call.
+      args: The positional args to pass to f.
+      kwargs: The keyword args to pass to f.
+    Returns:
+      A Parallelizer wrapping the ReraiserThreadGroup running the map in
+      parallel.
+    """
+    r = super(SyncParallelizer, self).pMap(f, *args, **kwargs)
+    r.pFinish(None)
+    return r
+
diff --git a/build/android/pylib/utils/parallelizer_test.py b/build/android/pylib/utils/parallelizer_test.py
new file mode 100644
index 0000000..6e0c7e7
--- /dev/null
+++ b/build/android/pylib/utils/parallelizer_test.py
@@ -0,0 +1,166 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unit tests for the contents of parallelizer.py."""
+
+# pylint: disable=W0212
+# pylint: disable=W0613
+
+import os
+import tempfile
+import time
+import unittest
+
+from pylib.utils import parallelizer
+
+
+class ParallelizerTestObject(object):
+  """Class used to test parallelizer.Parallelizer."""
+
+  parallel = parallelizer.Parallelizer
+
+  def __init__(self, thing, completion_file_name=None):
+    self._thing = thing
+    self._completion_file_name = completion_file_name
+    self.helper = ParallelizerTestObjectHelper(thing)
+
+  @staticmethod
+  def doReturn(what):
+    return what
+
+  @classmethod
+  def doRaise(cls, what):
+    raise what
+
+  def doSetTheThing(self, new_thing):
+    self._thing = new_thing
+
+  def doReturnTheThing(self):
+    return self._thing
+
+  def doRaiseTheThing(self):
+    raise self._thing
+
+  def doRaiseIfExceptionElseSleepFor(self, sleep_duration):
+    if isinstance(self._thing, Exception):
+      raise self._thing
+    time.sleep(sleep_duration)
+    self._write_completion_file()
+    return self._thing
+
+  def _write_completion_file(self):
+    if self._completion_file_name and len(self._completion_file_name):
+      with open(self._completion_file_name, 'w+b') as completion_file:
+        completion_file.write('complete')
+
+  def __getitem__(self, index):
+    return self._thing[index]
+
+  def __str__(self):
+    return type(self).__name__
+
+
+class ParallelizerTestObjectHelper(object):
+
+  def __init__(self, thing):
+    self._thing = thing
+
+  def doReturnStringThing(self):
+    return str(self._thing)
+
+
+class ParallelizerTest(unittest.TestCase):
+
+  def testInitWithNone(self):
+    with self.assertRaises(AssertionError):
+      parallelizer.Parallelizer(None)
+
+  def testInitEmptyList(self):
+    with self.assertRaises(AssertionError):
+      parallelizer.Parallelizer([])
+
+  def testMethodCall(self):
+    test_data = ['abc_foo', 'def_foo', 'ghi_foo']
+    expected = ['abc_bar', 'def_bar', 'ghi_bar']
+    r = parallelizer.Parallelizer(test_data).replace('_foo', '_bar').pGet(0.1)
+    self.assertEquals(expected, r)
+
+  def testMutate(self):
+    devices = [ParallelizerTestObject(True) for _ in xrange(0, 10)]
+    self.assertTrue(all(d.doReturnTheThing() for d in devices))
+    ParallelizerTestObject.parallel(devices).doSetTheThing(False).pFinish(1)
+    self.assertTrue(not any(d.doReturnTheThing() for d in devices))
+
+  def testAllReturn(self):
+    devices = [ParallelizerTestObject(True) for _ in xrange(0, 10)]
+    results = ParallelizerTestObject.parallel(
+        devices).doReturnTheThing().pGet(1)
+    self.assertTrue(isinstance(results, list))
+    self.assertEquals(10, len(results))
+    self.assertTrue(all(results))
+
+  def testAllRaise(self):
+    devices = [ParallelizerTestObject(Exception('thing %d' % i))
+               for i in xrange(0, 10)]
+    p = ParallelizerTestObject.parallel(devices).doRaiseTheThing()
+    with self.assertRaises(Exception):
+      p.pGet(1)
+
+  def testOneFailOthersComplete(self):
+    parallel_device_count = 10
+    exception_index = 7
+    exception_msg = 'thing %d' % exception_index
+
+    try:
+      completion_files = [tempfile.NamedTemporaryFile(delete=False)
+                          for _ in xrange(0, parallel_device_count)]
+      devices = [
+          ParallelizerTestObject(
+              i if i != exception_index else Exception(exception_msg),
+              completion_files[i].name)
+          for i in xrange(0, parallel_device_count)]
+      for f in completion_files:
+        f.close()
+      p = ParallelizerTestObject.parallel(devices)
+      with self.assertRaises(Exception) as e:
+        p.doRaiseIfExceptionElseSleepFor(2).pGet(3)
+      self.assertTrue(exception_msg in str(e.exception))
+      for i in xrange(0, parallel_device_count):
+        with open(completion_files[i].name) as f:
+          if i == exception_index:
+            self.assertEquals('', f.read())
+          else:
+            self.assertEquals('complete', f.read())
+    finally:
+      for f in completion_files:
+        os.remove(f.name)
+
+  def testReusable(self):
+    devices = [ParallelizerTestObject(True) for _ in xrange(0, 10)]
+    p = ParallelizerTestObject.parallel(devices)
+    results = p.doReturn(True).pGet(1)
+    self.assertTrue(all(results))
+    results = p.doReturn(True).pGet(1)
+    self.assertTrue(all(results))
+    with self.assertRaises(Exception):
+      results = p.doRaise(Exception('reusableTest')).pGet(1)
+
+  def testContained(self):
+    devices = [ParallelizerTestObject(i) for i in xrange(0, 10)]
+    results = (ParallelizerTestObject.parallel(devices).helper
+        .doReturnStringThing().pGet(1))
+    self.assertTrue(isinstance(results, list))
+    self.assertEquals(10, len(results))
+    for i in xrange(0, 10):
+      self.assertEquals(str(i), results[i])
+
+  def testGetItem(self):
+    devices = [ParallelizerTestObject(range(i, i+10)) for i in xrange(0, 10)]
+    results = ParallelizerTestObject.parallel(devices)[9].pGet(1)
+    self.assertEquals(range(9, 19), results)
+
+
+if __name__ == '__main__':
+  unittest.main(verbosity=2)
+
diff --git a/build/android/pylib/utils/proguard.py b/build/android/pylib/utils/proguard.py
new file mode 100644
index 0000000..34ad5c3
--- /dev/null
+++ b/build/android/pylib/utils/proguard.py
@@ -0,0 +1,148 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+import re
+import tempfile
+
+from pylib import constants
+from pylib import cmd_helper
+
+
+_PROGUARD_CLASS_RE = re.compile(r'\s*?- Program class:\s*([\S]+)$')
+_PROGUARD_SUPERCLASS_RE = re.compile(r'\s*?  Superclass:\s*([\S]+)$')
+_PROGUARD_SECTION_RE = re.compile(
+    r'^(?:Interfaces|Constant Pool|Fields|Methods|Class file attributes) '
+    r'\(count = \d+\):$')
+_PROGUARD_METHOD_RE = re.compile(r'\s*?- Method:\s*(\S*)[(].*$')
+_PROGUARD_ANNOTATION_RE = re.compile(r'\s*?- Annotation \[L(\S*);\]:$')
+_PROGUARD_ANNOTATION_CONST_RE = (
+    re.compile(r'\s*?- Constant element value.*$'))
+_PROGUARD_ANNOTATION_VALUE_RE = re.compile(r'\s*?- \S+? \[(.*)\]$')
+
+_PROGUARD_PATH_SDK = os.path.join(
+    constants.ANDROID_SDK_ROOT, 'tools', 'proguard', 'lib', 'proguard.jar')
+_PROGUARD_PATH_BUILT = (
+    os.path.join(os.environ['ANDROID_BUILD_TOP'], 'external', 'proguard',
+                 'lib', 'proguard.jar')
+    if 'ANDROID_BUILD_TOP' in os.environ else None)
+_PROGUARD_PATH = (
+    _PROGUARD_PATH_SDK if os.path.exists(_PROGUARD_PATH_SDK)
+    else _PROGUARD_PATH_BUILT)
+
+
+def Dump(jar_path):
+  """Dumps class and method information from a JAR into a dict via proguard.
+
+  Args:
+    jar_path: An absolute path to the JAR file to dump.
+  Returns:
+    A dict in the following format:
+      {
+        'classes': [
+          {
+            'class': '',
+            'superclass': '',
+            'annotations': {},
+            'methods': [
+              {
+                'method': '',
+                'annotations': {},
+              },
+              ...
+            ],
+          },
+          ...
+        ],
+      }
+  """
+
+  with tempfile.NamedTemporaryFile() as proguard_output:
+    cmd_helper.RunCmd(['java', '-jar',
+                       _PROGUARD_PATH,
+                       '-injars', jar_path,
+                       '-dontshrink',
+                       '-dontoptimize',
+                       '-dontobfuscate',
+                       '-dontpreverify',
+                       '-dump', proguard_output.name])
+
+
+    results = {
+      'classes': [],
+    }
+
+    annotation = None
+    annotation_has_value = False
+    class_result = None
+    method_result = None
+
+    for line in proguard_output:
+      line = line.strip('\r\n')
+
+      m = _PROGUARD_CLASS_RE.match(line)
+      if m:
+        class_result = {
+          'class': m.group(1).replace('/', '.'),
+          'superclass': '',
+          'annotations': {},
+          'methods': [],
+        }
+        results['classes'].append(class_result)
+        annotation = None
+        annotation_has_value = False
+        method_result = None
+        continue
+
+      if not class_result:
+        continue
+
+      m = _PROGUARD_SUPERCLASS_RE.match(line)
+      if m:
+        class_result['superclass'] = m.group(1).replace('/', '.')
+        continue
+
+      m = _PROGUARD_SECTION_RE.match(line)
+      if m:
+        annotation = None
+        annotation_has_value = False
+        method_result = None
+        continue
+
+      m = _PROGUARD_METHOD_RE.match(line)
+      if m:
+        method_result = {
+          'method': m.group(1),
+          'annotations': {},
+        }
+        class_result['methods'].append(method_result)
+        annotation = None
+        annotation_has_value = False
+        continue
+
+      m = _PROGUARD_ANNOTATION_RE.match(line)
+      if m:
+        # Ignore the annotation package.
+        annotation = m.group(1).split('/')[-1]
+        if method_result:
+          method_result['annotations'][annotation] = None
+        else:
+          class_result['annotations'][annotation] = None
+        continue
+
+      if annotation:
+        if not annotation_has_value:
+          m = _PROGUARD_ANNOTATION_CONST_RE.match(line)
+          annotation_has_value = bool(m)
+        else:
+          m = _PROGUARD_ANNOTATION_VALUE_RE.match(line)
+          if m:
+            if method_result:
+              method_result['annotations'][annotation] = m.group(1)
+            else:
+              class_result['annotations'][annotation] = m.group(1)
+          annotation_has_value = None
+
+  return results
+
diff --git a/build/android/pylib/utils/repo_utils.py b/build/android/pylib/utils/repo_utils.py
new file mode 100644
index 0000000..e0c7d2c
--- /dev/null
+++ b/build/android/pylib/utils/repo_utils.py
@@ -0,0 +1,16 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from pylib import cmd_helper
+
+
+def GetGitHeadSHA1(in_directory):
+  """Returns the git hash tag for the given directory.
+
+  Args:
+    in_directory: The directory where git is to be run.
+  """
+  command_line = ['git', 'log', '-1', '--pretty=format:%H']
+  output = cmd_helper.GetCmdOutput(command_line, cwd=in_directory)
+  return output[0:40]
diff --git a/build/android/pylib/utils/reraiser_thread.py b/build/android/pylib/utils/reraiser_thread.py
new file mode 100644
index 0000000..0ec16b1
--- /dev/null
+++ b/build/android/pylib/utils/reraiser_thread.py
@@ -0,0 +1,158 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Thread and ThreadGroup that reraise exceptions on the main thread."""
+# pylint: disable=W0212
+
+import logging
+import sys
+import threading
+import traceback
+
+from pylib.utils import watchdog_timer
+
+
+class TimeoutError(Exception):
+  """Module-specific timeout exception."""
+  pass
+
+
+def LogThreadStack(thread):
+  """Log the stack for the given thread.
+
+  Args:
+    thread: a threading.Thread instance.
+  """
+  stack = sys._current_frames()[thread.ident]
+  logging.critical('*' * 80)
+  logging.critical('Stack dump for thread %r', thread.name)
+  logging.critical('*' * 80)
+  for filename, lineno, name, line in traceback.extract_stack(stack):
+    logging.critical('File: "%s", line %d, in %s', filename, lineno, name)
+    if line:
+      logging.critical('  %s', line.strip())
+  logging.critical('*' * 80)
+
+
+class ReraiserThread(threading.Thread):
+  """Thread class that can reraise exceptions."""
+
+  def __init__(self, func, args=None, kwargs=None, name=None):
+    """Initialize thread.
+
+    Args:
+      func: callable to call on a new thread.
+      args: list of positional arguments for callable, defaults to empty.
+      kwargs: dictionary of keyword arguments for callable, defaults to empty.
+      name: thread name, defaults to Thread-N.
+    """
+    super(ReraiserThread, self).__init__(name=name)
+    if not args:
+      args = []
+    if not kwargs:
+      kwargs = {}
+    self.daemon = True
+    self._func = func
+    self._args = args
+    self._kwargs = kwargs
+    self._ret = None
+    self._exc_info = None
+
+  def ReraiseIfException(self):
+    """Reraise exception if an exception was raised in the thread."""
+    if self._exc_info:
+      raise self._exc_info[0], self._exc_info[1], self._exc_info[2]
+
+  def GetReturnValue(self):
+    """Reraise exception if present, otherwise get the return value."""
+    self.ReraiseIfException()
+    return self._ret
+
+  #override
+  def run(self):
+    """Overrides Thread.run() to add support for reraising exceptions."""
+    try:
+      self._ret = self._func(*self._args, **self._kwargs)
+    except: # pylint: disable=W0702
+      self._exc_info = sys.exc_info()
+
+
+class ReraiserThreadGroup(object):
+  """A group of ReraiserThread objects."""
+
+  def __init__(self, threads=None):
+    """Initialize thread group.
+
+    Args:
+      threads: a list of ReraiserThread objects; defaults to empty.
+    """
+    if not threads:
+      threads = []
+    self._threads = threads
+
+  def Add(self, thread):
+    """Add a thread to the group.
+
+    Args:
+      thread: a ReraiserThread object.
+    """
+    self._threads.append(thread)
+
+  def StartAll(self):
+    """Start all threads."""
+    for thread in self._threads:
+      thread.start()
+
+  def _JoinAll(self, watcher=None):
+    """Join all threads without stack dumps.
+
+    Reraises exceptions raised by the child threads and supports breaking
+    immediately on exceptions raised on the main thread.
+
+    Args:
+      watcher: Watchdog object providing timeout, by default waits forever.
+    """
+    if watcher is None:
+      watcher = watchdog_timer.WatchdogTimer(None)
+    alive_threads = self._threads[:]
+    while alive_threads:
+      for thread in alive_threads[:]:
+        if watcher.IsTimedOut():
+          raise TimeoutError('Timed out waiting for %d of %d threads.' %
+                             (len(alive_threads), len(self._threads)))
+        # Allow the main thread to periodically check for interrupts.
+        thread.join(0.1)
+        if not thread.isAlive():
+          alive_threads.remove(thread)
+    # All threads are allowed to complete before reraising exceptions.
+    for thread in self._threads:
+      thread.ReraiseIfException()
+
+  def JoinAll(self, watcher=None):
+    """Join all threads.
+
+    Reraises exceptions raised by the child threads and supports breaking
+    immediately on exceptions raised on the main thread. Unfinished threads'
+    stacks will be logged on watchdog timeout.
+
+    Args:
+      watcher: Watchdog object providing timeout, by default waits forever.
+    """
+    try:
+      self._JoinAll(watcher)
+    except TimeoutError:
+      for thread in (t for t in self._threads if t.isAlive()):
+        LogThreadStack(thread)
+      raise
+
+  def GetAllReturnValues(self, watcher=None):
+    """Get all return values, joining all threads if necessary.
+
+    Args:
+      watcher: same as in |JoinAll|. Only used if threads are alive.
+    """
+    if any([t.isAlive() for t in self._threads]):
+      self.JoinAll(watcher)
+    return [t.GetReturnValue() for t in self._threads]
+
diff --git a/build/android/pylib/utils/reraiser_thread_unittest.py b/build/android/pylib/utils/reraiser_thread_unittest.py
new file mode 100644
index 0000000..2392d0e
--- /dev/null
+++ b/build/android/pylib/utils/reraiser_thread_unittest.py
@@ -0,0 +1,96 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unittests for reraiser_thread.py."""
+
+import threading
+import unittest
+
+from pylib.utils import reraiser_thread
+from pylib.utils import watchdog_timer
+
+
+class TestException(Exception):
+  pass
+
+
+class TestReraiserThread(unittest.TestCase):
+  """Tests for reraiser_thread.ReraiserThread."""
+  def testNominal(self):
+    result = [None, None]
+
+    def f(a, b=None):
+      result[0] = a
+      result[1] = b
+
+    thread = reraiser_thread.ReraiserThread(f, [1], {'b': 2})
+    thread.start()
+    thread.join()
+    self.assertEqual(result[0], 1)
+    self.assertEqual(result[1], 2)
+
+  def testRaise(self):
+    def f():
+      raise TestException
+
+    thread = reraiser_thread.ReraiserThread(f)
+    thread.start()
+    thread.join()
+    with self.assertRaises(TestException):
+      thread.ReraiseIfException()
+
+
+class TestReraiserThreadGroup(unittest.TestCase):
+  """Tests for reraiser_thread.ReraiserThreadGroup."""
+  def testInit(self):
+    ran = [False] * 5
+    def f(i):
+      ran[i] = True
+
+    group = reraiser_thread.ReraiserThreadGroup(
+      [reraiser_thread.ReraiserThread(f, args=[i]) for i in range(5)])
+    group.StartAll()
+    group.JoinAll()
+    for v in ran:
+      self.assertTrue(v)
+
+  def testAdd(self):
+    ran = [False] * 5
+    def f(i):
+      ran[i] = True
+
+    group = reraiser_thread.ReraiserThreadGroup()
+    for i in xrange(5):
+      group.Add(reraiser_thread.ReraiserThread(f, args=[i]))
+    group.StartAll()
+    group.JoinAll()
+    for v in ran:
+      self.assertTrue(v)
+
+  def testJoinRaise(self):
+    def f():
+      raise TestException
+    group = reraiser_thread.ReraiserThreadGroup(
+      [reraiser_thread.ReraiserThread(f) for _ in xrange(5)])
+    group.StartAll()
+    with self.assertRaises(TestException):
+      group.JoinAll()
+
+  def testJoinTimeout(self):
+    def f():
+      pass
+    event = threading.Event()
+    def g():
+      event.wait()
+    group = reraiser_thread.ReraiserThreadGroup(
+        [reraiser_thread.ReraiserThread(g),
+         reraiser_thread.ReraiserThread(f)])
+    group.StartAll()
+    with self.assertRaises(reraiser_thread.TimeoutError):
+      group.JoinAll(watchdog_timer.WatchdogTimer(0.01))
+    event.set()
+
+
+if __name__ == '__main__':
+  unittest.main()
diff --git a/build/android/pylib/utils/run_tests_helper.py b/build/android/pylib/utils/run_tests_helper.py
new file mode 100644
index 0000000..43f654d
--- /dev/null
+++ b/build/android/pylib/utils/run_tests_helper.py
@@ -0,0 +1,44 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Helper functions common to native, java and host-driven test runners."""
+
+import logging
+import sys
+import time
+
+
+class CustomFormatter(logging.Formatter):
+  """Custom log formatter."""
+
+  #override
+  def __init__(self, fmt='%(threadName)-4s  %(message)s'):
+    # Can't use super() because in older Python versions logging.Formatter does
+    # not inherit from object.
+    logging.Formatter.__init__(self, fmt=fmt)
+    self._creation_time = time.time()
+
+  #override
+  def format(self, record):
+    # Can't use super() because in older Python versions logging.Formatter does
+    # not inherit from object.
+    msg = logging.Formatter.format(self, record)
+    if 'MainThread' in msg[:19]:
+      msg = msg.replace('MainThread', 'Main', 1)
+    timediff = time.time() - self._creation_time
+    return '%s %8.3fs %s' % (record.levelname[0], timediff, msg)
+
+
+def SetLogLevel(verbose_count):
+  """Sets log level as |verbose_count|."""
+  log_level = logging.WARNING  # Default.
+  if verbose_count == 1:
+    log_level = logging.INFO
+  elif verbose_count >= 2:
+    log_level = logging.DEBUG
+  logger = logging.getLogger()
+  logger.setLevel(log_level)
+  custom_handler = logging.StreamHandler(sys.stdout)
+  custom_handler.setFormatter(CustomFormatter())
+  logging.getLogger().addHandler(custom_handler)
diff --git a/build/android/pylib/utils/test_environment.py b/build/android/pylib/utils/test_environment.py
new file mode 100644
index 0000000..e78eb5c
--- /dev/null
+++ b/build/android/pylib/utils/test_environment.py
@@ -0,0 +1,47 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+import psutil
+import signal
+
+from pylib.device import device_errors
+from pylib.device import device_utils
+
+
+def _KillWebServers():
+  for s in [signal.SIGTERM, signal.SIGINT, signal.SIGQUIT, signal.SIGKILL]:
+    signalled = []
+    for server in ['lighttpd', 'webpagereplay']:
+      for p in psutil.process_iter():
+        try:
+          if not server in ' '.join(p.cmdline):
+            continue
+          logging.info('Killing %s %s %s', s, server, p.pid)
+          p.send_signal(s)
+          signalled.append(p)
+        except Exception as e:
+          logging.warning('Failed killing %s %s %s', server, p.pid, e)
+    for p in signalled:
+      try:
+        p.wait(1)
+      except Exception as e:
+        logging.warning('Failed waiting for %s to die. %s', p.pid, e)
+
+
+def CleanupLeftoverProcesses():
+  """Clean up the test environment, restarting fresh adb and HTTP daemons."""
+  _KillWebServers()
+  device_utils.RestartServer()
+
+  def cleanup_device(d):
+    d.old_interface.RestartAdbdOnDevice()
+    try:
+      d.EnableRoot()
+    except device_errors.CommandFailedError as e:
+      logging.error(str(e))
+    d.WaitUntilFullyBooted()
+
+  device_utils.DeviceUtils.parallel().pMap(cleanup_device)
+
diff --git a/build/android/pylib/utils/time_profile.py b/build/android/pylib/utils/time_profile.py
new file mode 100644
index 0000000..45da7ff
--- /dev/null
+++ b/build/android/pylib/utils/time_profile.py
@@ -0,0 +1,26 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+import time
+
+
+class TimeProfile(object):
+  """Class for simple profiling of action, with logging of cost."""
+
+  def __init__(self, description):
+    self._starttime = None
+    self._description = description
+    self.Start()
+
+  def Start(self):
+    self._starttime = time.time()
+
+  def Stop(self):
+    """Stop profiling and dump a log."""
+    if self._starttime:
+      stoptime = time.time()
+      logging.info('%fsec to perform %s',
+                   stoptime - self._starttime, self._description)
+      self._starttime = None
diff --git a/build/android/pylib/utils/timeout_retry.py b/build/android/pylib/utils/timeout_retry.py
new file mode 100644
index 0000000..61f7c70
--- /dev/null
+++ b/build/android/pylib/utils/timeout_retry.py
@@ -0,0 +1,167 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""A utility to run functions with timeouts and retries."""
+# pylint: disable=W0702
+
+import logging
+import threading
+import time
+import traceback
+
+from pylib.utils import reraiser_thread
+from pylib.utils import watchdog_timer
+
+
+class TimeoutRetryThread(reraiser_thread.ReraiserThread):
+  def __init__(self, func, timeout, name):
+    super(TimeoutRetryThread, self).__init__(func, name=name)
+    self._watcher = watchdog_timer.WatchdogTimer(timeout)
+    self._expired = False
+
+  def GetWatcher(self):
+    """Returns the watchdog keeping track of this thread's time."""
+    return self._watcher
+
+  def GetElapsedTime(self):
+    return self._watcher.GetElapsed()
+
+  def GetRemainingTime(self, required=0, msg=None):
+    """Get the remaining time before the thread times out.
+
+    Useful to send as the |timeout| parameter of async IO operations.
+
+    Args:
+      required: minimum amount of time that will be required to complete, e.g.,
+        some sleep or IO operation.
+      msg: error message to show if timing out.
+
+    Returns:
+      The number of seconds remaining before the thread times out, or None
+      if the thread never times out.
+
+    Raises:
+      reraiser_thread.TimeoutError if the remaining time is less than the
+        required time.
+    """
+    remaining = self._watcher.GetRemaining()
+    if remaining is not None and remaining < required:
+      if msg is None:
+        msg = 'Timeout expired'
+      if remaining > 0:
+        msg += (', wait of %.1f secs required but only %.1f secs left'
+                % (required, remaining))
+      self._expired = True
+      raise reraiser_thread.TimeoutError(msg)
+    return remaining
+
+  def LogTimeoutException(self):
+    """Log the exception that terminated this thread."""
+    if not self._expired:
+      return
+    logging.critical('*' * 80)
+    logging.critical('%s on thread %r', self._exc_info[0].__name__, self.name)
+    logging.critical('*' * 80)
+    fmt_exc = ''.join(traceback.format_exception(*self._exc_info))
+    for line in fmt_exc.splitlines():
+      logging.critical(line.rstrip())
+    logging.critical('*' * 80)
+
+
+def CurrentTimeoutThread():
+  """Get the current thread if it is a TimeoutRetryThread.
+
+  Returns:
+    The current thread if it is a TimeoutRetryThread, otherwise None.
+  """
+  current_thread = threading.current_thread()
+  if isinstance(current_thread, TimeoutRetryThread):
+    return current_thread
+  else:
+    return None
+
+
+def WaitFor(condition, wait_period=5, max_tries=None):
+  """Wait for a condition to become true.
+
+  Repeadly call the function condition(), with no arguments, until it returns
+  a true value.
+
+  If called within a TimeoutRetryThread, it cooperates nicely with it.
+
+  Args:
+    condition: function with the condition to check
+    wait_period: number of seconds to wait before retrying to check the
+      condition
+    max_tries: maximum number of checks to make, the default tries forever
+      or until the TimeoutRetryThread expires.
+
+  Returns:
+    The true value returned by the condition, or None if the condition was
+    not met after max_tries.
+
+  Raises:
+    reraiser_thread.TimeoutError if the current thread is a TimeoutRetryThread
+      and the timeout expires.
+  """
+  condition_name = condition.__name__
+  timeout_thread = CurrentTimeoutThread()
+  while max_tries is None or max_tries > 0:
+    result = condition()
+    if max_tries is not None:
+      max_tries -= 1
+    msg = ['condition', repr(condition_name), 'met' if result else 'not met']
+    if timeout_thread:
+      msg.append('(%.1fs)' % timeout_thread.GetElapsedTime())
+    logging.info(' '.join(msg))
+    if result:
+      return result
+    if timeout_thread:
+      timeout_thread.GetRemainingTime(wait_period,
+          msg='Timed out waiting for %r' % condition_name)
+    time.sleep(wait_period)
+  return None
+
+
+def Run(func, timeout, retries, args=None, kwargs=None):
+  """Runs the passed function in a separate thread with timeouts and retries.
+
+  Args:
+    func: the function to be wrapped.
+    timeout: the timeout in seconds for each try.
+    retries: the number of retries.
+    args: list of positional args to pass to |func|.
+    kwargs: dictionary of keyword args to pass to |func|.
+
+  Returns:
+    The return value of func(*args, **kwargs).
+  """
+  if not args:
+    args = []
+  if not kwargs:
+    kwargs = {}
+
+  # The return value uses a list because Python variables are references, not
+  # values. Closures make a copy of the reference, so updating the closure's
+  # reference wouldn't update where the original reference pointed.
+  ret = [None]
+  def RunOnTimeoutThread():
+    ret[0] = func(*args, **kwargs)
+
+  num_try = 1
+  while True:
+    child_thread = TimeoutRetryThread(
+      RunOnTimeoutThread, timeout,
+      name='TimeoutThread-%d-for-%s' % (num_try,
+                                        threading.current_thread().name))
+    try:
+      thread_group = reraiser_thread.ReraiserThreadGroup([child_thread])
+      thread_group.StartAll()
+      thread_group.JoinAll(child_thread.GetWatcher())
+      return ret[0]
+    except:
+      child_thread.LogTimeoutException()
+      if num_try > retries:
+        raise
+      num_try += 1
diff --git a/build/android/pylib/utils/timeout_retry_unittest.py b/build/android/pylib/utils/timeout_retry_unittest.py
new file mode 100644
index 0000000..dc36c42
--- /dev/null
+++ b/build/android/pylib/utils/timeout_retry_unittest.py
@@ -0,0 +1,52 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unittests for timeout_and_retry.py."""
+
+import unittest
+
+from pylib.utils import reraiser_thread
+from pylib.utils import timeout_retry
+
+
+class TestException(Exception):
+  pass
+
+
+def _NeverEnding(tries):
+  tries[0] += 1
+  while True:
+    pass
+
+
+def _CountTries(tries):
+  tries[0] += 1
+  raise TestException
+
+
+class TestRun(unittest.TestCase):
+  """Tests for timeout_retry.Run."""
+
+  def testRun(self):
+    self.assertTrue(timeout_retry.Run(
+        lambda x: x, 30, 3, [True], {}))
+
+  def testTimeout(self):
+    tries = [0]
+    self.assertRaises(reraiser_thread.TimeoutError,
+        timeout_retry.Run, lambda: _NeverEnding(tries), 0, 3)
+    self.assertEqual(tries[0], 4)
+
+  def testRetries(self):
+    tries = [0]
+    self.assertRaises(TestException,
+        timeout_retry.Run, lambda: _CountTries(tries), 30, 3)
+    self.assertEqual(tries[0], 4)
+
+  def testReturnValue(self):
+    self.assertTrue(timeout_retry.Run(lambda: True, 30, 3))
+
+
+if __name__ == '__main__':
+  unittest.main()
diff --git a/build/android/pylib/utils/watchdog_timer.py b/build/android/pylib/utils/watchdog_timer.py
new file mode 100644
index 0000000..2f4c464
--- /dev/null
+++ b/build/android/pylib/utils/watchdog_timer.py
@@ -0,0 +1,47 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""WatchdogTimer timeout objects."""
+
+import time
+
+
+class WatchdogTimer(object):
+  """A resetable timeout-based watchdog.
+
+  This object is threadsafe.
+  """
+
+  def __init__(self, timeout):
+    """Initializes the watchdog.
+
+    Args:
+      timeout: The timeout in seconds. If timeout is None it will never timeout.
+    """
+    self._start_time = time.time()
+    self._timeout = timeout
+
+  def Reset(self):
+    """Resets the timeout countdown."""
+    self._start_time = time.time()
+
+  def GetElapsed(self):
+    """Returns the elapsed time of the watchdog."""
+    return time.time() - self._start_time
+
+  def GetRemaining(self):
+    """Returns the remaining time of the watchdog."""
+    if self._timeout:
+      return self._timeout - self.GetElapsed()
+    else:
+      return None
+
+  def IsTimedOut(self):
+    """Whether the watchdog has timed out.
+
+    Returns:
+      True if the watchdog has timed out, False otherwise.
+    """
+    remaining = self.GetRemaining()
+    return remaining is not None and remaining < 0
diff --git a/build/android/pylib/utils/xvfb.py b/build/android/pylib/utils/xvfb.py
new file mode 100644
index 0000000..cb9d50e
--- /dev/null
+++ b/build/android/pylib/utils/xvfb.py
@@ -0,0 +1,58 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# pylint: disable=W0702
+
+import os
+import signal
+import subprocess
+import sys
+import time
+
+
+def _IsLinux():
+  """Return True if on Linux; else False."""
+  return sys.platform.startswith('linux')
+
+
+class Xvfb(object):
+  """Class to start and stop Xvfb if relevant.  Nop if not Linux."""
+
+  def __init__(self):
+    self._pid = 0
+
+  def Start(self):
+    """Start Xvfb and set an appropriate DISPLAY environment.  Linux only.
+
+    Copied from tools/code_coverage/coverage_posix.py
+    """
+    if not _IsLinux():
+      return
+    proc = subprocess.Popen(['Xvfb', ':9', '-screen', '0', '1024x768x24',
+                             '-ac'],
+                            stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
+    self._pid = proc.pid
+    if not self._pid:
+      raise Exception('Could not start Xvfb')
+    os.environ['DISPLAY'] = ':9'
+
+    # Now confirm, giving a chance for it to start if needed.
+    for _ in range(10):
+      proc = subprocess.Popen('xdpyinfo >/dev/null', shell=True)
+      _, retcode = os.waitpid(proc.pid, 0)
+      if retcode == 0:
+        break
+      time.sleep(0.25)
+    if retcode != 0:
+      raise Exception('Could not confirm Xvfb happiness')
+
+  def Stop(self):
+    """Stop Xvfb if needed.  Linux only."""
+    if self._pid:
+      try:
+        os.kill(self._pid, signal.SIGKILL)
+      except:
+        pass
+      del os.environ['DISPLAY']
+      self._pid = 0
diff --git a/build/android/pylib/utils/zip_utils.py b/build/android/pylib/utils/zip_utils.py
new file mode 100644
index 0000000..d799463
--- /dev/null
+++ b/build/android/pylib/utils/zip_utils.py
@@ -0,0 +1,31 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+import os
+import zipfile
+
+
+def WriteToZipFile(zip_file, path, arc_path):
+  """Recursively write |path| to |zip_file| as |arc_path|.
+
+  zip_file: An open instance of zipfile.ZipFile.
+  path: An absolute path to the file or directory to be zipped.
+  arc_path: A relative path within the zip file to which the file or directory
+    located at |path| should be written.
+  """
+  if os.path.isdir(path):
+    for dir_path, _, file_names in os.walk(path):
+      dir_arc_path = os.path.join(arc_path, os.path.relpath(dir_path, path))
+      logging.debug('dir:  %s -> %s', dir_path, dir_arc_path)
+      zip_file.write(dir_path, dir_arc_path, zipfile.ZIP_STORED)
+      for f in file_names:
+        file_path = os.path.join(dir_path, f)
+        file_arc_path = os.path.join(dir_arc_path, f)
+        logging.debug('file: %s -> %s', file_path, file_arc_path)
+        zip_file.write(file_path, file_arc_path, zipfile.ZIP_DEFLATED)
+  else:
+    logging.debug('file: %s -> %s', path, arc_path)
+    zip_file.write(path, arc_path, zipfile.ZIP_DEFLATED)
+
diff --git a/build/android/pylib/valgrind_tools.py b/build/android/pylib/valgrind_tools.py
new file mode 100644
index 0000000..99719d0
--- /dev/null
+++ b/build/android/pylib/valgrind_tools.py
@@ -0,0 +1,304 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Classes in this file define additional actions that need to be taken to run a
+test under some kind of runtime error detection tool.
+
+The interface is intended to be used as follows.
+
+1. For tests that simply run a native process (i.e. no activity is spawned):
+
+Call tool.CopyFiles(device).
+Prepend test command line with tool.GetTestWrapper().
+
+2. For tests that spawn an activity:
+
+Call tool.CopyFiles(device).
+Call tool.SetupEnvironment().
+Run the test as usual.
+Call tool.CleanUpEnvironment().
+"""
+# pylint: disable=R0201
+
+import glob
+import logging
+import os.path
+import subprocess
+import sys
+
+from pylib.constants import DIR_SOURCE_ROOT
+from pylib.device import device_errors
+
+
+def SetChromeTimeoutScale(device, scale):
+  """Sets the timeout scale in /data/local/tmp/chrome_timeout_scale to scale."""
+  path = '/data/local/tmp/chrome_timeout_scale'
+  if not scale or scale == 1.0:
+    # Delete if scale is None/0.0/1.0 since the default timeout scale is 1.0
+    device.RunShellCommand('rm %s' % path)
+  else:
+    device.WriteFile(path, '%f' % scale, as_root=True)
+
+
+class BaseTool(object):
+  """A tool that does nothing."""
+
+  def __init__(self):
+    """Does nothing."""
+    pass
+
+  def GetTestWrapper(self):
+    """Returns a string that is to be prepended to the test command line."""
+    return ''
+
+  def GetUtilWrapper(self):
+    """Returns the wrapper name for the utilities.
+
+    Returns:
+       A string that is to be prepended to the command line of utility
+    processes (forwarder, etc.).
+    """
+    return ''
+
+  @classmethod
+  def CopyFiles(cls, device):
+    """Copies tool-specific files to the device, create directories, etc."""
+    pass
+
+  def SetupEnvironment(self):
+    """Sets up the system environment for a test.
+
+    This is a good place to set system properties.
+    """
+    pass
+
+  def CleanUpEnvironment(self):
+    """Cleans up environment."""
+    pass
+
+  def GetTimeoutScale(self):
+    """Returns a multiplier that should be applied to timeout values."""
+    return 1.0
+
+  def NeedsDebugInfo(self):
+    """Whether this tool requires debug info.
+
+    Returns:
+      True if this tool can not work with stripped binaries.
+    """
+    return False
+
+
+class AddressSanitizerTool(BaseTool):
+  """AddressSanitizer tool."""
+
+  WRAPPER_NAME = '/system/bin/asanwrapper'
+  # Disable memcmp overlap check.There are blobs (gl drivers)
+  # on some android devices that use memcmp on overlapping regions,
+  # nothing we can do about that.
+  EXTRA_OPTIONS = 'strict_memcmp=0,use_sigaltstack=1'
+
+  def __init__(self, device):
+    super(AddressSanitizerTool, self).__init__()
+    self._device = device
+    # Configure AndroidCommands to run utils (such as md5sum_bin) under ASan.
+    # This is required because ASan is a compiler-based tool, and md5sum
+    # includes instrumented code from base.
+    device.old_interface.SetUtilWrapper(self.GetUtilWrapper())
+
+  @classmethod
+  def CopyFiles(cls, device):
+    """Copies ASan tools to the device."""
+    libs = glob.glob(os.path.join(DIR_SOURCE_ROOT,
+                                  'third_party/llvm-build/Release+Asserts/',
+                                  'lib/clang/*/lib/linux/',
+                                  'libclang_rt.asan-arm-android.so'))
+    assert len(libs) == 1
+    subprocess.call(
+        [os.path.join(
+             DIR_SOURCE_ROOT,
+             'tools/android/asan/third_party/asan_device_setup.sh'),
+         '--device', str(device),
+         '--lib', libs[0],
+         '--extra-options', AddressSanitizerTool.EXTRA_OPTIONS])
+    device.WaitUntilFullyBooted()
+
+  def GetTestWrapper(self):
+    return AddressSanitizerTool.WRAPPER_NAME
+
+  def GetUtilWrapper(self):
+    """Returns the wrapper for utilities, such as forwarder.
+
+    AddressSanitizer wrapper must be added to all instrumented binaries,
+    including forwarder and the like. This can be removed if such binaries
+    were built without instrumentation. """
+    return self.GetTestWrapper()
+
+  def SetupEnvironment(self):
+    try:
+      self._device.EnableRoot()
+    except device_errors.CommandFailedError as e:
+      # Try to set the timeout scale anyway.
+      # TODO(jbudorick) Handle this exception appropriately after interface
+      #                 conversions are finished.
+      logging.error(str(e))
+    SetChromeTimeoutScale(self._device, self.GetTimeoutScale())
+
+  def CleanUpEnvironment(self):
+    SetChromeTimeoutScale(self._device, None)
+
+  def GetTimeoutScale(self):
+    # Very slow startup.
+    return 20.0
+
+
+class ValgrindTool(BaseTool):
+  """Base abstract class for Valgrind tools."""
+
+  VG_DIR = '/data/local/tmp/valgrind'
+  VGLOGS_DIR = '/data/local/tmp/vglogs'
+
+  def __init__(self, device):
+    super(ValgrindTool, self).__init__()
+    self._device = device
+    # exactly 31 chars, SystemProperties::PROP_NAME_MAX
+    self._wrap_properties = ['wrap.com.google.android.apps.ch',
+                             'wrap.org.chromium.native_test']
+
+  @classmethod
+  def CopyFiles(cls, device):
+    """Copies Valgrind tools to the device."""
+    device.RunShellCommand(
+        'rm -r %s; mkdir %s' % (ValgrindTool.VG_DIR, ValgrindTool.VG_DIR))
+    device.RunShellCommand(
+        'rm -r %s; mkdir %s' % (ValgrindTool.VGLOGS_DIR,
+                                ValgrindTool.VGLOGS_DIR))
+    files = cls.GetFilesForTool()
+    device.PushChangedFiles(
+        [((os.path.join(DIR_SOURCE_ROOT, f),
+          os.path.join(ValgrindTool.VG_DIR, os.path.basename(f)))
+         for f in files)])
+
+  def SetupEnvironment(self):
+    """Sets up device environment."""
+    self._device.RunShellCommand('chmod 777 /data/local/tmp')
+    self._device.RunShellCommand('setenforce 0')
+    for prop in self._wrap_properties:
+      self._device.RunShellCommand(
+          'setprop %s "logwrapper %s"' % (prop, self.GetTestWrapper()))
+    SetChromeTimeoutScale(self._device, self.GetTimeoutScale())
+
+  def CleanUpEnvironment(self):
+    """Cleans up device environment."""
+    for prop in self._wrap_properties:
+      self._device.RunShellCommand('setprop %s ""' % (prop,))
+    SetChromeTimeoutScale(self._device, None)
+
+  @staticmethod
+  def GetFilesForTool():
+    """Returns a list of file names for the tool."""
+    raise NotImplementedError()
+
+  def NeedsDebugInfo(self):
+    """Whether this tool requires debug info.
+
+    Returns:
+      True if this tool can not work with stripped binaries.
+    """
+    return True
+
+
+class MemcheckTool(ValgrindTool):
+  """Memcheck tool."""
+
+  def __init__(self, device):
+    super(MemcheckTool, self).__init__(device)
+
+  @staticmethod
+  def GetFilesForTool():
+    """Returns a list of file names for the tool."""
+    return ['tools/valgrind/android/vg-chrome-wrapper.sh',
+            'tools/valgrind/memcheck/suppressions.txt',
+            'tools/valgrind/memcheck/suppressions_android.txt']
+
+  def GetTestWrapper(self):
+    """Returns a string that is to be prepended to the test command line."""
+    return ValgrindTool.VG_DIR + '/' + 'vg-chrome-wrapper.sh'
+
+  def GetTimeoutScale(self):
+    """Returns a multiplier that should be applied to timeout values."""
+    return 30
+
+
+class TSanTool(ValgrindTool):
+  """ThreadSanitizer tool. See http://code.google.com/p/data-race-test ."""
+
+  def __init__(self, device):
+    super(TSanTool, self).__init__(device)
+
+  @staticmethod
+  def GetFilesForTool():
+    """Returns a list of file names for the tool."""
+    return ['tools/valgrind/android/vg-chrome-wrapper-tsan.sh',
+            'tools/valgrind/tsan/suppressions.txt',
+            'tools/valgrind/tsan/suppressions_android.txt',
+            'tools/valgrind/tsan/ignores.txt']
+
+  def GetTestWrapper(self):
+    """Returns a string that is to be prepended to the test command line."""
+    return ValgrindTool.VG_DIR + '/' + 'vg-chrome-wrapper-tsan.sh'
+
+  def GetTimeoutScale(self):
+    """Returns a multiplier that should be applied to timeout values."""
+    return 30.0
+
+
+TOOL_REGISTRY = {
+    'memcheck': MemcheckTool,
+    'memcheck-renderer': MemcheckTool,
+    'tsan': TSanTool,
+    'tsan-renderer': TSanTool,
+    'asan': AddressSanitizerTool,
+}
+
+
+def CreateTool(tool_name, device):
+  """Creates a tool with the specified tool name.
+
+  Args:
+    tool_name: Name of the tool to create.
+    device: A DeviceUtils instance.
+  Returns:
+    A tool for the specified tool_name.
+  """
+  if not tool_name:
+    return BaseTool()
+
+  ctor = TOOL_REGISTRY.get(tool_name)
+  if ctor:
+    return ctor(device)
+  else:
+    print 'Unknown tool %s, available tools: %s' % (
+        tool_name, ', '.join(sorted(TOOL_REGISTRY.keys())))
+    sys.exit(1)
+
+def PushFilesForTool(tool_name, device):
+  """Pushes the files required for |tool_name| to |device|.
+
+  Args:
+    tool_name: Name of the tool to create.
+    device: A DeviceUtils instance.
+  """
+  if not tool_name:
+    return
+
+  clazz = TOOL_REGISTRY.get(tool_name)
+  if clazz:
+    clazz.CopyFiles(device)
+  else:
+    print 'Unknown tool %s, available tools: %s' % (
+        tool_name, ', '.join(sorted(TOOL_REGISTRY.keys())))
+    sys.exit(1)
+
diff --git a/build/android/rezip.gyp b/build/android/rezip.gyp
new file mode 100644
index 0000000..1115177
--- /dev/null
+++ b/build/android/rezip.gyp
@@ -0,0 +1,45 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Build the rezip build tool.
+{
+  'targets': [
+    {
+      # GN: //build/android/rezip:rezip
+      'target_name': 'rezip_apk_jar',
+      'type': 'none',
+      'variables': {
+        'java_in_dir': 'rezip',
+        'compile_stamp': '<(SHARED_INTERMEDIATE_DIR)/<(_target_name)/compile.stamp',
+        'javac_jar_path': '<(PRODUCT_DIR)/lib.java/rezip_apk.jar',
+      },
+      'actions': [
+        {
+          'action_name': 'javac_<(_target_name)',
+          'message': 'Compiling <(_target_name) java sources',
+          'variables': {
+            'java_sources': ['>!@(find >(java_in_dir) -name "*.java")'],
+          },
+          'inputs': [
+            '<(DEPTH)/build/android/gyp/util/build_utils.py',
+            '<(DEPTH)/build/android/gyp/javac.py',
+            '>@(java_sources)',
+          ],
+          'outputs': [
+            '<(compile_stamp)',
+            '<(javac_jar_path)',
+          ],
+          'action': [
+            'python', '<(DEPTH)/build/android/gyp/javac.py',
+            '--classpath=',
+            '--classes-dir=<(SHARED_INTERMEDIATE_DIR)/<(_target_name)',
+            '--jar-path=<(javac_jar_path)',
+            '--stamp=<(compile_stamp)',
+            '>@(java_sources)',
+          ]
+        },
+      ],
+    }
+  ],
+}
diff --git a/build/android/rezip/BUILD.gn b/build/android/rezip/BUILD.gn
new file mode 100644
index 0000000..8b8f78e
--- /dev/null
+++ b/build/android/rezip/BUILD.gn
@@ -0,0 +1,11 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/android/rules.gni")
+
+# GYP: //build/android/rezip.gyp:rezip_apk_jar
+java_library("rezip") {
+  jar_path = "$root_build_dir/lib.java/rezip_apk.jar"
+  DEPRECATED_java_in_dir = "."
+}
diff --git a/build/android/rezip/RezipApk.java b/build/android/rezip/RezipApk.java
new file mode 100644
index 0000000..43d7544
--- /dev/null
+++ b/build/android/rezip/RezipApk.java
@@ -0,0 +1,448 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.Enumeration;
+import java.util.List;
+import java.util.jar.JarEntry;
+import java.util.jar.JarFile;
+import java.util.jar.JarOutputStream;
+import java.util.regex.Pattern;
+import java.util.zip.CRC32;
+
+/**
+ * Command line tool used to build APKs which support loading the native code library
+ * directly from the APK file. To construct the APK we rename the native library by
+ * adding the prefix "crazy." to the filename. This is done to prevent the Android
+ * Package Manager from extracting the library. The native code must be page aligned
+ * and uncompressed. The page alignment is implemented by adding a zero filled file
+ * in front of the the native code library. This tool is designed so that running
+ * SignApk and/or zipalign on the resulting APK does not break the page alignment.
+ * This is achieved by outputing the filenames in the same canonical order used
+ * by SignApk and adding the same alignment fields added by zipalign.
+ */
+class RezipApk {
+    // Alignment to use for non-compressed files (must match zipalign).
+    private static final int ALIGNMENT = 4;
+
+    // Alignment to use for non-compressed *.so files
+    private static final int LIBRARY_ALIGNMENT = 4096;
+
+    // Files matching this pattern are not copied to the output when adding alignment.
+    // When reordering and verifying the APK they are copied to the end of the file.
+    private static Pattern sMetaFilePattern =
+            Pattern.compile("^(META-INF/((.*)[.](SF|RSA|DSA)|com/android/otacert))|("
+                    + Pattern.quote(JarFile.MANIFEST_NAME) + ")$");
+
+    // Pattern for matching a shared library in the APK
+    private static Pattern sLibraryPattern = Pattern.compile("^lib/[^/]*/lib.*[.]so$");
+    // Pattern for match the crazy linker in the APK
+    private static Pattern sCrazyLinkerPattern =
+            Pattern.compile("^lib/[^/]*/libchromium_android_linker.so$");
+    // Pattern for matching a crazy loaded shared library in the APK
+    private static Pattern sCrazyLibraryPattern = Pattern.compile("^lib/[^/]*/crazy.lib.*[.]so$");
+
+    private static boolean isLibraryFilename(String filename) {
+        return sLibraryPattern.matcher(filename).matches()
+                && !sCrazyLinkerPattern.matcher(filename).matches();
+    }
+
+    private static boolean isCrazyLibraryFilename(String filename) {
+        return sCrazyLibraryPattern.matcher(filename).matches();
+    }
+
+    private static String renameLibraryForCrazyLinker(String filename) {
+        int lastSlash = filename.lastIndexOf('/');
+        // We rename the library, so that the Android Package Manager
+        // no longer extracts the library.
+        return filename.substring(0, lastSlash + 1) + "crazy." + filename.substring(lastSlash + 1);
+    }
+
+    /**
+     * Wraps another output stream, counting the number of bytes written.
+     */
+    private static class CountingOutputStream extends OutputStream {
+        private long mCount = 0;
+        private OutputStream mOut;
+
+        public CountingOutputStream(OutputStream out) {
+            this.mOut = out;
+        }
+
+        /** Returns the number of bytes written. */
+        public long getCount() {
+            return mCount;
+        }
+
+        @Override public void write(byte[] b, int off, int len) throws IOException {
+            mOut.write(b, off, len);
+            mCount += len;
+        }
+
+        @Override public void write(int b) throws IOException {
+            mOut.write(b);
+            mCount++;
+        }
+
+        @Override public void close() throws IOException {
+            mOut.close();
+        }
+
+        @Override public void flush() throws IOException {
+            mOut.flush();
+        }
+    }
+
+    private static String outputName(JarEntry entry, boolean rename) {
+        String inName = entry.getName();
+        if (rename && entry.getSize() > 0 && isLibraryFilename(inName)) {
+            return renameLibraryForCrazyLinker(inName);
+        }
+        return inName;
+    }
+
+    /**
+     * Comparator used to sort jar entries from the input file.
+     * Sorting is done based on the output filename (which maybe renamed).
+     * Filenames are in natural string order, except that filenames matching
+     * the meta-file pattern are always after other files. This is so the manifest
+     * and signature are at the end of the file after any alignment file.
+     */
+    private static class EntryComparator implements Comparator<JarEntry> {
+        private boolean mRename;
+
+        public EntryComparator(boolean rename) {
+            mRename = rename;
+        }
+
+        @Override
+        public int compare(JarEntry j1, JarEntry j2) {
+            String o1 = outputName(j1, mRename);
+            String o2 = outputName(j2, mRename);
+            boolean o1Matches = sMetaFilePattern.matcher(o1).matches();
+            boolean o2Matches = sMetaFilePattern.matcher(o2).matches();
+            if (o1Matches != o2Matches) {
+                return o1Matches ? 1 : -1;
+            } else {
+                return o1.compareTo(o2);
+            }
+        }
+    }
+
+    // Build an ordered list of jar entries. The jar entries from the input are
+    // sorted based on the output filenames (which maybe renamed). If |omitMetaFiles|
+    // is true do not include the jar entries for the META-INF files.
+    // Entries are ordered in the deterministic order used by SignApk.
+    private static List<JarEntry> getOutputFileOrderEntries(
+            JarFile jar, boolean omitMetaFiles, boolean rename) {
+        List<JarEntry> entries = new ArrayList<JarEntry>();
+        for (Enumeration<JarEntry> e = jar.entries(); e.hasMoreElements(); ) {
+            JarEntry entry = e.nextElement();
+            if (entry.isDirectory()) {
+                continue;
+            }
+            if (omitMetaFiles && sMetaFilePattern.matcher(entry.getName()).matches()) {
+                continue;
+            }
+            entries.add(entry);
+        }
+
+        // We sort the input entries by name. When present META-INF files
+        // are sorted to the end.
+        Collections.sort(entries, new EntryComparator(rename));
+        return entries;
+    }
+
+    /**
+     * Add a zero filled alignment file at this point in the zip file,
+     * The added file will be added before |name| and after |prevName|.
+     * The size of the alignment file is such that the location of the
+     * file |name| will be on a LIBRARY_ALIGNMENT boundary.
+     *
+     * Note this arrangement is devised so that running SignApk and/or zipalign on the resulting
+     * file will not alter the alignment.
+     *
+     * @param offset number of bytes into the output file at this point.
+     * @param timestamp time in millis since the epoch to include in the header.
+     * @param name the name of the library filename.
+     * @param prevName the name of the previous file in the archive (or null).
+     * @param out jar output stream to write the alignment file to.
+     *
+     * @throws IOException if the output file can not be written.
+     */
+    private static void addAlignmentFile(
+            long offset, long timestamp, String name, String prevName,
+            JarOutputStream out) throws IOException {
+
+        // Compute the start and alignment of the library, as if it was next.
+        int headerSize = JarFile.LOCHDR + name.length();
+        long libOffset = offset + headerSize;
+        int libNeeded = LIBRARY_ALIGNMENT - (int) (libOffset % LIBRARY_ALIGNMENT);
+        if (libNeeded == LIBRARY_ALIGNMENT) {
+            // Already aligned, no need to added alignment file.
+            return;
+        }
+
+        // Check that there is not another file between the library and the
+        // alignment file.
+        String alignName = name.substring(0, name.length() - 2) + "align";
+        if (prevName != null && prevName.compareTo(alignName) >= 0) {
+            throw new UnsupportedOperationException(
+                "Unable to insert alignment file, because there is "
+                + "another file in front of the file to be aligned. "
+                + "Other file: " + prevName + " Alignment file: " + alignName
+                + " file: " + name);
+        }
+
+        // Compute the size of the alignment file header.
+        headerSize = JarFile.LOCHDR + alignName.length();
+        // We are going to add an alignment file of type STORED. This file
+        // will itself induce a zipalign alignment adjustment.
+        int extraNeeded =
+                (ALIGNMENT - (int) ((offset + headerSize) % ALIGNMENT)) % ALIGNMENT;
+        headerSize += extraNeeded;
+
+        if (libNeeded < headerSize + 1) {
+            // The header was bigger than the alignment that we need, add another page.
+            libNeeded += LIBRARY_ALIGNMENT;
+        }
+        // Compute the size of the alignment file.
+        libNeeded -= headerSize;
+
+        // Build the header for the alignment file.
+        byte[] zeroBuffer = new byte[libNeeded];
+        JarEntry alignEntry = new JarEntry(alignName);
+        alignEntry.setMethod(JarEntry.STORED);
+        alignEntry.setSize(libNeeded);
+        alignEntry.setTime(timestamp);
+        CRC32 crc = new CRC32();
+        crc.update(zeroBuffer);
+        alignEntry.setCrc(crc.getValue());
+
+        if (extraNeeded != 0) {
+            alignEntry.setExtra(new byte[extraNeeded]);
+        }
+
+        // Output the alignment file.
+        out.putNextEntry(alignEntry);
+        out.write(zeroBuffer);
+        out.closeEntry();
+        out.flush();
+    }
+
+    // Make a JarEntry for the output file which corresponds to the input
+    // file. The output file will be called |name|. The output file will always
+    // be uncompressed (STORED). If the input is not STORED it is necessary to inflate
+    // it to compute the CRC and size of the output entry.
+    private static JarEntry makeStoredEntry(String name, JarEntry inEntry, JarFile in)
+            throws IOException {
+        JarEntry outEntry = new JarEntry(name);
+        outEntry.setMethod(JarEntry.STORED);
+
+        if (inEntry.getMethod() == JarEntry.STORED) {
+            outEntry.setCrc(inEntry.getCrc());
+            outEntry.setSize(inEntry.getSize());
+        } else {
+            // We are inflating the file. We need to compute the CRC and size.
+            byte[] buffer = new byte[4096];
+            CRC32 crc = new CRC32();
+            int size = 0;
+            int num;
+            InputStream data = in.getInputStream(inEntry);
+            while ((num = data.read(buffer)) > 0) {
+                crc.update(buffer, 0, num);
+                size += num;
+            }
+            data.close();
+            outEntry.setCrc(crc.getValue());
+            outEntry.setSize(size);
+        }
+        return outEntry;
+    }
+
+    /**
+     * Copy the contents of the input APK file to the output APK file. If |rename| is
+     * true then non-empty libraries (*.so) in the input will be renamed by prefixing
+     * "crazy.". This is done to prevent the Android Package Manager extracting the
+     * library. Note the crazy linker itself is not renamed, for bootstrapping reasons.
+     * Empty libraries are not renamed (they are in the APK to workaround a bug where
+     * the Android Package Manager fails to delete old versions when upgrading).
+     * There must be exactly one "crazy" library in the output stream. The "crazy"
+     * library will be uncompressed and page aligned in the output stream. Page
+     * alignment is implemented by adding a zero filled file, regular alignment is
+     * implemented by adding a zero filled extra field to the zip file header. If
+     * |addAlignment| is true a page alignment file is added, otherwise the "crazy"
+     * library must already be page aligned. Care is taken so that the output is generated
+     * in the same way as SignApk. This is important so that running SignApk and
+     * zipalign on the output does not break the page alignment. The archive may not
+     * contain a "*.apk" as SignApk has special nested signing logic that we do not
+     * support.
+     *
+     * @param in The input APK File.
+     * @param out The output APK stream.
+     * @param countOut Counting output stream (to measure the current offset).
+     * @param addAlignment Whether to add the alignment file or just check.
+     * @param rename Whether to rename libraries to be "crazy".
+     *
+     * @throws IOException if the output file can not be written.
+     */
+    private static void rezip(
+            JarFile in, JarOutputStream out, CountingOutputStream countOut,
+            boolean addAlignment, boolean rename) throws IOException {
+
+        List<JarEntry> entries = getOutputFileOrderEntries(in, addAlignment, rename);
+        long timestamp = System.currentTimeMillis();
+        byte[] buffer = new byte[4096];
+        boolean firstEntry = true;
+        String prevName = null;
+        int numCrazy = 0;
+        for (JarEntry inEntry : entries) {
+            // Rename files, if specied.
+            String name = outputName(inEntry, rename);
+            if (name.endsWith(".apk")) {
+                throw new UnsupportedOperationException(
+                        "Nested APKs are not supported: " + name);
+            }
+
+            // Build the header.
+            JarEntry outEntry = null;
+            boolean isCrazy = isCrazyLibraryFilename(name);
+            if (isCrazy) {
+                // "crazy" libraries are alway output uncompressed (STORED).
+                outEntry = makeStoredEntry(name, inEntry, in);
+                numCrazy++;
+                if (numCrazy > 1) {
+                    throw new UnsupportedOperationException(
+                            "Found more than one library\n"
+                            + "Multiple libraries are not supported for APKs that use "
+                            + "'load_library_from_zip'.\n"
+                            + "See crbug/388223.\n"
+                            + "Note, check that your build is clean.\n"
+                            + "An unclean build can incorrectly incorporate old "
+                            + "libraries in the APK.");
+                }
+            } else if (inEntry.getMethod() == JarEntry.STORED) {
+                // Preserve the STORED method of the input entry.
+                outEntry = new JarEntry(inEntry);
+                outEntry.setExtra(null);
+            } else {
+                // Create a new entry so that the compressed len is recomputed.
+                outEntry = new JarEntry(name);
+            }
+            outEntry.setTime(timestamp);
+
+            // Compute and add alignment
+            long offset = countOut.getCount();
+            if (firstEntry) {
+                // The first entry in a jar file has an extra field of
+                // four bytes that you can't get rid of; any extra
+                // data you specify in the JarEntry is appended to
+                // these forced four bytes.  This is JAR_MAGIC in
+                // JarOutputStream; the bytes are 0xfeca0000.
+                firstEntry = false;
+                offset += 4;
+            }
+            if (outEntry.getMethod() == JarEntry.STORED) {
+                if (isCrazy) {
+                    if (addAlignment) {
+                        addAlignmentFile(offset, timestamp, name, prevName, out);
+                    }
+                    // We check that we did indeed get to a page boundary.
+                    offset = countOut.getCount() + JarFile.LOCHDR + name.length();
+                    if ((offset % LIBRARY_ALIGNMENT) != 0) {
+                        throw new AssertionError(
+                                "Library was not page aligned when verifying page alignment. "
+                                + "Library name: " + name + " Expected alignment: "
+                                + LIBRARY_ALIGNMENT + "Offset: " + offset + " Error: "
+                                + (offset % LIBRARY_ALIGNMENT));
+                    }
+                } else {
+                    // This is equivalent to zipalign.
+                    offset += JarFile.LOCHDR + name.length();
+                    int needed = (ALIGNMENT - (int) (offset % ALIGNMENT)) % ALIGNMENT;
+                    if (needed != 0) {
+                        outEntry.setExtra(new byte[needed]);
+                    }
+                }
+            }
+            out.putNextEntry(outEntry);
+
+            // Copy the data from the input to the output
+            int num;
+            InputStream data = in.getInputStream(inEntry);
+            while ((num = data.read(buffer)) > 0) {
+                out.write(buffer, 0, num);
+            }
+            data.close();
+            out.closeEntry();
+            out.flush();
+            prevName = name;
+        }
+        if (numCrazy == 0) {
+            throw new AssertionError("There was no crazy library in the archive");
+        }
+    }
+
+    private static void usage() {
+        System.err.println("Usage: prealignapk (addalignment|reorder) input.apk output.apk");
+        System.err.println("\"crazy\" libraries are always inflated in the output");
+        System.err.println(
+                "  renamealign  - rename libraries with \"crazy.\" prefix and add alignment file");
+        System.err.println("  align        - add alignment file");
+        System.err.println("  reorder      - re-creates canonical ordering and checks alignment");
+        System.exit(2);
+    }
+
+    public static void main(String[] args) throws IOException {
+        if (args.length != 3) usage();
+
+        boolean addAlignment = false;
+        boolean rename = false;
+        if (args[0].equals("renamealign")) {
+            // Normal case. Before signing we rename the library and add an alignment file.
+            addAlignment = true;
+            rename = true;
+        } else if (args[0].equals("align")) {
+            // LGPL compliance case. Before signing, we add an alignment file to a
+            // reconstructed APK which already contains the "crazy" library.
+            addAlignment = true;
+            rename = false;
+        } else if (args[0].equals("reorder")) {
+            // Normal case. After jarsigning we write the file in the canonical order and check.
+            addAlignment = false;
+        } else {
+            usage();
+        }
+
+        String inputFilename = args[1];
+        String outputFilename = args[2];
+
+        JarFile inputJar = null;
+        FileOutputStream outputFile = null;
+
+        try {
+            inputJar = new JarFile(new File(inputFilename), true);
+            outputFile = new FileOutputStream(outputFilename);
+
+            CountingOutputStream outCount = new CountingOutputStream(outputFile);
+            JarOutputStream outputJar = new JarOutputStream(outCount);
+
+            // Match the compression level used by SignApk.
+            outputJar.setLevel(9);
+
+            rezip(inputJar, outputJar, outCount, addAlignment, rename);
+            outputJar.close();
+        } finally {
+            if (inputJar != null) inputJar.close();
+            if (outputFile != null) outputFile.close();
+        }
+    }
+}
diff --git a/build/android/screenshot.py b/build/android/screenshot.py
new file mode 100755
index 0000000..097739f
--- /dev/null
+++ b/build/android/screenshot.py
@@ -0,0 +1,98 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Takes a screenshot or a screen video capture from an Android device."""
+
+import logging
+import optparse
+import os
+import sys
+
+from pylib import screenshot
+from pylib.device import device_errors
+from pylib.device import device_utils
+
+def _PrintMessage(heading, eol='\n'):
+  sys.stdout.write('%s%s' % (heading, eol))
+  sys.stdout.flush()
+
+
+def _CaptureScreenshot(device, host_file):
+  host_file = device.TakeScreenshot(host_file)
+  _PrintMessage('Screenshot written to %s' % os.path.abspath(host_file))
+
+
+def _CaptureVideo(device, host_file, options):
+  size = tuple(map(int, options.size.split('x'))) if options.size else None
+  recorder = screenshot.VideoRecorder(device,
+                                      megabits_per_second=options.bitrate,
+                                      size=size,
+                                      rotate=options.rotate)
+  try:
+    recorder.Start()
+    _PrintMessage('Recording. Press Enter to stop...', eol='')
+    raw_input()
+  finally:
+    recorder.Stop()
+  host_file = recorder.Pull(host_file)
+  _PrintMessage('Video written to %s' % os.path.abspath(host_file))
+
+
+def main():
+  # Parse options.
+  parser = optparse.OptionParser(description=__doc__,
+                                 usage='screenshot.py [options] [filename]')
+  parser.add_option('-d', '--device', metavar='ANDROID_DEVICE', help='Serial '
+                    'number of Android device to use.', default=None)
+  parser.add_option('-f', '--file', help='Save result to file instead of '
+                    'generating a timestamped file name.', metavar='FILE')
+  parser.add_option('-v', '--verbose', help='Verbose logging.',
+                    action='store_true')
+  video_options = optparse.OptionGroup(parser, 'Video capture')
+  video_options.add_option('--video', help='Enable video capturing. Requires '
+                           'Android KitKat or later', action='store_true')
+  video_options.add_option('-b', '--bitrate', help='Bitrate in megabits/s, '
+                           'from 0.1 to 100 mbps, %default mbps by default.',
+                           default=4, type='float')
+  video_options.add_option('-r', '--rotate', help='Rotate video by 90 degrees.',
+                           default=False, action='store_true')
+  video_options.add_option('-s', '--size', metavar='WIDTHxHEIGHT',
+                           help='Frame size to use instead of the device '
+                           'screen size.', default=None)
+  parser.add_option_group(video_options)
+
+  (options, args) = parser.parse_args()
+
+  if len(args) > 1:
+    parser.error('Too many positional arguments.')
+  host_file = args[0] if args else options.file
+
+  if options.verbose:
+    logging.getLogger().setLevel(logging.DEBUG)
+
+  devices = device_utils.DeviceUtils.HealthyDevices()
+  if options.device:
+    device = next((d for d in devices if d == options.device), None)
+    if not device:
+      raise device_errors.DeviceUnreachableError(options.device)
+  else:
+    if len(devices) > 1:
+      parser.error('Multiple devices are attached. '
+                   'Please specify device serial number with --device.')
+    elif len(devices) == 1:
+      device = devices[0]
+    else:
+      raise device_errors.NoDevicesError()
+
+  if options.video:
+    _CaptureVideo(device, host_file, options)
+  else:
+    _CaptureScreenshot(device, host_file)
+  return 0
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/build/android/setup.gyp b/build/android/setup.gyp
new file mode 100644
index 0000000..0e1c2c4
--- /dev/null
+++ b/build/android/setup.gyp
@@ -0,0 +1,111 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+{
+  'conditions': [
+    ['component == "shared_library"', {
+      'targets': [
+        {
+          # These libraries from the Android ndk are required to be packaged with
+          # any APK that is built with them. build/java_apk.gypi expects any
+          # libraries that should be packaged with the apk to be in
+          # <(SHARED_LIB_DIR)
+          'target_name': 'copy_system_libraries',
+          'type': 'none',
+          'copies': [
+            {
+              'destination': '<(SHARED_LIB_DIR)/',
+              'files': [
+                '<(android_libcpp_libs_dir)/libc++_shared.so',
+              ],
+            },
+          ],
+        },
+      ],
+    }],
+  ],
+  'targets': [
+    {
+      'target_name': 'get_build_device_configurations',
+      'type': 'none',
+      'actions': [
+        {
+          'action_name': 'get configurations',
+          'inputs': [
+            'gyp/util/build_device.py',
+            'gyp/get_device_configuration.py',
+          ],
+          'outputs': [
+            '<(build_device_config_path)',
+            '<(build_device_config_path).fake',
+          ],
+          'action': [
+            'python', 'gyp/get_device_configuration.py',
+            '--output=<(build_device_config_path)',
+          ],
+        }
+      ],
+    },
+    {
+      # Target for creating common output build directories. Creating output
+      # dirs beforehand ensures that build scripts can assume these folders to
+      # exist and there are no race conditions resulting from build scripts
+      # trying to create these directories.
+      # The build/java.gypi target depends on this target.
+      'target_name': 'build_output_dirs',
+      'type': 'none',
+      'actions': [
+        {
+          'action_name': 'create_java_output_dirs',
+          'variables' : {
+            'output_dirs' : [
+              '<(PRODUCT_DIR)/apks',
+              '<(PRODUCT_DIR)/lib.java',
+              '<(PRODUCT_DIR)/test.lib.java',
+            ]
+          },
+          'inputs' : [],
+          # By not specifying any outputs, we ensure that this command isn't
+          # re-run when the output directories are touched (i.e. apks are
+          # written to them).
+          'outputs': [''],
+          'action': [
+            'mkdir',
+            '-p',
+            '<@(output_dirs)',
+          ],
+        },
+      ],
+    }, # build_output_dirs
+    {
+      'target_name': 'sun_tools_java',
+      'type': 'none',
+      'variables': {
+        'found_jar_path': '<(PRODUCT_DIR)/sun_tools_java/tools.jar',
+        'jar_path': '<(found_jar_path)',
+      },
+      'includes': [
+        '../../build/host_prebuilt_jar.gypi',
+      ],
+      'actions': [
+        {
+          'action_name': 'find_sun_tools_jar',
+          'variables' : {
+          },
+          'inputs' : [
+            'gyp/find_sun_tools_jar.py',
+            'gyp/util/build_utils.py',
+          ],
+          'outputs': [
+            '<(found_jar_path)',
+          ],
+          'action': [
+            'python', 'gyp/find_sun_tools_jar.py',
+            '--output', '<(found_jar_path)',
+          ],
+        },
+      ],
+    }, # sun_tools_java
+  ]
+}
+
diff --git a/build/android/strip_native_libraries.gypi b/build/android/strip_native_libraries.gypi
new file mode 100644
index 0000000..bdffcfd
--- /dev/null
+++ b/build/android/strip_native_libraries.gypi
@@ -0,0 +1,54 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into an action to provide a rule that strips
+# native libraries.
+#
+# To use this, create a gyp target with the following form:
+#  {
+#    'action_name': 'strip_native_libraries',
+#    'actions': [
+#      'variables': {
+#        'ordered_libraries_file': 'file generated by write_ordered_libraries'
+#        'input_paths': 'files to be added to the list of inputs'
+#        'stamp': 'file to touch when the action is complete'
+#        'stripped_libraries_dir': 'directory to store stripped libraries',
+#      },
+#      'includes': [ '../../build/android/strip_native_libraries.gypi' ],
+#    ],
+#  },
+#
+
+{
+  'message': 'Stripping libraries for <(_target_name)',
+  'variables': {
+    'input_paths': [],
+  },
+  'inputs': [
+    '<(DEPTH)/build/android/gyp/util/build_utils.py',
+    '<(DEPTH)/build/android/gyp/strip_library_for_device.py',
+    '<(ordered_libraries_file)',
+    '>@(input_paths)',
+  ],
+  'outputs': [
+    '<(stamp)',
+  ],
+  'conditions': [
+    ['component == "shared_library"', {
+      # Add a fake output to force the build to always re-run this step. This
+      # is required because the real inputs are not known at gyp-time and
+      # changing base.so may not trigger changes to dependent libraries.
+      'outputs': [ '<(stamp).fake' ]
+    }],
+  ],
+  'action': [
+    'python', '<(DEPTH)/build/android/gyp/strip_library_for_device.py',
+    '--android-strip=<(android_strip)',
+    '--android-strip-arg=--strip-unneeded',
+    '--stripped-libraries-dir=<(stripped_libraries_dir)',
+    '--libraries-dir=<(SHARED_LIB_DIR),<(PRODUCT_DIR)',
+    '--libraries=@FileArg(<(ordered_libraries_file):libraries)',
+    '--stamp=<(stamp)',
+  ],
+}
diff --git a/build/android/symbolize.py b/build/android/symbolize.py
new file mode 100755
index 0000000..56d3b19
--- /dev/null
+++ b/build/android/symbolize.py
@@ -0,0 +1,88 @@
+#!/usr/bin/env python
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Symbolizes stack traces generated by Chromium for Android.
+
+Sample usage:
+  adb logcat chromium:V | symbolize.py
+"""
+
+import os
+import re
+import sys
+
+from pylib import constants
+
+# Uses symbol.py from third_party/android_platform, not python's.
+sys.path.insert(0,
+                os.path.join(constants.DIR_SOURCE_ROOT,
+                            'third_party/android_platform/development/scripts'))
+import symbol
+
+# Sample output from base/debug/stack_trace_android.cc
+#00 0x693cd34f /path/to/some/libfoo.so+0x0007434f
+TRACE_LINE = re.compile(r'(?P<frame>\#[0-9]+ 0x[0-9a-f]{8,8}) '
+                        r'(?P<lib>[^+]+)\+0x(?P<addr>[0-9a-f]{8,8})')
+
+class Symbolizer(object):
+  def __init__(self, output):
+    self._output = output
+
+  def write(self, data):
+    while True:
+      match = re.search(TRACE_LINE, data)
+      if not match:
+        self._output.write(data)
+        break
+
+      frame = match.group('frame')
+      lib = match.group('lib')
+      addr = match.group('addr')
+
+      # TODO(scherkus): Doing a single lookup per line is pretty slow,
+      # especially with larger libraries. Consider caching strategies such as:
+      # 1) Have Python load the libraries and do symbol lookups instead of
+      #    calling out to addr2line each time.
+      # 2) Have Python keep multiple addr2line instances open as subprocesses,
+      #    piping addresses and reading back symbols as we find them
+      # 3) Read ahead the entire stack trace until we find no more, then batch
+      #    the symbol lookups.
+      #
+      # TODO(scherkus): These results are memoized, which could result in
+      # incorrect lookups when running this script on long-lived instances
+      # (e.g., adb logcat) when doing incremental development. Consider clearing
+      # the cache when modification timestamp of libraries change.
+      sym = symbol.SymbolInformation(lib, addr, False)[0][0]
+
+      if not sym:
+        post = match.end('addr')
+        self._output.write(data[:post])
+        data = data[post:]
+        continue
+
+      pre = match.start('frame')
+      post = match.end('addr')
+
+      self._output.write(data[:pre])
+      self._output.write(frame)
+      self._output.write(' ')
+      self._output.write(sym)
+
+      data = data[post:]
+
+  def flush(self):
+    self._output.flush()
+
+
+def main():
+  symbolizer = Symbolizer(sys.stdout)
+  for line in sys.stdin:
+    symbolizer.write(line)
+  symbolizer.flush()
+
+
+if __name__ == '__main__':
+  main()
diff --git a/build/android/symbolize_test.py b/build/android/symbolize_test.py
new file mode 100755
index 0000000..826d852
--- /dev/null
+++ b/build/android/symbolize_test.py
@@ -0,0 +1,130 @@
+#!/usr/bin/env python
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unittest for symbolize.py.
+
+This test uses test libraries generated by the Android g++ toolchain.
+
+Should things break you can recreate the libraries and get the updated
+addresses and demangled names by running the following:
+  cd test/symbolize/
+  make
+  nm -gC *.so
+"""
+
+import StringIO
+import unittest
+
+import symbolize
+
+LIB_A_PATH = '/build/android/tests/symbolize/liba.so'
+LIB_B_PATH = '/build/android/tests/symbolize/libb.so'
+
+def RunSymbolizer(text):
+  output = StringIO.StringIO()
+  s = symbolize.Symbolizer(output)
+  s.write(text)
+  return output.getvalue()
+
+
+class SymbolizerUnittest(unittest.TestCase):
+  def testSingleLineNoMatch(self):
+    # Leading '#' is required.
+    expected = '00 0x00000000 ' + LIB_A_PATH + '+0x00000254\n'
+    self.assertEqual(expected, RunSymbolizer(expected))
+
+    # Whitespace should be exactly one space.
+    expected = '#00  0x00000000 ' + LIB_A_PATH + '+0x00000254\n'
+    self.assertEqual(expected, RunSymbolizer(expected))
+    expected = '#00 0x00000000  ' + LIB_A_PATH + '+0x00000254\n'
+    self.assertEqual(expected, RunSymbolizer(expected))
+
+    # Decimal stack frame numbers are required.
+    expected = '#0a 0x00000000 ' + LIB_A_PATH + '+0x00000254\n'
+    self.assertEqual(expected, RunSymbolizer(expected))
+
+    # Hexadecimal addresses are required.
+    expected = '#00 0xghijklmn ' + LIB_A_PATH + '+0x00000254\n'
+    self.assertEqual(expected, RunSymbolizer(expected))
+    expected = '#00 0x00000000 ' + LIB_A_PATH + '+0xghijklmn\n'
+    self.assertEqual(expected, RunSymbolizer(expected))
+
+    # Addresses must be exactly 8 characters.
+    expected = '#00 0x0000000 ' + LIB_A_PATH + '+0x00000254\n'
+    self.assertEqual(expected, RunSymbolizer(expected))
+    expected = '#00 0x000000000 ' + LIB_A_PATH + '+0x00000254\n'
+    self.assertEqual(expected, RunSymbolizer(expected))
+
+    expected = '#00 0x0000000 ' + LIB_A_PATH + '+0x0000254\n'
+    self.assertEqual(expected, RunSymbolizer(expected))
+    expected = '#00 0x000000000 ' + LIB_A_PATH + '+0x000000254\n'
+    self.assertEqual(expected, RunSymbolizer(expected))
+
+    # Addresses must be prefixed with '0x'.
+    expected = '#00 00000000 ' + LIB_A_PATH + '+0x00000254\n'
+    self.assertEqual(expected, RunSymbolizer(expected))
+    expected = '#00 0x00000000 ' + LIB_A_PATH + '+00000254\n'
+    self.assertEqual(expected, RunSymbolizer(expected))
+
+    # Library name is required.
+    expected = '#00 0x00000000\n'
+    self.assertEqual(expected, RunSymbolizer(expected))
+    expected = '#00 0x00000000 +0x00000254\n'
+    self.assertEqual(expected, RunSymbolizer(expected))
+
+    # Library name must be followed by offset with no spaces around '+'.
+    expected = '#00 0x00000000 ' + LIB_A_PATH + ' +0x00000254\n'
+    self.assertEqual(expected, RunSymbolizer(expected))
+    expected = '#00 0x00000000 ' + LIB_A_PATH + '+ 0x00000254\n'
+    self.assertEqual(expected, RunSymbolizer(expected))
+    expected = '#00 0x00000000 ' + LIB_A_PATH + ' 0x00000254\n'
+    self.assertEqual(expected, RunSymbolizer(expected))
+    expected = '#00 0x00000000 ' + LIB_A_PATH + '+\n'
+    self.assertEqual(expected, RunSymbolizer(expected))
+
+  def testSingleLine(self):
+    text = '#00 0x00000000 ' + LIB_A_PATH + '+0x00000254\n'
+    expected = '#00 0x00000000 A::Bar(char const*)\n'
+    actual = RunSymbolizer(text)
+    self.assertEqual(expected, actual)
+
+  def testSingleLineWithSurroundingText(self):
+    text = 'LEFT #00 0x00000000 ' + LIB_A_PATH + '+0x00000254 RIGHT\n'
+    expected = 'LEFT #00 0x00000000 A::Bar(char const*) RIGHT\n'
+    actual = RunSymbolizer(text)
+    self.assertEqual(expected, actual)
+
+  def testMultipleLinesSameLibrary(self):
+    text = '#00 0x00000000 ' + LIB_A_PATH + '+0x00000254\n'
+    text += '#01 0x00000000 ' + LIB_A_PATH + '+0x00000234\n'
+    expected = '#00 0x00000000 A::Bar(char const*)\n'
+    expected += '#01 0x00000000 A::Foo(int)\n'
+    actual = RunSymbolizer(text)
+    self.assertEqual(expected, actual)
+
+  def testMultipleLinesDifferentLibrary(self):
+    text = '#00 0x00000000 ' + LIB_A_PATH + '+0x00000254\n'
+    text += '#01 0x00000000 ' + LIB_B_PATH + '+0x00000234\n'
+    expected = '#00 0x00000000 A::Bar(char const*)\n'
+    expected += '#01 0x00000000 B::Baz(float)\n'
+    actual = RunSymbolizer(text)
+    self.assertEqual(expected, actual)
+
+  def testMultipleLinesWithSurroundingTextEverywhere(self):
+    text = 'TOP\n'
+    text += 'LEFT #00 0x00000000 ' + LIB_A_PATH + '+0x00000254 RIGHT\n'
+    text += 'LEFT #01 0x00000000 ' + LIB_B_PATH + '+0x00000234 RIGHT\n'
+    text += 'BOTTOM\n'
+    expected = 'TOP\n'
+    expected += 'LEFT #00 0x00000000 A::Bar(char const*) RIGHT\n'
+    expected += 'LEFT #01 0x00000000 B::Baz(float) RIGHT\n'
+    expected += 'BOTTOM\n'
+    actual = RunSymbolizer(text)
+    self.assertEqual(expected, actual)
+
+
+if __name__ == '__main__':
+  unittest.main()
diff --git a/build/android/test_runner.gypi b/build/android/test_runner.gypi
new file mode 100644
index 0000000..f92b7ce
--- /dev/null
+++ b/build/android/test_runner.gypi
@@ -0,0 +1,81 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Generates a script in the output bin directory which runs the test
+# target using the test runner script in build/android/pylib/test_runner.py.
+#
+# To use this, include this file in a gtest or instrumentation test target.
+# {
+#   'target_name': 'gtest',
+#   'type': 'none',
+#   'variables': {
+#     'test_type': 'gtest',  # string
+#     'test_suite_name': 'gtest_suite'  # string
+#     'isolate_file': 'path/to/gtest.isolate'  # string
+#   },
+#   'includes': ['path/to/this/gypi/file'],
+# }
+#
+# {
+#   'target_name': 'instrumentation_apk',
+#   'type': 'none',
+#   'variables': {
+#     'test_type': 'instrumentation',  # string
+#     'apk_name': 'TestApk'  # string
+#     'isolate_file': 'path/to/instrumentation_test.isolate'  # string
+#   },
+#   'includes': ['path/to/this/gypi/file'],
+# }
+#
+
+{
+  'variables': {
+    'variables': {
+      'isolate_file%': '',
+      'support_apk_path%': '',
+    },
+    'test_runner_args': ['--output-directory', '<(PRODUCT_DIR)'],
+    'conditions': [
+      ['test_type == "gtest"', {
+        'test_runner_args': ['--suite', '<(test_suite_name)'],
+        'script_name': 'run_<(test_suite_name)',
+      }],
+      ['test_type == "instrumentation"', {
+        'test_runner_args': ['--test-apk', '<(apk_name)'],
+        'script_name': 'run_<(_target_name)',
+        'conditions': [
+          ['support_apk_path != ""', {
+            'test_runner_args': [
+              '--support-apk',
+              '<(support_apk_path)'
+            ],
+          }],
+        ],
+      }],
+      ['isolate_file != ""', {
+        'test_runner_args': ['--isolate-file-path', '<(isolate_file)']
+      }],
+    ],
+  },
+  'actions': [
+    {
+      'action_name': 'create_test_runner_script_<(script_name)',
+      'message': 'Creating test runner script <(script_name)',
+      'variables': {
+        'script_output_path': '<(PRODUCT_DIR)/bin/<(script_name)',
+      },
+      'inputs': [
+        '<(DEPTH)/build/android/gyp/create_test_runner_script.py',
+      ],
+      'outputs': [
+        '<(script_output_path)'
+      ],
+      'action': [
+        'python', '<(DEPTH)/build/android/gyp/create_test_runner_script.py',
+        '--script-output-path=<(script_output_path)',
+        '<(test_type)', '<@(test_runner_args)',
+      ],
+    },
+  ],
+}
\ No newline at end of file
diff --git a/build/android/test_runner.py b/build/android/test_runner.py
new file mode 100755
index 0000000..1fc48ec
--- /dev/null
+++ b/build/android/test_runner.py
@@ -0,0 +1,1067 @@
+#!/usr/bin/env python
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Runs all types of tests from one unified interface."""
+
+import argparse
+import collections
+import logging
+import os
+import shutil
+import signal
+import sys
+import threading
+import unittest
+
+from pylib import constants
+from pylib import forwarder
+from pylib import ports
+from pylib.base import base_test_result
+from pylib.base import environment_factory
+from pylib.base import test_dispatcher
+from pylib.base import test_instance_factory
+from pylib.base import test_run_factory
+from pylib.device import device_errors
+from pylib.device import device_utils
+from pylib.gtest import gtest_config
+# TODO(jbudorick): Remove this once we stop selectively enabling platform mode.
+from pylib.gtest import gtest_test_instance
+from pylib.gtest import setup as gtest_setup
+from pylib.gtest import test_options as gtest_test_options
+from pylib.linker import setup as linker_setup
+from pylib.host_driven import setup as host_driven_setup
+from pylib.instrumentation import setup as instrumentation_setup
+from pylib.instrumentation import test_options as instrumentation_test_options
+from pylib.junit import setup as junit_setup
+from pylib.junit import test_dispatcher as junit_dispatcher
+from pylib.monkey import setup as monkey_setup
+from pylib.monkey import test_options as monkey_test_options
+from pylib.perf import setup as perf_setup
+from pylib.perf import test_options as perf_test_options
+from pylib.perf import test_runner as perf_test_runner
+from pylib.results import json_results
+from pylib.results import report_results
+from pylib.uiautomator import setup as uiautomator_setup
+from pylib.uiautomator import test_options as uiautomator_test_options
+from pylib.utils import apk_helper
+from pylib.utils import base_error
+from pylib.utils import reraiser_thread
+from pylib.utils import run_tests_helper
+
+
+def AddCommonOptions(parser):
+  """Adds all common options to |parser|."""
+
+  group = parser.add_argument_group('Common Options')
+
+  default_build_type = os.environ.get('BUILDTYPE', 'Debug')
+
+  debug_or_release_group = group.add_mutually_exclusive_group()
+  debug_or_release_group.add_argument(
+      '--debug', action='store_const', const='Debug', dest='build_type',
+      default=default_build_type,
+      help=('If set, run test suites under out/Debug. '
+            'Default is env var BUILDTYPE or Debug.'))
+  debug_or_release_group.add_argument(
+      '--release', action='store_const', const='Release', dest='build_type',
+      help=('If set, run test suites under out/Release. '
+            'Default is env var BUILDTYPE or Debug.'))
+
+  group.add_argument('--build-directory', dest='build_directory',
+                     help=('Path to the directory in which build files are'
+                           ' located (should not include build type)'))
+  group.add_argument('--output-directory', dest='output_directory',
+                     help=('Path to the directory in which build files are'
+                           ' located (must include build type). This will take'
+                           ' precedence over --debug, --release and'
+                           ' --build-directory'))
+  group.add_argument('--num_retries', dest='num_retries', type=int, default=2,
+                     help=('Number of retries for a test before '
+                           'giving up (default: %(default)s).'))
+  group.add_argument('-v',
+                     '--verbose',
+                     dest='verbose_count',
+                     default=0,
+                     action='count',
+                     help='Verbose level (multiple times for more)')
+  group.add_argument('--flakiness-dashboard-server',
+                     dest='flakiness_dashboard_server',
+                     help=('Address of the server that is hosting the '
+                           'Chrome for Android flakiness dashboard.'))
+  group.add_argument('--enable-platform-mode', action='store_true',
+                     help=('Run the test scripts in platform mode, which '
+                           'conceptually separates the test runner from the '
+                           '"device" (local or remote, real or emulated) on '
+                           'which the tests are running. [experimental]'))
+  group.add_argument('-e', '--environment', default='local',
+                     choices=constants.VALID_ENVIRONMENTS,
+                     help='Test environment to run in (default: %(default)s).')
+  group.add_argument('--adb-path',
+                     help=('Specify the absolute path of the adb binary that '
+                           'should be used.'))
+  group.add_argument('--json-results-file', dest='json_results_file',
+                     help='If set, will dump results in JSON form '
+                          'to specified file.')
+
+def ProcessCommonOptions(args):
+  """Processes and handles all common options."""
+  run_tests_helper.SetLogLevel(args.verbose_count)
+  constants.SetBuildType(args.build_type)
+  if args.build_directory:
+    constants.SetBuildDirectory(args.build_directory)
+  if args.output_directory:
+    constants.SetOutputDirectory(args.output_directory)
+  if args.adb_path:
+    constants.SetAdbPath(args.adb_path)
+  # Some things such as Forwarder require ADB to be in the environment path.
+  adb_dir = os.path.dirname(constants.GetAdbPath())
+  if adb_dir and adb_dir not in os.environ['PATH'].split(os.pathsep):
+    os.environ['PATH'] = adb_dir + os.pathsep + os.environ['PATH']
+
+
+def AddRemoteDeviceOptions(parser):
+  group = parser.add_argument_group('Remote Device Options')
+
+  group.add_argument('--trigger',
+                     help=('Only triggers the test if set. Stores test_run_id '
+                           'in given file path. '))
+  group.add_argument('--collect',
+                     help=('Only collects the test results if set. '
+                           'Gets test_run_id from given file path.'))
+  group.add_argument('--remote-device', action='append',
+                     help='Device type to run test on.')
+  group.add_argument('--results-path',
+                     help='File path to download results to.')
+  group.add_argument('--api-protocol',
+                     help='HTTP protocol to use. (http or https)')
+  group.add_argument('--api-address',
+                     help='Address to send HTTP requests.')
+  group.add_argument('--api-port',
+                     help='Port to send HTTP requests to.')
+  group.add_argument('--runner-type',
+                     help='Type of test to run as.')
+  group.add_argument('--runner-package',
+                     help='Package name of test.')
+  group.add_argument('--device-type',
+                     choices=constants.VALID_DEVICE_TYPES,
+                     help=('Type of device to run on. iOS or android'))
+  group.add_argument('--device-oem', action='append',
+                     help='Device OEM to run on.')
+  group.add_argument('--remote-device-file',
+                     help=('File with JSON to select remote device. '
+                           'Overrides all other flags.'))
+  group.add_argument('--remote-device-timeout', type=int,
+                     help='Times to retry finding remote device')
+  group.add_argument('--network-config', type=int,
+                     help='Integer that specifies the network environment '
+                          'that the tests will be run in.')
+
+  device_os_group = group.add_mutually_exclusive_group()
+  device_os_group.add_argument('--remote-device-minimum-os',
+                               help='Minimum OS on device.')
+  device_os_group.add_argument('--remote-device-os', action='append',
+                               help='OS to have on the device.')
+
+  api_secret_group = group.add_mutually_exclusive_group()
+  api_secret_group.add_argument('--api-secret', default='',
+                                help='API secret for remote devices.')
+  api_secret_group.add_argument('--api-secret-file', default='',
+                                help='Path to file that contains API secret.')
+
+  api_key_group = group.add_mutually_exclusive_group()
+  api_key_group.add_argument('--api-key', default='',
+                             help='API key for remote devices.')
+  api_key_group.add_argument('--api-key-file', default='',
+                             help='Path to file that contains API key.')
+
+
+def AddDeviceOptions(parser):
+  """Adds device options to |parser|."""
+  group = parser.add_argument_group(title='Device Options')
+  group.add_argument('--tool',
+                     dest='tool',
+                     help=('Run the test under a tool '
+                           '(use --tool help to list them)'))
+  group.add_argument('-d', '--device', dest='test_device',
+                     help=('Target device for the test suite '
+                           'to run on.'))
+
+
+def AddGTestOptions(parser):
+  """Adds gtest options to |parser|."""
+
+  gtest_suites = list(gtest_config.STABLE_TEST_SUITES
+                      + gtest_config.EXPERIMENTAL_TEST_SUITES)
+
+  group = parser.add_argument_group('GTest Options')
+  group.add_argument('-s', '--suite', dest='suite_name',
+                     nargs='+', metavar='SUITE_NAME', required=True,
+                     help=('Executable name of the test suite to run. '
+                           'Available suites include (but are not limited to): '
+                            '%s' % ', '.join('"%s"' % s for s in gtest_suites)))
+  group.add_argument('--gtest_also_run_disabled_tests',
+                     '--gtest-also-run-disabled-tests',
+                     dest='run_disabled', action='store_true',
+                     help='Also run disabled tests if applicable.')
+  group.add_argument('-a', '--test-arguments', dest='test_arguments',
+                     default='',
+                     help='Additional arguments to pass to the test.')
+  group.add_argument('-t', dest='timeout', type=int, default=60,
+                     help='Timeout to wait for each test '
+                          '(default: %(default)s).')
+  group.add_argument('--isolate_file_path',
+                     '--isolate-file-path',
+                     dest='isolate_file_path',
+                     help='.isolate file path to override the default '
+                          'path')
+  group.add_argument('--app-data-file', action='append', dest='app_data_files',
+                     help='A file path relative to the app data directory '
+                          'that should be saved to the host.')
+  group.add_argument('--app-data-file-dir',
+                     help='Host directory to which app data files will be'
+                          ' saved. Used with --app-data-file.')
+  group.add_argument('--delete-stale-data', dest='delete_stale_data',
+                     action='store_true',
+                     help='Delete stale test data on the device.')
+
+  filter_group = group.add_mutually_exclusive_group()
+  filter_group.add_argument('-f', '--gtest_filter', '--gtest-filter',
+                            dest='test_filter',
+                            help='googletest-style filter string.')
+  filter_group.add_argument('--gtest-filter-file', dest='test_filter_file',
+                            help='Path to file that contains googletest-style '
+                                  'filter strings. (Lines will be joined with '
+                                  '":" to create a single filter string.)')
+
+  AddDeviceOptions(parser)
+  AddCommonOptions(parser)
+  AddRemoteDeviceOptions(parser)
+
+
+def AddLinkerTestOptions(parser):
+  group = parser.add_argument_group('Linker Test Options')
+  group.add_argument('-f', '--gtest-filter', dest='test_filter',
+                     help='googletest-style filter string.')
+  AddCommonOptions(parser)
+  AddDeviceOptions(parser)
+
+
+def AddJavaTestOptions(argument_group):
+  """Adds the Java test options to |option_parser|."""
+
+  argument_group.add_argument(
+      '-f', '--test-filter', dest='test_filter',
+      help=('Test filter (if not fully qualified, will run all matches).'))
+  argument_group.add_argument(
+      '-A', '--annotation', dest='annotation_str',
+      help=('Comma-separated list of annotations. Run only tests with any of '
+            'the given annotations. An annotation can be either a key or a '
+            'key-values pair. A test that has no annotation is considered '
+            '"SmallTest".'))
+  argument_group.add_argument(
+      '-E', '--exclude-annotation', dest='exclude_annotation_str',
+      help=('Comma-separated list of annotations. Exclude tests with these '
+            'annotations.'))
+  argument_group.add_argument(
+      '--screenshot', dest='screenshot_failures', action='store_true',
+      help='Capture screenshots of test failures')
+  argument_group.add_argument(
+      '--save-perf-json', action='store_true',
+      help='Saves the JSON file for each UI Perf test.')
+  argument_group.add_argument(
+      '--official-build', action='store_true', help='Run official build tests.')
+  argument_group.add_argument(
+      '--test_data', '--test-data', action='append', default=[],
+      help=('Each instance defines a directory of test data that should be '
+            'copied to the target(s) before running the tests. The argument '
+            'should be of the form <target>:<source>, <target> is relative to '
+            'the device data directory, and <source> is relative to the '
+            'chromium build directory.'))
+  argument_group.add_argument(
+      '--disable-dalvik-asserts', dest='set_asserts', action='store_false',
+      default=True, help='Removes the dalvik.vm.enableassertions property')
+
+
+
+def ProcessJavaTestOptions(args):
+  """Processes options/arguments and populates |options| with defaults."""
+
+  # TODO(jbudorick): Handle most of this function in argparse.
+  if args.annotation_str:
+    args.annotations = args.annotation_str.split(',')
+  elif args.test_filter:
+    args.annotations = []
+  else:
+    args.annotations = ['Smoke', 'SmallTest', 'MediumTest', 'LargeTest',
+                        'EnormousTest', 'IntegrationTest']
+
+  if args.exclude_annotation_str:
+    args.exclude_annotations = args.exclude_annotation_str.split(',')
+  else:
+    args.exclude_annotations = []
+
+
+def AddInstrumentationTestOptions(parser):
+  """Adds Instrumentation test options to |parser|."""
+
+  parser.usage = '%(prog)s [options]'
+
+  group = parser.add_argument_group('Instrumentation Test Options')
+  AddJavaTestOptions(group)
+
+  java_or_python_group = group.add_mutually_exclusive_group()
+  java_or_python_group.add_argument(
+      '-j', '--java-only', action='store_false',
+      dest='run_python_tests', default=True, help='Run only the Java tests.')
+  java_or_python_group.add_argument(
+      '-p', '--python-only', action='store_false',
+      dest='run_java_tests', default=True,
+      help='Run only the host-driven tests.')
+
+  group.add_argument('--host-driven-root',
+                     help='Root of the host-driven tests.')
+  group.add_argument('-w', '--wait_debugger', dest='wait_for_debugger',
+                     action='store_true',
+                     help='Wait for debugger.')
+  group.add_argument('--apk-under-test', dest='apk_under_test',
+                     help=('the name of the apk under test.'))
+  group.add_argument('--test-apk', dest='test_apk', required=True,
+                     help=('The name of the apk containing the tests '
+                           '(without the .apk extension; '
+                           'e.g. "ContentShellTest").'))
+  group.add_argument('--support-apk', dest='test_support_apk_path',
+                     help=('The path to an optional support apk to be '
+                           'installed alongside the test apk. The '
+                           'path should be relative to the output '
+                           'directory (--output-directory).'))
+  group.add_argument('--coverage-dir',
+                     help=('Directory in which to place all generated '
+                           'EMMA coverage files.'))
+  group.add_argument('--device-flags', dest='device_flags', default='',
+                     help='The relative filepath to a file containing '
+                          'command-line flags to set on the device')
+  group.add_argument('--device-flags-file', default='',
+                     help='The relative filepath to a file containing '
+                          'command-line flags to set on the device')
+  group.add_argument('--isolate_file_path',
+                     '--isolate-file-path',
+                     dest='isolate_file_path',
+                     help='.isolate file path to override the default '
+                          'path')
+  group.add_argument('--delete-stale-data', dest='delete_stale_data',
+                     action='store_true',
+                     help='Delete stale test data on the device.')
+
+  AddCommonOptions(parser)
+  AddDeviceOptions(parser)
+  AddRemoteDeviceOptions(parser)
+
+
+def ProcessInstrumentationOptions(args):
+  """Processes options/arguments and populate |options| with defaults.
+
+  Args:
+    args: argparse.Namespace object.
+
+  Returns:
+    An InstrumentationOptions named tuple which contains all options relevant to
+    instrumentation tests.
+  """
+
+  ProcessJavaTestOptions(args)
+
+  if not args.host_driven_root:
+    args.run_python_tests = False
+
+  args.test_apk_path = os.path.join(
+      constants.GetOutDirectory(),
+      constants.SDK_BUILD_APKS_DIR,
+      '%s.apk' % args.test_apk)
+  args.test_apk_jar_path = os.path.join(
+      constants.GetOutDirectory(),
+      constants.SDK_BUILD_TEST_JAVALIB_DIR,
+      '%s.jar' %  args.test_apk)
+
+  args.test_runner = apk_helper.GetInstrumentationName(args.test_apk_path)
+
+  # TODO(jbudorick): Get rid of InstrumentationOptions.
+  return instrumentation_test_options.InstrumentationOptions(
+      args.tool,
+      args.annotations,
+      args.exclude_annotations,
+      args.test_filter,
+      args.test_data,
+      args.save_perf_json,
+      args.screenshot_failures,
+      args.wait_for_debugger,
+      args.coverage_dir,
+      args.test_apk,
+      args.test_apk_path,
+      args.test_apk_jar_path,
+      args.test_runner,
+      args.test_support_apk_path,
+      args.device_flags,
+      args.isolate_file_path,
+      args.set_asserts,
+      args.delete_stale_data
+      )
+
+
+def AddUIAutomatorTestOptions(parser):
+  """Adds UI Automator test options to |parser|."""
+
+  group = parser.add_argument_group('UIAutomator Test Options')
+  AddJavaTestOptions(group)
+  group.add_argument(
+      '--package', required=True, choices=constants.PACKAGE_INFO.keys(),
+      metavar='PACKAGE', help='Package under test.')
+  group.add_argument(
+      '--test-jar', dest='test_jar', required=True,
+      help=('The name of the dexed jar containing the tests (without the '
+            '.dex.jar extension). Alternatively, this can be a full path '
+            'to the jar.'))
+
+  AddCommonOptions(parser)
+  AddDeviceOptions(parser)
+
+
+def ProcessUIAutomatorOptions(args):
+  """Processes UIAutomator options/arguments.
+
+  Args:
+    args: argparse.Namespace object.
+
+  Returns:
+    A UIAutomatorOptions named tuple which contains all options relevant to
+    uiautomator tests.
+  """
+
+  ProcessJavaTestOptions(args)
+
+  if os.path.exists(args.test_jar):
+    # The dexed JAR is fully qualified, assume the info JAR lives along side.
+    args.uiautomator_jar = args.test_jar
+  else:
+    args.uiautomator_jar = os.path.join(
+        constants.GetOutDirectory(),
+        constants.SDK_BUILD_JAVALIB_DIR,
+        '%s.dex.jar' % args.test_jar)
+  args.uiautomator_info_jar = (
+      args.uiautomator_jar[:args.uiautomator_jar.find('.dex.jar')] +
+      '_java.jar')
+
+  return uiautomator_test_options.UIAutomatorOptions(
+      args.tool,
+      args.annotations,
+      args.exclude_annotations,
+      args.test_filter,
+      args.test_data,
+      args.save_perf_json,
+      args.screenshot_failures,
+      args.uiautomator_jar,
+      args.uiautomator_info_jar,
+      args.package,
+      args.set_asserts)
+
+
+def AddJUnitTestOptions(parser):
+  """Adds junit test options to |parser|."""
+
+  group = parser.add_argument_group('JUnit Test Options')
+  group.add_argument(
+      '-s', '--test-suite', dest='test_suite', required=True,
+      help=('JUnit test suite to run.'))
+  group.add_argument(
+      '-f', '--test-filter', dest='test_filter',
+      help='Filters tests googletest-style.')
+  group.add_argument(
+      '--package-filter', dest='package_filter',
+      help='Filters tests by package.')
+  group.add_argument(
+      '--runner-filter', dest='runner_filter',
+      help='Filters tests by runner class. Must be fully qualified.')
+  group.add_argument(
+      '--sdk-version', dest='sdk_version', type=int,
+      help='The Android SDK version.')
+  AddCommonOptions(parser)
+
+
+def AddMonkeyTestOptions(parser):
+  """Adds monkey test options to |parser|."""
+
+  group = parser.add_argument_group('Monkey Test Options')
+  group.add_argument(
+      '--package', required=True, choices=constants.PACKAGE_INFO.keys(),
+      metavar='PACKAGE', help='Package under test.')
+  group.add_argument(
+      '--event-count', default=10000, type=int,
+      help='Number of events to generate (default: %(default)s).')
+  group.add_argument(
+      '--category', default='',
+      help='A list of allowed categories.')
+  group.add_argument(
+      '--throttle', default=100, type=int,
+      help='Delay between events (ms) (default: %(default)s). ')
+  group.add_argument(
+      '--seed', type=int,
+      help=('Seed value for pseudo-random generator. Same seed value generates '
+            'the same sequence of events. Seed is randomized by default.'))
+  group.add_argument(
+      '--extra-args', default='',
+      help=('String of other args to pass to the command verbatim.'))
+
+  AddCommonOptions(parser)
+  AddDeviceOptions(parser)
+
+def ProcessMonkeyTestOptions(args):
+  """Processes all monkey test options.
+
+  Args:
+    args: argparse.Namespace object.
+
+  Returns:
+    A MonkeyOptions named tuple which contains all options relevant to
+    monkey tests.
+  """
+  # TODO(jbudorick): Handle this directly in argparse with nargs='+'
+  category = args.category
+  if category:
+    category = args.category.split(',')
+
+  # TODO(jbudorick): Get rid of MonkeyOptions.
+  return monkey_test_options.MonkeyOptions(
+      args.verbose_count,
+      args.package,
+      args.event_count,
+      category,
+      args.throttle,
+      args.seed,
+      args.extra_args)
+
+def AddUirobotTestOptions(parser):
+  """Adds uirobot test options to |option_parser|."""
+  group = parser.add_argument_group('Uirobot Test Options')
+
+  group.add_argument('--app-under-test', required=True,
+                     help='APK to run tests on.')
+  group.add_argument(
+      '--minutes', default=5, type=int,
+      help='Number of minutes to run uirobot test [default: %(default)s].')
+
+  AddCommonOptions(parser)
+  AddDeviceOptions(parser)
+  AddRemoteDeviceOptions(parser)
+
+def AddPerfTestOptions(parser):
+  """Adds perf test options to |parser|."""
+
+  group = parser.add_argument_group('Perf Test Options')
+
+  class SingleStepAction(argparse.Action):
+    def __call__(self, parser, namespace, values, option_string=None):
+      if values and not namespace.single_step:
+        parser.error('single step command provided, '
+                     'but --single-step not specified.')
+      elif namespace.single_step and not values:
+        parser.error('--single-step specified, '
+                     'but no single step command provided.')
+      setattr(namespace, self.dest, values)
+
+  step_group = group.add_mutually_exclusive_group(required=True)
+  # TODO(jbudorick): Revise --single-step to use argparse.REMAINDER.
+  # This requires removing "--" from client calls.
+  step_group.add_argument(
+      '--single-step', action='store_true',
+      help='Execute the given command with retries, but only print the result '
+           'for the "most successful" round.')
+  step_group.add_argument(
+      '--steps',
+      help='JSON file containing the list of commands to run.')
+  step_group.add_argument(
+      '--print-step',
+      help='The name of a previously executed perf step to print.')
+
+  group.add_argument(
+      '--output-json-list',
+      help='Write a simple list of names from --steps into the given file.')
+  group.add_argument(
+      '--collect-chartjson-data',
+      action='store_true',
+      help='Cache the chartjson output from each step for later use.')
+  group.add_argument(
+      '--output-chartjson-data',
+      default='',
+      help='Write out chartjson into the given file.')
+  group.add_argument(
+      '--flaky-steps',
+      help=('A JSON file containing steps that are flaky '
+            'and will have its exit code ignored.'))
+  group.add_argument(
+      '--no-timeout', action='store_true',
+      help=('Do not impose a timeout. Each perf step is responsible for '
+            'implementing the timeout logic.'))
+  group.add_argument(
+      '-f', '--test-filter',
+      help=('Test filter (will match against the names listed in --steps).'))
+  group.add_argument(
+      '--dry-run', action='store_true',
+      help='Just print the steps without executing.')
+  # Uses 0.1 degrees C because that's what Android does.
+  group.add_argument(
+      '--max-battery-temp', type=int,
+      help='Only start tests when the battery is at or below the given '
+           'temperature (0.1 C)')
+  group.add_argument('single_step_command', nargs='*', action=SingleStepAction,
+                     help='If --single-step is specified, the command to run.')
+  group.add_argument('--min-battery-level', type=int,
+                     help='Only starts tests when the battery is charged above '
+                          'given level.')
+  AddCommonOptions(parser)
+  AddDeviceOptions(parser)
+
+
+def ProcessPerfTestOptions(args):
+  """Processes all perf test options.
+
+  Args:
+    args: argparse.Namespace object.
+
+  Returns:
+    A PerfOptions named tuple which contains all options relevant to
+    perf tests.
+  """
+  # TODO(jbudorick): Move single_step handling down into the perf tests.
+  if args.single_step:
+    args.single_step = ' '.join(args.single_step_command)
+  # TODO(jbudorick): Get rid of PerfOptions.
+  return perf_test_options.PerfOptions(
+      args.steps, args.flaky_steps, args.output_json_list,
+      args.print_step, args.no_timeout, args.test_filter,
+      args.dry_run, args.single_step, args.collect_chartjson_data,
+      args.output_chartjson_data, args.max_battery_temp, args.min_battery_level)
+
+
+def AddPythonTestOptions(parser):
+  group = parser.add_argument_group('Python Test Options')
+  group.add_argument(
+      '-s', '--suite', dest='suite_name', metavar='SUITE_NAME',
+      choices=constants.PYTHON_UNIT_TEST_SUITES.keys(),
+      help='Name of the test suite to run.')
+  AddCommonOptions(parser)
+
+
+def _RunGTests(args, devices):
+  """Subcommand of RunTestsCommands which runs gtests."""
+  exit_code = 0
+  for suite_name in args.suite_name:
+    # TODO(jbudorick): Either deprecate multi-suite or move its handling down
+    # into the gtest code.
+    gtest_options = gtest_test_options.GTestOptions(
+        args.tool,
+        args.test_filter,
+        args.run_disabled,
+        args.test_arguments,
+        args.timeout,
+        args.isolate_file_path,
+        suite_name,
+        args.app_data_files,
+        args.app_data_file_dir,
+        args.delete_stale_data)
+    runner_factory, tests = gtest_setup.Setup(gtest_options, devices)
+
+    results, test_exit_code = test_dispatcher.RunTests(
+        tests, runner_factory, devices, shard=True, test_timeout=None,
+        num_retries=args.num_retries)
+
+    if test_exit_code and exit_code != constants.ERROR_EXIT_CODE:
+      exit_code = test_exit_code
+
+    report_results.LogFull(
+        results=results,
+        test_type='Unit test',
+        test_package=suite_name,
+        flakiness_server=args.flakiness_dashboard_server)
+
+    if args.json_results_file:
+      json_results.GenerateJsonResultsFile(results, args.json_results_file)
+
+  return exit_code
+
+
+def _RunLinkerTests(args, devices):
+  """Subcommand of RunTestsCommands which runs linker tests."""
+  runner_factory, tests = linker_setup.Setup(args, devices)
+
+  results, exit_code = test_dispatcher.RunTests(
+      tests, runner_factory, devices, shard=True, test_timeout=60,
+      num_retries=args.num_retries)
+
+  report_results.LogFull(
+      results=results,
+      test_type='Linker test',
+      test_package='ChromiumLinkerTest')
+
+  if args.json_results_file:
+    json_results.GenerateJsonResultsFile(results, args.json_results_file)
+
+  return exit_code
+
+
+def _RunInstrumentationTests(args, devices):
+  """Subcommand of RunTestsCommands which runs instrumentation tests."""
+  logging.info('_RunInstrumentationTests(%s, %s)' % (str(args), str(devices)))
+
+  instrumentation_options = ProcessInstrumentationOptions(args)
+
+  if len(devices) > 1 and args.wait_for_debugger:
+    logging.warning('Debugger can not be sharded, using first available device')
+    devices = devices[:1]
+
+  results = base_test_result.TestRunResults()
+  exit_code = 0
+
+  if args.run_java_tests:
+    runner_factory, tests = instrumentation_setup.Setup(
+        instrumentation_options, devices)
+
+    test_results, exit_code = test_dispatcher.RunTests(
+        tests, runner_factory, devices, shard=True, test_timeout=None,
+        num_retries=args.num_retries)
+
+    results.AddTestRunResults(test_results)
+
+  if args.run_python_tests:
+    runner_factory, tests = host_driven_setup.InstrumentationSetup(
+        args.host_driven_root, args.official_build,
+        instrumentation_options)
+
+    if tests:
+      test_results, test_exit_code = test_dispatcher.RunTests(
+          tests, runner_factory, devices, shard=True, test_timeout=None,
+          num_retries=args.num_retries)
+
+      results.AddTestRunResults(test_results)
+
+      # Only allow exit code escalation
+      if test_exit_code and exit_code != constants.ERROR_EXIT_CODE:
+        exit_code = test_exit_code
+
+  if args.device_flags:
+    args.device_flags = os.path.join(constants.DIR_SOURCE_ROOT,
+                                     args.device_flags)
+
+  report_results.LogFull(
+      results=results,
+      test_type='Instrumentation',
+      test_package=os.path.basename(args.test_apk),
+      annotation=args.annotations,
+      flakiness_server=args.flakiness_dashboard_server)
+
+  if args.json_results_file:
+    json_results.GenerateJsonResultsFile(results, args.json_results_file)
+
+  return exit_code
+
+
+def _RunUIAutomatorTests(args, devices):
+  """Subcommand of RunTestsCommands which runs uiautomator tests."""
+  uiautomator_options = ProcessUIAutomatorOptions(args)
+
+  runner_factory, tests = uiautomator_setup.Setup(uiautomator_options)
+
+  results, exit_code = test_dispatcher.RunTests(
+      tests, runner_factory, devices, shard=True, test_timeout=None,
+      num_retries=args.num_retries)
+
+  report_results.LogFull(
+      results=results,
+      test_type='UIAutomator',
+      test_package=os.path.basename(args.test_jar),
+      annotation=args.annotations,
+      flakiness_server=args.flakiness_dashboard_server)
+
+  if args.json_results_file:
+    json_results.GenerateJsonResultsFile(results, args.json_results_file)
+
+  return exit_code
+
+
+def _RunJUnitTests(args):
+  """Subcommand of RunTestsCommand which runs junit tests."""
+  runner_factory, tests = junit_setup.Setup(args)
+  results, exit_code = junit_dispatcher.RunTests(tests, runner_factory)
+
+  report_results.LogFull(
+      results=results,
+      test_type='JUnit',
+      test_package=args.test_suite)
+
+  if args.json_results_file:
+    json_results.GenerateJsonResultsFile(results, args.json_results_file)
+
+  return exit_code
+
+
+def _RunMonkeyTests(args, devices):
+  """Subcommand of RunTestsCommands which runs monkey tests."""
+  monkey_options = ProcessMonkeyTestOptions(args)
+
+  runner_factory, tests = monkey_setup.Setup(monkey_options)
+
+  results, exit_code = test_dispatcher.RunTests(
+      tests, runner_factory, devices, shard=False, test_timeout=None,
+      num_retries=args.num_retries)
+
+  report_results.LogFull(
+      results=results,
+      test_type='Monkey',
+      test_package='Monkey')
+
+  if args.json_results_file:
+    json_results.GenerateJsonResultsFile(results, args.json_results_file)
+
+  return exit_code
+
+
+def _RunPerfTests(args):
+  """Subcommand of RunTestsCommands which runs perf tests."""
+  perf_options = ProcessPerfTestOptions(args)
+
+  # Just save a simple json with a list of test names.
+  if perf_options.output_json_list:
+    return perf_test_runner.OutputJsonList(
+        perf_options.steps, perf_options.output_json_list)
+
+  # Just print the results from a single previously executed step.
+  if perf_options.print_step:
+    return perf_test_runner.PrintTestOutput(
+        perf_options.print_step, perf_options.output_chartjson_data)
+
+  runner_factory, tests, devices = perf_setup.Setup(perf_options)
+
+  # shard=False means that each device will get the full list of tests
+  # and then each one will decide their own affinity.
+  # shard=True means each device will pop the next test available from a queue,
+  # which increases throughput but have no affinity.
+  results, _ = test_dispatcher.RunTests(
+      tests, runner_factory, devices, shard=False, test_timeout=None,
+      num_retries=args.num_retries)
+
+  report_results.LogFull(
+      results=results,
+      test_type='Perf',
+      test_package='Perf')
+
+  if args.json_results_file:
+    json_results.GenerateJsonResultsFile(results, args.json_results_file)
+
+  if perf_options.single_step:
+    return perf_test_runner.PrintTestOutput('single_step')
+
+  perf_test_runner.PrintSummary(tests)
+
+  # Always return 0 on the sharding stage. Individual tests exit_code
+  # will be returned on the print_step stage.
+  return 0
+
+
+def _RunPythonTests(args):
+  """Subcommand of RunTestsCommand which runs python unit tests."""
+  suite_vars = constants.PYTHON_UNIT_TEST_SUITES[args.suite_name]
+  suite_path = suite_vars['path']
+  suite_test_modules = suite_vars['test_modules']
+
+  sys.path = [suite_path] + sys.path
+  try:
+    suite = unittest.TestSuite()
+    suite.addTests(unittest.defaultTestLoader.loadTestsFromName(m)
+                   for m in suite_test_modules)
+    runner = unittest.TextTestRunner(verbosity=1+args.verbose_count)
+    return 0 if runner.run(suite).wasSuccessful() else 1
+  finally:
+    sys.path = sys.path[1:]
+
+
+def _GetAttachedDevices(test_device=None):
+  """Get all attached devices.
+
+  Args:
+    test_device: Name of a specific device to use.
+
+  Returns:
+    A list of attached devices.
+  """
+  attached_devices = device_utils.DeviceUtils.HealthyDevices()
+  if test_device:
+    test_device = [d for d in attached_devices if d == test_device]
+    if not test_device:
+      raise device_errors.DeviceUnreachableError(
+          'Did not find device %s among attached device. Attached devices: %s'
+          % (test_device, ', '.join(attached_devices)))
+    return test_device
+
+  else:
+    if not attached_devices:
+      raise device_errors.NoDevicesError()
+    return sorted(attached_devices)
+
+
+def RunTestsCommand(args, parser):
+  """Checks test type and dispatches to the appropriate function.
+
+  Args:
+    args: argparse.Namespace object.
+    parser: argparse.ArgumentParser object.
+
+  Returns:
+    Integer indicated exit code.
+
+  Raises:
+    Exception: Unknown command name passed in, or an exception from an
+        individual test runner.
+  """
+  command = args.command
+
+  ProcessCommonOptions(args)
+
+  if args.enable_platform_mode:
+    return RunTestsInPlatformMode(args, parser)
+
+  if command in constants.LOCAL_MACHINE_TESTS:
+    devices = []
+  else:
+    devices = _GetAttachedDevices(args.test_device)
+
+  forwarder.Forwarder.RemoveHostLog()
+  if not ports.ResetTestServerPortAllocation():
+    raise Exception('Failed to reset test server port.')
+
+  if command == 'gtest':
+    if args.suite_name[0] in gtest_test_instance.BROWSER_TEST_SUITES:
+      return RunTestsInPlatformMode(args, parser)
+    return _RunGTests(args, devices)
+  elif command == 'linker':
+    return _RunLinkerTests(args, devices)
+  elif command == 'instrumentation':
+    return _RunInstrumentationTests(args, devices)
+  elif command == 'uiautomator':
+    return _RunUIAutomatorTests(args, devices)
+  elif command == 'junit':
+    return _RunJUnitTests(args)
+  elif command == 'monkey':
+    return _RunMonkeyTests(args, devices)
+  elif command == 'perf':
+    return _RunPerfTests(args)
+  elif command == 'python':
+    return _RunPythonTests(args)
+  else:
+    raise Exception('Unknown test type.')
+
+
+_SUPPORTED_IN_PLATFORM_MODE = [
+  # TODO(jbudorick): Add support for more test types.
+  'gtest',
+  'instrumentation',
+  'uirobot',
+]
+
+
+def RunTestsInPlatformMode(args, parser):
+
+  if args.command not in _SUPPORTED_IN_PLATFORM_MODE:
+    parser.error('%s is not yet supported in platform mode' % args.command)
+
+  with environment_factory.CreateEnvironment(args, parser.error) as env:
+    with test_instance_factory.CreateTestInstance(args, parser.error) as test:
+      with test_run_factory.CreateTestRun(
+          args, env, test, parser.error) as test_run:
+        results = test_run.RunTests()
+
+        if args.environment == 'remote_device' and args.trigger:
+          return 0 # Not returning results, only triggering.
+
+        report_results.LogFull(
+            results=results,
+            test_type=test.TestType(),
+            test_package=test_run.TestPackage(),
+            annotation=getattr(args, 'annotations', None),
+            flakiness_server=getattr(args, 'flakiness_dashboard_server', None))
+
+        if args.json_results_file:
+          json_results.GenerateJsonResultsFile(
+              results, args.json_results_file)
+
+  return 0 if results.DidRunPass() else constants.ERROR_EXIT_CODE
+
+
+CommandConfigTuple = collections.namedtuple(
+    'CommandConfigTuple',
+    ['add_options_func', 'help_txt'])
+VALID_COMMANDS = {
+    'gtest': CommandConfigTuple(
+        AddGTestOptions,
+        'googletest-based C++ tests'),
+    'instrumentation': CommandConfigTuple(
+        AddInstrumentationTestOptions,
+        'InstrumentationTestCase-based Java tests'),
+    'uiautomator': CommandConfigTuple(
+        AddUIAutomatorTestOptions,
+        "Tests that run via Android's uiautomator command"),
+    'junit': CommandConfigTuple(
+        AddJUnitTestOptions,
+        'JUnit4-based Java tests'),
+    'monkey': CommandConfigTuple(
+        AddMonkeyTestOptions,
+        "Tests based on Android's monkey"),
+    'perf': CommandConfigTuple(
+        AddPerfTestOptions,
+        'Performance tests'),
+    'python': CommandConfigTuple(
+        AddPythonTestOptions,
+        'Python tests based on unittest.TestCase'),
+    'linker': CommandConfigTuple(
+        AddLinkerTestOptions,
+        'Linker tests'),
+    'uirobot': CommandConfigTuple(
+        AddUirobotTestOptions,
+        'Uirobot test'),
+}
+
+
+def DumpThreadStacks(_signal, _frame):
+  for thread in threading.enumerate():
+    reraiser_thread.LogThreadStack(thread)
+
+
+def main():
+  signal.signal(signal.SIGUSR1, DumpThreadStacks)
+
+  parser = argparse.ArgumentParser()
+  command_parsers = parser.add_subparsers(title='test types',
+                                          dest='command')
+
+  for test_type, config in sorted(VALID_COMMANDS.iteritems(),
+                                  key=lambda x: x[0]):
+    subparser = command_parsers.add_parser(
+        test_type, usage='%(prog)s [options]', help=config.help_txt)
+    config.add_options_func(subparser)
+
+  args = parser.parse_args()
+
+  try:
+    return RunTestsCommand(args, parser)
+  except base_error.BaseError as e:
+    logging.exception('Error occurred.')
+    if e.is_infra_error:
+      return constants.INFRA_EXIT_CODE
+    return constants.ERROR_EXIT_CODE
+  except: # pylint: disable=W0702
+    logging.exception('Unrecognized error occurred.')
+    return constants.ERROR_EXIT_CODE
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/build/android/tests/symbolize/Makefile b/build/android/tests/symbolize/Makefile
new file mode 100644
index 0000000..5178a04
--- /dev/null
+++ b/build/android/tests/symbolize/Makefile
@@ -0,0 +1,11 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+TOOLCHAIN=../../../../third_party/android_tools/ndk/toolchains/arm-linux-androideabi-4.6/prebuilt/linux-x86_64/bin/arm-linux-androideabi-
+CXX=$(TOOLCHAIN)g++
+
+lib%.so: %.cc
+	$(CXX) -nostdlib -g -fPIC -shared $< -o $@
+
+all: liba.so libb.so
diff --git a/build/android/tests/symbolize/a.cc b/build/android/tests/symbolize/a.cc
new file mode 100644
index 0000000..f0c7ca4
--- /dev/null
+++ b/build/android/tests/symbolize/a.cc
@@ -0,0 +1,14 @@
+// Copyright 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+class A {
+ public:
+  A();
+  void Foo(int i);
+  void Bar(const char* c);
+};
+
+A::A() {}
+void A::Foo(int i) {}
+void A::Bar(const char* c) {}
diff --git a/build/android/tests/symbolize/b.cc b/build/android/tests/symbolize/b.cc
new file mode 100644
index 0000000..db87520
--- /dev/null
+++ b/build/android/tests/symbolize/b.cc
@@ -0,0 +1,14 @@
+// Copyright 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+class B {
+ public:
+  B();
+  void Baz(float f);
+  void Qux(double d);
+};
+
+B::B() {}
+void B::Baz(float f) {}
+void B::Qux(double d) {}
diff --git a/build/android/tests/symbolize/liba.so b/build/android/tests/symbolize/liba.so
new file mode 100644
index 0000000..79cb739
--- /dev/null
+++ b/build/android/tests/symbolize/liba.so
Binary files differ
diff --git a/build/android/tests/symbolize/libb.so b/build/android/tests/symbolize/libb.so
new file mode 100644
index 0000000..7cf01d4
--- /dev/null
+++ b/build/android/tests/symbolize/libb.so
Binary files differ
diff --git a/build/android/tombstones.py b/build/android/tombstones.py
new file mode 100755
index 0000000..dbfe3f7
--- /dev/null
+++ b/build/android/tombstones.py
@@ -0,0 +1,252 @@
+#!/usr/bin/env python
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# Find the most recent tombstone file(s) on all connected devices
+# and prints their stacks.
+#
+# Assumes tombstone file was created with current symbols.
+
+import datetime
+import itertools
+import logging
+import multiprocessing
+import os
+import re
+import subprocess
+import sys
+import optparse
+
+from pylib.device import adb_wrapper
+from pylib.device import device_errors
+from pylib.device import device_utils
+from pylib.utils import run_tests_helper
+
+
+_TZ_UTC = {'TZ': 'UTC'}
+
+def _ListTombstones(device):
+  """List the tombstone files on the device.
+
+  Args:
+    device: An instance of DeviceUtils.
+
+  Yields:
+    Tuples of (tombstone filename, date time of file on device).
+  """
+  try:
+    lines = device.RunShellCommand(
+        ['ls', '-a', '-l', '/data/tombstones'],
+        as_root=True, check_return=True, env=_TZ_UTC, timeout=60)
+    for line in lines:
+      if 'tombstone' in line and not 'No such file or directory' in line:
+        details = line.split()
+        t = datetime.datetime.strptime(details[-3] + ' ' + details[-2],
+                                       '%Y-%m-%d %H:%M')
+        yield details[-1], t
+  except device_errors.CommandFailedError:
+    logging.exception('Could not retrieve tombstones.')
+
+
+def _GetDeviceDateTime(device):
+  """Determine the date time on the device.
+
+  Args:
+    device: An instance of DeviceUtils.
+
+  Returns:
+    A datetime instance.
+  """
+  device_now_string = device.RunShellCommand(
+      ['date'], check_return=True, env=_TZ_UTC)
+  return datetime.datetime.strptime(
+      device_now_string[0], '%a %b %d %H:%M:%S %Z %Y')
+
+
+def _GetTombstoneData(device, tombstone_file):
+  """Retrieve the tombstone data from the device
+
+  Args:
+    device: An instance of DeviceUtils.
+    tombstone_file: the tombstone to retrieve
+
+  Returns:
+    A list of lines
+  """
+  return device.ReadFile(
+      '/data/tombstones/' + tombstone_file, as_root=True).splitlines()
+
+
+def _EraseTombstone(device, tombstone_file):
+  """Deletes a tombstone from the device.
+
+  Args:
+    device: An instance of DeviceUtils.
+    tombstone_file: the tombstone to delete.
+  """
+  return device.RunShellCommand(
+      ['rm', '/data/tombstones/' + tombstone_file],
+      as_root=True, check_return=True)
+
+
+def _DeviceAbiToArch(device_abi):
+  # The order of this list is significant to find the more specific match (e.g.,
+  # arm64) before the less specific (e.g., arm).
+  arches = ['arm64', 'arm', 'x86_64', 'x86_64', 'x86', 'mips']
+  for arch in arches:
+    if arch in device_abi:
+      return arch
+  raise RuntimeError('Unknown device ABI: %s' % device_abi)
+
+def _ResolveSymbols(tombstone_data, include_stack, device_abi):
+  """Run the stack tool for given tombstone input.
+
+  Args:
+    tombstone_data: a list of strings of tombstone data.
+    include_stack: boolean whether to include stack data in output.
+    device_abi: the default ABI of the device which generated the tombstone.
+
+  Yields:
+    A string for each line of resolved stack output.
+  """
+  # Check if the tombstone data has an ABI listed, if so use this in preference
+  # to the device's default ABI.
+  for line in tombstone_data:
+    found_abi = re.search('ABI: \'(.+?)\'', line)
+    if found_abi:
+      device_abi = found_abi.group(1)
+  arch = _DeviceAbiToArch(device_abi)
+  if not arch:
+    return
+
+  stack_tool = os.path.join(os.path.dirname(__file__), '..', '..',
+                            'third_party', 'android_platform', 'development',
+                            'scripts', 'stack')
+  proc = subprocess.Popen([stack_tool, '--arch', arch], stdin=subprocess.PIPE,
+                          stdout=subprocess.PIPE)
+  output = proc.communicate(input='\n'.join(tombstone_data))[0]
+  for line in output.split('\n'):
+    if not include_stack and 'Stack Data:' in line:
+      break
+    yield line
+
+
+def _ResolveTombstone(tombstone):
+  lines = []
+  lines += [tombstone['file'] + ' created on ' + str(tombstone['time']) +
+            ', about this long ago: ' +
+            (str(tombstone['device_now'] - tombstone['time']) +
+            ' Device: ' + tombstone['serial'])]
+  logging.info('\n'.join(lines))
+  logging.info('Resolving...')
+  lines += _ResolveSymbols(tombstone['data'], tombstone['stack'],
+                           tombstone['device_abi'])
+  return lines
+
+
+def _ResolveTombstones(jobs, tombstones):
+  """Resolve a list of tombstones.
+
+  Args:
+    jobs: the number of jobs to use with multiprocess.
+    tombstones: a list of tombstones.
+  """
+  if not tombstones:
+    logging.warning('No tombstones to resolve.')
+    return
+  if len(tombstones) == 1:
+    data = [_ResolveTombstone(tombstones[0])]
+  else:
+    pool = multiprocessing.Pool(processes=jobs)
+    data = pool.map(_ResolveTombstone, tombstones)
+  for tombstone in data:
+    for line in tombstone:
+      logging.info(line)
+
+
+def _GetTombstonesForDevice(device, options):
+  """Returns a list of tombstones on a given device.
+
+  Args:
+    device: An instance of DeviceUtils.
+    options: command line arguments from OptParse
+  """
+  ret = []
+  all_tombstones = list(_ListTombstones(device))
+  if not all_tombstones:
+    logging.warning('No tombstones.')
+    return ret
+
+  # Sort the tombstones in date order, descending
+  all_tombstones.sort(cmp=lambda a, b: cmp(b[1], a[1]))
+
+  # Only resolve the most recent unless --all-tombstones given.
+  tombstones = all_tombstones if options.all_tombstones else [all_tombstones[0]]
+
+  device_now = _GetDeviceDateTime(device)
+  try:
+    for tombstone_file, tombstone_time in tombstones:
+      ret += [{'serial': str(device),
+               'device_abi': device.product_cpu_abi,
+               'device_now': device_now,
+               'time': tombstone_time,
+               'file': tombstone_file,
+               'stack': options.stack,
+               'data': _GetTombstoneData(device, tombstone_file)}]
+  except device_errors.CommandFailedError:
+    for line in device.RunShellCommand(
+        ['ls', '-a', '-l', '/data/tombstones'],
+        as_root=True, check_return=True, env=_TZ_UTC, timeout=60):
+      logging.info('%s: %s', str(device), line)
+    raise
+
+  # Erase all the tombstones if desired.
+  if options.wipe_tombstones:
+    for tombstone_file, _ in all_tombstones:
+      _EraseTombstone(device, tombstone_file)
+
+  return ret
+
+
+def main():
+  custom_handler = logging.StreamHandler(sys.stdout)
+  custom_handler.setFormatter(run_tests_helper.CustomFormatter())
+  logging.getLogger().addHandler(custom_handler)
+  logging.getLogger().setLevel(logging.INFO)
+
+  parser = optparse.OptionParser()
+  parser.add_option('--device',
+                    help='The serial number of the device. If not specified '
+                         'will use all devices.')
+  parser.add_option('-a', '--all-tombstones', action='store_true',
+                    help="""Resolve symbols for all tombstones, rather than just
+                         the most recent""")
+  parser.add_option('-s', '--stack', action='store_true',
+                    help='Also include symbols for stack data')
+  parser.add_option('-w', '--wipe-tombstones', action='store_true',
+                    help='Erase all tombstones from device after processing')
+  parser.add_option('-j', '--jobs', type='int',
+                    default=4,
+                    help='Number of jobs to use when processing multiple '
+                         'crash stacks.')
+  options, _ = parser.parse_args()
+
+  if options.device:
+    devices = [device_utils.DeviceUtils(options.device)]
+  else:
+    devices = device_utils.DeviceUtils.HealthyDevices()
+
+  # This must be done serially because strptime can hit a race condition if
+  # used for the first time in a multithreaded environment.
+  # http://bugs.python.org/issue7980
+  tombstones = []
+  for device in devices:
+    tombstones += _GetTombstonesForDevice(device, options)
+
+  _ResolveTombstones(options.jobs, tombstones)
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/build/android/update_verification.py b/build/android/update_verification.py
new file mode 100755
index 0000000..05d083b
--- /dev/null
+++ b/build/android/update_verification.py
@@ -0,0 +1,108 @@
+#!/usr/bin/env python
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Runs semi-automated update testing on a non-rooted device.
+
+This script will help verify that app data is preserved during an update.
+To use this script first run it with the create_app_data option.
+
+./update_verification.py create_app_data --old-apk <path> --app-data <path>
+
+The script will then install the old apk, prompt you to create some app data
+(bookmarks, etc.), and then save the app data in the path you gave it.
+
+Next, once you have some app data saved, run this script with the test_update
+option.
+
+./update_verification.py test_update --old-apk <path> --new-apk <path>
+--app-data <path>
+
+This will install the old apk, load the saved app data, install the new apk,
+and ask the user to verify that all of the app data was preserved.
+"""
+
+import argparse
+import logging
+import os
+import sys
+import time
+
+from pylib import constants
+from pylib.device import device_errors
+from pylib.device import device_utils
+from pylib.utils import apk_helper
+from pylib.utils import run_tests_helper
+
+def CreateAppData(device, old_apk, app_data, package_name):
+  device.Install(old_apk)
+  raw_input('Set the application state. Once ready, press enter and '
+            'select "Backup my data" on the device.')
+  device.adb.Backup(app_data, packages=[package_name])
+  logging.critical('Application data saved to %s' % app_data)
+
+def TestUpdate(device, old_apk, new_apk, app_data, package_name):
+  device.Install(old_apk)
+  device.adb.Restore(app_data)
+  # Restore command is not synchronous
+  raw_input('Select "Restore my data" on the device. Then press enter to '
+            'continue.')
+  device_path = device.GetApplicationPaths(package_name)
+  if not device_path:
+    raise Exception('Expected package %s to already be installed. '
+                    'Package name might have changed!' % package_name)
+
+  logging.info('Verifying that %s can be overinstalled.', new_apk)
+  device.adb.Install(new_apk, reinstall=True)
+  logging.critical('Successfully updated to the new apk. Please verify that '
+                   'the application data is preserved.')
+
+def main():
+  parser = argparse.ArgumentParser(
+      description="Script to do semi-automated upgrade testing.")
+  parser.add_argument('-v', '--verbose', action='count',
+                      help='Print verbose log information.')
+  command_parsers = parser.add_subparsers(dest='command')
+
+  subparser = command_parsers.add_parser('create_app_data')
+  subparser.add_argument('--old-apk', required=True,
+                         help='Path to apk to update from.')
+  subparser.add_argument('--app-data', required=True,
+                         help='Path to where the app data backup should be '
+                           'saved to.')
+  subparser.add_argument('--package-name',
+                         help='Chrome apk package name.')
+
+  subparser = command_parsers.add_parser('test_update')
+  subparser.add_argument('--old-apk', required=True,
+                         help='Path to apk to update from.')
+  subparser.add_argument('--new-apk', required=True,
+                         help='Path to apk to update to.')
+  subparser.add_argument('--app-data', required=True,
+                         help='Path to where the app data backup is saved.')
+  subparser.add_argument('--package-name',
+                         help='Chrome apk package name.')
+
+  args = parser.parse_args()
+  run_tests_helper.SetLogLevel(args.verbose)
+
+  devices = device_utils.DeviceUtils.HealthyDevices()
+  if not devices:
+    raise device_errors.NoDevicesError()
+  device = devices[0]
+  logging.info('Using device %s for testing.' % str(device))
+
+  package_name = (args.package_name if args.package_name
+                  else apk_helper.GetPackageName(args.old_apk))
+  if args.command == 'create_app_data':
+    CreateAppData(device, args.old_apk, args.app_data, package_name)
+  elif args.command == 'test_update':
+    TestUpdate(
+        device, args.old_apk, args.new_apk, args.app_data, package_name)
+  else:
+    raise Exception('Unknown test command: %s' % args.command)
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/build/android/write_ordered_libraries.gypi b/build/android/write_ordered_libraries.gypi
new file mode 100644
index 0000000..1b52e71
--- /dev/null
+++ b/build/android/write_ordered_libraries.gypi
@@ -0,0 +1,43 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into an action to provide a rule that
+# generates a json file with the list of dependent libraries needed for a given
+# shared library or executable.
+#
+# To use this, create a gyp target with the following form:
+#  {
+#    'actions': [
+#      'variables': {
+#        'input_libraries': 'shared library or executable to process',
+#        'ordered_libraries_file': 'file to generate'
+#      },
+#      'includes': [ '../../build/android/write_ordered_libraries.gypi' ],
+#    ],
+#  },
+#
+
+{
+  'action_name': 'ordered_libraries_<(_target_name)<(subtarget)',
+  'message': 'Writing dependency ordered libraries for <(_target_name)',
+  'variables': {
+    'input_libraries%': [],
+    'subtarget%': '',
+  },
+  'inputs': [
+    '<(DEPTH)/build/android/gyp/util/build_utils.py',
+    '<(DEPTH)/build/android/gyp/write_ordered_libraries.py',
+    '<@(input_libraries)',
+  ],
+  'outputs': [
+    '<(ordered_libraries_file)',
+  ],
+  'action': [
+    'python', '<(DEPTH)/build/android/gyp/write_ordered_libraries.py',
+    '--input-libraries=<(input_libraries)',
+    '--libraries-dir=<(SHARED_LIB_DIR),<(PRODUCT_DIR)',
+    '--readelf=<(android_readelf)',
+    '--output=<(ordered_libraries_file)',
+  ],
+}
diff --git a/build/android_sdk_extras.json b/build/android_sdk_extras.json
new file mode 100644
index 0000000..25b47c3
--- /dev/null
+++ b/build/android_sdk_extras.json
@@ -0,0 +1,9 @@
+[
+  {
+    "dir_name": "google",
+    "version": "21.0.0",
+    "zip": "google_google_play_services_21.0.0.zip",
+    "package": "google_play_services",
+    "package_id": "extra-google-google_play_services"
+  }
+]
diff --git a/build/apk_browsertest.gypi b/build/apk_browsertest.gypi
new file mode 100644
index 0000000..316f52f
--- /dev/null
+++ b/build/apk_browsertest.gypi
@@ -0,0 +1,43 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into a target to provide a rule
+# to build APK-based browser test suites.
+#
+# To use this, create a gyp target with the following form:
+# {
+#   'target_name': 'test_suite_name_apk',
+#   'type': 'none',
+#   'variables': {
+#     'test_suite_name': 'test_suite_name',  # string
+#     'java_in_dir': 'path/to/java/dir',
+#   },
+#   'includes': ['path/to/this/gypi/file'],
+# }
+#
+
+{
+  'dependencies': [
+    '<(DEPTH)/base/base.gyp:base_java',
+    '<(DEPTH)/build/android/pylib/device/commands/commands.gyp:chromium_commands',
+    '<(DEPTH)/build/android/pylib/remote/device/dummy/dummy.gyp:remote_device_dummy_apk',
+    '<(DEPTH)/testing/android/appurify_support.gyp:appurify_support_java',
+    '<(DEPTH)/testing/android/native_test.gyp:native_test_java',
+    '<(DEPTH)/tools/android/android_tools.gyp:android_tools',
+  ],
+  'conditions': [
+     ['OS == "android"', {
+       'variables': {
+         # These are used to configure java_apk.gypi included below.
+         'apk_name': '<(test_suite_name)',
+         'intermediate_dir': '<(PRODUCT_DIR)/<(test_suite_name)_apk',
+         'final_apk_path': '<(intermediate_dir)/<(test_suite_name)-debug.apk',
+         'native_lib_target': 'lib<(test_suite_name)',
+         # TODO(yfriedman, cjhopman): Support managed installs for gtests.
+         'gyp_managed_install': 0,
+       },
+       'includes': [ 'java_apk.gypi' ],
+     }],  # 'OS == "android"
+  ],  # conditions
+}
diff --git a/build/apk_fake_jar.gypi b/build/apk_fake_jar.gypi
new file mode 100644
index 0000000..128b84c
--- /dev/null
+++ b/build/apk_fake_jar.gypi
@@ -0,0 +1,15 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into a target to provide a rule
+# to build Java in a consistent manner.
+
+{
+  'all_dependent_settings': {
+    'variables': {
+      'input_jars_paths': ['>(apk_output_jar_path)'],
+      'library_dexed_jars_paths': ['>(apk_output_jar_path)'],
+    },
+  },
+}
diff --git a/build/apk_test.gypi b/build/apk_test.gypi
new file mode 100644
index 0000000..e0d323f
--- /dev/null
+++ b/build/apk_test.gypi
@@ -0,0 +1,45 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into a target to provide a rule
+# to build APK based test suites.
+#
+# To use this, create a gyp target with the following form:
+# {
+#   'target_name': 'test_suite_name_apk',
+#   'type': 'none',
+#   'variables': {
+#     'test_suite_name': 'test_suite_name',  # string
+#     'input_jars_paths': ['/path/to/test_suite.jar', ... ],  # list
+#   },
+#   'includes': ['path/to/this/gypi/file'],
+# }
+#
+
+{
+  'dependencies': [
+    '<(DEPTH)/base/base.gyp:base_java',
+    '<(DEPTH)/build/android/pylib/device/commands/commands.gyp:chromium_commands',
+    '<(DEPTH)/build/android/pylib/remote/device/dummy/dummy.gyp:remote_device_dummy_apk',
+    '<(DEPTH)/testing/android/appurify_support.gyp:appurify_support_java',
+    '<(DEPTH)/testing/android/on_device_instrumentation.gyp:reporter_java',
+    '<(DEPTH)/tools/android/android_tools.gyp:android_tools',
+  ],
+  'conditions': [
+     ['OS == "android"', {
+       'variables': {
+         # These are used to configure java_apk.gypi included below.
+         'test_type': 'gtest',
+         'apk_name': '<(test_suite_name)',
+         'intermediate_dir': '<(PRODUCT_DIR)/<(test_suite_name)_apk',
+         'final_apk_path': '<(intermediate_dir)/<(test_suite_name)-debug.apk',
+         'java_in_dir': '<(DEPTH)/testing/android/native_test/java',
+         'native_lib_target': 'lib<(test_suite_name)',
+         # TODO(yfriedman, cjhopman): Support managed installs for gtests.
+         'gyp_managed_install': 0,
+       },
+       'includes': [ 'java_apk.gypi', 'android/test_runner.gypi' ],
+     }],  # 'OS == "android"
+  ],  # conditions
+}
diff --git a/build/apply_locales.py b/build/apply_locales.py
new file mode 100755
index 0000000..6af7280
--- /dev/null
+++ b/build/apply_locales.py
@@ -0,0 +1,45 @@
+#!/usr/bin/env python
+# Copyright (c) 2009 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# TODO: remove this script when GYP has for loops
+
+import sys
+import optparse
+
+def main(argv):
+
+  parser = optparse.OptionParser()
+  usage = 'usage: %s [options ...] format_string locale_list'
+  parser.set_usage(usage.replace('%s', '%prog'))
+  parser.add_option('-d', dest='dash_to_underscore', action="store_true",
+                    default=False,
+                    help='map "en-US" to "en" and "-" to "_" in locales')
+
+  (options, arglist) = parser.parse_args(argv)
+
+  if len(arglist) < 3:
+    print 'ERROR: need string and list of locales'
+    return 1
+
+  str_template = arglist[1]
+  locales = arglist[2:]
+
+  results = []
+  for locale in locales:
+    # For Cocoa to find the locale at runtime, it needs to use '_' instead
+    # of '-' (http://crbug.com/20441).  Also, 'en-US' should be represented
+    # simply as 'en' (http://crbug.com/19165, http://crbug.com/25578).
+    if options.dash_to_underscore:
+      if locale == 'en-US':
+        locale = 'en'
+      locale = locale.replace('-', '_')
+    results.append(str_template.replace('ZZLOCALE', locale))
+
+  # Quote each element so filename spaces don't mess up GYP's attempt to parse
+  # it into a list.
+  print ' '.join(["'%s'" % x for x in results])
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv))
diff --git a/build/branding_value.sh b/build/branding_value.sh
new file mode 100755
index 0000000..9fcb550
--- /dev/null
+++ b/build/branding_value.sh
@@ -0,0 +1,51 @@
+#!/bin/sh
+
+# Copyright (c) 2008 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This is a wrapper for fetching values from the BRANDING files.  Pass the
+# value of GYP's branding variable followed by the key you want and the right
+# file is checked.
+#
+#  branding_value.sh Chromium COPYRIGHT
+#  branding_value.sh Chromium PRODUCT_FULLNAME
+#
+
+set -e
+
+if [ $# -ne 2 ] ;  then
+  echo "error: expect two arguments, branding and key" >&2
+  exit 1
+fi
+
+BUILD_BRANDING=$1
+THE_KEY=$2
+
+pushd $(dirname "${0}") > /dev/null
+BUILD_DIR=$(pwd)
+popd > /dev/null
+
+TOP="${BUILD_DIR}/.."
+
+case ${BUILD_BRANDING} in
+  Chromium)
+    BRANDING_FILE="${TOP}/chrome/app/theme/chromium/BRANDING"
+    ;;
+  Chrome)
+    BRANDING_FILE="${TOP}/chrome/app/theme/google_chrome/BRANDING"
+    ;;
+  *)
+    echo "error: unknown branding: ${BUILD_BRANDING}" >&2
+    exit 1
+    ;;
+esac
+
+BRANDING_VALUE=$(sed -n -e "s/^${THE_KEY}=\(.*\)\$/\1/p" "${BRANDING_FILE}")
+
+if [ -z "${BRANDING_VALUE}" ] ; then
+  echo "error: failed to find key '${THE_KEY}'" >&2
+  exit 1
+fi
+
+echo "${BRANDING_VALUE}"
diff --git a/build/build-ctags.sh b/build/build-ctags.sh
new file mode 100755
index 0000000..61e017e
--- /dev/null
+++ b/build/build-ctags.sh
@@ -0,0 +1,49 @@
+#!/bin/bash
+
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+if [[ a"`ctags --version | head -1 | grep \"^Exuberant Ctags\"`" == "a" ]]; then
+  cat <<EOF
+  You must be using Exuberant Ctags, not just standard GNU ctags. If you are on
+  Debian or a related flavor of Linux, you may want to try running
+  apt-get install exuberant-ctags.
+EOF
+  exit
+fi
+
+CHROME_SRC_DIR="$PWD"
+
+fail() {
+  echo "Failed to create ctags for $1"
+  exit 1
+}
+
+ctags_cmd() {
+  echo "ctags --languages=C++ $1 --exclude=.git -R -f .tmp_tags"
+}
+
+build_dir() {
+  local extraexcludes=""
+  if [[ a"$1" == "a--extra-excludes" ]]; then
+    extraexcludes="--exclude=third_party --exclude=build --exclude=out"
+    shift
+  fi
+
+  cd "$CHROME_SRC_DIR/$1" || fail $1
+  # Redirect error messages so they aren't seen because they are almost always
+  # errors about components that you just happen to have not built (NaCl, for
+  # example).
+  $(ctags_cmd "$extraexcludes") 2> /dev/null || fail $1
+  mv -f .tmp_tags tags
+}
+
+# We always build the top level but leave all submodules as optional.
+build_dir --extra-excludes "" "top level"
+
+# Build any other directies that are listed on the command line.
+for dir in $@; do
+  build_dir "$1"
+  shift
+done
diff --git a/build/build_config.h b/build/build_config.h
new file mode 100644
index 0000000..d8c3db6
--- /dev/null
+++ b/build/build_config.h
@@ -0,0 +1,168 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// This file adds defines about the platform we're currently building on.
+//  Operating System:
+//    OS_WIN / OS_MACOSX / OS_LINUX / OS_POSIX (MACOSX or LINUX) /
+//    OS_NACL (NACL_SFI or NACL_NONSFI) / OS_NACL_SFI / OS_NACL_NONSFI
+//  Compiler:
+//    COMPILER_MSVC / COMPILER_GCC
+//  Processor:
+//    ARCH_CPU_X86 / ARCH_CPU_X86_64 / ARCH_CPU_X86_FAMILY (X86 or X86_64)
+//    ARCH_CPU_32_BITS / ARCH_CPU_64_BITS
+
+#ifndef BUILD_BUILD_CONFIG_H_
+#define BUILD_BUILD_CONFIG_H_
+
+// A set of macros to use for platform detection.
+#if defined(__native_client__)
+// __native_client__ must be first, so that other OS_ defines are not set.
+#define OS_NACL 1
+// OS_NACL comes in two sandboxing technology flavors, SFI or Non-SFI.
+// PNaCl toolchain defines __native_client_nonsfi__ macro in Non-SFI build
+// mode, while it does not in SFI build mode.
+#if defined(__native_client_nonsfi__)
+#define OS_NACL_NONSFI
+#else
+#define OS_NACL_SFI
+#endif
+#elif defined(ANDROID)
+#define OS_ANDROID 1
+#elif defined(__APPLE__)
+// only include TargetConditions after testing ANDROID as some android builds
+// on mac don't have this header available and it's not needed unless the target
+// is really mac/ios.
+#include <TargetConditionals.h>
+#define OS_MACOSX 1
+#if defined(TARGET_OS_IPHONE) && TARGET_OS_IPHONE
+#define OS_IOS 1
+#endif  // defined(TARGET_OS_IPHONE) && TARGET_OS_IPHONE
+#elif defined(__linux__)
+#define OS_LINUX 1
+// include a system header to pull in features.h for glibc/uclibc macros.
+#include <unistd.h>
+#if defined(__GLIBC__) && !defined(__UCLIBC__)
+// we really are using glibc, not uClibc pretending to be glibc
+#define LIBC_GLIBC 1
+#endif
+#elif defined(_WIN32)
+#define OS_WIN 1
+#define TOOLKIT_VIEWS 1
+#elif defined(__FreeBSD__)
+#define OS_FREEBSD 1
+#elif defined(__OpenBSD__)
+#define OS_OPENBSD 1
+#elif defined(__sun)
+#define OS_SOLARIS 1
+#elif defined(__QNXNTO__)
+#define OS_QNX 1
+#else
+#error Please add support for your platform in build/build_config.h
+#endif
+
+#if defined(USE_OPENSSL_CERTS) && defined(USE_NSS_CERTS)
+#error Cannot use both OpenSSL and NSS for certificates
+#endif
+
+// For access to standard BSD features, use OS_BSD instead of a
+// more specific macro.
+#if defined(OS_FREEBSD) || defined(OS_OPENBSD)
+#define OS_BSD 1
+#endif
+
+// For access to standard POSIXish features, use OS_POSIX instead of a
+// more specific macro.
+#if defined(OS_MACOSX) || defined(OS_LINUX) || defined(OS_FREEBSD) ||     \
+    defined(OS_OPENBSD) || defined(OS_SOLARIS) || defined(OS_ANDROID) ||  \
+    defined(OS_NACL) || defined(OS_QNX)
+#define OS_POSIX 1
+#endif
+
+// Use tcmalloc
+#if (defined(OS_WIN) || defined(OS_LINUX) || defined(OS_ANDROID)) && \
+    !defined(NO_TCMALLOC)
+#define USE_TCMALLOC 1
+#endif
+
+// Compiler detection.
+#if defined(__GNUC__)
+#define COMPILER_GCC 1
+#elif defined(_MSC_VER)
+#define COMPILER_MSVC 1
+#else
+#error Please add support for your compiler in build/build_config.h
+#endif
+
+// Processor architecture detection.  For more info on what's defined, see:
+//   http://msdn.microsoft.com/en-us/library/b0084kay.aspx
+//   http://www.agner.org/optimize/calling_conventions.pdf
+//   or with gcc, run: "echo | gcc -E -dM -"
+#if defined(_M_X64) || defined(__x86_64__)
+#define ARCH_CPU_X86_FAMILY 1
+#define ARCH_CPU_X86_64 1
+#define ARCH_CPU_64_BITS 1
+#define ARCH_CPU_LITTLE_ENDIAN 1
+#elif defined(_M_IX86) || defined(__i386__)
+#define ARCH_CPU_X86_FAMILY 1
+#define ARCH_CPU_X86 1
+#define ARCH_CPU_32_BITS 1
+#define ARCH_CPU_LITTLE_ENDIAN 1
+#elif defined(__ARMEL__)
+#define ARCH_CPU_ARM_FAMILY 1
+#define ARCH_CPU_ARMEL 1
+#define ARCH_CPU_32_BITS 1
+#define ARCH_CPU_LITTLE_ENDIAN 1
+#elif defined(__aarch64__)
+#define ARCH_CPU_ARM_FAMILY 1
+#define ARCH_CPU_ARM64 1
+#define ARCH_CPU_64_BITS 1
+#define ARCH_CPU_LITTLE_ENDIAN 1
+#elif defined(__pnacl__)
+#define ARCH_CPU_32_BITS 1
+#define ARCH_CPU_LITTLE_ENDIAN 1
+#elif defined(__MIPSEL__)
+#if defined(__LP64__)
+#define ARCH_CPU_MIPS64_FAMILY 1
+#define ARCH_CPU_MIPS64EL 1
+#define ARCH_CPU_64_BITS 1
+#define ARCH_CPU_LITTLE_ENDIAN 1
+#else
+#define ARCH_CPU_MIPS_FAMILY 1
+#define ARCH_CPU_MIPSEL 1
+#define ARCH_CPU_32_BITS 1
+#define ARCH_CPU_LITTLE_ENDIAN 1
+#endif
+#else
+#error Please add support for your architecture in build/build_config.h
+#endif
+
+// Type detection for wchar_t.
+#if defined(OS_WIN)
+#define WCHAR_T_IS_UTF16
+#elif defined(OS_POSIX) && defined(COMPILER_GCC) && \
+    defined(__WCHAR_MAX__) && \
+    (__WCHAR_MAX__ == 0x7fffffff || __WCHAR_MAX__ == 0xffffffff)
+#define WCHAR_T_IS_UTF32
+#elif defined(OS_POSIX) && defined(COMPILER_GCC) && \
+    defined(__WCHAR_MAX__) && \
+    (__WCHAR_MAX__ == 0x7fff || __WCHAR_MAX__ == 0xffff)
+// On Posix, we'll detect short wchar_t, but projects aren't guaranteed to
+// compile in this mode (in particular, Chrome doesn't). This is intended for
+// other projects using base who manage their own dependencies and make sure
+// short wchar works for them.
+#define WCHAR_T_IS_UTF16
+#else
+#error Please add support for your compiler in build/build_config.h
+#endif
+
+#if defined(OS_ANDROID)
+// The compiler thinks std::string::const_iterator and "const char*" are
+// equivalent types.
+#define STD_STRING_ITERATOR_IS_CHAR_POINTER
+// The compiler thinks base::string16::const_iterator and "char16*" are
+// equivalent types.
+#define BASE_STRING16_ITERATOR_IS_CHAR16_POINTER
+#endif
+
+#endif  // BUILD_BUILD_CONFIG_H_
diff --git a/build/check_return_value.py b/build/check_return_value.py
new file mode 100755
index 0000000..c659d1e
--- /dev/null
+++ b/build/check_return_value.py
@@ -0,0 +1,17 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""This program wraps an arbitrary command and prints "1" if the command ran
+successfully."""
+
+import os
+import subprocess
+import sys
+
+devnull = open(os.devnull, 'wb')
+if not subprocess.call(sys.argv[1:], stdout=devnull, stderr=devnull):
+  print 1
+else:
+  print 0
diff --git a/build/check_sdk_extras_version.py b/build/check_sdk_extras_version.py
new file mode 100755
index 0000000..9b2f10d
--- /dev/null
+++ b/build/check_sdk_extras_version.py
@@ -0,0 +1,131 @@
+#!/usr/bin/env python
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+'''Checks the status of an Android SDK package.
+
+Verifies the given package has been installed from the Android SDK Manager and
+that its version is at least the minimum version required by the project
+configuration.
+'''
+
+import argparse
+import json
+import os
+import re
+import sys
+
+
+COLORAMA_ROOT = os.path.join(os.path.dirname(__file__),
+                 os.pardir, 'third_party', 'colorama', 'src')
+
+sys.path.append(COLORAMA_ROOT)
+import colorama
+
+
+UDPATE_SCRIPT_PATH = 'build/install-android-sdks.sh'
+
+SDK_EXTRAS_JSON_FILE = os.path.join(os.path.dirname(__file__),
+                                    'android_sdk_extras.json')
+
+PACKAGE_VERSION_PATTERN = r'^Pkg\.Revision=(?P<version>\d+).*$'
+
+PKG_NOT_FOUND_MSG = ('Error while checking Android SDK extras versions. '
+                     'Could not find the "{package_id}" package in '
+                     '{checked_location}. Please run {script} to download it.')
+UPDATE_NEEDED_MSG = ('Error while checking Android SDK extras versions. '
+                     'Version {minimum_version} or greater is required for the '
+                     'package "{package_id}". Version {actual_version} found. '
+                     'Please run {script} to update it.')
+REQUIRED_VERSION_ERROR_MSG = ('Error while checking Android SDK extras '
+                              'versions. '
+                              'Could not retrieve the required version for '
+                              'package "{package_id}".')
+
+
+def main():
+  parser = argparse.ArgumentParser(description=__doc__)
+  parser.add_argument('--package-id',
+                      help=('id of the package to check for. The list of '
+                            'available packages and their ids can be obtained '
+                            'by running '
+                            'third_party/android_tools/sdk/tools/android list '
+                            'sdk --extended'))
+  parser.add_argument('--package-location',
+                      help='path to the package\'s expected install location.',
+                      metavar='DIR')
+  parser.add_argument('--stamp',
+                      help=('if specified, a stamp file will be created at the '
+                            'provided location.'),
+                      metavar='FILE')
+
+  args = parser.parse_args()
+
+  if not ShouldSkipVersionCheck():
+    minimum_version = GetRequiredMinimumVersion(args.package_id)
+    CheckPackageVersion(args.package_id, args.package_location, minimum_version)
+
+  # Create the stamp file.
+  if args.stamp:
+    with open(args.stamp, 'a'):
+      os.utime(args.stamp, None)
+
+  sys.exit(0)
+
+def ExitError(msg):
+  sys.exit(colorama.Fore.MAGENTA + colorama.Style.BRIGHT + msg +
+           colorama.Style.RESET_ALL)
+
+
+def GetRequiredMinimumVersion(package_id):
+  with open(SDK_EXTRAS_JSON_FILE, 'r') as json_file:
+    packages = json.load(json_file)
+
+  for package in packages:
+    if package['package_id'] == package_id:
+      return int(package['version'].split('.')[0])
+
+  ExitError(REQUIRED_VERSION_ERROR_MSG.format(package_id=package_id))
+
+
+def CheckPackageVersion(pkg_id, location, minimum_version):
+  version_file_path = os.path.join(location, 'source.properties')
+  # Extracts the version of the package described by the property file. We only
+  # care about the major version number here.
+  version_pattern = re.compile(PACKAGE_VERSION_PATTERN, re.MULTILINE)
+
+  if not os.path.isfile(version_file_path):
+    ExitError(PKG_NOT_FOUND_MSG.format(
+      package_id=pkg_id,
+      checked_location=location,
+      script=UDPATE_SCRIPT_PATH))
+
+  with open(version_file_path, 'r') as f:
+    match = version_pattern.search(f.read())
+
+    if not match:
+      ExitError(PKG_NOT_FOUND_MSG.format(
+        package_id=pkg_id,
+        checked_location=location,
+        script=UDPATE_SCRIPT_PATH))
+
+    pkg_version = int(match.group('version'))
+    if pkg_version < minimum_version:
+      ExitError(UPDATE_NEEDED_MSG.format(
+        package_id=pkg_id,
+        minimum_version=minimum_version,
+        actual_version=pkg_version,
+        script=UDPATE_SCRIPT_PATH))
+
+  # Everything looks ok, print nothing.
+
+def ShouldSkipVersionCheck():
+  '''
+  Bots should not run the version check, since they download the sdk extras
+  in a different way.
+  '''
+  return bool(os.environ.get('CHROME_HEADLESS'))
+
+if __name__ == '__main__':
+  main()
diff --git a/build/chrome_settings.gypi b/build/chrome_settings.gypi
new file mode 100644
index 0000000..e9c7535
--- /dev/null
+++ b/build/chrome_settings.gypi
@@ -0,0 +1,30 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file contains settings for ../chrome/chrome.gyp that other gyp files
+# also use.
+{
+  'variables': {
+    # TODO: remove this helper when we have loops in GYP
+    'apply_locales_cmd': ['python', '<(DEPTH)/build/apply_locales.py'],
+
+    'conditions': [
+      ['OS=="mac"', {
+        'conditions': [
+          ['branding=="Chrome"', {
+            'mac_bundle_id': 'com.google.Chrome',
+            'mac_creator': 'rimZ',
+            # The policy .grd file also needs the bundle id.
+            'grit_defines': ['-D', 'mac_bundle_id=com.google.Chrome'],
+          }, {  # else: branding!="Chrome"
+            'mac_bundle_id': 'org.chromium.Chromium',
+            'mac_creator': 'Cr24',
+            # The policy .grd file also needs the bundle id.
+            'grit_defines': ['-D', 'mac_bundle_id=org.chromium.Chromium'],
+          }],  # branding
+        ],  # conditions
+      }],  # OS=="mac"
+    ],  # conditions
+  },  # variables
+}
diff --git a/build/clobber.py b/build/clobber.py
new file mode 100755
index 0000000..785011a
--- /dev/null
+++ b/build/clobber.py
@@ -0,0 +1,110 @@
+#!/usr/bin/env python
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""This script provides methods for clobbering build directories."""
+
+import argparse
+import os
+import shutil
+import sys
+
+
+def extract_gn_build_commands(build_ninja_file):
+  """Extracts from a build.ninja the commands to run GN.
+
+  The commands to run GN are the gn rule and build.ninja build step at the
+  top of the build.ninja file. We want to keep these when deleting GN builds
+  since we want to preserve the command-line flags to GN.
+
+  On error, returns the empty string."""
+  result = ""
+  with open(build_ninja_file, 'r') as f:
+    # Read until the second blank line. The first thing GN writes to the file
+    # is the "rule gn" and the second is the section for "build build.ninja",
+    # separated by blank lines.
+    num_blank_lines = 0
+    while num_blank_lines < 2:
+      line = f.readline()
+      if len(line) == 0:
+        return ''  # Unexpected EOF.
+      result += line
+      if line[0] == '\n':
+        num_blank_lines = num_blank_lines + 1
+  return result
+
+
+def delete_build_dir(build_dir):
+  # GN writes a build.ninja.d file. Note that not all GN builds have args.gn.
+  build_ninja_d_file = os.path.join(build_dir, 'build.ninja.d')
+  if not os.path.exists(build_ninja_d_file):
+    shutil.rmtree(build_dir)
+    return
+
+  # GN builds aren't automatically regenerated when you sync. To avoid
+  # messing with the GN workflow, erase everything but the args file, and
+  # write a dummy build.ninja file that will automatically rerun GN the next
+  # time Ninja is run.
+  build_ninja_file = os.path.join(build_dir, 'build.ninja')
+  build_commands = extract_gn_build_commands(build_ninja_file)
+
+  try:
+    gn_args_file = os.path.join(build_dir, 'args.gn')
+    with open(gn_args_file, 'r') as f:
+      args_contents = f.read()
+  except IOError:
+    args_contents = ''
+
+  shutil.rmtree(build_dir)
+
+  # Put back the args file (if any).
+  os.mkdir(build_dir)
+  if args_contents != '':
+    with open(gn_args_file, 'w') as f:
+      f.write(args_contents)
+
+  # Write the build.ninja file sufficiently to regenerate itself.
+  with open(os.path.join(build_dir, 'build.ninja'), 'w') as f:
+    if build_commands != '':
+      f.write(build_commands)
+    else:
+      # Couldn't parse the build.ninja file, write a default thing.
+      f.write('''rule gn
+command = gn -q gen //out/%s/
+description = Regenerating ninja files
+
+build build.ninja: gn
+generator = 1
+depfile = build.ninja.d
+''' % (os.path.split(build_dir)[1]))
+
+  # Write a .d file for the build which references a nonexistant file. This
+  # will make Ninja always mark the build as dirty.
+  with open(build_ninja_d_file, 'w') as f:
+    f.write('build.ninja: nonexistant_file.gn\n')
+
+
+def clobber(out_dir):
+  """Clobber contents of build directory.
+
+  Don't delete the directory itself: some checkouts have the build directory
+  mounted."""
+  for f in os.listdir(out_dir):
+    path = os.path.join(out_dir, f)
+    if os.path.isfile(path):
+      os.unlink(path)
+    elif os.path.isdir(path):
+      delete_build_dir(path)
+
+
+def main():
+  parser = argparse.ArgumentParser()
+  parser.add_argument('out_dir', help='The output directory to clobber')
+  args = parser.parse_args()
+  clobber(args.out_dir)
+  return 0
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/build/common.croc b/build/common.croc
new file mode 100644
index 0000000..fde7a8b
--- /dev/null
+++ b/build/common.croc
@@ -0,0 +1,127 @@
+# -*- python -*-
+# Crocodile config file for Chromium - settings common to all platforms
+#
+# This should be speicified before the platform-specific config, for example:
+#       croc -c chrome_common.croc -c linux/chrome_linux.croc
+
+{
+  # List of root directories, applied in order
+  'roots' : [
+    # Sub-paths we specifically care about and want to call out
+    {
+      'root' : '_/src',
+      'altname' : 'CHROMIUM',
+    },
+  ],
+
+  # List of rules, applied in order
+  # Note that any 'include':0 rules here will be overridden by the 'include':1
+  # rules in the platform-specific configs.
+  'rules' : [
+    # Don't scan for executable lines in uninstrumented C++ header files
+    {
+      'regexp' : '.*\\.(h|hpp)$',
+      'add_if_missing' : 0,
+    },
+
+    # Groups
+    {
+      'regexp' : '',
+      'group' : 'source',
+    },
+    {
+      'regexp' : '.*_(test|unittest|uitest|browsertest)\\.',
+      'group' : 'test',
+    },
+
+    # Languages
+    {
+      'regexp' : '.*\\.(c|h)$',
+      'language' : 'C',
+    },
+    {
+      'regexp' : '.*\\.(cc|cpp|hpp)$',
+      'language' : 'C++',
+    },
+
+    # Files/paths to include.  Specify these before the excludes, since rules
+    # are in order.
+    {
+      'regexp' : '^CHROMIUM/(base|media|net|printing|remoting|chrome|content|webkit/glue|native_client)/',
+      'include' : 1,
+    },
+    # Don't include subversion or mercurial SCM dirs
+    {
+      'regexp' : '.*/(\\.svn|\\.hg)/',
+      'include' : 0,
+    },
+    # Don't include output dirs
+    {
+      'regexp' : '.*/(Debug|Release|out|xcodebuild)/',
+      'include' : 0,
+    },
+    # Don't include third-party source
+    {
+      'regexp' : '.*/third_party/',
+      'include' : 0,
+    },
+    # We don't run the V8 test suite, so we don't care about V8 coverage.
+    {
+      'regexp' : '.*/v8/',
+      'include' : 0,
+    },
+  ],
+
+  # Paths to add source from
+  'add_files' : [
+    'CHROMIUM'
+  ],
+
+  # Statistics to print
+  'print_stats' : [
+    {
+      'stat' : 'files_executable',
+      'format' : '*RESULT FilesKnown: files_executable= %d files',
+    },
+    {
+      'stat' : 'files_instrumented',
+      'format' : '*RESULT FilesInstrumented: files_instrumented= %d files',
+    },
+    {
+      'stat' : '100.0 * files_instrumented / files_executable',
+      'format' : '*RESULT FilesInstrumentedPercent: files_instrumented_percent= %g percent',
+    },
+    {
+      'stat' : 'lines_executable',
+      'format' : '*RESULT LinesKnown: lines_known= %d lines',
+    },
+    {
+      'stat' : 'lines_instrumented',
+      'format' : '*RESULT LinesInstrumented: lines_instrumented= %d lines',
+    },
+    {
+      'stat' : 'lines_covered',
+      'format' : '*RESULT LinesCoveredSource: lines_covered_source= %d lines',
+      'group' : 'source',
+    },
+    {
+      'stat' : 'lines_covered',
+      'format' : '*RESULT LinesCoveredTest: lines_covered_test= %d lines',
+      'group' : 'test',
+    },
+    {
+      'stat' : '100.0 * lines_covered / lines_executable',
+      'format' : '*RESULT PercentCovered: percent_covered= %g percent',
+    },
+    {
+      'stat' : '100.0 * lines_covered / lines_executable',
+      'format' : '*RESULT PercentCoveredSource: percent_covered_source= %g percent',
+      'group' : 'source',
+    },
+    {
+      'stat' : '100.0 * lines_covered / lines_executable',
+      'format' : '*RESULT PercentCoveredTest: percent_covered_test= %g percent',
+      'group' : 'test',
+    },
+  ],
+}
diff --git a/build/common.gypi b/build/common.gypi
new file mode 100644
index 0000000..3a2df58
--- /dev/null
+++ b/build/common.gypi
@@ -0,0 +1,6216 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# IMPORTANT:
+# Please don't directly include this file if you are building via gyp_chromium,
+# since gyp_chromium is automatically forcing its inclusion.
+{
+  # Variables expected to be overriden on the GYP command line (-D) or by
+  # ~/.gyp/include.gypi.
+  'variables': {
+    # Putting a variables dict inside another variables dict looks kind of
+    # weird.  This is done so that 'host_arch', 'chromeos', etc are defined as
+    # variables within the outer variables dict here.  This is necessary
+    # to get these variables defined for the conditions within this variables
+    # dict that operate on these variables.
+    'variables': {
+      'variables': {
+        'variables': {
+          'variables': {
+            # Whether we're building a ChromeOS build.
+            'chromeos%': 0,
+
+            # Whether we're building the cast (chromecast) shell
+            'chromecast%': 0,
+
+            # Whether or not we are using the Aura windowing framework.
+            'use_aura%': 0,
+
+            # Whether or not we are building the Ash shell.
+            'use_ash%': 0,
+
+            # Whether or not we are using CRAS, the ChromeOS Audio Server.
+            'use_cras%': 0,
+
+            # Use a raw surface abstraction.
+            'use_ozone%': 0,
+
+            # Configure the build for small devices. See crbug.com/318413
+            'embedded%': 0,
+
+            'conditions': [
+              # Compute the architecture that we're building on.
+              ['OS=="win" or OS=="ios"', {
+                'host_arch%': 'ia32',
+              }, {
+                'host_arch%': '<!pymod_do_main(detect_host_arch)',
+              }],
+            ],
+          },
+          # Copy conditionally-set variables out one scope.
+          'chromeos%': '<(chromeos)',
+          'chromecast%': '<(chromecast)',
+          'use_aura%': '<(use_aura)',
+          'use_ash%': '<(use_ash)',
+          'use_cras%': '<(use_cras)',
+          'use_ozone%': '<(use_ozone)',
+          'embedded%': '<(embedded)',
+          'host_arch%': '<(host_arch)',
+
+          # Whether we are using Views Toolkit
+          'toolkit_views%': 0,
+
+          # Use the PCI lib to collect GPU information.
+          'use_libpci%': 1,
+
+          # Use OpenSSL instead of NSS as the underlying SSL and crypto
+          # implementation. Certificate verification will in most cases be
+          # handled by the OS. If OpenSSL's struct X509 is used to represent
+          # certificates, use_openssl_certs must be set.
+          'use_openssl%': 1,
+
+          # Use OpenSSL for representing certificates. When targeting Android,
+          # the platform certificate library is used for certificate
+          # verification. On other targets, this flag also enables OpenSSL for
+          # certificate verification, but this configuration is unsupported.
+          'use_openssl_certs%': 0,
+
+          # Disable viewport meta tag by default.
+          'enable_viewport%': 0,
+
+          # Enable HiDPI support.
+          'enable_hidpi%': 0,
+
+          # Enable top chrome material design.
+          'enable_topchrome_md%' : 0,
+
+          # Force building against pre-built sysroot image on linux.  By default
+          # the sysroot image is only used for Official builds  or when cross
+          # compiling to arm or mips.
+          'use_sysroot%': 0,
+
+          # Override buildtype to select the desired build flavor.
+          # Dev - everyday build for development/testing
+          # Official - release build (generally implies additional processing)
+          # TODO(mmoss) Once 'buildtype' is fully supported (e.g. Windows gyp
+          # conversion is done), some of the things which are now controlled by
+          # 'branding', such as symbol generation, will need to be refactored
+          # based on 'buildtype' (i.e. we don't care about saving symbols for
+          # non-Official # builds).
+          'buildtype%': 'Dev',
+
+          # Override branding to select the desired branding flavor.
+          'branding%': 'Chromium',
+
+          'conditions': [
+            # Windows and Linux (including Chrome OS) use Aura and Ash.
+            ['OS=="win" or OS=="linux"', {
+              'use_ash%': 1,
+              'use_aura%': 1,
+            }],
+
+            ['chromecast==1 and OS!="android"', {
+              'embedded%': 1,
+              'use_ozone%': 1,
+            }],
+
+            # Ozone uses Aura.
+            ['use_ozone==1', {
+              'use_aura%': 1,
+            }],
+
+            # Whether we're a traditional desktop unix.
+            ['(OS=="linux" or OS=="freebsd" or OS=="openbsd" or OS=="solaris") and chromeos==0', {
+              'desktop_linux%': 1,
+            }, {
+              'desktop_linux%': 0,
+            }],
+
+            # Embedded implies ozone.
+            ['embedded==1', {
+              'use_ozone%': 1,
+            }],
+
+            ['OS=="android"', {
+              'target_arch%': 'arm',
+            }, {
+              # Default architecture we're building for is the architecture we're
+              # building on, and possibly sub-architecture (for iOS builds).
+              'target_arch%': '<(host_arch)',
+            }],
+          ],
+        },
+        # Copy conditionally-set variables out one scope.
+        'chromeos%': '<(chromeos)',
+        'chromecast%': '<(chromecast)',
+        'desktop_linux%': '<(desktop_linux)',
+        'use_aura%': '<(use_aura)',
+        'use_ash%': '<(use_ash)',
+        'use_cras%': '<(use_cras)',
+        'use_ozone%': '<(use_ozone)',
+        'embedded%': '<(embedded)',
+        'use_libpci%': '<(use_libpci)',
+        'use_openssl%': '<(use_openssl)',
+        'use_openssl_certs%': '<(use_openssl_certs)',
+        'enable_viewport%': '<(enable_viewport)',
+        'enable_hidpi%': '<(enable_hidpi)',
+        'enable_topchrome_md%': '<(enable_topchrome_md)',
+        'buildtype%': '<(buildtype)',
+        'branding%': '<(branding)',
+        'branding_path_component%': '<(branding)',
+        'host_arch%': '<(host_arch)',
+        'target_arch%': '<(target_arch)',
+
+        'target_subarch%': '',
+
+        # The channel to build on Android: stable, beta, dev, canary, or
+        # default. "default" should be used on non-official builds.
+        'android_channel%': 'default',
+
+        # Set ARM architecture version.
+        'arm_version%': 7,
+
+        # Use aurax11 for clipboard implementation. This is true on linux_aura.
+        'use_clipboard_aurax11%': 0,
+
+        # goma settings.
+        # 1 to use goma.
+        # If no gomadir is set, it uses the default gomadir.
+        'use_goma%': 0,
+        'gomadir%': '',
+
+        # The system root for cross-compiles. Default: none.
+        'sysroot%': '',
+        'chroot_cmd%': '',
+
+        # The system libdir used for this ABI.
+        'system_libdir%': 'lib',
+
+        # Default MIPS arch variant. This is set in the conditions block
+        # below for MIPS targets.
+        'mips_arch_variant%': '',
+
+        # MIPS DSP ASE revision. Possible values are:
+        #   0: unavailable
+        #   1: revision 1
+        #   2: revision 2
+        'mips_dsp_rev%': 0,
+
+        'conditions': [
+          ['branding == "Chrome"', {
+            'branding_path_component%': 'google_chrome',
+          }],
+
+          ['branding == "Chromium"', {
+            'branding_path_component%': 'chromium',
+          }],
+
+          # Ash needs Aura.
+          ['use_aura==0', {
+            'use_ash%': 0,
+          }],
+
+          # Set default value of toolkit_views based on OS.
+          ['OS=="mac" or OS=="win" or chromeos==1 or use_aura==1', {
+            'toolkit_views%': 1,
+          }, {
+            'toolkit_views%': 0,
+          }],
+
+          # Embedded builds use aura without ash or views.
+          ['embedded==1', {
+            'use_aura%': 1,
+            'use_ash%': 0,
+            'toolkit_views%': 0,
+          }],
+
+          # Enable HiDPI on Mac OS, Windows and Linux (including Chrome OS).
+          ['OS=="mac" or OS=="win" or OS=="linux"', {
+            'enable_hidpi%': 1,
+          }],
+
+          # Enable Top Chrome Material Design on Chrome OS, Windows, and Linux.
+          ['chromeos==1 or OS=="win" or OS=="linux"', {
+            'enable_topchrome_md%': 1,
+          }],
+
+          # On iOS, use NSS rather than OpenSSL. See http://crbug.com/338886.
+          ['OS=="ios"', {
+            'use_openssl%': 0,
+          }],
+
+          # Enable App Launcher everywhere but mobile.
+          ['OS!="ios" and OS!="android"', {
+            'enable_app_list%': 1,
+          }, {
+            'enable_app_list%': 0,
+          }],
+
+          ['use_aura==1 and OS!="android"', {
+            'use_default_render_theme%': 1,
+          }, {
+            'use_default_render_theme%': 0,
+          }],
+
+          ['use_ozone==1', {
+            'use_ozone_evdev%': 1,
+          }, {
+            'use_ozone_evdev%': 0,
+          }],
+
+          # Set default gomadir.
+          ['OS=="win"', {
+            'gomadir': 'c:\\goma\\goma-win',
+          }, {
+            'gomadir': '<!(/bin/echo -n ${HOME}/goma)',
+          }],
+
+          # Set the default "target_subarch" on iOS. Valid values are "arm32",
+          # "arm64" and "both" (meaning a fat binary).
+          ['OS=="ios"', {
+            'target_subarch%': 'arm64',
+          }],
+
+          # Set arch variants for MIPS platforms.
+          ['target_arch=="mips64el"', {
+            'conditions': [
+              ['OS=="android"', {
+                'mips_arch_variant%': 'r6',
+              }, {
+                'mips_arch_variant%': 'r2',
+              }],
+            ],
+          }],
+
+          ['target_arch=="mipsel"', {
+            'mips_arch_variant%': 'r1',
+          }],
+
+          ['OS=="linux" and target_arch=="arm" and chromeos==0', {
+            # sysroot needs to be an absolute path otherwise it generates
+            # incorrect results when passed to pkg-config
+            'sysroot%': '<!(cd <(DEPTH) && pwd -P)/build/linux/debian_wheezy_arm-sysroot',
+          }], # OS=="linux" and target_arch=="arm" and chromeos==0
+
+          ['OS=="linux" and ((branding=="Chrome" and buildtype=="Official" and chromeos==0) or use_sysroot==1)' , {
+            'conditions': [
+              ['target_arch=="x64"', {
+                'sysroot%': '<!(cd <(DEPTH) && pwd -P)/build/linux/debian_wheezy_amd64-sysroot',
+              }],
+              ['target_arch=="ia32"', {
+                'sysroot%': '<!(cd <(DEPTH) && pwd -P)/build/linux/debian_wheezy_i386-sysroot',
+              }],
+          ],
+          }], # OS=="linux" and branding=="Chrome" and buildtype=="Official" and chromeos==0
+
+          ['OS=="linux" and target_arch=="mipsel"', {
+            'sysroot%': '<!(cd <(DEPTH) && pwd -P)/mipsel-sysroot/sysroot',
+          }],
+        ],
+      },
+
+      # Copy conditionally-set variables out one scope.
+      'chromeos%': '<(chromeos)',
+      'chromecast%': '<(chromecast)',
+      'host_arch%': '<(host_arch)',
+      'target_arch%': '<(target_arch)',
+      'target_subarch%': '<(target_subarch)',
+      'mips_arch_variant%': '<(mips_arch_variant)',
+      'mips_dsp_rev%': '<(mips_dsp_rev)',
+      'toolkit_views%': '<(toolkit_views)',
+      'desktop_linux%': '<(desktop_linux)',
+      'use_aura%': '<(use_aura)',
+      'use_ash%': '<(use_ash)',
+      'use_cras%': '<(use_cras)',
+      'use_libpci%': '<(use_libpci)',
+      'use_ozone%': '<(use_ozone)',
+      'use_ozone_evdev%': '<(use_ozone_evdev)',
+      'use_clipboard_aurax11%': '<(use_clipboard_aurax11)',
+      'embedded%': '<(embedded)',
+      'use_openssl%': '<(use_openssl)',
+      'use_openssl_certs%': '<(use_openssl_certs)',
+      'enable_viewport%': '<(enable_viewport)',
+      'enable_hidpi%': '<(enable_hidpi)',
+      'enable_topchrome_md%': '<(enable_topchrome_md)',
+      'android_channel%': '<(android_channel)',
+      'use_goma%': '<(use_goma)',
+      'gomadir%': '<(gomadir)',
+      'enable_app_list%': '<(enable_app_list)',
+      'use_default_render_theme%': '<(use_default_render_theme)',
+      'buildtype%': '<(buildtype)',
+      'branding%': '<(branding)',
+      'branding_path_component%': '<(branding_path_component)',
+      'arm_version%': '<(arm_version)',
+      'sysroot%': '<(sysroot)',
+      'chroot_cmd%': '<(chroot_cmd)',
+      'system_libdir%': '<(system_libdir)',
+
+      # Set to 1 to enable fast builds. Set to 2 for even faster builds
+      # (it disables debug info for fastest compilation - only for use
+      # on compile-only bots).
+      'fastbuild%': 0,
+
+      # Set to 1 to not store any build metadata, e.g. ifdef out all __DATE__
+      # and __TIME__. Set to 0 to reenable the use of these macros in the code
+      # base. See http://crbug.com/314403.
+      'dont_embed_build_metadata%': 1,
+
+      # Set to 1 to force Visual C++ to use legacy debug information format /Z7.
+      # This is useful for parallel compilation tools which can't support /Zi.
+      # Only used on Windows.
+      'win_z7%' : 0,
+
+      # Set to 1 to enable dcheck in Release build.
+      'dcheck_always_on%': 0,
+
+      # Set to 1 to make a build that disables unshipped tracing events.
+      # Note: this setting is ignored if buildtype=="Official".
+      'tracing_like_official_build%': 0,
+
+      # Disable image loader component extension by default.
+      'image_loader_extension%': 0,
+
+      # Set NEON compilation flags.
+      'arm_neon%': 1,
+
+      # Detect NEON support at run-time.
+      'arm_neon_optional%': 0,
+
+      # Use libjpeg-turbo as the JPEG codec used by Chromium.
+      'use_libjpeg_turbo%': 1,
+
+      # Use system libjpeg. Note that the system's libjepg will be used even if
+      # use_libjpeg_turbo is set.
+      'use_system_libjpeg%': 0,
+
+      # By default, component is set to static_library and it can be overriden
+      # by the GYP command line or by ~/.gyp/include.gypi.
+      'component%': 'static_library',
+
+      # /analyze is off by default on Windows because it is very slow and noisy.
+      # Enable with GYP_DEFINES=win_analyze=1
+      'win_analyze%': 0,
+
+      # Set to select the Title Case versions of strings in GRD files.
+      'use_titlecase_in_grd%': 0,
+
+      # Use translations provided by volunteers at launchpad.net.  This
+      # currently only works on Linux.
+      'use_third_party_translations%': 0,
+
+      # Remoting compilation is enabled by default. Set to 0 to disable.
+      'remoting%': 1,
+
+      # Configuration policy is enabled by default. Set to 0 to disable.
+      'configuration_policy%': 1,
+
+      # Variable safe_browsing is used to control the build time configuration
+      # for safe browsing feature. Safe browsing can be compiled in 4 different
+      # levels: 0 disables it, 1 enables it fully, and 2 enables only UI and
+      # reporting features for use with Data Saver on Mobile, and 3 enables
+      # extended mobile protection via an external API.  When 3 is fully
+      # deployed, it will replace 2.
+      'safe_browsing%': 1,
+
+      # Web speech is enabled by default. Set to 0 to disable.
+      'enable_web_speech%': 1,
+
+      # 'Ok Google' hotwording is disabled by default in open source builds. Set
+      # to 1 to enable. (This will download a closed-source NaCl module at
+      # startup.) Chrome-branded builds have this enabled by default.
+      'enable_hotwording%': 0,
+
+      # Notifications are compiled in by default. Set to 0 to disable.
+      'notifications%' : 1,
+
+      # Use dsymutil to generate real .dSYM files on Mac. The default is 0 for
+      # regular builds and 1 for ASan builds.
+      'mac_want_real_dsym%': 'default',
+
+      # If this is set, the clang plugins used on the buildbot will be used.
+      # Run tools/clang/scripts/update.sh to make sure they are compiled.
+      # This causes 'clang_chrome_plugins_flags' to be set.
+      # Has no effect if 'clang' is not set as well.
+      'clang_use_chrome_plugins%': 1,
+
+      # Enable building with ASAN (Clang's -fsanitize=address option).
+      # -fsanitize=address only works with clang, but asan=1 implies clang=1
+      # See https://sites.google.com/a/chromium.org/dev/developers/testing/addresssanitizer
+      'asan%': 0,
+      'asan_blacklist%': '<(PRODUCT_DIR)/../../tools/memory/asan/blacklist.txt',
+      # Enable coverage gathering instrumentation in sanitizer tools. This flag
+      # also controls coverage granularity (1 for function-level coverage, 2
+      # for block-level coverage).
+      'sanitizer_coverage%': 0,
+      # Deprecated, only works if |sanitizer_coverage| isn't set.
+      # TODO(glider): remove this flag.
+      'asan_coverage%': 0,
+      # Enable intra-object-overflow detection in ASan (experimental).
+      'asan_field_padding%': 0,
+
+      # Enable Chromium overrides of the default configurations for various
+      # dynamic tools (like ASan).
+      'use_sanitizer_options%': 0,
+
+      # Enable building with SyzyAsan.
+      # See https://code.google.com/p/sawbuck/wiki/SyzyASanHowTo
+      'syzyasan%': 0,
+
+      # Enable crash reporting via Kasko.
+      'kasko%': 0,
+
+      # Enable building with LSan (Clang's -fsanitize=leak option).
+      # -fsanitize=leak only works with clang, but lsan=1 implies clang=1
+      # See https://sites.google.com/a/chromium.org/dev/developers/testing/leaksanitizer
+      'lsan%': 0,
+
+      # Enable building with TSan (Clang's -fsanitize=thread option).
+      # -fsanitize=thread only works with clang, but tsan=1 implies clang=1
+      # See http://clang.llvm.org/docs/ThreadSanitizer.html
+      'tsan%': 0,
+      'tsan_blacklist%': '<(PRODUCT_DIR)/../../tools/memory/tsan_v2/ignores.txt',
+
+      # Enable building with MSan (Clang's -fsanitize=memory option).
+      # MemorySanitizer only works with clang, but msan=1 implies clang=1
+      # See http://clang.llvm.org/docs/MemorySanitizer.html
+      'msan%': 0,
+      'msan_blacklist%': '<(PRODUCT_DIR)/../../tools/msan/blacklist.txt',
+      # Track where uninitialized memory originates from. From fastest to
+      # slowest: 0 - no tracking, 1 - track only the initial allocation site, 2
+      # - track the chain of stores leading from allocation site to use site.
+      'msan_track_origins%': 2,
+
+      # Enable building with UBSan (Clang's -fsanitize=undefined option).
+      # -fsanitize=undefined only works with clang, but ubsan=1 implies clang=1
+      # See http://clang.llvm.org/docs/UsersManual.html
+      'ubsan%': 0,
+      'ubsan_blacklist%': '<(PRODUCT_DIR)/../../tools/ubsan/blacklist.txt',
+      'ubsan_vptr_blacklist%': '<(PRODUCT_DIR)/../../tools/ubsan/vptr_blacklist.txt',
+
+      # Enable building with UBsan's vptr (Clang's -fsanitize=vptr option).
+      # -fsanitize=vptr only works with clang, but ubsan_vptr=1 implies clang=1
+      'ubsan_vptr%': 0,
+
+      # Use dynamic libraries instrumented by one of the sanitizers
+      # instead of the standard system libraries. Set this flag to build the
+      # libraries from source.
+      'use_instrumented_libraries%': 0,
+
+      # Use dynamic libraries instrumented by one of the sanitizers
+      # instead of the standard system libraries. Set this flag to download
+      # prebuilt binaries from GCS.
+      'use_prebuilt_instrumented_libraries%': 0,
+
+      # Use libc++ (third_party/libc++ and third_party/libc++abi) instead of
+      # stdlibc++ as standard library. This is intended to use for instrumented
+      # builds.
+      'use_custom_libcxx%': 0,
+
+      # Use system libc++ instead of the default C++ library, usually libstdc++.
+      # This is intended for iOS builds only.
+      'use_system_libcxx%': 0,
+
+      # Use a modified version of Clang to intercept allocated types and sizes
+      # for allocated objects. clang_type_profiler=1 implies clang=1.
+      # See http://dev.chromium.org/developers/deep-memory-profiler/cpp-object-type-identifier
+      # TODO(dmikurube): Support mac.  See http://crbug.com/123758#c11
+      'clang_type_profiler%': 0,
+
+      # Set to true to instrument the code with function call logger.
+      # See src/third_party/cygprofile/cyg-profile.cc for details.
+      'order_profiling%': 0,
+
+      # Use the provided profiled order file to link Chrome image with it.
+      # This makes Chrome faster by better using CPU cache when executing code.
+      # This is known as PGO (profile guided optimization).
+      # See https://sites.google.com/a/google.com/chrome-msk/dev/boot-speed-up-effort
+      'order_text_section%' : "",
+
+      # Set to 1 compile with -fPIC cflag on linux. This is a must for shared
+      # libraries on linux x86-64 and arm, plus ASLR.
+      'linux_fpic%': 1,
+
+      # Whether one-click signin is enabled or not.
+      'enable_one_click_signin%': 0,
+
+      # Whether to back up data before sync.
+      'enable_pre_sync_backup%': 0,
+
+      # Enable Chrome browser extensions
+      'enable_extensions%': 1,
+
+      # Enable Google Now.
+      'enable_google_now%': 1,
+
+      # Enable basic printing support and UI.
+      'enable_basic_printing%': 1,
+
+      # Enable printing with print preview. It does not imply
+      # enable_basic_printing. It's possible to build Chrome with preview only.
+      'enable_print_preview%': 1,
+
+      # Set the version of CLD.
+      #   0: Don't specify the version. This option is for the Finch testing.
+      #   1: Use only CLD1.
+      #   2: Use only CLD2.
+      'cld_version%': 2,
+
+      # For CLD2, the size of the tables that should be included in the build
+      # Only evaluated if cld_version == 2 or if building the CLD2 dynamic data
+      # tool explicitly.
+      # See third_party/cld_2/cld_2.gyp for more information.
+      #   0: Small tables, lower accuracy
+      #   2: Large tables, high accuracy
+      'cld2_table_size%': 2,
+
+      # Enable spell checker.
+      'enable_spellcheck%': 1,
+
+      # Use the operating system spellchecker, e.g. NSSpellChecker on Mac or
+      # SpellCheckerSession on Android.
+      'use_platform_spellchecker%': 0,
+
+      # Webrtc compilation is enabled by default. Set to 0 to disable.
+      'enable_webrtc%': 1,
+
+      # Media router support is enabled by default. Set to 0 to disable.
+      'enable_media_router%': 1,
+
+      # Enables use of the session service, which is enabled by default.
+      # Support for disabling depends on the platform.
+      'enable_session_service%': 1,
+
+      # Enables theme support, which is enabled by default.  Support for
+      # disabling depends on the platform.
+      'enable_themes%': 1,
+
+      # Enables autofill dialog and associated features; disabled by default.
+      'enable_autofill_dialog%' : 0,
+
+      # Defaults Wallet integration in Autofill dialog to use production
+      # servers. Unofficial builds won't have the proper API keys.
+      'enable_prod_wallet_service%': 0,
+
+      # Enables support for background apps.
+      'enable_background%': 1,
+
+      # Enable the task manager by default.
+      'enable_task_manager%': 1,
+
+      # Enables used resource whitelist generation; disabled by default.
+      'enable_resource_whitelist_generation%': 0,
+
+      # Enable FILE support by default.
+      'disable_file_support%': 0,
+
+      # Enable FTP support by default.
+      'disable_ftp_support%': 0,
+
+      # Use native android functions in place of ICU.  Not supported by most
+      # components.
+      'use_icu_alternatives_on_android%': 0,
+
+      # Use of precompiled headers on Windows.
+      #
+      # This variable may be explicitly set to 1 (enabled) or 0
+      # (disabled) in ~/.gyp/include.gypi or via the GYP command line.
+      # This setting will override the default.
+      #
+      # See
+      # http://code.google.com/p/chromium/wiki/WindowsPrecompiledHeaders
+      # for details.
+      'chromium_win_pch%': 0,
+
+      # Clang stuff.
+      'make_clang_dir%': 'third_party/llvm-build/Release+Asserts',
+      # Set this to true when building with Clang.
+      # See http://code.google.com/p/chromium/wiki/Clang for details.
+      # If this is set, clang is used as both host and target compiler in
+      # cross-compile builds.
+      'clang%': 0,
+
+      # Use experimental lld linker instead of the platform's default linker.
+      'use_lld%': 0,
+
+      # Enable plugin installation by default.
+      'enable_plugin_installation%': 1,
+
+      # Specifies whether to use canvas_skia.cc in place of platform
+      # specific implementations of gfx::Canvas. Affects text drawing in the
+      # Chrome UI.
+      # TODO(asvitkine): Enable this on all platforms and delete this flag.
+      #                  http://crbug.com/105550
+      'use_canvas_skia%': 0,
+
+      # Set to "tsan", "memcheck", or "drmemory" to configure the build to work
+      # with one of those tools.
+      'build_for_tool%': '',
+
+      'wix_path%': '<(DEPTH)/third_party/wix',
+
+      # Supervised users are enabled by default.
+      'enable_supervised_users%': 1,
+
+      # Platform sends memory pressure signals natively.
+      'native_memory_pressure_signals%': 0,
+
+      'enable_mdns%' : 0,
+      'enable_service_discovery%': 0,
+      'enable_wifi_bootstrapping%': 0,
+      'enable_hangout_services_extension%': 0,
+
+       # Enable the Syzygy optimization step.
+      'syzygy_optimize%': 0,
+
+      # Enable hole punching for the protected video.
+      'video_hole%': 0,
+
+      # Automatically select platforms under ozone. Turn this off to
+      # build only explicitly selected platforms.
+      'ozone_auto_platforms%': 1,
+
+      # If this is set clang is used as host compiler, but not as target
+      # compiler. Always do this by default.
+      'host_clang%': 1,
+
+      # Variables to control Link-Time Optimization (LTO).
+      # On Android, the variable use_lto enables LTO on code compiled with -Os,
+      # and use_lto_o2 enables LTO on code compiled with -O2. On other
+      # platforms, use_lto enables LTO in all translation units, and use_lto_o2
+      # has no effect.
+      #
+      # On Linux and Android, when using LLVM LTO, the script
+      # build/download_gold_plugin.py must be run to download a linker plugin.
+      # On Mac, LLVM needs to be built from scratch using
+      # tools/clang/scripts/update.py and the absolute path to
+      # third_party/llvm-build/Release+Asserts/lib must be added to
+      # $DYLD_LIBRARY_PATH to pick up the right version of the linker plugin.
+      #
+      # On Android, the variables must *not* be enabled at the same time.
+      # In this case LTO would 'merge' the optimization flags at link-time
+      # which would lead to all code be optimized with -O2. See crbug.com/407544
+      'use_lto%': 0,
+      'use_lto_o2%': 0,
+
+      # Allowed level of identical code folding in the gold linker.
+      'gold_icf_level%': 'all',
+
+      # Libxkbcommon usage.
+      'use_xkbcommon%': 0,
+
+      # Control Flow Integrity for virtual calls and casts.
+      # See http://clang.llvm.org/docs/ControlFlowIntegrity.html
+      'cfi_vptr%': 0,
+
+      'cfi_blacklist%': '<(PRODUCT_DIR)/../../tools/cfi/blacklist.txt',
+
+      # Whether the entire browser uses toolkit-views on Mac instead of Cocoa.
+      'mac_views_browser%': 0,
+
+      # By default, use ICU data file (icudtl.dat).
+      'icu_use_data_file_flag%': 1,
+
+      # Turn on JNI generation optimizations by default.
+      'optimize_jni_generation%': 1,
+
+      'conditions': [
+        # A flag for POSIX platforms
+        ['OS=="win"', {
+          'os_posix%': 0,
+        }, {
+          'os_posix%': 1,
+        }],
+
+        # A flag for BSD platforms
+        ['OS=="freebsd" or OS=="openbsd"', {
+          'os_bsd%': 1,
+        }, {
+          'os_bsd%': 0,
+        }],
+
+        # NSS usage.
+        ['(OS=="linux" or OS=="freebsd" or OS=="openbsd" or OS=="solaris")', {
+          'use_nss_certs%': 1,
+        }, {
+          'use_nss_certs%': 0,
+        }],
+
+        # libudev usage.  This currently only affects the content layer.
+        ['OS=="linux" and embedded==0', {
+          'use_udev%': 1,
+        }, {
+          'use_udev%': 0,
+        }],
+
+        # Flags to use X11 on non-Mac POSIX platforms.
+        ['OS=="win" or OS=="mac" or OS=="ios" or OS=="android" or use_ozone==1', {
+          'use_x11%': 0,
+        }, {
+          'use_x11%': 1,
+        }],
+
+        # Flags to use glib.
+        ['OS=="win" or OS=="mac" or OS=="ios" or OS=="android" or use_ozone==1', {
+          'use_glib%': 0,
+        }, {
+          'use_glib%': 1,
+        }],
+
+        # Flags to use pango and cairo.
+        ['OS=="win" or OS=="mac" or OS=="ios" or OS=="android" or embedded==1', {
+          'use_pango%': 0,
+          'use_cairo%': 0,
+        }, {
+          'use_pango%': 1,
+          'use_cairo%': 1,
+        }],
+
+        # DBus usage.
+        ['OS=="linux" and embedded==0', {
+          'use_dbus%': 1,
+        }, {
+          'use_dbus%': 0,
+        }],
+
+        # We always use skia text rendering in Aura on Windows, since GDI
+        # doesn't agree with our BackingStore.
+        # TODO(beng): remove once skia text rendering is on by default.
+        ['use_aura==1 and OS=="win"', {
+          'enable_skia_text%': 1,
+        }],
+
+        # A flag to enable or disable our compile-time dependency
+        # on gnome-keyring. If that dependency is disabled, no gnome-keyring
+        # support will be available. This option is useful
+        # for Linux distributions and for Aura.
+        ['OS!="linux" or chromeos==1', {
+          'use_gnome_keyring%': 0,
+        }, {
+          'use_gnome_keyring%': 1,
+        }],
+
+        ['OS=="mac" or OS=="ios"', {
+          # Mac and iOS want Title Case strings
+          'use_titlecase_in_grd%': 1,
+        }],
+
+        # Enable loader extensions on Chrome OS.
+        ['chromeos==1', {
+          'image_loader_extension%': 1,
+        }, {
+          'image_loader_extension%': 0,
+        }],
+
+        ['OS=="win" or OS=="mac" or (OS=="linux" and chromeos==0)', {
+          'enable_one_click_signin%': 1,
+          'enable_pre_sync_backup%': 1,
+        }],
+
+        ['OS=="android"', {
+          'enable_extensions%': 0,
+          'enable_google_now%': 0,
+          'cld_version%': 1,
+          'enable_spellcheck%': 0,
+          'enable_themes%': 0,
+          'remoting%': 0,
+          'arm_neon%': 0,
+          'arm_neon_optional%': 1,
+          'native_memory_pressure_signals%': 1,
+          'enable_basic_printing%': 1,
+          'enable_print_preview%': 0,
+          'enable_task_manager%':0,
+          'video_hole%': 1,
+        }],
+
+        # OSX has a built-in spellchecker can be utilized.
+        ['OS=="mac"', {
+          'use_platform_spellchecker%': 1,
+        }],
+
+        # Android OS includes support for proprietary codecs regardless of
+        # building Chromium or Google Chrome. We also ship Google Chrome and
+        # Chromecast with proprietary codecs.
+        ['OS=="android" or branding=="Chrome" or chromecast==1', {
+          'proprietary_codecs%': 1,
+        }, {
+          'proprietary_codecs%': 0,
+        }],
+
+        ['OS=="mac" or OS=="ios"', {
+          'native_memory_pressure_signals%': 1,
+        }],
+
+        # Enable autofill dialog when not on iOS.
+        ['OS!="ios"', {
+          'enable_autofill_dialog%': 1,
+        }],
+
+        ['buildtype=="Official"', {
+          'enable_prod_wallet_service%': 1,
+        }],
+
+        ['branding=="Chrome"', {
+          'enable_hotwording%': 1,
+        }],
+
+        ['OS=="android"', {
+          'enable_webrtc%': 1,
+        }],
+
+        ['OS=="ios"', {
+          'disable_ftp_support%': 1,
+          'enable_extensions%': 0,
+          'enable_google_now%': 0,
+          'cld_version%': 2,
+          'cld2_table_size%': 0,
+          'enable_basic_printing%': 0,
+          'enable_print_preview%': 0,
+          'enable_session_service%': 0,
+          'enable_spellcheck%': 0,
+          'enable_themes%': 0,
+          'enable_webrtc%': 0,
+          'notifications%': 0,
+          'remoting%': 0,
+          'safe_browsing%': 0,
+          'enable_supervised_users%': 0,
+          'enable_task_manager%': 0,
+          'use_system_libcxx%': 1,
+        }],
+
+        # Use GPU accelerated cross process image transport by default
+        # on linux builds with the Aura window manager
+        ['use_aura==1 and OS=="linux"', {
+          'ui_compositor_image_transport%': 1,
+        }, {
+          'ui_compositor_image_transport%': 0,
+        }],
+
+        # Turn precompiled headers on by default.
+        ['OS=="win" and buildtype!="Official"', {
+          'chromium_win_pch%': 1
+        }],
+
+        ['chromeos==1 or OS=="android" or OS=="ios" or desktop_linux==1', {
+          'enable_plugin_installation%': 0,
+        }, {
+          'enable_plugin_installation%': 1,
+        }],
+
+        # Whether PPAPI is enabled.
+        ['OS=="android" or OS=="ios" or (embedded==1 and chromecast==0)', {
+          'enable_plugins%': 0,
+        }, {
+          'enable_plugins%': 1,
+        }],
+
+        # linux_use_bundled_gold: whether to use the gold linker binary checked
+        # into third_party/binutils.  Force this off via GYP_DEFINES when you
+        # are using a custom toolchain and need to control -B in ldflags.
+        # Do not use 32-bit gold on 32-bit hosts as it runs out address space
+        # for component=static_library builds.
+        ['(OS=="linux" or OS=="android") and (target_arch=="x64" or target_arch=="arm")', {
+          'linux_use_bundled_gold%': 1,
+        }, {
+          'linux_use_bundled_gold%': 0,
+        }],
+
+        # linux_use_bundled_binutils: whether to use the binary binutils
+        # checked into third_party/binutils.  These are not multi-arch so cannot
+        # be used except on x86 and x86-64 (the only two architectures which
+        # are currently checke in).  Force this off via GYP_DEFINES when you
+        # are using a custom toolchain and need to control -B in cflags.
+        ['OS=="linux" and (target_arch=="x64")', {
+          'linux_use_bundled_binutils%': 1,
+        }, {
+          'linux_use_bundled_binutils%': 0,
+        }],
+
+        # linux_use_gold_flags: whether to use build flags that rely on gold.
+        # On by default for x64 Linux.
+        ['OS=="linux" and target_arch=="x64"', {
+          'linux_use_gold_flags%': 1,
+        }, {
+          'linux_use_gold_flags%': 0,
+        }],
+
+        # linux_use_debug_fission: whether to use split DWARF debug info
+        # files. This can reduce link time significantly, but is incompatible
+        # with some utilities such as icecc and ccache. Requires gold and
+        # gcc >= 4.8 or clang.
+        # http://gcc.gnu.org/wiki/DebugFission
+        ['OS=="linux" and target_arch=="x64"', {
+          'linux_use_debug_fission%': 1,
+        }, {
+          'linux_use_debug_fission%': 0,
+        }],
+
+        ['OS=="android" or OS=="ios"', {
+          'enable_captive_portal_detection%': 0,
+          'enable_media_router%': 0,
+        }, {
+          'enable_captive_portal_detection%': 1,
+          'enable_media_router%': 1,
+        }],
+
+        # Enable Skia UI text drawing incrementally on different platforms.
+        # http://crbug.com/105550
+        #
+        # On Aura, this allows per-tile painting to be used in the browser
+        # compositor.
+        ['OS!="android" and OS!="ios"', {
+          'use_canvas_skia%': 1,
+        }],
+
+        ['chromeos==1', {
+          'enable_basic_printing%': 0,
+          'enable_print_preview%': 1,
+        }],
+
+        # Do not enable the Settings App on ChromeOS.
+        ['enable_app_list==1 and chromeos==0', {
+          'enable_settings_app%': 1,
+        }, {
+          'enable_settings_app%': 0,
+        }],
+
+        # Whether tests targets should be run, archived or just have the
+        # dependencies verified. All the tests targets have the '_run' suffix,
+        # e.g. base_unittests_run runs the target base_unittests. The test
+        # target always calls tools/swarming_client/isolate.py. See the script's
+        # --help for more information. Meant to be overriden with GYP_DEFINES.
+        # TODO(maruel): Remove the conditions as more configurations are
+        # supported.
+        ['OS!="ios" and OS!="android" and chromeos==0', {
+          'test_isolation_mode%': 'check',
+        }, {
+          'test_isolation_mode%': 'noop',
+        }],
+        # Whether Android build uses OpenMAX DL FFT.
+        ['OS=="android" and ((target_arch=="arm" and arm_version >= 7) or target_arch=="ia32" or target_arch=="x64" or target_arch=="arm64" or target_arch=="mipsel")', {
+          # Currently only supported on Android ARMv7+, ARM64, ia32, x64 and mipsel.
+          # When enabled, this will also enable WebAudio support on
+          # Android for these architectures.  Default is enabled.  Whether
+          # WebAudio is actually available depends on runtime settings
+          # and flags.
+          'use_openmax_dl_fft%': 1,
+        }, {
+          'use_openmax_dl_fft%': 0,
+        }],
+        ['OS=="win" or OS=="linux"', {
+            'enable_mdns%' : 1,
+        }],
+
+        # Disable various features by default on embedded.
+        ['embedded==1', {
+          'remoting%': 0,
+          'enable_basic_printing%': 0,
+          'enable_print_preview%': 0,
+        }],
+
+        ['OS=="win" or OS=="mac"', {
+          'enable_wifi_bootstrapping%' : 1,
+        }],
+
+        # Path to sas.dll, which provides the SendSAS function.
+        # http://msdn.microsoft.com/en-us/library/windows/desktop/dd979761(v=vs.85).aspx
+        ['target_arch=="x64"', {
+          'sas_dll_path%': '<(DEPTH)/third_party/platformsdk_win7/files/redist/amd64',
+        }, {
+          'sas_dll_path%': '<(DEPTH)/third_party/platformsdk_win7/files/redist/x86',
+        }],
+
+        ['sysroot!=""', {
+          'pkg-config': '<(chroot_cmd) <(DEPTH)/build/linux/pkg-config-wrapper "<(sysroot)" "<(target_arch)" "<(system_libdir)"',
+        }, {
+          'pkg-config': 'pkg-config'
+        }],
+      ],
+
+      # WebVR support disabled until platform implementations have been added
+      'enable_webvr%': 0,
+
+      # Setting this to '0' will cause V8's startup snapshot to be
+      # embedded in the binary instead of being a external files.
+      'v8_use_external_startup_data%': 1,
+
+      # Set this to 1 to enable use of concatenated impulse responses
+      # for the HRTF panner in WebAudio.
+      'use_concatenated_impulse_responses': 1,
+
+      # You can set the variable 'use_official_google_api_keys' to 1
+      # to use the Google-internal file containing official API keys
+      # for Google Chrome even in a developer build.  Setting this
+      # variable explicitly to 1 will cause your build to fail if the
+      # internal file is missing.
+      #
+      # The variable is documented here, but not handled in this file;
+      # see //google_apis/determine_use_official_keys.gypi for the
+      # implementation.
+      #
+      # Set the variable to 0 to not use the internal file, even when
+      # it exists in your checkout.
+      #
+      # Leave it unset in your include.gypi to have the variable
+      # implicitly set to 1 if you have
+      # src/google_apis/internal/google_chrome_api_keys.h in your
+      # checkout, and implicitly set to 0 if not.
+      #
+      # Note that official builds always behave as if the variable
+      # was explicitly set to 1, i.e. they always use official keys,
+      # and will fail to build if the internal file is missing.
+      #
+      # NOTE: You MUST NOT explicitly set the variable to 2 in your
+      # include.gypi or by other means. Due to subtleties of GYP, this
+      # is not the same as leaving the variable unset, even though its
+      # default value in
+      # //google_apis/determine_use_official_keys.gypi is 2.
+
+      # Set these to bake the specified API keys and OAuth client
+      # IDs/secrets into your build.
+      #
+      # If you create a build without values baked in, you can instead
+      # set environment variables to provide the keys at runtime (see
+      # src/google_apis/google_api_keys.h for details).  Features that
+      # require server-side APIs may fail to work if no keys are
+      # provided.
+      #
+      # Note that if you are building an official build or if
+      # use_official_google_api_keys has been set to 1 (explicitly or
+      # implicitly), these values will be ignored and the official
+      # keys will be used instead.
+      'google_api_key%': '',
+      'google_default_client_id%': '',
+      'google_default_client_secret%': '',
+      # Native Client is enabled by default.
+      'disable_nacl%': '0',
+
+      # Sets the default version name and code for Android app, by default we
+      # do a developer build.
+      'android_app_version_name%': 'Developer Build',
+      'android_app_version_code%': 1,
+    },
+
+    # Copy conditionally-set variables out one scope.
+    'branding%': '<(branding)',
+    'branding_path_component%': '<(branding_path_component)',
+    'buildtype%': '<(buildtype)',
+    'target_arch%': '<(target_arch)',
+    'target_subarch%': '<(target_subarch)',
+    'mips_arch_variant%': '<(mips_arch_variant)',
+    'mips_dsp_rev%': '<(mips_dsp_rev)',
+    'host_arch%': '<(host_arch)',
+    'toolkit_views%': '<(toolkit_views)',
+    'ui_compositor_image_transport%': '<(ui_compositor_image_transport)',
+    'use_aura%': '<(use_aura)',
+    'use_ash%': '<(use_ash)',
+    'use_cras%': '<(use_cras)',
+    'use_libpci%': '<(use_libpci)',
+    'use_openssl%': '<(use_openssl)',
+    'use_openssl_certs%': '<(use_openssl_certs)',
+    'use_nss_certs%': '<(use_nss_certs)',
+    'use_udev%': '<(use_udev)',
+    'os_bsd%': '<(os_bsd)',
+    'os_posix%': '<(os_posix)',
+    'use_dbus%': '<(use_dbus)',
+    'use_glib%': '<(use_glib)',
+    'use_pango%': '<(use_pango)',
+    'use_cairo%': '<(use_cairo)',
+    'use_ozone%': '<(use_ozone)',
+    'use_ozone_evdev%': '<(use_ozone_evdev)',
+    'use_xkbcommon%': '<(use_xkbcommon)',
+    'use_clipboard_aurax11%': '<(use_clipboard_aurax11)',
+    'desktop_linux%': '<(desktop_linux)',
+    'use_x11%': '<(use_x11)',
+    'use_gnome_keyring%': '<(use_gnome_keyring)',
+    'linux_fpic%': '<(linux_fpic)',
+    'chromeos%': '<(chromeos)',
+    'chromecast%': '<(chromecast)',
+    'enable_viewport%': '<(enable_viewport)',
+    'enable_hidpi%': '<(enable_hidpi)',
+    'enable_topchrome_md%': '<(enable_topchrome_md)',
+    'image_loader_extension%': '<(image_loader_extension)',
+    'fastbuild%': '<(fastbuild)',
+    'dont_embed_build_metadata%': '<(dont_embed_build_metadata)',
+    'win_z7%': '<(win_z7)',
+    'dcheck_always_on%': '<(dcheck_always_on)',
+    'tracing_like_official_build%': '<(tracing_like_official_build)',
+    'arm_version%': '<(arm_version)',
+    'arm_neon%': '<(arm_neon)',
+    'arm_neon_optional%': '<(arm_neon_optional)',
+    'sysroot%': '<(sysroot)',
+    'pkg-config%': '<(pkg-config)',
+    'chroot_cmd%': '<(chroot_cmd)',
+    'system_libdir%': '<(system_libdir)',
+    'component%': '<(component)',
+    'win_analyze%': '<(win_analyze)',
+    'enable_resource_whitelist_generation%': '<(enable_resource_whitelist_generation)',
+    'use_titlecase_in_grd%': '<(use_titlecase_in_grd)',
+    'use_third_party_translations%': '<(use_third_party_translations)',
+    'remoting%': '<(remoting)',
+    'enable_one_click_signin%': '<(enable_one_click_signin)',
+    'enable_pre_sync_backup%': '<(enable_pre_sync_backup)',
+    'enable_media_router%': '<(enable_media_router)',
+    'enable_webrtc%': '<(enable_webrtc)',
+    'chromium_win_pch%': '<(chromium_win_pch)',
+    'configuration_policy%': '<(configuration_policy)',
+    'safe_browsing%': '<(safe_browsing)',
+    'enable_web_speech%': '<(enable_web_speech)',
+    'enable_hotwording%': '<(enable_hotwording)',
+    'notifications%': '<(notifications)',
+    'clang_use_chrome_plugins%': '<(clang_use_chrome_plugins)',
+    'mac_want_real_dsym%': '<(mac_want_real_dsym)',
+    'asan%': '<(asan)',
+    'asan_blacklist%': '<(asan_blacklist)',
+    'asan_coverage%': '<(asan_coverage)',
+    'sanitizer_coverage%': '<(sanitizer_coverage)',
+    'asan_field_padding%': '<(asan_field_padding)',
+    'use_sanitizer_options%': '<(use_sanitizer_options)',
+    'syzyasan%': '<(syzyasan)',
+    'kasko%': '<(kasko)',
+    'syzygy_optimize%': '<(syzygy_optimize)',
+    'lsan%': '<(lsan)',
+    'msan%': '<(msan)',
+    'msan_blacklist%': '<(msan_blacklist)',
+    'msan_track_origins%': '<(msan_track_origins)',
+    'tsan%': '<(tsan)',
+    'tsan_blacklist%': '<(tsan_blacklist)',
+    'ubsan%': '<(ubsan)',
+    'ubsan_blacklist%': '<(ubsan_blacklist)',
+    'ubsan_vptr_blacklist%': '<(ubsan_vptr_blacklist)',
+    'ubsan_vptr%': '<(ubsan_vptr)',
+    'use_instrumented_libraries%': '<(use_instrumented_libraries)',
+    'use_prebuilt_instrumented_libraries%': '<(use_prebuilt_instrumented_libraries)',
+    'use_custom_libcxx%': '<(use_custom_libcxx)',
+    'use_system_libcxx%': '<(use_system_libcxx)',
+    'clang_type_profiler%': '<(clang_type_profiler)',
+    'order_profiling%': '<(order_profiling)',
+    'order_text_section%': '<(order_text_section)',
+    'enable_extensions%': '<(enable_extensions)',
+    'enable_plugin_installation%': '<(enable_plugin_installation)',
+    'enable_plugins%': '<(enable_plugins)',
+    'enable_session_service%': '<(enable_session_service)',
+    'enable_themes%': '<(enable_themes)',
+    'enable_autofill_dialog%': '<(enable_autofill_dialog)',
+    'enable_prod_wallet_service%': '<(enable_prod_wallet_service)',
+    'enable_background%': '<(enable_background)',
+    'linux_use_bundled_gold%': '<(linux_use_bundled_gold)',
+    'linux_use_bundled_binutils%': '<(linux_use_bundled_binutils)',
+    'linux_use_gold_flags%': '<(linux_use_gold_flags)',
+    'linux_use_debug_fission%': '<(linux_use_debug_fission)',
+    'use_canvas_skia%': '<(use_canvas_skia)',
+    'test_isolation_mode%': '<(test_isolation_mode)',
+    'enable_basic_printing%': '<(enable_basic_printing)',
+    'enable_print_preview%': '<(enable_print_preview)',
+    'enable_spellcheck%': '<(enable_spellcheck)',
+    'use_platform_spellchecker%': '<(use_platform_spellchecker)',
+    'enable_google_now%': '<(enable_google_now)',
+    'cld_version%': '<(cld_version)',
+    'cld2_table_size%': '<(cld2_table_size)',
+    'enable_captive_portal_detection%': '<(enable_captive_portal_detection)',
+    'disable_file_support%': '<(disable_file_support)',
+    'disable_ftp_support%': '<(disable_ftp_support)',
+    'use_icu_alternatives_on_android%': '<(use_icu_alternatives_on_android)',
+    'enable_task_manager%': '<(enable_task_manager)',
+    'sas_dll_path%': '<(sas_dll_path)',
+    'wix_path%': '<(wix_path)',
+    'use_libjpeg_turbo%': '<(use_libjpeg_turbo)',
+    'use_system_libjpeg%': '<(use_system_libjpeg)',
+    'android_channel%': '<(android_channel)',
+    'icu_use_data_file_flag%': '<(icu_use_data_file_flag)',
+    'gyp_managed_install%': 0,
+    'create_standalone_apk%': 1,
+    'enable_app_list%': '<(enable_app_list)',
+    'use_default_render_theme%': '<(use_default_render_theme)',
+    'enable_settings_app%': '<(enable_settings_app)',
+    'google_api_key%': '<(google_api_key)',
+    'google_default_client_id%': '<(google_default_client_id)',
+    'google_default_client_secret%': '<(google_default_client_secret)',
+    'enable_supervised_users%': '<(enable_supervised_users)',
+    'native_memory_pressure_signals%': '<(native_memory_pressure_signals)',
+    'enable_mdns%' : '<(enable_mdns)',
+    'enable_service_discovery%' : '<(enable_service_discovery)',
+    'enable_wifi_bootstrapping%': '<(enable_wifi_bootstrapping)',
+    'enable_hangout_services_extension%' : '<(enable_hangout_services_extension)',
+    'proprietary_codecs%': '<(proprietary_codecs)',
+    'use_goma%': '<(use_goma)',
+    'gomadir%': '<(gomadir)',
+    'use_lto%': '<(use_lto)',
+    'use_lto_o2%': '<(use_lto_o2)',
+    'gold_icf_level%': '<(gold_icf_level)',
+    'video_hole%': '<(video_hole)',
+    'v8_use_external_startup_data%': '<(v8_use_external_startup_data)',
+    'cfi_vptr%': '<(cfi_vptr)',
+    'cfi_blacklist%': '<(cfi_blacklist)',
+    'mac_views_browser%': '<(mac_views_browser)',
+    'android_app_version_name%': '<(android_app_version_name)',
+    'android_app_version_code%': '<(android_app_version_code)',
+    'enable_webvr%': '<(enable_webvr)',
+
+    # Turns on compiler optimizations in V8 in Debug build.
+    'v8_optimized_debug%': 1,
+
+    # Use system protobuf instead of bundled one.
+    'use_system_protobuf%': 0,
+
+    # Use system yasm instead of bundled one.
+    'use_system_yasm%': 0,
+
+    # Use system ICU instead of bundled one.
+    'use_system_icu%' : 0,
+
+    # Default to enabled PIE; this is important for ASLR but we may need to be
+    # able to turn it off for various reasons.
+    'linux_disable_pie%': 0,
+
+    # The release channel that this build targets. This is used to restrict
+    # channel-specific build options, like which installer packages to create.
+    # The default is 'all', which does no channel-specific filtering.
+    'channel%': 'all',
+
+    # Override chromium_mac_pch and set it to 0 to suppress the use of
+    # precompiled headers on the Mac.  Prefix header injection may still be
+    # used, but prefix headers will not be precompiled.  This is useful when
+    # using distcc to distribute a build to compile slaves that don't
+    # share the same compiler executable as the system driving the compilation,
+    # because precompiled headers rely on pointers into a specific compiler
+    # executable's image.  Setting this to 0 is needed to use an experimental
+    # Linux-Mac cross compiler distcc farm.
+    'chromium_mac_pch%': 1,
+
+    # The default value for mac_strip in target_defaults. This cannot be
+    # set there, per the comment about variable% in a target_defaults.
+    'mac_strip_release%': 0,
+
+    # Set to 1 to enable java code coverage. Instruments classes during build
+    # to produce .ec files during runtime.
+    'emma_coverage%': 0,
+
+    # EMMA filter string consisting of a list of inclusion/exclusion patterns
+    # separated with whitespace and/or comma. Only has effect if
+    # 'emma_coverage=1'.
+    'emma_filter%': '',
+
+    # Set to 1 to enable running Android lint on java/class files.
+    'android_lint%': 1,
+
+    # Although base/allocator lets you select a heap library via an
+    # environment variable, the libcmt shim it uses sometimes gets in
+    # the way.  To disable it entirely, and switch to normal msvcrt, do e.g.
+    #  'win_use_allocator_shim': 0,
+    #  'win_release_RuntimeLibrary': 2
+    # to ~/.gyp/include.gypi, gclient runhooks --force, and do a release build.
+    'win_use_allocator_shim%': 1, # 1 = shim allocator via libcmt; 0 = msvcrt
+
+    # TODO(bradnelson): eliminate this when possible.
+    # To allow local gyp files to prevent release.vsprops from being included.
+    # Yes(1) means include release.vsprops.
+    # Once all vsprops settings are migrated into gyp, this can go away.
+    'msvs_use_common_release%': 1,
+
+    # TODO(bradnelson): eliminate this when possible.
+    # To allow local gyp files to override additional linker options for msvs.
+    # Yes(1) means set use the common linker options.
+    'msvs_use_common_linker_extras%': 1,
+
+    # TODO(sgk): eliminate this if possible.
+    # It would be nicer to support this via a setting in 'target_defaults'
+    # in chrome/app/locales/locales.gypi overriding the setting in the
+    # 'Debug' configuration in the 'target_defaults' dict below,
+    # but that doesn't work as we'd like.
+    'msvs_debug_link_incremental%': '2',
+
+    # Needed for some of the largest modules.
+    'msvs_debug_link_nonincremental%': '1',
+
+    # Turns on Use Library Dependency Inputs for linking chrome.dll on Windows
+    # to get incremental linking to be faster in debug builds.
+    'incremental_chrome_dll%': '0',
+
+    # Experimental setting to break chrome.dll into multiple pieces based on
+    # process type.
+    'chrome_multiple_dll%': '0',
+
+    # Experimental setting to optimize Chrome's DLLs with PGO.
+    'chrome_pgo_phase%': '0',
+
+    # Whether the VS xtree header has been patched to disable warning 4702. If
+    # it has, then we don't need to disable 4702 (unreachable code warning).
+    # The patch is preapplied to the internal toolchain and hence all bots.
+    'msvs_xtree_patched%': '<!pymod_do_main(win_is_xtree_patched)',
+
+    # Clang stuff.
+    'clang%': '<(clang)',
+    'host_clang%': '<(host_clang)',
+    'make_clang_dir%': '<(make_clang_dir)',
+    'use_lld%': '<(use_lld)',
+
+    # Control which version of clang to use when building for iOS.  If set to
+    # '1', uses the version of clang that ships with Xcode.  If set to '0', uses
+    # the version of clang that ships with the Chromium source.  This variable
+    # is automatically set to '1' in Official builds.
+    'clang_xcode%': 0,
+
+    # These two variables can be set in GYP_DEFINES while running
+    # |gclient runhooks| to let clang run a plugin in every compilation.
+    # Only has an effect if 'clang=1' is in GYP_DEFINES as well.
+    # Example:
+    #     GYP_DEFINES='clang=1 clang_load=/abs/path/to/libPrintFunctionNames.dylib clang_add_plugin=print-fns' gclient runhooks
+
+    'clang_load%': '',
+    'clang_add_plugin%': '',
+
+    # Tell ld64 to write map files describing binary layout. Useful
+    # for looking at what contributes to binary size, e.g. with
+    # https://github.com/nico/bloat
+    'mac_write_linker_maps%': 0,
+
+    # The default type of gtest.
+    'gtest_target_type%': 'executable',
+
+    # Enable sampling based profiler.
+    # See http://google-perftools.googlecode.com/svn/trunk/doc/cpuprofile.html
+    'profiling%': '0',
+    # Profile without optimizing out stack frames when profiling==1.
+    'profiling_full_stack_frames%': '0',
+
+    # And if we want to dump symbols for Breakpad-enabled builds.
+    'linux_dump_symbols%': 0,
+    # And if we want to strip the binary after dumping symbols.
+    'linux_strip_binary%': 0,
+    # If we want stack unwind support for backtrace().
+    'debug_unwind_tables%': 1,
+    'release_unwind_tables%': 1,
+
+    # Override where to find binutils
+    'binutils_version%': 0,
+    'binutils_dir%': '',
+
+    # Enable TCMalloc.
+    # Default of 'use_allocator' is set to 'none' if OS=='android' later.
+    'use_allocator%': 'tcmalloc',
+
+    # Set to 1 to link against libgnome-keyring instead of using dlopen().
+    'linux_link_gnome_keyring%': 0,
+    # Set to 1 to link against gsettings APIs instead of using dlopen().
+    'linux_link_gsettings%': 0,
+
+    # Enable use of OpenMAX DL FFT routines.
+    'use_openmax_dl_fft%': '<(use_openmax_dl_fft)',
+
+    # Enable new NPDevice API.
+    'enable_new_npdevice_api%': 0,
+
+    # .gyp files or targets should set chromium_code to 1 if they build
+    # Chromium-specific code, as opposed to external code.  This variable is
+    # used to control such things as the set of warnings to enable, and
+    # whether warnings are treated as errors.
+    'chromium_code%': 0,
+
+    # Disable fatal linker warnings, similarly to how we make it possible
+    # to disable -Werror (e.g. for different toolchain versions).
+    'disable_fatal_linker_warnings%': 0,
+
+    'release_valgrind_build%': 0,
+
+    # TODO(thakis): Make this a blacklist instead, http://crbug.com/101600
+    'enable_wexit_time_destructors%': 0,
+
+    # Build libpeerconnection as a static library by default.
+    'libpeer_target_type%': 'static_library',
+
+    # Set to 1 to compile with the OpenGL ES 2.0 conformance tests.
+    'internal_gles2_conform_tests%': 0,
+
+    # Set to 1 to compile with the Khronos GL-CTS conformance tests.
+    'internal_khronos_glcts_tests%': 0,
+
+    # Set to 1 to compile the filter fuzzer.
+    'internal_filter_fuzzer%': 0,
+
+    # NOTE: When these end up in the Mac bundle, we need to replace '-' for '_'
+    # so Cocoa is happy (http://crbug.com/20441).
+    'locales': [
+      'am', 'ar', 'bg', 'bn', 'ca', 'cs', 'da', 'de', 'el', 'en-GB',
+      'en-US', 'es-419', 'es', 'et', 'fa', 'fi', 'fil', 'fr', 'gu', 'he',
+      'hi', 'hr', 'hu', 'id', 'it', 'ja', 'kn', 'ko', 'lt', 'lv',
+      'ml', 'mr', 'ms', 'nb', 'nl', 'pl', 'pt-BR', 'pt-PT', 'ro', 'ru',
+      'sk', 'sl', 'sr', 'sv', 'sw', 'ta', 'te', 'th', 'tr', 'uk',
+      'vi', 'zh-CN', 'zh-TW',
+    ],
+
+    # Pseudo locales are special locales which are used for testing and
+    # debugging. They don't get copied to the final app. For more info,
+    # check out https://www.chromium.org/developers/testing/fake-bidi
+    'pseudo_locales': [
+      'fake-bidi',
+    ],
+
+    'grit_defines': [],
+
+    # If debug_devtools is set to 1, JavaScript files for DevTools are
+    # stored as is and loaded from disk. Otherwise, a concatenated file
+    # is stored in resources.pak. It is still possible to load JS files
+    # from disk by passing --debug-devtools cmdline switch.
+    'debug_devtools%': 0,
+
+    # The Java Bridge is not compiled in by default.
+    'java_bridge%': 0,
+
+    # Code signing for iOS binaries.  The bots need to be able to disable this.
+    'chromium_ios_signing%': 1,
+
+    # This flag is only used when disable_nacl==0 and disables all those
+    # subcomponents which would require the installation of a native_client
+    # untrusted toolchain.
+    'disable_nacl_untrusted%': 0,
+
+    # PNaCl toolchain does not support sanitizers. Disable by default.
+    'enable_nacl_nonsfi_test%': 0,
+
+    # Disable Dart by default.
+    'enable_dart%': 0,
+
+    # Copy out the setting of disable_nacl.
+    'disable_nacl%': '<(disable_nacl)',
+
+    # Portable Native Client is enabled by default.
+    'disable_pnacl%': 0,
+
+    # Whether to build full debug version for Debug configuration on Android.
+    # Compared to full debug version, the default Debug configuration on Android
+    # has no full v8 debug, has size optimization and linker gc section, so that
+    # we can build a debug version with acceptable size and performance.
+    'android_full_debug%': 0,
+
+    # Contains data about the attached devices for gyp_managed_install.
+    'build_device_config_path': '<(PRODUCT_DIR)/build_devices.cfg',
+
+    'sas_dll_exists': '<!pymod_do_main(dir_exists "<(sas_dll_path)")',
+    'wix_exists': '<!pymod_do_main(dir_exists "<(wix_path)")',
+
+    'windows_sdk_path%': 'C:/Program Files (x86)/Windows Kits/8.1',
+    'directx_sdk_default_path': '<(DEPTH)/third_party/directxsdk/files',
+
+    # Whether we are using the rlz library or not.  Platforms like Android send
+    # rlz codes for searches but do not use the library.
+    'enable_rlz_support%': 0,
+    'enable_rlz%': 0,
+
+    # Turns on the i18n support in V8.
+    'v8_enable_i18n_support': 1,
+
+    # Compile d8 for the host toolset.
+    'v8_toolset_for_d8': 'host',
+
+    # Use brlapi from brltty for braille display support.
+    'use_brlapi%': 0,
+
+    # Relative path to icu.gyp from this file.
+    'icu_gyp_path': '../third_party/icu/icu.gyp',
+
+    # IPC fuzzer is disabled by default.
+    'enable_ipc_fuzzer%': 0,
+
+    # Force disable libstdc++ debug mode.
+    'disable_glibcxx_debug%': 0,
+
+    # Set to 1 to compile with MSE support for MPEG2 TS
+    'enable_mpeg2ts_stream_parser%': 0,
+
+    # Support ChromeOS touchpad gestures with ozone.
+    'use_evdev_gestures%': 0,
+
+    # Default ozone platform (if no --ozone-platform flag).
+    'ozone_platform%': "",
+
+    # Ozone platforms to include in the build.
+    'ozone_platform_caca%': 0,
+    'ozone_platform_cast%': 0,
+    'ozone_platform_drm%': 0,
+    'ozone_platform_egltest%': 0,
+    'ozone_platform_gbm%': 0,
+    'ozone_platform_ozonex%': 0,
+    'ozone_platform_test%': 0,
+
+    # Experiment: http://crbug.com/426914
+    'envoy%': 0,
+
+    # Used to set libjpeg_gyp_path. Chrome OS ui/gfx/gfx.gyp uses the IJG path
+    # for robust login screen decoding.
+    'libjpeg_ijg_gyp_path': '<(DEPTH)/third_party/libjpeg/libjpeg.gyp',
+    'libjpeg_turbo_gyp_path': '<(DEPTH)/third_party/libjpeg_turbo/libjpeg.gyp',
+
+    'conditions': [
+      ['buildtype=="Official"', {
+        # Continue to embed build meta data in Official builds, basically the
+        # time it was built.
+        # TODO(maruel): This decision should be revisited because having an
+        # official deterministic build has high value too but MSVC toolset can't
+        # generate anything deterministic with WPO enabled AFAIK.
+        'dont_embed_build_metadata%': 0,
+      }],
+      # Enable the Syzygy optimization step for the official builds.
+      ['OS=="win" and buildtype=="Official" and syzyasan!=1 and clang!=1', {
+        'syzygy_optimize%': 1,
+      }, {
+        'syzygy_optimize%': 0,
+      }],
+      # Get binutils version so we can enable debug fission if we can.
+      ['os_posix==1 and OS!="mac" and OS!="ios"', {
+        'conditions': [
+          # compiler_version doesn't work with clang
+          # TODO(mithro): Land https://codereview.chromium.org/199793014/ so
+          # compiler_version works with clang.
+          # TODO(glider): set clang to 1 earlier for ASan and TSan builds so
+          # that it takes effect here.
+          ['clang==0 and asan==0 and lsan==0 and tsan==0 and msan==0 and ubsan==0 and ubsan_vptr==0', {
+            'binutils_version%': '<!pymod_do_main(compiler_version target assembler)',
+          }],
+          # On Android we know the binutils version in the toolchain.
+          ['OS=="android"', {
+            'binutils_version%': 222,
+          }],
+          ['host_arch=="x64"', {
+            'binutils_dir%': 'third_party/binutils/Linux_x64/Release/bin',
+          }],
+          ['host_arch=="ia32"', {
+            'binutils_dir%': 'third_party/binutils/Linux_ia32/Release/bin',
+          }],
+          # Our version of binutils in third_party/binutils
+          ['linux_use_bundled_binutils==1', {
+            'binutils_version%': 224,
+          }],
+        ],
+      }, {
+        'binutils_version%': 0,
+      }],
+      # The version of GCC in use, set later in platforms that use GCC and have
+      # not explicitly chosen to build with clang. Currently, this means all
+      # platforms except Windows, Mac and iOS.
+      # TODO(glider): set clang to 1 earlier for ASan and TSan builds so that
+      # it takes effect here.
+      ['os_posix==1 and OS!="mac" and OS!="ios" and clang==0 and asan==0 and lsan==0 and tsan==0 and msan==0 and ubsan_vptr==0', {
+        'conditions': [
+          ['OS=="android"', {
+            'host_gcc_version%': '<!pymod_do_main(compiler_version host compiler)',
+            # We directly set the gcc version since we know what we use.
+            'gcc_version%': 49,
+          }, {
+            'host_gcc_version%': '<!pymod_do_main(compiler_version host compiler)',
+            'gcc_version%': '<!pymod_do_main(compiler_version target compiler)',
+          }],
+        ],
+      }, {
+        'host_gcc_version%': 0,
+        'gcc_version%': 0,
+      }],
+      ['OS=="win" and "<!pymod_do_main(dir_exists <(directx_sdk_default_path))"=="True"', {
+        'directx_sdk_path%': '<(directx_sdk_default_path)',
+      }, {
+        'directx_sdk_path%': '$(DXSDK_DIR)',
+      }],
+      ['OS=="win"', {
+        'windows_driver_kit_path%': '$(WDK_DIR)',
+      }],
+      ['os_posix==1 and OS!="mac" and OS!="ios"', {
+        'conditions': [
+          ['target_arch=="mipsel" or target_arch=="mips64el"', {
+            'werror%': '',
+            'disable_nacl%': 1,
+            'nacl_untrusted_build%': 0,
+            'use_allocator%': 'none',
+          }],
+          # Use a 64-bit linker to avoid running out of address space. The
+          # buildbots should have a 64-bit kernel and a 64-bit libc installed.
+          ['host_arch=="ia32" and target_arch=="ia32"', {
+            # TODO(thestig) This is a horrible way to force the desired
+            # configuration. Our gyp variable scoping is likely wrong and
+            # needs to be cleaned up. The GN configuration should be changed
+            # to match.
+            'binutils_version%': 224,
+            'linux_use_bundled_binutils%': '1',
+            'linux_use_bundled_gold%': '1',
+            'binutils_dir%': 'third_party/binutils/Linux_x64/Release/bin',
+          }],
+          # All Chrome builds have breakpad symbols, but only process the
+          # symbols from official builds.
+          ['(branding=="Chrome" and buildtype=="Official")', {
+            'linux_dump_symbols%': 1,
+
+            # Omit unwind support in official release builds to save space. We
+            # can use breakpad for these builds.
+            'release_unwind_tables%': 0,
+          }],
+        ],
+      }],  # os_posix==1 and OS!="mac" and OS!="ios"
+      ['OS=="ios"', {
+        'disable_nacl%': 1,
+        'enable_background%': 0,
+        'icu_use_data_file_flag%': 1,
+        'enable_web_speech%': 0,
+        'use_system_libxml%': 1,
+        'use_system_sqlite%': 1,
+        'locales==': [
+          'ar', 'ca', 'cs', 'da', 'de', 'el', 'en-GB', 'en-US', 'es', 'es-MX',
+          'fi', 'fr', 'he', 'hi', 'hr', 'hu', 'id', 'it', 'ja', 'ko', 'ms',
+          'nb', 'nl', 'pl', 'pt', 'pt-PT', 'ro', 'ru', 'sk', 'sv', 'th', 'tr',
+          'uk', 'vi', 'zh-CN', 'zh-TW',
+        ],
+
+        # iOS SDK and deployment target support.  The |ios_sdk| value is left
+        # blank so that when it is set in the project files it will be the
+        # "current" iOS SDK.  Forcing a specific SDK even if it is "current"
+        # causes Xcode to spit out a warning for every single project file for
+        # not using the "current" SDK.
+        'ios_sdk%': '',
+        'ios_sdk_path%': '',
+        'ios_deployment_target%': '7.0',
+
+        'conditions': [
+          # ios_product_name is set to the name of the .app bundle as it should
+          # appear on disk.
+          ['branding=="Chrome"', {
+            'ios_product_name%': 'Chrome',
+          }, { # else: branding!="Chrome"
+            'ios_product_name%': 'Chromium',
+          }],
+          ['branding=="Chrome" and buildtype=="Official"', {
+            'ios_breakpad%': 1,
+          }, { # else: branding!="Chrome" or buildtype!="Official"
+            'ios_breakpad%': 0,
+          }],
+        ],
+      }],  # OS=="ios"
+      ['OS=="android"', {
+        # Location of Android NDK.
+        'variables': {
+          'variables': {
+            # Standard libraries can use the relative path to the NDK.
+            'android_ndk_root%': '../../third_party/android_tools/ndk/',
+            # Unfortunately, it is required to use the absolute path to the SDK
+            # because it us passed to ant which uses a different relative path
+            # from GYP.
+            'android_sdk_root%': '<!(cd <(DEPTH) && pwd -P)/third_party/android_tools/sdk/',
+            # Similarly, gdbserver and the Android toolchain need to use the
+            # absolute path to the NDK because they are used at different levels
+            # in the GYP files.
+            'android_ndk_absolute_root%': '<!(cd <(DEPTH) && pwd -P)/third_party/android_tools/ndk/',
+            'android_host_arch%': '<!(uname -m)',
+            # Android API-level of the SDK used for compilation.
+            'android_sdk_version%': '22',
+            'android_sdk_build_tools_version%': '22.0.1',
+            'host_os%': "<!(uname -s | sed -e 's/Linux/linux/;s/Darwin/mac/')",
+          },
+          # Copy conditionally-set variables out one scope.
+          'android_ndk_root%': '<(android_ndk_root)',
+          'android_ndk_absolute_root%': '<(android_ndk_absolute_root)',
+          'android_sdk_root%': '<(android_sdk_root)',
+          'android_sdk_version%': '<(android_sdk_version)',
+          'android_libcpp_root': '<(android_ndk_root)/sources/cxx-stl/llvm-libc++',
+          'host_os%': '<(host_os)',
+
+          'android_sdk%': '<(android_sdk_root)/platforms/android-<(android_sdk_version)',
+          # Android SDK build tools (e.g. dx, aidl)
+          'android_sdk_tools%': '<(android_sdk_root)/build-tools/<(android_sdk_build_tools_version)',
+
+          # Android API level 16 is JB (Android 4.1) which is the minimum
+          # platform requirement for Chrome on Android, we use it for native
+          # code compilation.
+          'conditions': [
+            ['target_arch == "ia32"', {
+              'android_app_abi%': 'x86',
+              'android_gdbserver%': '<(android_ndk_absolute_root)/prebuilt/android-x86/gdbserver/gdbserver',
+              'android_ndk_sysroot%': '<(android_ndk_root)/platforms/android-16/arch-x86',
+              'android_ndk_lib_dir%': 'usr/lib',
+              'android_toolchain%': '<(android_ndk_absolute_root)/toolchains/x86-4.9/prebuilt/<(host_os)-<(android_host_arch)/bin',
+            }],
+            ['target_arch == "x64"', {
+              'android_app_abi%': 'x86_64',
+              'android_gdbserver%': '<(android_ndk_absolute_root)/prebuilt/android-x86_64/gdbserver/gdbserver',
+              'android_ndk_sysroot%': '<(android_ndk_root)/platforms/android-21/arch-x86_64',
+              'android_ndk_lib_dir%': 'usr/lib64',
+              'android_toolchain%': '<(android_ndk_absolute_root)/toolchains/x86_64-4.9/prebuilt/<(host_os)-<(android_host_arch)/bin',
+            }],
+            ['target_arch=="arm"', {
+              'conditions': [
+                ['arm_version<7', {
+                  'android_app_abi%': 'armeabi',
+                }, {
+                  'android_app_abi%': 'armeabi-v7a',
+                }],
+              ],
+              'android_gdbserver%': '<(android_ndk_absolute_root)/prebuilt/android-arm/gdbserver/gdbserver',
+              'android_ndk_sysroot%': '<(android_ndk_root)/platforms/android-16/arch-arm',
+              'android_ndk_lib_dir%': 'usr/lib',
+              'android_toolchain%': '<(android_ndk_absolute_root)/toolchains/arm-linux-androideabi-4.9/prebuilt/<(host_os)-<(android_host_arch)/bin',
+            }],
+            ['target_arch == "arm64"', {
+              'android_app_abi%': 'arm64-v8a',
+              'android_gdbserver%': '<(android_ndk_absolute_root)/prebuilt/android-arm64/gdbserver/gdbserver',
+              'android_ndk_sysroot%': '<(android_ndk_root)/platforms/android-21/arch-arm64',
+              'android_ndk_lib_dir%': 'usr/lib',
+              'android_toolchain%': '<(android_ndk_absolute_root)/toolchains/aarch64-linux-android-4.9/prebuilt/<(host_os)-<(android_host_arch)/bin',
+            }],
+            ['target_arch == "mipsel"', {
+              'android_app_abi%': 'mips',
+              'android_gdbserver%': '<(android_ndk_absolute_root)/prebuilt/android-mips/gdbserver/gdbserver',
+              'android_ndk_sysroot%': '<(android_ndk_root)/platforms/android-16/arch-mips',
+              'android_ndk_lib_dir%': 'usr/lib',
+              'android_toolchain%': '<(android_ndk_absolute_root)/toolchains/mipsel-linux-android-4.9/prebuilt/<(host_os)-<(android_host_arch)/bin',
+            }],
+            ['target_arch == "mips64el"', {
+              'android_app_abi%': 'mips64',
+              'android_gdbserver%': '<(android_ndk_absolute_root)/prebuilt/android-mips64/gdbserver/gdbserver',
+              'android_ndk_sysroot%': '<(android_ndk_root)/platforms/android-21/arch-mips64',
+              'android_ndk_lib_dir%': 'usr/lib64',
+              'android_toolchain%': '<(android_ndk_absolute_root)/toolchains/mips64el-linux-android-4.9/prebuilt/<(host_os)-<(android_host_arch)/bin',
+            }],
+          ],
+        },
+        # Copy conditionally-set variables out one scope.
+        'android_app_abi%': '<(android_app_abi)',
+        'android_gdbserver%': '<(android_gdbserver)',
+        'android_ndk_root%': '<(android_ndk_root)',
+        'android_ndk_sysroot%': '<(android_ndk_sysroot)',
+        'android_sdk_root%': '<(android_sdk_root)',
+        'android_sdk_version%': '<(android_sdk_version)',
+        'android_toolchain%': '<(android_toolchain)',
+
+        'android_ndk_include': '<(android_ndk_sysroot)/usr/include',
+        'android_ndk_lib': '<(android_ndk_sysroot)/<(android_ndk_lib_dir)',
+        'android_sdk_tools%': '<(android_sdk_tools)',
+        'android_aapt_path%': '<(android_sdk_tools)/aapt',
+        'android_sdk%': '<(android_sdk)',
+        'android_sdk_jar%': '<(android_sdk)/android.jar',
+
+        'android_libcpp_root': '<(android_libcpp_root)',
+        'android_libcpp_include': '<(android_libcpp_root)/libcxx/include',
+        'android_libcpp_libs_dir%': '<(android_libcpp_root)/libs/<(android_app_abi)',
+        'host_os%': '<(host_os)',
+
+        # Location of the "objcopy" binary, used by both gyp and scripts.
+        'android_objcopy%' : '<!(/bin/echo -n <(android_toolchain)/*-objcopy)',
+
+        # Location of the "strip" binary, used by both gyp and scripts.
+        'android_strip%' : '<!(/bin/echo -n <(android_toolchain)/*-strip)',
+
+        # Location of the "readelf" binary.
+        'android_readelf%' : '<!(/bin/echo -n <(android_toolchain)/*-readelf)',
+
+        # Determines whether we should optimize JNI generation at the cost of
+        # breaking assumptions in the build system that when inputs have changed
+        # the outputs should always change as well.  This is meant purely for
+        # developer builds, to avoid spurious re-linking of native files.
+        'optimize_jni_generation%': '<(optimize_jni_generation)',
+
+        # Use OpenSSL's struct X509 to represent certificates.
+        'use_openssl_certs%': 1,
+
+        'proprietary_codecs%': '<(proprietary_codecs)',
+        'safe_browsing%': 2,
+        'enable_web_speech%': 0,
+        'java_bridge%': 1,
+        'build_ffmpegsumo%': 0,
+        'use_allocator%': 'none',
+
+        # Disable Native Client.
+        'disable_nacl%': 1,
+
+        # Android does not support background apps.
+        'enable_background%': 0,
+
+        # Sessions are store separately in the Java side.
+        'enable_session_service%': 0,
+
+        'p2p_apis%' : 0,
+
+        'gtest_target_type%': 'shared_library',
+      }],  # OS=="android"
+      ['embedded==1', {
+        'use_system_fontconfig%': 0,
+      }, {
+        'use_system_fontconfig%': 1,
+      }],
+      ['chromecast==1', {
+        'enable_mpeg2ts_stream_parser%': 1,
+        'ffmpeg_branding%': 'ChromeOS',
+        'ozone_platform_ozonex%': 1,
+        'use_custom_freetype%': 0,
+        'use_playready%': 0,
+        'conditions': [
+          ['target_arch=="arm"', {
+            'arm_arch%': '',
+            'arm_tune%': 'cortex-a9',
+            'arm_thumb%': 1,
+            'video_hole%': 1,
+          }],
+        ],
+      }],
+      ['chromecast==1 and OS!="android"', {
+        'ozone_platform_cast%': 1
+      }],
+      ['OS=="linux" and target_arch!="mipsel"', {
+        'clang%': 1,
+      }],  # OS=="mac"
+      ['OS=="mac"', {
+        'conditions': [
+          # All Chrome builds have breakpad symbols, but only process the
+          # symbols from official builds.
+          ['(branding=="Chrome" and buildtype=="Official")', {
+            'mac_strip_release%': 1,
+          }],
+        ],
+      }],  # OS=="mac"
+      ['OS=="mac" or OS=="ios"', {
+        'clang%': 1,
+
+        'variables': {
+          # Mac OS X SDK and deployment target support.  The SDK identifies
+          # the version of the system headers that will be used, and
+          # corresponds to the MAC_OS_X_VERSION_MAX_ALLOWED compile-time
+          # macro.  "Maximum allowed" refers to the operating system version
+          # whose APIs are available in the headers.  The deployment target
+          # identifies the minimum system version that the built products are
+          # expected to function on.  It corresponds to the
+          # MAC_OS_X_VERSION_MIN_REQUIRED compile-time macro.  To ensure these
+          # macros are available, #include <AvailabilityMacros.h>.  Additional
+          # documentation on these macros is available at
+          # http://developer.apple.com/mac/library/technotes/tn2002/tn2064.html#SECTION3
+          # Chrome normally builds with the Mac OS X 10.6 SDK and sets the
+          # deployment target to 10.6.  Other projects, such as O3D, may
+          # override these defaults.
+
+          # Normally, mac_sdk_min is used to find an SDK that Xcode knows
+          # about that is at least the specified version. In official builds,
+          # the SDK must match mac_sdk_min exactly. If the SDK is installed
+          # someplace that Xcode doesn't know about, set mac_sdk_path to the
+          # path to the SDK; when set to a non-empty string, SDK detection
+          # based on mac_sdk_min will be bypassed entirely.
+          'conditions': [
+            ['OS=="ios"', {
+              'mac_sdk_min%': '10.8',
+            }, {  # else OS!="ios"
+              'mac_sdk_min%': '10.6',
+            }],
+          ],
+          'mac_sdk_path%': '',
+
+          'mac_deployment_target%': '10.6',
+        },
+
+        'mac_sdk_min': '<(mac_sdk_min)',
+        'mac_sdk_path': '<(mac_sdk_path)',
+        'mac_deployment_target': '<(mac_deployment_target)',
+
+        # Compile in Breakpad support by default so that it can be
+        # tested, even if it is not enabled by default at runtime.
+        'mac_breakpad_compiled_in%': 1,
+        'conditions': [
+          # mac_product_name is set to the name of the .app bundle as it should
+          # appear on disk.  This duplicates data from
+          # chrome/app/theme/chromium/BRANDING and
+          # chrome/app/theme/google_chrome/BRANDING, but is necessary to get
+          # these names into the build system.
+          ['branding=="Chrome"', {
+            'mac_product_name%': 'Google Chrome',
+          }, { # else: branding!="Chrome"
+            'mac_product_name%': 'Chromium',
+          }],
+          # Official mac builds require a specific OS X SDK, but iOS and
+          # non-official mac builds do not.
+          ['branding=="Chrome" and buildtype=="Official" and OS=="mac"', {
+            'mac_sdk%': '<!(python <(DEPTH)/build/mac/find_sdk.py --verify <(mac_sdk_min) --sdk_path=<(mac_sdk_path))',
+          }, {
+            'mac_sdk%': '<!(python <(DEPTH)/build/mac/find_sdk.py <(mac_sdk_min))',
+          }],
+          ['branding=="Chrome" and buildtype=="Official"', {
+            # Enable uploading crash dumps.
+            'mac_breakpad_uploads%': 1,
+            # Enable dumping symbols at build time for use by Mac Breakpad.
+            'mac_breakpad%': 1,
+            # Enable Keystone auto-update support.
+            'mac_keystone%': 1,
+          }, { # else: branding!="Chrome" or buildtype!="Official"
+            'mac_breakpad_uploads%': 0,
+            'mac_breakpad%': 0,
+            'mac_keystone%': 0,
+          }],
+        ],
+      }],  # OS=="mac" or OS=="ios"
+      ['OS=="win"', {
+        'conditions': [
+          # This is the architecture convention used in WinSDK paths.
+          ['target_arch=="ia32"', {
+            'winsdk_arch%': 'x86',
+          },{
+            'winsdk_arch%': '<(target_arch)',
+          }],
+          ['component=="shared_library" or MSVS_VERSION == "2015"', {
+            # TODO(scottmg): The allocator shimming doesn't work on the 2015 CRT
+            # and we are hoping to be able to remove it if an additional feature
+            # lands in the 2015 CRT API. For now, don't shim and revisit once
+            # VS2015 is RTM: http://crbug.com/481611.
+            'win_use_allocator_shim%': 0,
+          }],
+          ['component=="static_library"', {
+            # Turn on multiple dll by default on Windows when in static_library.
+            'chrome_multiple_dll%': 1,
+          }],
+          ['asan==1 or syzyasan==1', {
+            'win_use_allocator_shim%': 0,
+          }],
+          ['syzyasan==1', {
+            'kasko%': 1,
+          }],
+          ['component=="shared_library" and "<(GENERATOR)"=="ninja"', {
+            # Only enabled by default for ninja because it's buggy in VS.
+            # Not enabled for component=static_library because some targets
+            # are too large and the toolchain fails due to the size of the
+            # .obj files.
+            'incremental_chrome_dll%': 1,
+          }],
+          # Don't do incremental linking for large modules on 32-bit or when
+          # component=static_library as the toolchain fails due to the size of
+          # the .ilk files.
+          ['MSVS_OS_BITS==32 or component=="static_library"', {
+            'msvs_large_module_debug_link_mode%': '1',  # No
+          },{
+            'msvs_large_module_debug_link_mode%': '2',  # Yes
+          }],
+        ],
+        'nacl_win64_defines': [
+          # This flag is used to minimize dependencies when building
+          # Native Client loader for 64-bit Windows.
+          'NACL_WIN64',
+        ],
+        # Need to include allocator target, but exclude tcmalloc files.
+        'use_allocator%': 'winheap',
+      }],
+
+      ['os_posix==1 and chromeos==0 and OS!="android" and OS!="ios" and embedded==0', {
+        'use_cups%': 1,
+      }, {
+        'use_cups%': 0,
+      }],
+
+      ['enable_plugins==1 and (OS=="linux" or OS=="mac" or OS=="win") and chromecast==0', {
+        'enable_pepper_cdms%': 1,
+      }, {
+        'enable_pepper_cdms%': 0,
+      }],
+
+      ['OS=="android" or chromecast==1', {
+        'enable_browser_cdms%': 1,
+      }, {
+        'enable_browser_cdms%': 0,
+      }],
+
+      # Native Client glibc toolchain is enabled
+      # by default except on arm, mips and mips64.
+      ['target_arch=="arm" or target_arch=="mipsel" or target_arch=="mips64el"', {
+        'disable_glibc%': 1,
+      }, {
+        'disable_glibc%': 0,
+      }],
+
+      # Set the relative path from this file to the GYP file of the JPEG
+      # library used by Chromium.
+      ['use_system_libjpeg==1 or use_libjpeg_turbo==0', {
+        # Configuration for using the system libjeg is here.
+        'libjpeg_gyp_path': '<(libjpeg_ijg_gyp_path)',
+      }, {
+        'libjpeg_gyp_path': '<(libjpeg_turbo_gyp_path)',
+      }],
+
+      # Options controlling the use of GConf (the classic GNOME configuration
+      # system) and GIO, which contains GSettings (the new GNOME config system).
+      ['chromeos==1 or embedded==1', {
+        'use_gconf%': 0,
+        'use_gio%': 0,
+      }, {
+        'use_gconf%': 1,
+        'use_gio%': 1,
+      }],
+
+      # Set up -D and -E flags passed into grit.
+      ['branding=="Chrome"', {
+        # TODO(mmoss) The .grd files look for _google_chrome, but for
+        # consistency they should look for google_chrome_build like C++.
+        'grit_defines': ['-D', '_google_chrome',
+                         '-E', 'CHROMIUM_BUILD=google_chrome'],
+      }, {
+        'grit_defines': ['-D', '_chromium',
+                         '-E', 'CHROMIUM_BUILD=chromium'],
+      }],
+      ['chromeos==1', {
+        'grit_defines': ['-D', 'chromeos', '-D', 'scale_factors=2x'],
+      }],
+      ['desktop_linux==1', {
+        'grit_defines': ['-D', 'desktop_linux'],
+      }],
+      ['toolkit_views==1', {
+        'grit_defines': ['-D', 'toolkit_views'],
+      }],
+      ['use_aura==1', {
+        'grit_defines': ['-D', 'use_aura'],
+      }],
+      ['use_ash==1', {
+        'grit_defines': ['-D', 'use_ash'],
+      }],
+      ['use_nss_certs==1', {
+        'grit_defines': ['-D', 'use_nss_certs'],
+      }],
+      ['use_ozone==1', {
+        'grit_defines': ['-D', 'use_ozone'],
+      }],
+      ['image_loader_extension==1', {
+        'grit_defines': ['-D', 'image_loader_extension'],
+      }],
+      ['remoting==1', {
+        'grit_defines': ['-D', 'remoting'],
+      }],
+      ['use_titlecase_in_grd==1', {
+        'grit_defines': ['-D', 'use_titlecase'],
+      }],
+      ['use_third_party_translations==1', {
+        'grit_defines': ['-D', 'use_third_party_translations'],
+        'locales': [
+          'ast', 'bs', 'ca@valencia', 'en-AU', 'eo', 'eu', 'gl', 'hy', 'ia',
+          'ka', 'ku', 'kw', 'ms', 'ug'
+        ],
+      }],
+      ['OS=="android"', {
+        'grit_defines': [
+          '-t', 'android',
+          '-E', 'ANDROID_JAVA_TAGGED_ONLY=true',
+          '--no-output-all-resource-defines',
+        ],
+      }],
+      ['OS=="mac" or OS=="ios"', {
+        'grit_defines': ['-D', 'scale_factors=2x'],
+      }],
+      ['OS == "ios"', {
+        'variables': {
+          'enable_coverage%': 0,
+        },
+        'grit_defines': [
+          '-t', 'ios',
+          '--no-output-all-resource-defines',
+        ],
+        # iOS uses a whitelist to filter resources.
+        'grit_whitelist%': '<(DEPTH)/build/ios/grit_whitelist.txt',
+
+        # Enable host builds when generating with ninja-ios.
+        'conditions': [
+          ['"<(GENERATOR)"=="ninja"', {
+            'host_os%': "mac",
+          }],
+
+          # Use the version of clang shipped with Xcode when building official
+          # version of Chrome for iOS.
+          #
+          # TODO(eugenebut): Remove enable_coverage check once
+          # libclang_rt.profile_ios.a is bundled with Chromium's clang.
+          # http://crbug.com/450379
+          #
+          # TODO(sdefresne): Remove xcodebuild version check onces clang ToT
+          # supports "nullable" and related. https://crbug.com/499448
+          ['buildtype=="Official" or enable_coverage or '
+            '<!(xcodebuild -version|awk \'/Xcode/{print ($2 >= 7.0)}\')==1', {
+            'clang_xcode%': 1,
+          }],
+        ],
+      }],
+      ['enable_extensions==1', {
+        'grit_defines': ['-D', 'enable_extensions'],
+      }],
+      ['enable_plugins!=0', {
+        'grit_defines': ['-D', 'enable_plugins'],
+      }],
+      ['enable_basic_printing==1 or enable_print_preview==1', {
+        'grit_defines': ['-D', 'enable_printing'],
+      }],
+      ['enable_print_preview==1', {
+        'grit_defines': ['-D', 'enable_print_preview'],
+      }],
+      ['enable_themes==1', {
+        'grit_defines': ['-D', 'enable_themes'],
+      }],
+      ['enable_app_list==1', {
+        'grit_defines': ['-D', 'enable_app_list'],
+      }],
+      ['enable_settings_app==1', {
+        'grit_defines': ['-D', 'enable_settings_app'],
+      }],
+      ['enable_google_now==1', {
+        'grit_defines': ['-D', 'enable_google_now'],
+      }],
+      ['use_concatenated_impulse_responses==1', {
+        'grit_defines': ['-D', 'use_concatenated_impulse_responses'],
+      }],
+      ['enable_media_router==1', {
+        'grit_defines': ['-D', 'enable_media_router'],
+      }],
+      ['enable_webrtc==1', {
+        'grit_defines': ['-D', 'enable_webrtc'],
+      }],
+      ['enable_hangout_services_extension==1', {
+        'grit_defines': ['-D', 'enable_hangout_services_extension'],
+      }],
+      ['enable_task_manager==1', {
+        'grit_defines': ['-D', 'enable_task_manager'],
+      }],
+      ['notifications==1', {
+        'grit_defines': ['-D', 'enable_notifications'],
+      }],
+      ['enable_wifi_bootstrapping==1', {
+        'grit_defines': ['-D', 'enable_wifi_bootstrapping'],
+      }],
+      ['mac_views_browser==1', {
+        'grit_defines': ['-D', 'mac_views_browser'],
+      }],
+      ['enable_resource_whitelist_generation==1 and OS!="win"', {
+        'grit_rc_header_format': ['-h', '#define {textual_id} _Pragma("whitelisted_resource_{numeric_id}") {numeric_id}'],
+      }],
+      ['enable_resource_whitelist_generation==1 and OS=="win"', {
+        'grit_rc_header_format': ['-h', '#define {textual_id} __pragma(message("whitelisted_resource_{numeric_id}")) {numeric_id}'],
+      }],
+      ['enable_mdns==1 or OS=="mac"', {
+        'grit_defines': ['-D', 'enable_service_discovery'],
+        'enable_service_discovery%': 1
+      }],
+      ['clang_use_chrome_plugins==1', {
+        'variables': {
+          'conditions': [
+            ['OS!="win"', {
+              'variables': {
+                'conditions': [
+                  ['OS=="mac" or OS=="ios"', {
+                    'clang_lib_path%': '<!(cd <(DEPTH) && pwd -P)/third_party/llvm-build/Release+Asserts/lib/libFindBadConstructs.dylib',
+                  }, { # OS != "mac" or OS != "ios"
+                    'clang_lib_path%': '<!(cd <(DEPTH) && pwd -P)/third_party/llvm-build/Release+Asserts/lib/libFindBadConstructs.so',
+                  }],
+                ],
+              },
+              'clang_dynlib_flags%': '-Xclang -load -Xclang <(clang_lib_path) ',
+            }, { # OS == "win"
+              # On Windows, the plugin is built directly into clang, so there's
+              # no need to load it dynamically.
+              'clang_dynlib_flags%': '',
+            }],
+            # https://crbug.com/441916
+            ['OS=="android" or OS=="linux" or OS=="mac"', {
+              'clang_plugin_args%': '-Xclang -plugin-arg-find-bad-constructs -Xclang check-templates ',
+            }, { # OS != "linux"
+              'clang_plugin_args%': ''
+            }],
+          ],
+        },
+        # If you change these, also change build/config/clang/BUILD.gn.
+        'clang_chrome_plugins_flags%':
+          '<(clang_dynlib_flags)'
+          '-Xclang -add-plugin -Xclang find-bad-constructs <(clang_plugin_args)',
+      }],
+      ['asan==1 or msan==1 or lsan==1 or tsan==1', {
+        'clang%': 1,
+        'use_allocator%': 'none',
+        'use_sanitizer_options%': 1,
+      }],
+
+      ['OS=="linux" and asan==0 and msan==0 and lsan==0 and tsan==0', {
+        # PNaCl toolchain Non-SFI build only supports linux OS build.
+        # Also, it does not support sanitizers.
+        'enable_nacl_nonsfi_test%': 1,
+      }],
+      ['asan==1 and OS=="linux" and chromeos==0', {
+        'use_custom_libcxx%': 1,
+      }],
+      ['ubsan==1', {
+        'clang%': 1,
+      }],
+      ['ubsan_vptr==1', {
+        'clang%': 1,
+      }],
+      ['asan==1 and OS=="mac"', {
+        'mac_strip_release': 1,
+      }],
+      ['tsan==1', {
+        'use_custom_libcxx%': 1,
+      }],
+      ['msan==1', {
+        # Use a just-built, MSan-instrumented libc++ instead of the system-wide
+        # libstdc++. This is required to avoid false positive reports whenever
+        # the C++ standard library is used.
+        'use_custom_libcxx%': 1,
+        # Running the V8-generated code on an ARM simulator is a powerful hack
+        # that allows the tool to see the memory accesses from JITted code.
+        # Without this flag, JS code causes false positive reports from MSan.
+        'v8_target_arch': 'arm64',
+      }],
+
+      ['OS=="linux" and clang_type_profiler==1', {
+        'clang%': 1,
+        'clang_use_chrome_plugins%': 0,
+        'conditions': [
+          ['host_arch=="x64"', {
+            'make_clang_dir%': 'third_party/llvm-allocated-type/Linux_x64',
+          }],
+          ['host_arch=="ia32"', {
+            # 32-bit Clang is unsupported.  It may not build.  Put your 32-bit
+            # Clang in this directory at your own risk if needed for some
+            # purpose (e.g. to compare 32-bit and 64-bit behavior like memory
+            # usage).  Any failure by this compiler should not close the tree.
+            'make_clang_dir%': 'third_party/llvm-allocated-type/Linux_ia32',
+          }],
+        ],
+      }],
+
+      # On valgrind bots, override the optimizer settings so we don't inline too
+      # much and make the stacks harder to figure out.
+      #
+      # TODO(rnk): Kill off variables that no one else uses and just implement
+      # them under a build_for_tool== condition.
+      ['build_for_tool=="memcheck" or build_for_tool=="tsan"', {
+        # gcc flags
+        'mac_debug_optimization': '1',
+        'mac_release_optimization': '1',
+        'release_optimize': '1',
+        'no_gc_sections': 1,
+        'debug_extra_cflags': '-g -fno-inline -fno-omit-frame-pointer '
+                              '-fno-builtin -fno-optimize-sibling-calls',
+        'release_extra_cflags': '-g -fno-inline -fno-omit-frame-pointer '
+                                '-fno-builtin -fno-optimize-sibling-calls',
+
+        # MSVS flags for TSan on Pin and Windows.
+        'win_debug_RuntimeChecks': '0',
+        'win_debug_disable_iterator_debugging': '1',
+        'win_debug_Optimization': '1',
+        'win_debug_InlineFunctionExpansion': '0',
+        'win_release_InlineFunctionExpansion': '0',
+        'win_release_OmitFramePointers': '0',
+
+        'use_allocator': 'tcmalloc',
+        'release_valgrind_build': 1,
+        'werror': '',
+        'component': 'static_library',
+        'use_system_zlib': 0,
+      }],
+
+      # Build tweaks for DrMemory.
+      # TODO(rnk): Combine with tsan config to share the builder.
+      # http://crbug.com/108155
+      ['build_for_tool=="drmemory"', {
+        # These runtime checks force initialization of stack vars which blocks
+        # DrMemory's uninit detection.
+        'win_debug_RuntimeChecks': '0',
+        # Iterator debugging is slow.
+        'win_debug_disable_iterator_debugging': '1',
+        # Try to disable optimizations that mess up stacks in a release build.
+        # DrM-i#1054 (https://github.com/DynamoRIO/drmemory/issues/1054)
+        # /O2 and /Ob0 (disable inline) cannot be used together because of a
+        # compiler bug, so we use /Ob1 instead.
+        'win_release_InlineFunctionExpansion': '1',
+        'win_release_OmitFramePointers': '0',
+        # Ditto for debug, to support bumping win_debug_Optimization.
+        'win_debug_InlineFunctionExpansion': 0,
+        'win_debug_OmitFramePointers': 0,
+        # Keep the code under #ifndef NVALGRIND.
+        'release_valgrind_build': 1,
+      }],
+
+      # RLZ library is used on Win, Mac, iOS and ChromeOS.
+      ['OS=="win" or OS=="mac" or OS=="ios" or chromeos==1', {
+        'enable_rlz_support%': 1,
+        'conditions': [
+          # RLZ is enabled for "Chrome" builds.
+          ['branding=="Chrome"', {
+            'enable_rlz%': 1,
+          }],
+        ],
+      }],
+
+      # Set default compiler flags depending on ARM version.
+      ['arm_version==6', {
+        'arm_arch%': 'armv6',
+        'arm_tune%': '',
+        'arm_fpu%': 'vfp',
+        'arm_float_abi%': 'softfp',
+        'arm_thumb%': 0,
+      }],
+      ['arm_version==7', {
+        'arm_arch%': 'armv7-a',
+        'arm_tune%': 'generic-armv7-a',
+        'conditions': [
+          ['arm_neon==1', {
+            'arm_fpu%': 'neon',
+          }, {
+            'arm_fpu%': 'vfpv3-d16',
+          }],
+          ['OS=="android"', {
+            'arm_float_abi%': 'softfp',
+          }, {
+            'arm_float_abi%': 'hard',
+          }],
+        ],
+        'arm_thumb%': 1,
+      }],
+
+      # Set default compiler flags for MIPS floating-point support.
+      ['target_arch=="mipsel"', {
+        'mips_float_abi%': 'hard',
+      }],
+      ['target_arch=="mipsel" and mips_arch_variant=="r2"', {
+        'mips_fpu_mode%': 'fp32',
+      }],
+
+      # Enable brlapi by default for chromeos.
+      [ 'chromeos==1', {
+        'use_brlapi%': 1,
+      }],
+
+      ['use_ozone==1 and ozone_auto_platforms==1', {
+        # Use test as the default platform.
+        'ozone_platform%': 'test',
+
+        # Build all platforms whose deps are in install-build-deps.sh.
+        # Only these platforms will be compile tested by buildbots.
+        'ozone_platform_drm%': 1,
+        'ozone_platform_test%': 1,
+        'ozone_platform_egltest%': 1,
+      }],
+
+      ['desktop_linux==1 and use_aura==1 and use_x11==1', {
+        'use_clipboard_aurax11%': 1,
+      }],
+
+      ['OS=="win" and use_goma==1', {
+        # goma doesn't support pch yet.
+        'chromium_win_pch': 0,
+        # goma doesn't support PDB yet, so win_z7=1 or fastbuild=1.
+        'conditions': [
+          ['win_z7==0 and fastbuild==0', {
+            'fastbuild': 1,
+          }],
+        ],
+      }],
+
+      ['OS=="win" and (clang==1 or asan==1)', {
+        'chromium_win_pch': 0,
+      }],
+
+      ['host_clang==1', {
+        'host_cc': '<(make_clang_dir)/bin/clang',
+        'host_cxx': '<(make_clang_dir)/bin/clang++',
+      }, {
+        'host_cc': '<!(which gcc)',
+        'host_cxx': '<!(which g++)',
+      }],
+
+      # The seccomp-bpf sandbox is only supported on five architectures
+      # currently.
+      # Do not disable seccomp_bpf anywhere without talking to
+      # security@chromium.org!
+      ['((OS=="linux" or OS=="android") and '
+           '(target_arch=="ia32" or target_arch=="x64" or '
+             'target_arch=="arm" or target_arch=="mipsel" or '
+             'target_arch=="arm64"))', {
+         'use_seccomp_bpf%': 1,
+      }, {
+         'use_seccomp_bpf%': 0,
+      }],
+
+      ['cfi_vptr==1', {
+        'use_lto%': 1,
+      }],
+
+      ['branding=="Chrome" and buildtype=="Official"', {
+        'enable_hangout_services_extension%': 1,
+      }, {
+        'enable_hangout_services_extension%': 0,
+      }],
+    ],
+
+    # The path to the ANGLE library.
+    'angle_path': '<(DEPTH)/third_party/angle',
+
+    # List of default apps to install in new profiles.  The first list contains
+    # the source files as found in svn.  The second list, used only for linux,
+    # contains the destination location for each of the files.  When a crx
+    # is added or removed from the list, the chrome/browser/resources/
+    # default_apps/external_extensions.json file must also be updated.
+    #
+    # README: GN version of these is in the target //chrome:default_apps
+    # (there's no global variable like in GYP). Be sure to update that target
+    # if you change these lists!
+    'default_apps_list': [
+      'browser/resources/default_apps/external_extensions.json',
+      'browser/resources/default_apps/gmail.crx',
+      'browser/resources/default_apps/search.crx',
+      'browser/resources/default_apps/youtube.crx',
+      'browser/resources/default_apps/drive.crx',
+      'browser/resources/default_apps/docs.crx',
+    ],
+    'default_apps_list_linux_dest': [
+      '<(PRODUCT_DIR)/default_apps/external_extensions.json',
+      '<(PRODUCT_DIR)/default_apps/gmail.crx',
+      '<(PRODUCT_DIR)/default_apps/search.crx',
+      '<(PRODUCT_DIR)/default_apps/youtube.crx',
+      '<(PRODUCT_DIR)/default_apps/drive.crx',
+      '<(PRODUCT_DIR)/default_apps/docs.crx',
+    ],
+
+    # Whether to allow building of the GPU-related isolates.
+    'archive_gpu_tests%': 0,
+
+     # Whether to allow building of chromoting related isolates.
+    'archive_chromoting_tests%': 0,
+  },
+  'target_defaults': {
+    'variables': {
+      # The condition that operates on chromium_code is in a target_conditions
+      # section, and will not have access to the default fallback value of
+      # chromium_code at the top of this file, or to the chromium_code
+      # variable placed at the root variables scope of .gyp files, because
+      # those variables are not set at target scope.  As a workaround,
+      # if chromium_code is not set at target scope, define it in target scope
+      # to contain whatever value it has during early variable expansion.
+      # That's enough to make it available during target conditional
+      # processing.
+      'chromium_code%': '<(chromium_code)',
+
+      'component%': '<(component)',
+
+      'chromecast%': '<(chromecast)',
+
+      # See http://msdn.microsoft.com/en-us/library/aa652360(VS.71).aspx
+      'win_release_Optimization%': '2', # 2 = /O2
+      'win_debug_Optimization%': '0',   # 0 = /Od
+
+      # See http://msdn.microsoft.com/en-us/library/2kxx5t2c(v=vs.80).aspx
+      # Tri-state: blank is default, 1 on, 0 off
+      'win_release_OmitFramePointers%': '0',
+      # Tri-state: blank is default, 1 on, 0 off
+      'win_debug_OmitFramePointers%': '',
+
+      # See http://msdn.microsoft.com/en-us/library/8wtf2dfz(VS.71).aspx
+      'win_debug_RuntimeChecks%': '3',    # 3 = all checks enabled, 0 = off
+
+      # See http://msdn.microsoft.com/en-us/library/47238hez(VS.71).aspx
+      'win_debug_InlineFunctionExpansion%': '',    # empty = default, 0 = off,
+      'win_release_InlineFunctionExpansion%': '2', # 1 = only __inline, 2 = max
+
+      # VS inserts quite a lot of extra checks to algorithms like
+      # std::partial_sort in Debug build which make them O(N^2)
+      # instead of O(N*logN). This is particularly slow under memory
+      # tools like ThreadSanitizer so we want it to be disablable.
+      # See http://msdn.microsoft.com/en-us/library/aa985982(v=VS.80).aspx
+      'win_debug_disable_iterator_debugging%': '0',
+
+      # An application manifest fragment to declare compatibility settings for
+      # 'executable' targets. Ignored in other target type.
+      'win_exe_compatibility_manifest%':
+          '<(DEPTH)\\build\\win\\compatibility.manifest',
+
+      'release_extra_cflags%': '',
+      'debug_extra_cflags%': '',
+
+      'release_valgrind_build%': '<(release_valgrind_build)',
+
+      # the non-qualified versions are widely assumed to be *nix-only
+      'win_release_extra_cflags%': '',
+      'win_debug_extra_cflags%': '',
+
+      # TODO(thakis): Make this a blacklist instead, http://crbug.com/101600
+      'enable_wexit_time_destructors%': '<(enable_wexit_time_destructors)',
+
+      # Only used by Windows build for now.  Can be used to build into a
+      # differet output directory, e.g., a build_dir_prefix of VS2010_ would
+      # output files in src/build/VS2010_{Debug,Release}.
+      'build_dir_prefix%': '',
+
+      # Targets are by default not nacl untrusted code.
+      'nacl_untrusted_build%': 0,
+
+      'pnacl_compile_flags': [
+        # pnacl uses the clang compiler so we need to suppress all the
+        # same warnings as we do for clang.
+        # TODO(sbc): Remove these if/when they are removed from the clang
+        # build.
+        '-Wno-unused-function',
+        '-Wno-char-subscripts',
+        '-Wno-c++11-extensions',
+        '-Wno-unnamed-type-template-args',
+      ],
+
+      # By default, Android targets have their exported JNI symbols stripped,
+      # so we test the manual JNI registration code paths that are required
+      # when using the crazy linker. To allow use of native JNI exports (lazily
+      # resolved by the JVM), targets can enable this variable, which will stop
+      # the stripping from happening. Only targets which do not need to be
+      # compatible with the crazy linker are permitted to set this.
+      'use_native_jni_exports%': 0,
+
+      'conditions': [
+        ['OS=="win" and component=="shared_library"', {
+          # See http://msdn.microsoft.com/en-us/library/aa652367.aspx
+          'win_release_RuntimeLibrary%': '2', # 2 = /MD (nondebug DLL)
+          'win_debug_RuntimeLibrary%': '3',   # 3 = /MDd (debug DLL)
+        }, {
+          # See http://msdn.microsoft.com/en-us/library/aa652367.aspx
+          'win_release_RuntimeLibrary%': '0', # 0 = /MT (nondebug static)
+          'win_debug_RuntimeLibrary%': '1',   # 1 = /MTd (debug static)
+        }],
+        ['OS=="ios"', {
+          # See http://gcc.gnu.org/onlinedocs/gcc-4.4.2/gcc/Optimize-Options.html
+          'mac_release_optimization%': 's', # Use -Os unless overridden
+          'mac_debug_optimization%': '0',   # Use -O0 unless overridden
+        }, {
+          # See http://gcc.gnu.org/onlinedocs/gcc-4.4.2/gcc/Optimize-Options.html
+          'mac_release_optimization%': '2', # Use -O2 unless overridden
+          'mac_debug_optimization%': '0',   # Use -O0 unless overridden
+        }],
+        ['OS=="android"', {
+          'host_os%': '<(host_os)',  # See comment above chromium_code.
+        }],
+      ],
+      'clang_warning_flags': [
+        '-Wheader-hygiene',
+
+        # Don't die on dtoa code that uses a char as an array index.
+        # This is required solely for base/third_party/dmg_fp/dtoa.cc.
+        '-Wno-char-subscripts',
+
+        # TODO(thakis): This used to be implied by -Wno-unused-function,
+        # which we no longer use. Check if it makes sense to remove
+        # this as well. http://crbug.com/316352
+        '-Wno-unneeded-internal-declaration',
+
+        # Warns on switches on enums that cover all enum values but
+        # also contain a default: branch. Chrome is full of that.
+        '-Wno-covered-switch-default',
+
+        # Warns when a const char[] is converted to bool.
+        '-Wstring-conversion',
+
+        # C++11-related flags:
+
+        # This warns on using ints as initializers for floats in
+        # initializer lists (e.g. |int a = f(); CGSize s = { a, a };|),
+        # which happens in several places in chrome code. Not sure if
+        # this is worth fixing.
+        '-Wno-c++11-narrowing',
+
+        # Clang considers the `register` keyword as deprecated, but e.g.
+        # code generated by flex (used in angle) contains that keyword.
+        # http://crbug.com/255186
+        '-Wno-deprecated-register',
+
+        # TODO(hans): Get this cleaned up, http://crbug.com/428099
+        '-Wno-inconsistent-missing-override',
+
+        # TODO(thakis): Enable this, crbug.com/507717
+        '-Wno-shift-negative-value',
+      ],
+    },
+    'includes': [ 'set_clang_warning_flags.gypi', ],
+    'defines': [
+      # Don't use deprecated V8 APIs anywhere.
+      'V8_DEPRECATION_WARNINGS',
+    ],
+    'include_dirs': [
+      '<(SHARED_INTERMEDIATE_DIR)',
+    ],
+    'conditions': [
+      ['OS=="mac"', {
+        # When compiling Objective C, warns if a method is used whose
+        # availability is newer than the deployment target.
+        'xcode_settings': { 'WARNING_CFLAGS': ['-Wpartial-availability']},
+      }],
+      ['(OS=="mac" or OS=="ios") and asan==1', {
+        'dependencies': [
+          '<(DEPTH)/build/mac/asan.gyp:asan_dynamic_runtime',
+        ],
+      }],
+      ['OS=="win" and asan==1 and component=="shared_library"', {
+        'dependencies': [
+          '<(DEPTH)/build/win/asan.gyp:asan_dynamic_runtime',
+        ],
+      }],
+      ['OS=="linux" and use_allocator!="none" and clang_type_profiler==1', {
+        'cflags_cc!': ['-fno-rtti'],
+        'cflags_cc+': [
+          '-frtti',
+          '-gline-tables-only',
+          '-fintercept-allocation-functions',
+        ],
+        'defines': ['TYPE_PROFILING'],
+        'dependencies': [
+          '<(DEPTH)/base/allocator/allocator.gyp:type_profiler',
+        ],
+      }],
+      ['branding=="Chrome"', {
+        'defines': ['GOOGLE_CHROME_BUILD'],
+      }, {  # else: branding!="Chrome"
+        'defines': ['CHROMIUM_BUILD'],
+      }],
+      ['OS=="mac" and component=="shared_library"', {
+        'xcode_settings': {
+          'DYLIB_INSTALL_NAME_BASE': '@rpath',
+          'LD_RUNPATH_SEARCH_PATHS': [
+            # For unbundled binaries.
+            '@loader_path/.',
+            # For bundled binaries, to get back from Binary.app/Contents/MacOS.
+            '@loader_path/../../..',
+          ],
+        },
+      }],
+      ['clang==1 or host_clang==1', {
+        # This is here so that all files get recompiled after a clang roll and
+        # when turning clang on or off.
+        # (defines are passed via the command line, and build systems rebuild
+        # things when their commandline changes). Nothing should ever read this
+        # define.
+        'defines': ['CR_CLANG_REVISION=<!(python <(DEPTH)/tools/clang/scripts/update.py --print-revision)'],
+      }],
+      ['enable_rlz==1', {
+        'defines': ['ENABLE_RLZ'],
+      }],
+      ['component=="shared_library"', {
+        'defines': ['COMPONENT_BUILD'],
+      }],
+      ['ui_compositor_image_transport==1', {
+        'defines': ['UI_COMPOSITOR_IMAGE_TRANSPORT'],
+      }],
+      ['use_aura==1', {
+        'defines': ['USE_AURA=1'],
+      }],
+      ['use_ash==1', {
+        'defines': ['USE_ASH=1'],
+      }],
+      ['use_pango==1', {
+        'defines': ['USE_PANGO=1'],
+      }],
+      ['use_cairo==1', {
+        'defines': ['USE_CAIRO=1'],
+      }],
+      ['use_cras==1', {
+        'defines': ['USE_CRAS=1'],
+      }],
+      ['use_ozone==1', {
+        'defines': ['USE_OZONE=1'],
+      }],
+      ['use_default_render_theme==1', {
+        'defines': ['USE_DEFAULT_RENDER_THEME=1'],
+      }],
+      ['use_libjpeg_turbo==1', {
+        'defines': ['USE_LIBJPEG_TURBO=1'],
+      }],
+      ['use_x11==1', {
+        'defines': ['USE_X11=1'],
+      }],
+      ['use_clipboard_aurax11==1', {
+        'defines': ['USE_CLIPBOARD_AURAX11=1'],
+      }],
+      ['enable_one_click_signin==1', {
+        'defines': ['ENABLE_ONE_CLICK_SIGNIN'],
+      }],
+      ['enable_pre_sync_backup==1', {
+        'defines': ['ENABLE_PRE_SYNC_BACKUP'],
+      }],
+      ['image_loader_extension==1', {
+        'defines': ['IMAGE_LOADER_EXTENSION=1'],
+      }],
+      ['profiling==1', {
+        'defines': ['ENABLE_PROFILING=1'],
+      }],
+      ['remoting==1', {
+        'defines': ['ENABLE_REMOTING=1'],
+      }],
+      ['enable_webrtc==1', {
+        'defines': ['ENABLE_WEBRTC=1'],
+      }],
+      ['enable_media_router==1', {
+        'defines': ['ENABLE_MEDIA_ROUTER=1'],
+      }],
+      ['proprietary_codecs==1', {
+        'defines': ['USE_PROPRIETARY_CODECS'],
+        'conditions': [
+          ['enable_mpeg2ts_stream_parser==1', {
+            'defines': ['ENABLE_MPEG2TS_STREAM_PARSER'],
+          }],
+        ],
+      }],
+      ['enable_viewport==1', {
+        'defines': ['ENABLE_VIEWPORT'],
+      }],
+      ['enable_pepper_cdms==1', {
+        'defines': ['ENABLE_PEPPER_CDMS'],
+      }],
+      ['enable_browser_cdms==1', {
+        'defines': ['ENABLE_BROWSER_CDMS'],
+      }],
+      ['configuration_policy==1', {
+        'defines': ['ENABLE_CONFIGURATION_POLICY'],
+      }],
+      ['notifications==1', {
+        'defines': ['ENABLE_NOTIFICATIONS'],
+      }],
+      ['enable_hidpi==1', {
+        'defines': ['ENABLE_HIDPI=1'],
+      }],
+      ['enable_topchrome_md==1', {
+        'defines': ['ENABLE_TOPCHROME_MD=1'],
+      }],
+      ['native_memory_pressure_signals==1', {
+        'defines': ['SYSTEM_NATIVELY_SIGNALS_MEMORY_PRESSURE'],
+      }],
+      ['use_udev==1', {
+        'defines': ['USE_UDEV'],
+      }],
+      ['fastbuild!=0', {
+        'xcode_settings': {
+          'GCC_GENERATE_DEBUGGING_SYMBOLS': 'NO',
+        },
+        'conditions': [
+          ['OS=="win" and fastbuild==2', {
+            # Completely disable debug information.
+            'msvs_settings': {
+              'VCLinkerTool': {
+                'GenerateDebugInformation': 'false',
+              },
+              'VCCLCompilerTool': {
+                'DebugInformationFormat': '0',
+              },
+            },
+          }],
+          ['OS=="win" and fastbuild==1', {
+            'msvs_settings': {
+              'VCLinkerTool': {
+                # This tells the linker to generate .pdbs, so that
+                # we can get meaningful stack traces.
+                'GenerateDebugInformation': 'true',
+              },
+              'VCCLCompilerTool': {
+                # No debug info to be generated by compiler.
+                'DebugInformationFormat': '0',
+              },
+            },
+          }],
+          ['(OS=="android" or OS=="linux") and fastbuild==2', {
+            'variables': { 'debug_extra_cflags': '-g0', },
+          }],
+          ['(OS=="android" or OS=="linux") and fastbuild==1', {
+            # TODO(thakis): Change this to -g1 once http://crbug.com/456947 is
+            # fixed.
+            'variables': { 'debug_extra_cflags': '-g0', },
+          }],
+          # Android builds symbols on release by default, disable them.
+          ['OS=="android" and fastbuild==2', {
+            'variables': { 'release_extra_cflags': '-g0', },
+          }],
+          ['OS=="android" and fastbuild==1', {
+            # TODO(thakis): Change this to -g1 once http://crbug.com/456947 is
+            # fixed.
+            'variables': { 'release_extra_cflags': '-g0', },
+          }],
+        ],
+      }],  # fastbuild!=0
+      ['dont_embed_build_metadata==1', {
+        'defines': [
+          'DONT_EMBED_BUILD_METADATA',
+        ],
+      }],  # dont_embed_build_metadata==1
+      ['dcheck_always_on!=0', {
+        'defines': ['DCHECK_ALWAYS_ON=1'],
+      }],  # dcheck_always_on!=0
+      ['tracing_like_official_build!=0', {
+        'defines': ['TRACING_IS_OFFICIAL_BUILD=1'],
+      }],  # tracing_like_official_build!=0
+      ['OS=="win"', {
+        'defines': ['NO_TCMALLOC'],
+        'conditions': [
+          ['win_use_allocator_shim==1', {
+            'defines': ['ALLOCATOR_SHIM'],
+          }],
+        ],
+      }],
+      ['asan==1', {
+        'defines': [
+          'ADDRESS_SANITIZER',
+          'MEMORY_TOOL_REPLACES_ALLOCATOR',
+          'MEMORY_SANITIZER_INITIAL_SIZE',
+        ],
+      }],
+      ['syzyasan==1', {
+        # SyzyAsan needs /PROFILE turned on to produce appropriate pdbs.
+        'msvs_settings': {
+          'VCLinkerTool': {
+            'Profile': 'true',
+          },
+        },
+        'defines': [
+            'SYZYASAN',
+            'MEMORY_TOOL_REPLACES_ALLOCATOR',
+            'MEMORY_SANITIZER_INITIAL_SIZE',
+        ],
+      }],
+      ['kasko==1', {
+        'defines': [
+            'KASKO',
+        ],
+        'include_dirs': [
+          '<(DEPTH)/third_party/kasko/include',
+        ],
+      }],
+      ['OS=="win"', {
+        'defines': [
+          '__STD_C',
+          '_CRT_SECURE_NO_DEPRECATE',
+          '_SCL_SECURE_NO_DEPRECATE',
+          # This define is required to pull in the new Win8 interfaces from
+          # system headers like ShObjIdl.h.
+          'NTDDI_VERSION=0x06030000',
+          # This is required for ATL to use XP-safe versions of its functions.
+          '_USING_V110_SDK71_',
+        ],
+        'include_dirs': [
+          '<(DEPTH)/third_party/wtl/include',
+        ],
+        'conditions': [
+          ['win_z7!=0', {
+            'msvs_settings': {
+              # Generates debug info when win_z7=1
+              # even if fastbuild=1 (that makes GenerateDebugInformation false).
+              'VCLinkerTool': {
+                'GenerateDebugInformation': 'true',
+              },
+              'VCCLCompilerTool': {
+                'DebugInformationFormat': '1',
+              }
+            }
+          }],  # win_z7!=0
+          ['win_analyze', {
+            'defines!': [
+              # This is prohibited when running /analyze.
+              '_USING_V110_SDK71_',
+            ],
+            'msvs_settings': {
+              'VCCLCompilerTool': {
+                # Set WarnAsError to false to disable this setting for most
+                # projects so that compilation continues.
+                'WarnAsError': 'false',
+                # When win_analyze is specified add the /analyze switch.
+                # Also add /WX- to force-disable WarnAsError for projects that
+                # override WarnAsError.
+                # Also, disable various noisy warnings that have low value.
+                'AdditionalOptions': [
+                  '/analyze:WX-',
+                  '/wd6011',  # Dereferencing NULL pointer
+                  '/wd6312',  # Possible infinite loop: use of the constant
+                    # EXCEPTION_CONTINUE_EXECUTION in the exception-filter
+                  '/wd6326',  # Potential comparison of constant with constant
+                  '/wd28159', # Consider using 'GetTickCount64'
+                  '/wd28204', # Inconsistent SAL annotations
+                  '/wd28251', # Inconsistent SAL annotations
+                  '/wd28252', # Inconsistent SAL annotations
+                  '/wd28253', # Inconsistent SAL annotations
+                  '/wd28196', # The precondition is not satisfied
+                  '/wd28301', # Inconsistent SAL annotations
+                  '/wd6340',  # Sign mismatch in function parameter
+                  '/wd28182', # Dereferencing NULL pointer
+                  # C6285 is ~16% of raw warnings and has low value
+                  '/wd6285',  # non-zero constant || non-zero constant
+                  # C6334 is ~80% of raw warnings and has low value
+                  '/wd6334',  # sizeof applied to an expression with an operator
+                ],
+              },
+            },
+          }],  # win_analyze
+        ],
+      }],  # OS==win
+      ['chromecast==1', {
+        'defines': [
+          'LOG_DISABLED=0',
+        ],
+        'conditions': [
+          ['use_playready==1', {
+            'defines': [
+              'PLAYREADY_CDM_AVAILABLE',
+            ],
+          }],
+        ],
+      }],
+      ['enable_task_manager==1', {
+        'defines': [
+          'ENABLE_TASK_MANAGER=1',
+        ],
+      }],
+      ['enable_extensions==1', {
+        'defines': [
+          'ENABLE_EXTENSIONS=1',
+        ],
+      }],
+      ['OS=="win" and branding=="Chrome"', {
+        'defines': ['ENABLE_SWIFTSHADER'],
+      }],
+      ['enable_dart==1', {
+        'defines': ['WEBKIT_USING_DART=1'],
+      }],
+      ['enable_plugin_installation==1', {
+        'defines': ['ENABLE_PLUGIN_INSTALLATION=1'],
+      }],
+      ['enable_plugins==1', {
+        'defines': ['ENABLE_PLUGINS=1'],
+      }],
+      ['enable_session_service==1', {
+        'defines': ['ENABLE_SESSION_SERVICE=1'],
+      }],
+      ['enable_themes==1', {
+        'defines': ['ENABLE_THEMES=1'],
+      }],
+      ['enable_autofill_dialog==1', {
+        'defines': ['ENABLE_AUTOFILL_DIALOG=1'],
+      }],
+      ['enable_prod_wallet_service==1', {
+        # In GN, this is set on the autofill tagets only. See
+        # //components/autofill/core/browser:wallet_service
+        'defines': ['ENABLE_PROD_WALLET_SERVICE=1'],
+      }],
+      ['enable_background==1', {
+        'defines': ['ENABLE_BACKGROUND=1'],
+      }],
+      ['enable_google_now==1', {
+        'defines': ['ENABLE_GOOGLE_NOW=1'],
+      }],
+      ['cld_version!=0', {
+        'defines': ['CLD_VERSION=<(cld_version)'],
+      }],
+      ['enable_basic_printing==1 or enable_print_preview==1', {
+        # Convenience define for ENABLE_BASIC_PRINTING || ENABLE_PRINT_PREVIEW.
+        'defines': ['ENABLE_PRINTING=1'],
+      }],
+      ['enable_basic_printing==1', {
+        # Enable basic printing support and UI.
+        'defines': ['ENABLE_BASIC_PRINTING=1'],
+      }],
+      ['enable_print_preview==1', {
+        # Enable printing with print preview.
+        # Can be defined without ENABLE_BASIC_PRINTING.
+        'defines': ['ENABLE_PRINT_PREVIEW=1'],
+      }],
+      ['enable_spellcheck==1', {
+        'defines': ['ENABLE_SPELLCHECK=1'],
+      }],
+      ['use_platform_spellchecker', {
+        'defines': ['USE_PLATFORM_SPELLCHECKER=1'],
+      }],
+      ['enable_captive_portal_detection==1', {
+        'defines': ['ENABLE_CAPTIVE_PORTAL_DETECTION=1'],
+      }],
+      ['enable_app_list==1', {
+        'defines': ['ENABLE_APP_LIST=1'],
+      }],
+      ['enable_settings_app==1', {
+        'defines': ['ENABLE_SETTINGS_APP=1'],
+      }],
+      ['disable_file_support==1', {
+        'defines': ['DISABLE_FILE_SUPPORT=1'],
+      }],
+      ['disable_ftp_support==1', {
+        'defines': ['DISABLE_FTP_SUPPORT=1'],
+      }],
+      ['enable_supervised_users==1', {
+        'defines': ['ENABLE_SUPERVISED_USERS=1'],
+      }],
+      ['enable_mdns==1', {
+        'defines': ['ENABLE_MDNS=1'],
+      }],
+      ['enable_service_discovery==1', {
+        'defines' : [ 'ENABLE_SERVICE_DISCOVERY=1' ],
+      }],
+      ['enable_wifi_bootstrapping==1', {
+        'defines' : [ 'ENABLE_WIFI_BOOTSTRAPPING=1' ],
+      }],
+      ['enable_hangout_services_extension==1', {
+        'defines': ['ENABLE_HANGOUT_SERVICES_EXTENSION=1'],
+      }],
+      ['enable_ipc_fuzzer==1', {
+        'defines': ['ENABLE_IPC_FUZZER=1'],
+      }],
+      ['video_hole==1', {
+        'defines': ['VIDEO_HOLE=1'],
+      }],
+      ['v8_use_external_startup_data==1', {
+       'defines': ['V8_USE_EXTERNAL_STARTUP_DATA'],
+      }],
+      ['enable_webvr==1', {
+        'defines': ['ENABLE_WEBVR'],
+      }],
+
+      # SAFE_BROWSING_SERVICE - browser manages a safe-browsing service.
+      # SAFE_BROWSING_DB_LOCAL - service manages a local database.
+      # SAFE_BROWSING_DB_REMOTE - service talks via API to a database
+      # SAFE_BROWSING_CSD - enable client-side phishing detection.
+      ['safe_browsing==1', {
+        'defines': [
+          # TODO(nparker): Remove existing uses of FULL_SAFE_BROWSING
+          'FULL_SAFE_BROWSING',
+          'SAFE_BROWSING_CSD',
+          'SAFE_BROWSING_DB_LOCAL',
+          'SAFE_BROWSING_SERVICE',
+        ],
+      }],
+      ['safe_browsing==2', {
+        'defines': [
+          # TODO(nparker): Remove existing uses of MOBILE_SAFE_BROWSING
+          'MOBILE_SAFE_BROWSING',
+          'SAFE_BROWSING_SERVICE',
+        ],
+      }],
+      ['safe_browsing==3', {
+        'defines': [
+          # TODO(nparker): Remove existing uses of MOBILE_SAFE_BROWSING
+          'MOBILE_SAFE_BROWSING',
+          'SAFE_BROWSING_DB_REMOTE',
+          'SAFE_BROWSING_SERVICE',
+        ],
+      }],
+    ],  # conditions for 'target_defaults'
+    'target_conditions': [
+      ['<(use_libpci)==1', {
+        'defines': ['USE_LIBPCI=1'],
+      }],
+      ['<(use_openssl)==1', {
+        'defines': ['USE_OPENSSL=1'],
+      }],
+      ['<(use_openssl_certs)==1', {
+        'defines': ['USE_OPENSSL_CERTS=1'],
+      }],
+      ['>(nacl_untrusted_build)==1', {
+        'defines': [
+          'USE_OPENSSL=1',
+          'USE_OPENSSL_CERTS=1',
+        ],
+      }],
+      ['<(use_glib)==1 and >(nacl_untrusted_build)==0', {
+        'defines': ['USE_GLIB=1'],
+      }],
+      ['<(use_nss_certs)==1 and >(nacl_untrusted_build)==0', {
+        'defines': ['USE_NSS_CERTS=1'],
+      }],
+      ['<(chromeos)==1 and >(nacl_untrusted_build)==0', {
+        'defines': ['OS_CHROMEOS=1'],
+      }],
+      ['enable_wexit_time_destructors==1 and OS!="win"', {
+        # TODO: Enable on Windows too, http://crbug.com/404525
+        'variables': { 'clang_warning_flags': ['-Wexit-time-destructors']},
+      }],
+      ['chromium_code==0', {
+        'variables': {
+          'clang_warning_flags': [
+            # TODO(mgiuca): Move this suppression into individual third-party
+            # libraries as required. http://crbug.com/505301.
+            '-Wno-overloaded-virtual',
+            # Lots of third-party libraries have unused variables. Instead of
+            # suppressing them individually, we just blanket suppress them here.
+            '-Wno-unused-variable',
+          ],
+        },
+        'conditions': [
+          [ 'os_posix==1 and OS!="mac" and OS!="ios"', {
+            # We don't want to get warnings from third-party code,
+            # so remove any existing warning-enabling flags like -Wall.
+            'cflags!': [
+              '-Wall',
+              '-Wextra',
+            ],
+            'cflags_cc': [
+              # Don't warn about hash_map in third-party code.
+              '-Wno-deprecated',
+            ],
+            'cflags': [
+              # Don't warn about printf format problems.
+              # This is off by default in gcc but on in Ubuntu's gcc(!).
+              '-Wno-format',
+            ],
+            'cflags_cc!': [
+              # Necessary because llvm.org/PR10448 is WONTFIX (crbug.com/90453).
+              '-Wsign-compare',
+            ]
+          }],
+          # TODO: Fix all warnings on chromeos too.
+          [ 'os_posix==1 and OS!="mac" and OS!="ios" and (clang!=1 or chromeos==1)', {
+            'cflags!': [
+              '-Werror',
+            ],
+          }],
+          [ 'os_posix==1 and os_bsd!=1 and OS!="mac" and OS!="android"', {
+            'cflags': [
+              # Don't warn about ignoring the return value from e.g. close().
+              # This is off by default in some gccs but on by default in others.
+              # BSD systems do not support this option, since they are usually
+              # using gcc 4.2.1, which does not have this flag yet.
+              '-Wno-unused-result',
+            ],
+          }],
+          [ 'OS=="win"', {
+            'defines': [
+              '_CRT_SECURE_NO_DEPRECATE',
+              '_CRT_NONSTDC_NO_WARNINGS',
+              '_CRT_NONSTDC_NO_DEPRECATE',
+              '_SCL_SECURE_NO_DEPRECATE',
+            ],
+            'msvs_disabled_warnings': [
+              4800,
+            ],
+            'msvs_settings': {
+              'VCCLCompilerTool': {
+                'WarningLevel': '3',
+                'WarnAsError': 'true',
+                'Detect64BitPortabilityProblems': 'false',
+              },
+            },
+            'conditions': [
+              ['buildtype=="Official"', {
+                'msvs_settings': {
+                  'VCCLCompilerTool': { 'WarnAsError': 'false' },
+                }
+              }],
+              [ 'component=="shared_library"', {
+              # TODO(darin): Unfortunately, some third_party code depends on base.
+                'msvs_disabled_warnings': [
+                  4251,  # class 'std::xx' needs to have dll-interface.
+                 ],
+              }],
+            ],
+          }],
+
+          [ 'OS=="mac" or OS=="ios"', {
+            'xcode_settings': {
+              'WARNING_CFLAGS!': ['-Wall', '-Wextra'],
+            },
+            'conditions': [
+              ['buildtype=="Official"', {
+                'xcode_settings': {
+                  'GCC_TREAT_WARNINGS_AS_ERRORS': 'NO',    # -Werror
+                },
+              }],
+            ],
+          }],
+          [ 'OS=="ios"', {
+            'xcode_settings': {
+              # TODO(ios): Fix remaining warnings in third-party code, then
+              # remove this; the Mac cleanup didn't get everything that's
+              # flagged in an iOS build.
+              'GCC_TREAT_WARNINGS_AS_ERRORS': 'NO',
+              'RUN_CLANG_STATIC_ANALYZER': 'NO',
+              # Several internal ios directories generate numerous warnings for
+              # -Wobjc-missing-property-synthesis.
+              'CLANG_WARN_OBJC_MISSING_PROPERTY_SYNTHESIS': 'NO',
+            },
+          }],
+        ],
+      }, {
+        'includes': [
+           # Rules for excluding e.g. foo_win.cc from the build on non-Windows.
+          'filename_rules.gypi',
+        ],
+        # In Chromium code, we define __STDC_foo_MACROS in order to get the
+        # C99 macros on Mac and Linux.
+        'defines': [
+          '__STDC_CONSTANT_MACROS',
+          '__STDC_FORMAT_MACROS',
+        ],
+        'conditions': [
+          ['OS=="win"', {
+            # turn on warnings for signed/unsigned mismatch on chromium code.
+            'msvs_settings': {
+              'VCCLCompilerTool': {
+                'AdditionalOptions': ['/we4389'],
+              },
+            },
+          }],
+          ['OS=="win" and component=="shared_library"', {
+            'msvs_disabled_warnings': [
+              4251,  # class 'std::xx' needs to have dll-interface.
+            ],
+          }],
+        ],
+      }],
+    ],  # target_conditions for 'target_defaults'
+    'default_configuration': 'Debug',
+    'configurations': {
+      # VCLinkerTool LinkIncremental values below:
+      #   0 == default
+      #   1 == /INCREMENTAL:NO
+      #   2 == /INCREMENTAL
+      # Debug links incremental, Release does not.
+      #
+      # Abstract base configurations to cover common attributes.
+      #
+      'Common_Base': {
+        'abstract': 1,
+        'msvs_configuration_attributes': {
+          'OutputDirectory': '<(DEPTH)\\build\\<(build_dir_prefix)$(ConfigurationName)',
+          'IntermediateDirectory': '$(OutDir)\\obj\\$(ProjectName)',
+          'CharacterSet': '1',
+        },
+        'msvs_settings':{
+          'VCCLCompilerTool': {
+            'AdditionalOptions': [
+              '/bigobj',
+            ],
+          },
+          'VCLinkerTool': {
+            # Add the default import libs.
+            'AdditionalDependencies': [
+              'kernel32.lib',
+              'gdi32.lib',
+              'winspool.lib',
+              'comdlg32.lib',
+              'advapi32.lib',
+              'shell32.lib',
+              'ole32.lib',
+              'oleaut32.lib',
+              'user32.lib',
+              'uuid.lib',
+              'odbc32.lib',
+              'odbccp32.lib',
+              'delayimp.lib',
+              'credui.lib',
+              'netapi32.lib',
+            ],
+            'AdditionalOptions': [
+              # Suggested by Microsoft Devrel to avoid
+              #   LINK : fatal error LNK1248: image size (80000000) exceeds maximum allowable size (80000000)
+              # which started happening more regularly after VS2013 Update 4.
+              # Needs to be a bit lower for VS2015, or else errors out.
+              '/maxilksize:0x7ff00000',
+            ],
+          },
+        },
+      },
+      'x86_Base': {
+        'abstract': 1,
+        'msvs_settings': {
+          'VCLinkerTool': {
+            'MinimumRequiredVersion': '5.01',  # XP.
+            'TargetMachine': '1',
+          },
+          'VCLibrarianTool': {
+            'TargetMachine': '1',
+          },
+        },
+        'msvs_configuration_platform': 'Win32',
+      },
+      'x64_Base': {
+        'abstract': 1,
+        'msvs_configuration_platform': 'x64',
+        'msvs_settings': {
+          'VCLinkerTool': {
+            # Make sure to understand http://crbug.com/361720 if you want to
+            # increase this.
+            'MinimumRequiredVersion': '5.02',  # Server 2003.
+            'TargetMachine': '17', # x86 - 64
+            'AdditionalLibraryDirectories!':
+              ['<(windows_sdk_path)/Lib/win8/um/x86'],
+            'AdditionalLibraryDirectories':
+              ['<(windows_sdk_path)/Lib/win8/um/x64'],
+            # Doesn't exist x64 SDK. Should use oleaut32 in any case.
+            'IgnoreDefaultLibraryNames': [ 'olepro32.lib' ],
+          },
+          'VCLibrarianTool': {
+            'AdditionalLibraryDirectories!':
+              ['<(windows_sdk_path)/Lib/win8/um/x86'],
+            'AdditionalLibraryDirectories':
+              ['<(windows_sdk_path)/Lib/win8/um/x64'],
+            'TargetMachine': '17', # x64
+          },
+        },
+      },
+      'Debug_Base': {
+        'abstract': 1,
+        'defines': [
+          'DYNAMIC_ANNOTATIONS_ENABLED=1',
+          'WTF_USE_DYNAMIC_ANNOTATIONS=1',
+        ],
+        'xcode_settings': {
+          'GCC_OPTIMIZATION_LEVEL': '<(mac_debug_optimization)',
+          'OTHER_CFLAGS': [
+            '<@(debug_extra_cflags)',
+          ],
+        },
+        'msvs_settings': {
+          'VCCLCompilerTool': {
+            'Optimization': '<(win_debug_Optimization)',
+            'PreprocessorDefinitions': ['_DEBUG'],
+            'BasicRuntimeChecks': '<(win_debug_RuntimeChecks)',
+            'RuntimeLibrary': '<(win_debug_RuntimeLibrary)',
+            'conditions': [
+              # According to MSVS, InlineFunctionExpansion=0 means
+              # "default inlining", not "/Ob0".
+              # Thus, we have to handle InlineFunctionExpansion==0 separately.
+              ['win_debug_InlineFunctionExpansion==0', {
+                'AdditionalOptions': ['/Ob0'],
+              }],
+              ['win_debug_InlineFunctionExpansion!=""', {
+                'InlineFunctionExpansion':
+                  '<(win_debug_InlineFunctionExpansion)',
+              }],
+              ['win_debug_disable_iterator_debugging==1', {
+                'PreprocessorDefinitions': ['_HAS_ITERATOR_DEBUGGING=0'],
+              }],
+
+              # if win_debug_OmitFramePointers is blank, leave as default
+              ['win_debug_OmitFramePointers==1', {
+                'OmitFramePointers': 'true',
+              }],
+              ['win_debug_OmitFramePointers==0', {
+                'OmitFramePointers': 'false',
+                # The above is not sufficient (http://crbug.com/106711): it
+                # simply eliminates an explicit "/Oy", but both /O2 and /Ox
+                # perform FPO regardless, so we must explicitly disable.
+                # We still want the false setting above to avoid having
+                # "/Oy /Oy-" and warnings about overriding.
+                'AdditionalOptions': ['/Oy-'],
+              }],
+            ],
+            'AdditionalOptions': [ '<@(win_debug_extra_cflags)', ],
+          },
+          'VCLinkerTool': {
+            'LinkIncremental': '<(msvs_debug_link_incremental)',
+            # ASLR makes debugging with windbg difficult because Chrome.exe and
+            # Chrome.dll share the same base name. As result, windbg will
+            # name the Chrome.dll module like chrome_<base address>, where
+            # <base address> typically changes with each launch. This in turn
+            # means that breakpoints in Chrome.dll don't stick from one launch
+            # to the next. For this reason, we turn ASLR off in debug builds.
+            # Note that this is a three-way bool, where 0 means to pick up
+            # the default setting, 1 is off and 2 is on.
+            'RandomizedBaseAddress': 1,
+          },
+          'VCResourceCompilerTool': {
+            'PreprocessorDefinitions': ['_DEBUG'],
+          },
+        },
+        'conditions': [
+          ['OS=="linux" or OS=="android"', {
+            'target_conditions': [
+              ['_toolset=="target"', {
+                'cflags': [
+                  '<@(debug_extra_cflags)',
+                ],
+              }],
+            ],
+          }],
+          ['OS=="linux" and target_arch!="ia32" and disable_glibcxx_debug==0', {
+            # Enable libstdc++ debugging facilities to help catch problems
+            # early, see http://crbug.com/65151 .
+            # TODO(phajdan.jr): Should we enable this for all of POSIX?
+            'defines': ['_GLIBCXX_DEBUG=1',],
+          }],
+          ['release_valgrind_build==0', {
+            'xcode_settings': {
+              'OTHER_CFLAGS': [
+                '-fstack-protector-all',  # Implies -fstack-protector
+              ],
+            },
+          }],
+          ['clang==1', {
+            'cflags': [
+              # Allow comparing the address of references and 'this' against 0
+              # in debug builds. Technically, these can never be null in
+              # well-defined C/C++ and Clang can optimize such checks away in
+              # release builds, but they may be used in asserts in debug builds.
+              '-Wno-undefined-bool-conversion',
+              '-Wno-tautological-undefined-compare',
+            ],
+            'xcode_settings': {
+              'OTHER_CFLAGS': [
+                '-Wno-undefined-bool-conversion',
+                '-Wno-tautological-undefined-compare',
+              ],
+            },
+            'msvs_settings': {
+              'VCCLCompilerTool': {
+                'AdditionalOptions': [
+                  '-Wno-undefined-bool-conversion',
+                  '-Wno-tautological-undefined-compare',
+                ],
+              },
+            },
+          }],
+        ],
+      },
+      'Release_Base': {
+        'abstract': 1,
+        'defines': [
+          'NDEBUG',
+        ],
+        'xcode_settings': {
+          'DEAD_CODE_STRIPPING': 'YES',  # -Wl,-dead_strip
+          'GCC_OPTIMIZATION_LEVEL': '<(mac_release_optimization)',
+          'OTHER_CFLAGS': [ '<@(release_extra_cflags)', ],
+        },
+        'msvs_settings': {
+          'VCCLCompilerTool': {
+            'RuntimeLibrary': '<(win_release_RuntimeLibrary)',
+            'conditions': [
+              # In official builds, each target will self-select
+              # an optimization level.
+              ['buildtype!="Official"', {
+                  'Optimization': '<(win_release_Optimization)',
+                },
+              ],
+              # According to MSVS, InlineFunctionExpansion=0 means
+              # "default inlining", not "/Ob0".
+              # Thus, we have to handle InlineFunctionExpansion==0 separately.
+              ['win_release_InlineFunctionExpansion==0', {
+                'AdditionalOptions': ['/Ob0'],
+              }],
+              ['win_release_InlineFunctionExpansion!=""', {
+                'InlineFunctionExpansion':
+                  '<(win_release_InlineFunctionExpansion)',
+              }],
+
+              # if win_release_OmitFramePointers is blank, leave as default
+              ['win_release_OmitFramePointers==1', {
+                'OmitFramePointers': 'true',
+              }],
+              ['win_release_OmitFramePointers==0', {
+                'OmitFramePointers': 'false',
+                # The above is not sufficient (http://crbug.com/106711): it
+                # simply eliminates an explicit "/Oy", but both /O2 and /Ox
+                # perform FPO regardless, so we must explicitly disable.
+                # We still want the false setting above to avoid having
+                # "/Oy /Oy-" and warnings about overriding.
+                'AdditionalOptions': ['/Oy-'],
+              }],
+              ['asan==0', {
+                # Put data in separate COMDATs. This allows the linker
+                # to put bit-identical constants at the same address even if
+                # they're unrelated constants, which saves binary size.
+                # This optimization can't be used when ASan is enabled because
+                # it is not compatible with the ASan ODR checker.
+                'AdditionalOptions': ['/Gw'],
+              }],
+            ],
+            'AdditionalOptions': [
+                '/d2Zi+',  # Improve debugging of Release builds.
+                '/Zc:inline',  # Remove unreferenced COMDAT (faster links).
+                '<@(win_release_extra_cflags)',
+            ],
+          },
+          'VCLinkerTool': {
+            # LinkIncremental is a tri-state boolean, where 0 means default
+            # (i.e., inherit from parent solution), 1 means false, and
+            # 2 means true.
+            'LinkIncremental': '1',
+            # This corresponds to the /PROFILE flag which ensures the PDB
+            # file contains FIXUP information (growing the PDB file by about
+            # 5%) but does not otherwise alter the output binary. This
+            # information is used by the Syzygy optimization tool when
+            # decomposing the release image.
+            'Profile': 'true',
+          },
+        },
+        'conditions': [
+          ['msvs_use_common_release', {
+            'includes': ['release.gypi'],
+          }],
+          ['release_valgrind_build==0 and tsan==0', {
+            'defines': [
+              'NVALGRIND',
+              'DYNAMIC_ANNOTATIONS_ENABLED=0',
+            ],
+          }, {
+            'defines': [
+              'MEMORY_TOOL_REPLACES_ALLOCATOR',
+              'MEMORY_SANITIZER_INITIAL_SIZE',
+              'DYNAMIC_ANNOTATIONS_ENABLED=1',
+              'WTF_USE_DYNAMIC_ANNOTATIONS=1',
+            ],
+          }],
+          ['OS=="win"', {
+            'defines': ['NO_TCMALLOC'],
+          }],
+          # _FORTIFY_SOURCE isn't really supported by Clang now, see
+          # http://llvm.org/bugs/show_bug.cgi?id=16821.
+          # It seems to work fine with Ubuntu 12 headers though, so use it
+          # in official builds.
+          ['os_posix==1 and (asan!=1 and msan!=1 and tsan!=1 and lsan!=1 and ubsan!=1) and (OS!="linux" or clang!=1 or buildtype=="Official")', {
+            'target_conditions': [
+              ['chromium_code==1', {
+                # Non-chromium code is not guaranteed to compile cleanly
+                # with _FORTIFY_SOURCE. Also, fortified build may fail
+                # when optimizations are disabled, so only do that for Release
+                # build.
+                'defines': [
+                  '_FORTIFY_SOURCE=2',
+                ],
+              }],
+            ],
+          }],
+          ['OS=="linux" or OS=="android"', {
+            'target_conditions': [
+              ['_toolset=="target"', {
+                'cflags': [
+                  '<@(release_extra_cflags)',
+                ],
+                'conditions': [
+                  ['enable_resource_whitelist_generation==1', {
+                    'cflags': [
+                      '-Wunknown-pragmas -Wno-error=unknown-pragmas',
+                    ],
+                  }],
+                ],
+              }],
+            ],
+          }],
+          ['OS=="ios"', {
+            'defines': [
+              'NS_BLOCK_ASSERTIONS=1',
+            ],
+          }],
+        ],
+      },
+      #
+      # Concrete configurations
+      #
+      'Debug': {
+        'inherit_from': ['Common_Base', 'x86_Base', 'Debug_Base'],
+      },
+      'Release': {
+        'inherit_from': ['Common_Base', 'x86_Base', 'Release_Base'],
+      },
+      'conditions': [
+        [ 'OS=="ios"', {
+          'Profile': {
+            'inherit_from': ['Common_Base', 'x86_Base', 'Release_Base'],
+            'target_conditions': [
+              [ '_type=="executable"', {
+                # To get a real .dSYM bundle produced by dsymutil, set the
+                # debug information format to dwarf-with-dsym.  Since
+                # strip_from_xcode will not be used, set Xcode to do the
+                # stripping as well.
+                'xcode_settings': {
+                  'DEBUG_INFORMATION_FORMAT': 'dwarf-with-dsym',
+                  'DEPLOYMENT_POSTPROCESSING': 'YES',
+                  'STRIP_INSTALLED_PRODUCT': 'YES',
+                },
+              }],
+            ],
+          },
+        }],
+        [ 'OS=="win"', {
+          # TODO(bradnelson): add a gyp mechanism to make this more graceful.
+          'Debug_x64': {
+            'inherit_from': ['Common_Base', 'x64_Base', 'Debug_Base'],
+          },
+          'Release_x64': {
+            'inherit_from': ['Common_Base', 'x64_Base', 'Release_Base'],
+          },
+        }],
+      ],
+    },
+  },
+  'conditions': [
+    ['os_posix==1', {
+      'target_defaults': {
+        'ldflags': [
+          '-Wl,-z,now',
+          '-Wl,-z,relro',
+        ],
+        # TODO(glider): enable the default options on other systems.
+        'conditions': [
+          ['use_sanitizer_options==1 and ((OS=="linux" and (chromeos==0 or target_arch!="ia32")) or OS=="mac")', {
+            'dependencies': [
+              '<(DEPTH)/build/sanitizers/sanitizers.gyp:sanitizer_options',
+            ],
+          }],
+        ],
+      },
+    }],
+    # TODO(jochen): Enable this on chromeos on arm. http://crbug.com/356580
+    ['os_posix==1 and disable_fatal_linker_warnings==0 and use_evdev_gestures==0 and (chromeos==0 or target_arch!="arm")', {
+      'target_defaults': {
+        'ldflags': [
+          '-Wl,--fatal-warnings',
+        ],
+      },
+    }],
+    # -Wl,-z,-defs doesn't work with the sanitiziers, http://crbug.com/452065
+    ['(OS=="linux" or OS=="android") and asan==0 and msan==0 and tsan==0 and ubsan==0 and ubsan_vptr==0', {
+      'target_defaults': {
+        'ldflags': [
+          '-Wl,-z,defs',
+        ],
+      },
+    }],
+    ['os_posix==1 and chromeos==0', {
+      # Chrome OS enables -fstack-protector-strong via its build wrapper,
+      # and we want to avoid overriding this, so stack-protector is only
+      # enabled when not building on Chrome OS.
+      # TODO(phajdan.jr): Use -fstack-protector-strong when our gcc
+      # supports it.
+      'target_defaults': {
+        'cflags': [
+          '-fstack-protector',
+          '--param=ssp-buffer-size=4',
+        ],
+      },
+    }],
+    ['os_posix==1 and OS=="linux"', {
+      'defines': [
+        '_LARGEFILE_SOURCE',
+        '_LARGEFILE64_SOURCE',
+        '_FILE_OFFSET_BITS=64',
+      ],
+    }],
+    ['os_posix==1 and OS!="mac" and OS!="ios"', {
+      'target_defaults': {
+        # Enable -Werror by default, but put it in a variable so it can
+        # be disabled in ~/.gyp/include.gypi on the valgrind builders.
+        'variables': {
+          'werror%': '-Werror',
+          'libraries_for_target%': '',
+        },
+        'defines': [
+          '_FILE_OFFSET_BITS=64',
+        ],
+        'cflags': [
+          '<(werror)',  # See note above about the werror variable.
+          '-pthread',
+          '-fno-strict-aliasing',  # See http://crbug.com/32204
+          '-Wall',
+          # Don't warn about unused function params.  We use those everywhere.
+          '-Wno-unused-parameter',
+          # Don't warn about the "struct foo f = {0};" initialization pattern.
+          '-Wno-missing-field-initializers',
+          # Don't export any symbols (for example, to plugins we dlopen()).
+          # Note: this is *required* to make some plugins work.
+          '-fvisibility=hidden',
+          '-pipe',
+        ],
+        'cflags_cc': [
+          '-fno-exceptions',
+          '-fno-rtti',
+          '-fno-threadsafe-statics',
+          # Make inline functions have hidden visiblity by default.
+          # Surprisingly, not covered by -fvisibility=hidden.
+          '-fvisibility-inlines-hidden',
+          # GCC turns on -Wsign-compare for C++ under -Wall, but clang doesn't,
+          # so we specify it explicitly.  (llvm.org/PR10448, crbug.com/90453)
+          '-Wsign-compare',
+        ],
+        'ldflags': [
+          '-pthread', '-Wl,-z,noexecstack',
+        ],
+        'libraries' : [
+          '<(libraries_for_target)',
+        ],
+        'configurations': {
+          'Debug_Base': {
+            'variables': {
+              'debug_optimize%': '0',
+            },
+            'defines': [
+              '_DEBUG',
+            ],
+            'cflags': [
+              '-O>(debug_optimize)',
+              '-g',
+            ],
+            'conditions' : [
+              ['OS=="android" and target_arch!="mipsel" and target_arch!="mips64el"', {
+                # TODO(jdduke) Re-enable on mips after resolving linking
+                # issues with libc++ (crbug.com/456380).
+                'ldflags': [
+                  # Warn in case of text relocations.
+                  '-Wl,--warn-shared-textrel',
+                ],
+              }],
+              ['OS=="android" and android_full_debug==0', {
+                # Some configurations are copied from Release_Base to reduce
+                # the binary size.
+                'variables': {
+                  'debug_optimize%': 's',
+                },
+                'cflags': [
+                  '-fdata-sections',
+                  '-ffunction-sections',
+                ],
+                'ldflags': [
+                  '-Wl,-O1',
+                  '-Wl,--as-needed',
+                ],
+              }],
+              ['OS=="android" and android_full_debug==0 and target_arch!="arm64"', {
+                # We don't omit frame pointers on arm64 since they are required
+                # to correctly unwind stackframes which contain system library
+                # function frames (crbug.com/391706).
+                'cflags': [
+                  '-fomit-frame-pointer',
+                ],
+              }],
+              ['OS=="linux" and target_arch=="ia32"', {
+                'ldflags': [
+                  '-Wl,--no-as-needed',
+                ],
+              }],
+              ['debug_unwind_tables==1', {
+                'cflags': ['-funwind-tables'],
+              }, {
+                'cflags': ['-fno-unwind-tables', '-fno-asynchronous-unwind-tables'],
+                'defines': ['NO_UNWIND_TABLES'],
+              }],
+              # TODO(mostynb): shuffle clang/gcc_version/binutils_version
+              # definitions in to the right scope to use them when setting
+              # linux_use_debug_fission, so it can be used here alone.
+              ['linux_use_debug_fission==1 and linux_use_gold_flags==1 and (clang==1 or gcc_version>=48) and binutils_version>=223', {
+                'cflags': ['-gsplit-dwarf'],
+              }],
+            ],
+          },
+          'Release_Base': {
+            'variables': {
+              'release_optimize%': '2',
+              # Binaries become big and gold is unable to perform GC
+              # and remove unused sections for some of test targets
+              # on 32 bit platform.
+              # (This is currently observed only in chromeos valgrind bots)
+              # The following flag is to disable --gc-sections linker
+              # option for these bots.
+              'no_gc_sections%': 0,
+
+              # TODO(bradnelson): reexamine how this is done if we change the
+              # expansion of configurations
+              'release_valgrind_build%': 0,
+            },
+            'cflags': [
+              '-O<(release_optimize)',
+              # Don't emit the GCC version ident directives, they just end up
+              # in the .comment section taking up binary size.
+              '-fno-ident',
+              # Put data and code in their own sections, so that unused symbols
+              # can be removed at link time with --gc-sections.
+              '-fdata-sections',
+              '-ffunction-sections',
+            ],
+            'ldflags': [
+              # Specifically tell the linker to perform optimizations.
+              # See http://lwn.net/Articles/192624/ .
+              '-Wl,-O1',
+              '-Wl,--as-needed',
+            ],
+            'conditions' : [
+              ['no_gc_sections==0', {
+                'ldflags': [
+                  '-Wl,--gc-sections',
+                ],
+              }],
+              ['OS=="android" and target_arch!="arm64"', {
+                # We don't omit frame pointers on arm64 since they are required
+                # to correctly unwind stackframes which contain system library
+                # function frames (crbug.com/391706).
+                'cflags': [
+                  '-fomit-frame-pointer',
+                ]
+              }],
+              ['OS=="android" and target_arch!="mipsel" and target_arch!="mips64el"', {
+                # TODO(jdduke) Re-enable on mips after resolving linking
+                # issues with libc++ (crbug.com/456380).
+                'ldflags': [
+                  # Warn in case of text relocations.
+                  '-Wl,--warn-shared-textrel',
+                ],
+              }],
+              ['OS=="android"', {
+                'variables': {
+                  'release_optimize%': 's',
+                },
+              }],
+              ['profiling==1', {
+                'cflags': [
+                  '-fno-omit-frame-pointer',
+                  '-g',
+                ],
+                'conditions' : [
+                  ['profiling_full_stack_frames==1', {
+                    'cflags': [
+                      '-fno-inline',
+                      '-fno-optimize-sibling-calls',
+                    ],
+                  }],
+                ],
+              }],
+              ['release_unwind_tables==1', {
+                'cflags': ['-funwind-tables'],
+              }, {
+                'cflags': ['-fno-unwind-tables', '-fno-asynchronous-unwind-tables'],
+                'defines': ['NO_UNWIND_TABLES'],
+              }],
+            ],
+          },
+        },
+        'conditions': [
+          ['target_arch=="ia32"', {
+            'target_conditions': [
+              ['_toolset=="target"', {
+                'asflags': [
+                  # Needed so that libs with .s files (e.g. libicudata.a)
+                  # are compatible with the general 32-bit-ness.
+                  '-32',
+                ],
+                # All floating-point computations on x87 happens in 80-bit
+                # precision.  Because the C and C++ language standards allow
+                # the compiler to keep the floating-point values in higher
+                # precision than what's specified in the source and doing so
+                # is more efficient than constantly rounding up to 64-bit or
+                # 32-bit precision as specified in the source, the compiler,
+                # especially in the optimized mode, tries very hard to keep
+                # values in x87 floating-point stack (in 80-bit precision)
+                # as long as possible. This has important side effects, that
+                # the real value used in computation may change depending on
+                # how the compiler did the optimization - that is, the value
+                # kept in 80-bit is different than the value rounded down to
+                # 64-bit or 32-bit. There are possible compiler options to
+                # make this behavior consistent (e.g. -ffloat-store would keep
+                # all floating-values in the memory, thus force them to be
+                # rounded to its original precision) but they have significant
+                # runtime performance penalty.
+                #
+                # -mfpmath=sse -msse2 makes the compiler use SSE instructions
+                # which keep floating-point values in SSE registers in its
+                # native precision (32-bit for single precision, and 64-bit
+                # for double precision values). This means the floating-point
+                # value used during computation does not change depending on
+                # how the compiler optimized the code, since the value is
+                # always kept in its specified precision.
+                #
+                # Refer to http://crbug.com/348761 for rationale behind SSE2
+                # being a minimum requirement for 32-bit Linux builds and
+                # http://crbug.com/313032 for an example where this has "bit"
+                # us in the past.
+                'cflags': [
+                  '-msse2',
+                  '-mfpmath=sse',
+                  '-mmmx',  # Allows mmintrin.h for MMX intrinsics.
+                  '-m32',
+                ],
+                'ldflags': [
+                  '-m32',
+                ],
+                'conditions': [
+                  # Use gold linker for Android ia32 target.
+                  ['OS=="android"', {
+                    'ldflags': [
+                      '-fuse-ld=gold',
+                    ],
+                  }],
+                ],
+              }],
+            ],
+          }],
+          ['target_arch=="x64"', {
+            'target_conditions': [
+              ['_toolset=="target"', {
+                'conditions': [
+                  # Use gold linker for Android x64 target.
+                  ['OS=="android"', {
+                    'ldflags': [
+                      '-fuse-ld=gold',
+                    ],
+                  }],
+                ],
+                'cflags': [
+                  '-m64',
+                  '-march=x86-64',
+                ],
+                'ldflags': [
+                  '-m64',
+                ],
+              }],
+            ],
+          }],
+          ['target_arch=="arm"', {
+            'target_conditions': [
+              ['_toolset=="target"', {
+                'conditions': [
+                  ['clang==0', {
+                    'cflags_cc': [
+                      # The codesourcery arm-2009q3 toolchain warns at that the ABI
+                      # has changed whenever it encounters a varargs function. This
+                      # silences those warnings, as they are not helpful and
+                      # clutter legitimate warnings.
+                      '-Wno-abi',
+                    ],
+                  }],
+                  ['clang==1 and arm_arch!="" and OS!="android"', {
+                    'cflags': [
+                      '-target arm-linux-gnueabihf',
+                    ],
+                    'ldflags': [
+                      '-target arm-linux-gnueabihf',
+                    ],
+                  }],
+                  ['arm_arch!=""', {
+                    'cflags': [
+                      '-march=<(arm_arch)',
+                    ],
+                    'conditions': [
+                      ['use_lto==1 or use_lto_o2==1', {
+                        'ldflags': [
+                          '-march=<(arm_arch)',
+                        ],
+                      }],
+                    ],
+                  }],
+                  ['clang==1 and OS!="android"', {
+                    'cflags': [
+                      # We need to disable clang's builtin assembler as it can't
+                      # handle several asm files, crbug.com/124610
+                      '-no-integrated-as',
+                    ],
+                  }],
+                  ['arm_tune!=""', {
+                    'cflags': [
+                      '-mtune=<(arm_tune)',
+                    ],
+                    'conditions': [
+                      ['use_lto==1 or use_lto_o2==1', {
+                        'ldflags': [
+                          '-mtune=<(arm_tune)',
+                        ],
+                      }],
+                    ],
+                  }],
+                  ['arm_fpu!=""', {
+                    'cflags': [
+                      '-mfpu=<(arm_fpu)',
+                    ],
+                    'conditions': [
+                      ['use_lto==1 or use_lto_o2==1', {
+                        'ldflags': [
+                          '-mfpu=<(arm_fpu)',
+                        ],
+                      }],
+                    ],
+                  }],
+                  ['arm_float_abi!=""', {
+                    'cflags': [
+                      '-mfloat-abi=<(arm_float_abi)',
+                    ],
+                    'conditions': [
+                      ['use_lto==1 or use_lto_o2==1', {
+                        'ldflags': [
+                          '-mfloat-abi=<(arm_float_abi)',
+                        ],
+                      }],
+                    ],
+                  }],
+                  ['arm_thumb==1', {
+                    'cflags': [
+                      '-mthumb',
+                    ],
+                    'conditions': [
+                      ['use_lto==1 or use_lto_o2==1', {
+                        'ldflags': [
+                          '-mthumb',
+                        ],
+                      }],
+                    ],
+                  }],
+                  ['OS=="android"', {
+                    # Most of the following flags are derived from what Android
+                    # uses by default when building for arm, reference for which
+                    # can be found in the following file in the Android NDK:
+                    # toolchains/arm-linux-androideabi-4.9/setup.mk
+                    'cflags': [
+                      # The tree-sra optimization (scalar replacement for
+                      # aggregates enabling subsequent optimizations) leads to
+                      # invalid code generation when using the Android NDK's
+                      # compiler (r5-r7). This can be verified using
+                      # webkit_unit_tests' WTF.Checked_int8_t test.
+                      '-fno-tree-sra',
+                      # The following option is disabled to improve binary
+                      # size and performance in gcc 4.9.
+                      '-fno-caller-saves',
+                      '-Wno-psabi',
+                    ],
+                    # Android now supports .relro sections properly.
+                    # NOTE: While these flags enable the generation of .relro
+                    # sections, the generated libraries can still be loaded on
+                    # older Android platform versions.
+                    'ldflags': [
+                        '-Wl,-z,relro',
+                        '-Wl,-z,now',
+                        '-fuse-ld=gold',
+                    ],
+                    'conditions': [
+                      ['gcc_version==48 and clang==0', {
+                        'cflags': [
+                          # The following 5 options are disabled to save on
+                          # binary size in GCC 4.8.
+                          '-fno-partial-inlining',
+                          '-fno-early-inlining',
+                          '-fno-tree-copy-prop',
+                          '-fno-tree-loop-optimize',
+                          '-fno-move-loop-invariants',
+                        ],
+                      }],
+                      ['arm_thumb==1', {
+                        'cflags': [ '-mthumb-interwork' ],
+                      }],
+                      ['profiling==1', {
+                        'cflags': [
+                          # Thumb code with frame pointer makes chrome crash
+                          # early.
+                          '-marm',
+                          '-mapcs-frame', # Required by -fno-omit-frame-pointer.
+                          # The perf report sometimes incorrectly attributes
+                          # code from tail calls.
+                          '-fno-optimize-sibling-calls',
+                        ],
+                        'cflags!': [
+                          '-fomit-frame-pointer',
+                        ],
+                      }],
+                      ['clang==1', {
+                        'cflags!': [
+                          # Clang does not support the following options.
+                          '-mapcs-frame',
+                          '-mthumb-interwork',
+                          '-finline-limit=64',
+                          '-fno-tree-sra',
+                          '-fno-caller-saves',
+                          '-Wno-psabi',
+                        ],
+                        'cflags': [
+                          # TODO(hans) Enable integrated-as (crbug.com/124610).
+                          '-no-integrated-as',
+                          '-B<(android_toolchain)',  # Else /usr/bin/as gets picked up.
+                        ],
+                      }],
+                      ['clang==1 and linux_use_bundled_gold==0', {
+                        'ldflags': [
+                          # Let clang find the ld.gold in the NDK.
+                          '--gcc-toolchain=<(android_toolchain)/..',
+                        ],
+                      }],
+                      ['asan==1', {
+                        'cflags': [
+                          '-marm', # Required for frame pointer based stack traces.
+                        ],
+                      }],
+                    ],
+                  }],
+                  ['chromecast==1', {
+                    'cflags': [
+                      # We set arm_arch to "" so that -march compiler option
+                      # is not set.  Otherwise a gcc bug that would complain
+                      # about it conflicting with '-mcpu=cortex-a9'. The flag
+                      # '-march=armv7-a' is actually redundant anyway because
+                      # it is enabled by default when we built the toolchain.
+                      # And using '-mcpu=cortex-a9' should be sufficient.
+                      '-mcpu=cortex-a9',
+                      '-funwind-tables',
+                      # Breakpad requires symbols with debugging information
+                      '-g',
+                    ],
+                    'ldflags': [
+                      # We want to statically link libstdc++/libgcc_s.
+                      '-static-libstdc++',
+                      '-static-libgcc',
+                    ],
+                    'cflags!': [
+                      # Some components in Chromium (e.g. v8, skia, ffmpeg)
+                      # define their own cflags for arm builds that could
+                      # conflict with the flags we set here (e.g.
+                      # '-mcpu=cortex-a9'). Remove these flags explicitly.
+                      '-march=armv7-a',
+                      '-mtune=cortex-a8',
+                    ],
+                  }],
+                ],
+              }],
+            ],
+          }],
+          ['target_arch=="arm64"', {
+            'target_conditions': [
+              ['_toolset=="target"', {
+                'conditions': [
+                  ['OS=="android"', {
+                    'cflags!': [
+                       '-fstack-protector',  # stack protector is always enabled on arm64.
+                    ],
+                  }],
+                ],
+              }],
+            ],
+          }],
+          ['target_arch=="mipsel"', {
+            'target_conditions': [
+              ['_toolset=="target"', {
+                'conditions': [
+                  ['mips_arch_variant=="r6"', {
+                    'conditions': [
+                      ['clang==1', {
+                        'cflags': [ '-target mipsel-linux-gnu', '-march=mips32r6', ],
+                        'ldflags': [ '-target mipsel-linux-gnu', ],
+                      }, { # clang==0
+                        'cflags': ['-mips32r6', '-Wa,-mips32r6', ],
+                      }],
+                      ['clang==0 and OS=="android"', {
+                        'ldflags': ['-mips32r6', '-Wl,-melf32ltsmip',],
+                      }],
+                    ],
+                  }],
+                  ['mips_arch_variant=="r2"', {
+                    'conditions': [
+                      ['mips_float_abi=="hard" and mips_fpu_mode!=""', {
+                        'cflags': ['-m<(mips_fpu_mode)'],
+                      }],
+                      ['clang==1', {
+                         'conditions': [
+                          ['OS=="android"', {
+                            'cflags': [ '-target mipsel-linux-android', '-march=mipsel', '-mcpu=mips32r2'],
+                            'ldflags': [ '-target mipsel-linux-android', ],
+                          }, {
+                            'cflags': [ '-target mipsel-linux-gnu', '-march=mipsel', '-mcpu=mips32r2'],
+                            'ldflags': [ '-target mipsel-linux-gnu', ],
+                          }],
+                         ],
+                      }, { # clang==0
+                        'cflags': ['-mips32r2', '-Wa,-mips32r2', ],
+                      }],
+                    ],
+                  }],
+                  ['mips_arch_variant=="r1"', {
+                    'conditions': [
+                      ['clang==1', {
+                        'conditions': [
+                          ['OS=="android"', {
+                            'cflags': [ '-target mipsel-linux-android', '-march=mipsel', '-mcpu=mips32'],
+                            'ldflags': [ '-target mipsel-linux-android', ],
+                          }, {
+                            'cflags': [ '-target mipsel-linux-gnu', '-march=mipsel', '-mcpu=mips32'],
+                            'ldflags': [ '-target mipsel-linux-gnu', ],
+                          }],
+                        ],
+                      }, { # clang==0
+                        'cflags': ['-mips32', '-Wa,-mips32', ],
+                      }],
+                    ],
+                  }],
+                  ['clang==1', {
+                    'cflags!': [
+                      # Clang does not support the following options.
+                      '-finline-limit=64',
+                    ],
+                    'cflags': [
+                      # TODO(gordanac) Enable integrated-as.
+                      '-no-integrated-as',
+                    ],
+                  }],
+                  ['clang==1 and OS=="android"', {
+                    'cflags': [
+                      '-B<(android_toolchain)',  # Else /usr/bin/as gets picked up.
+                    ],
+                    'ldflags': [
+                      # Let clang find the ld in the NDK.
+                      '--gcc-toolchain=<(android_toolchain)/..',
+                    ],
+                  }],
+                  ['mips_dsp_rev==1', {
+                    'cflags': ['-mdsp'],
+                  }],
+                  ['mips_dsp_rev==2', {
+                    'cflags': ['-mdspr2'],
+                  }],
+                ],
+                'cflags': [
+                  '-m<(mips_float_abi)-float'
+                ],
+                'ldflags': [
+                  '-Wl,--no-keep-memory'
+                ],
+                'cflags_cc': [
+                  '-Wno-uninitialized',
+                ],
+              }],
+            ],
+          }],
+          ['target_arch=="mips64el"', {
+            'target_conditions': [
+              ['_toolset=="target"', {
+                'conditions': [
+                  ['mips_arch_variant=="r6"', {
+                    'cflags': ['-mips64r6', '-Wa,-mips64r6'],
+                    'ldflags': ['-mips64r6'],
+                  }],
+                  ['mips_arch_variant=="r2"', {
+                    'cflags': ['-mips64r2', '-Wa,-mips64r2'],
+                    'ldflags': ['-mips64r2'],
+                  }],
+                ],
+                'cflags_cc': [
+                  '-Wno-uninitialized',
+                ],
+              }],
+            ],
+          }],
+          ['linux_fpic==1', {
+            'cflags': [
+              '-fPIC',
+            ],
+            'ldflags': [
+              '-fPIC',
+            ],
+          }],
+          ['sysroot!=""', {
+            'target_conditions': [
+              ['_toolset=="target"', {
+                'cflags': [
+                  '--sysroot=<(sysroot)',
+                ],
+                'ldflags': [
+                  '--sysroot=<(sysroot)',
+                  '<!(<(DEPTH)/build/linux/sysroot_ld_path.sh <(sysroot))',
+                ],
+              }]]
+          }],
+          ['clang==1', {
+            'cflags': [
+              # TODO(thakis): Remove, http://crbug.com/263960
+              '-Wno-reserved-user-defined-literal',
+            ],
+            'cflags_cc': [
+              # gnu++11 instead of c++11 is needed because some code uses
+              # typeof() (a GNU extension).
+              # TODO(thakis): Eventually switch this to c++11 instead,
+              # http://crbug.com/427584
+              '-std=gnu++11',
+            ],
+          }],
+          ['clang==0 and host_clang==1', {
+            'target_conditions': [
+              ['_toolset=="host"', {
+                'cflags_cc': [ '-std=gnu++11', ],
+              }],
+            ],
+          }],
+          ['clang==1 and clang_use_chrome_plugins==1', {
+            'cflags': [
+              '<@(clang_chrome_plugins_flags)',
+            ],
+          }],
+          ['clang==1 and clang_load!=""', {
+            'cflags': [
+              '-Xclang', '-load', '-Xclang', '<(clang_load)',
+            ],
+          }],
+          ['clang==1 and clang_add_plugin!=""', {
+            'cflags': [
+              '-Xclang', '-add-plugin', '-Xclang', '<(clang_add_plugin)',
+            ],
+          }],
+          ['clang==1 and target_arch=="ia32"', {
+            'cflags': [
+              # Else building libyuv gives clang's register allocator issues,
+              # see llvm.org/PR15798 / crbug.com/233709
+              '-momit-leaf-frame-pointer',
+              # Align the stack on 16-byte boundaries, http://crbug.com/418554.
+              '-mstack-alignment=16',
+              '-mstackrealign',
+            ],
+          }],
+          ['clang==1 and "<(GENERATOR)"=="ninja"', {
+            'cflags': [
+              # See http://crbug.com/110262
+              '-fcolor-diagnostics',
+            ],
+          }],
+          # Common options for AddressSanitizer, LeakSanitizer,
+          # ThreadSanitizer and MemorySanitizer.
+          ['asan==1 or lsan==1 or tsan==1 or msan==1 or ubsan==1 or ubsan_vptr==1', {
+            'target_conditions': [
+              ['_toolset=="target"', {
+                'cflags': [
+                  '-fno-omit-frame-pointer',
+                  '-gline-tables-only',
+                ],
+                'cflags!': [
+                  '-fomit-frame-pointer',
+                ],
+              }],
+            ],
+          }],
+          ['asan==1 or lsan==1 or tsan==1 or msan==1', {
+            'target_conditions': [
+              ['_toolset=="target"', {
+                'ldflags!': [
+                  # Functions interposed by the sanitizers can make ld think
+                  # that some libraries aren't needed when they actually are,
+                  # http://crbug.com/234010. As workaround, disable --as-needed.
+                  '-Wl,--as-needed',
+                ],
+                'defines': [
+                  'MEMORY_TOOL_REPLACES_ALLOCATOR',
+                  'MEMORY_SANITIZER_INITIAL_SIZE',
+                ],
+              }],
+            ],
+          }],
+          ['asan==1', {
+            'target_conditions': [
+              ['_toolset=="target"', {
+                'cflags': [
+                  '-fsanitize=address',
+                  # TODO(earthdok): Re-enable. http://crbug.com/427202
+                  #'-fsanitize-blacklist=<(asan_blacklist)',
+                ],
+                'ldflags': [
+                  '-fsanitize=address',
+                ],
+              }],
+            ],
+            'conditions': [
+              ['OS=="mac"', {
+                'cflags': [
+                  '-mllvm -asan-globals=0',  # http://crbug.com/352073
+                ],
+              }],
+            ],
+          }],
+          ['ubsan==1', {
+            'target_conditions': [
+              ['_toolset=="target"', {
+                'cflags': [
+                  # FIXME: work on enabling more flags and getting rid of false
+                  # positives. http://crbug.com/174801.
+                  '-fsanitize=bounds',
+                  '-fsanitize=float-divide-by-zero',
+                  '-fsanitize=integer-divide-by-zero',
+                  '-fsanitize=null',
+                  '-fsanitize=object-size',
+                  '-fsanitize=return',
+                  '-fsanitize=returns-nonnull-attribute',
+                  '-fsanitize=shift-exponent',
+                  '-fsanitize=signed-integer-overflow',
+                  '-fsanitize=unreachable',
+                  '-fsanitize=vla-bound',
+                  '-fsanitize-blacklist=<(ubsan_blacklist)',
+                  # Employ the experimental PBQP register allocator to avoid
+                  # slow compilation on files with too many basic blocks.
+                  # See http://crbug.com/426271.
+                  '-mllvm -regalloc=pbqp',
+                  # Speculatively use coalescing to slightly improve the code
+                  # generated by PBQP regallocator. May increase compile time.
+                  '-mllvm -pbqp-coalescing',
+                ],
+                'cflags_cc!': [
+                  '-fno-rtti',
+                ],
+                'cflags!': [
+                  '-fno-rtti',
+                ],
+                'ldflags': [
+                  '-fsanitize=undefined',
+                ],
+                'defines': [
+                  'UNDEFINED_SANITIZER',
+                ],
+              }],
+            ],
+          }],
+          ['ubsan_vptr==1', {
+            'target_conditions': [
+              ['_toolset=="target"', {
+                'cflags': [
+                  '-fsanitize=vptr',
+                  '-fsanitize-blacklist=<(ubsan_vptr_blacklist)',
+                ],
+                'cflags_cc!': [
+                  '-fno-rtti',
+                ],
+                'cflags!': [
+                  '-fno-rtti',
+                ],
+                'ldflags': [
+                  '-fsanitize=vptr',
+                ],
+                'defines': [
+                  'UNDEFINED_SANITIZER',
+                ],
+              }],
+            ],
+          }],
+          ['asan_coverage!=0 and sanitizer_coverage==0', {
+            'target_conditions': [
+              ['_toolset=="target"', {
+                'cflags': [
+                  '-fsanitize-coverage=<(asan_coverage)',
+                ],
+                'defines': [
+                  'SANITIZER_COVERAGE',
+                ],
+              }],
+            ],
+          }],
+          ['sanitizer_coverage!=0', {
+            'target_conditions': [
+              ['_toolset=="target"', {
+                'cflags': [
+                  '-fsanitize-coverage=<(sanitizer_coverage)',
+                ],
+                'defines': [
+                  'SANITIZER_COVERAGE',
+                ],
+              }],
+            ],
+          }],
+          ['asan_field_padding!=0', {
+            'target_conditions': [
+              ['_toolset=="target"', {
+                'cflags': [
+                  '-fsanitize-address-field-padding=<(asan_field_padding)',
+                ],
+              }],
+            ],
+          }],
+          ['lsan==1', {
+            'target_conditions': [
+              ['_toolset=="target"', {
+                'cflags': [
+                  '-fsanitize=leak',
+                ],
+                'ldflags': [
+                  '-fsanitize=leak',
+                ],
+                'defines': [
+                  'LEAK_SANITIZER',
+                  'WTF_USE_LEAK_SANITIZER=1',
+                ],
+              }],
+            ],
+          }],
+          ['tsan==1', {
+            'target_conditions': [
+              ['_toolset=="target"', {
+                'cflags': [
+                  '-fsanitize=thread',
+                  '-fsanitize-blacklist=<(tsan_blacklist)',
+                ],
+                'ldflags': [
+                  '-fsanitize=thread',
+                ],
+                'defines': [
+                  'THREAD_SANITIZER',
+                  'DYNAMIC_ANNOTATIONS_EXTERNAL_IMPL=1',
+                  'WTF_USE_DYNAMIC_ANNOTATIONS_NOIMPL=1',
+                ],
+              }],
+            ],
+          }],
+          ['msan==1', {
+            'target_conditions': [
+              ['_toolset=="target"', {
+                'cflags': [
+                  '-fsanitize=memory',
+                  '-fsanitize-memory-track-origins=<(msan_track_origins)',
+                  '-fsanitize-blacklist=<(msan_blacklist)',
+                ],
+                'ldflags': [
+                  '-fsanitize=memory',
+                ],
+                'defines': [
+                  'MEMORY_SANITIZER',
+                ],
+              }],
+            ],
+          }],
+          ['use_instrumented_libraries==1', {
+            'dependencies': [
+              '<(DEPTH)/third_party/instrumented_libraries/instrumented_libraries.gyp:instrumented_libraries',
+            ],
+          }],
+          ['use_prebuilt_instrumented_libraries==1', {
+            'dependencies': [
+              '<(DEPTH)/third_party/instrumented_libraries/instrumented_libraries.gyp:prebuilt_instrumented_libraries',
+            ],
+          }],
+          ['use_custom_libcxx==1', {
+            'dependencies': [
+              '<(DEPTH)/buildtools/third_party/libc++/libc++.gyp:libcxx_proxy',
+            ],
+          }],
+          ['order_profiling!=0 and (chromeos==1 or OS=="linux" or OS=="android")', {
+            'target_conditions' : [
+              # crazy_linker has an upstream gyp file we can't edit, and we
+              # don't want to instrument it.
+              ['_toolset=="target" and _target_name!="crazy_linker"', {
+                'cflags': [
+                  '-finstrument-functions',
+                  # Allow mmx intrinsics to inline, so that the
+                  #0 compiler can expand the intrinsics.
+                  '-finstrument-functions-exclude-file-list=mmintrin.h',
+                ],
+              }],
+              ['_toolset=="target" and OS=="android"', {
+                'cflags': [
+                  # Avoids errors with current NDK:
+                  # "third_party/android_tools/ndk/toolchains/arm-linux-androideabi-4.6/prebuilt/linux-x86_64/bin/../lib/gcc/arm-linux-androideabi/4.6/include/arm_neon.h:3426:3: error: argument must be a constant"
+                  '-finstrument-functions-exclude-file-list=arm_neon.h,SaturatedArithmeticARM.h',
+                ],
+              }],
+            ],
+          }],
+          ['linux_dump_symbols==1', {
+            'cflags': [ '-g' ],
+            'conditions': [
+              ['OS=="linux" and host_arch=="ia32" and linux_use_bundled_gold==0', {
+                'target_conditions': [
+                  ['_toolset=="target"', {
+                    'ldflags': [
+                      # Attempt to use less memory to prevent the linker from
+                      # running out of address space. Considering installing a
+                      # 64-bit kernel and switching to a 64-bit linker.
+                      '-Wl,--no-keep-memory',
+                    ],
+                  }],
+                ],
+              }],
+            ],
+          }],
+          ['use_allocator!="tcmalloc"', {
+            'defines': ['NO_TCMALLOC'],
+          }],
+          ['linux_use_gold_flags==1', {
+            # Newer gccs and clangs support -fuse-ld, use the flag to force gold
+            # selection.
+            # gcc -- http://gcc.gnu.org/onlinedocs/gcc-4.8.0/gcc/Optimize-Options.html
+            'ldflags': [ '-fuse-ld=gold', ],
+
+            'target_conditions': [
+              ['_toolset=="target"', {
+                'ldflags': [
+                  # Experimentation found that using four linking threads
+                  # saved ~20% of link time.
+                  # https://groups.google.com/a/chromium.org/group/chromium-dev/browse_thread/thread/281527606915bb36
+                  # Only apply this to the target linker, since the host
+                  # linker might not be gold, but isn't used much anyway.
+                  # TODO(raymes): Disable threading because gold is frequently
+                  # crashing on the bots: crbug.com/161942.
+                  # '-Wl,--threads',
+                  # '-Wl,--thread-count=4',
+                ],
+                'conditions': [
+                  # TODO(thestig): Enable this for disabled cases.
+                  [ 'buildtype!="Official" and chromeos==0 and release_valgrind_build==0 and asan==0 and lsan==0 and tsan==0 and msan==0 and ubsan==0 and ubsan_vptr==0', {
+                    'ldflags': [
+                      '-Wl,--detect-odr-violations',
+                    ],
+                  }],
+                ],
+              }],
+            ],
+            'conditions': [
+              ['release_valgrind_build==0 and order_profiling==0 and asan==0 and msan==0 and lsan==0 and tsan==0', {
+                'target_conditions': [
+                  ['_toolset=="target"', {
+                    'ldflags': [
+                      '-Wl,--icf=<(gold_icf_level)',
+                    ],
+                  }],
+                ],
+              }],
+            ],
+          }],
+          ['linux_use_bundled_binutils==1', {
+            'cflags': [
+              '-B<!(cd <(DEPTH) && pwd -P)/<(binutils_dir)',
+            ],
+          }],
+          ['linux_use_bundled_gold==1 and '
+           'not (clang==0 and (use_lto==1 or use_lto_o2==1))', {
+            # Put our binutils, which contains gold in the search path. We pass
+            # the path to gold to the compiler. gyp leaves unspecified what the
+            # cwd is when running the compiler, so the normal gyp path-munging
+            # fails us. This hack gets the right path.
+            #
+            # Disabled when using GCC LTO because GCC also uses the -B search
+            # path at link time to find "as", and our bundled "as" can only
+            # target x86.
+            'ldflags': [
+              '-B<!(cd <(DEPTH) && pwd -P)/<(binutils_dir)',
+            ],
+          }],
+          # Some binutils 2.23 releases may or may not have new dtags enabled,
+          # but they are all compatible with --disable-new-dtags,
+          # because the new dynamic tags are not created by default.
+          ['binutils_version>=223', {
+            # Newer binutils don't set DT_RPATH unless you disable "new" dtags
+            # and the new DT_RUNPATH doesn't work without --no-as-needed flag.
+            # FIXME(mithro): Figure out the --as-needed/--no-as-needed flags
+            # inside this file to allow usage of --no-as-needed and removal of
+            # this flag.
+            'ldflags': [
+              '-Wl,--disable-new-dtags',
+            ],
+          }],
+          ['gcc_version>=47 and clang==0', {
+            'target_conditions': [
+              ['_toolset=="target"', {
+                'cflags_cc': [
+                  '-std=gnu++11',
+                  # See comment for -Wno-c++11-narrowing.
+                  '-Wno-narrowing',
+                  # TODO(thakis): Remove, http://crbug.com/263960
+                  '-Wno-literal-suffix',
+                ],
+              }],
+            ],
+          }],
+          ['host_gcc_version>=47 and clang==0 and host_clang==0', {
+            'target_conditions': [
+              ['_toolset=="host"', {
+                'cflags_cc': [
+                  '-std=gnu++11',
+                  # See comment for -Wno-c++11-narrowing.
+                  '-Wno-narrowing',
+                  # TODO(thakis): Remove, http://crbug.com/263960
+                  '-Wno-literal-suffix',
+                ],
+              }],
+            ],
+          }],
+        ],
+      },
+    }],
+    # FreeBSD-specific options; note that most FreeBSD options are set above,
+    # with Linux.
+    ['OS=="freebsd"', {
+      'target_defaults': {
+        'ldflags': [
+          '-Wl,--no-keep-memory',
+        ],
+      },
+    }],
+    # Android-specific options; note that most are set above with Linux.
+    ['OS=="android"', {
+      'variables': {
+        # This is a unique identifier for a given build. It's used for
+        # identifying various build artifacts corresponding to a particular
+        # build of chrome (e.g. where to find archived symbols).
+        'chrome_build_id%': '',
+        'conditions': [
+          # Figure this out early since it needs symbols from libgcc.a, so it
+          # has to be before that in the set of libraries.
+          ['component=="shared_library"', {
+              'android_libcpp_library': 'c++_shared',
+          }, {
+              'android_libcpp_library': 'c++_static',
+          }],
+        ],
+
+        # Placing this variable here prevents from forking libvpx, used
+        # by remoting.  Remoting is off, so it needn't built,
+        # so forking it's deps seems like overkill.
+        # But this variable need defined to properly run gyp.
+        # A proper solution is to have an OS==android conditional
+        # in third_party/libvpx/libvpx.gyp to define it.
+        'libvpx_path': 'lib/linux/arm',
+      },
+      'target_defaults': {
+        'variables': {
+          'release_extra_cflags%': '',
+          'conditions': [
+            # If we're using the components build, append "cr" to all shared
+            # libraries to avoid naming collisions with android system library
+            # versions with the same name (e.g. skia, icu).
+            ['component=="shared_library"', {
+              'android_product_extension': 'cr.so',
+            }, {
+              'android_product_extension': 'so',
+            } ],
+          ],
+        },
+        'target_conditions': [
+          ['_type=="shared_library"', {
+            'product_extension': '<(android_product_extension)',
+          }],
+
+          # Settings for building device targets using Android's toolchain.
+          # These are based on the setup.mk file from the Android NDK.
+          #
+          # The NDK Android executable link step looks as follows:
+          #  $LDFLAGS
+          #  $(TARGET_CRTBEGIN_DYNAMIC_O)  <-- crtbegin.o
+          #  $(PRIVATE_OBJECTS)            <-- The .o that we built
+          #  $(PRIVATE_STATIC_LIBRARIES)   <-- The .a that we built
+          #  $(TARGET_LIBGCC)              <-- libgcc.a
+          #  $(PRIVATE_SHARED_LIBRARIES)   <-- The .so that we built
+          #  $(PRIVATE_LDLIBS)             <-- System .so
+          #  $(TARGET_CRTEND_O)            <-- crtend.o
+          #
+          # For now the above are approximated for executables by adding
+          # crtbegin.o to the end of the ldflags and 'crtend.o' to the end
+          # of 'libraries'.
+          #
+          # The NDK Android shared library link step looks as follows:
+          #  $LDFLAGS
+          #  $(PRIVATE_OBJECTS)            <-- The .o that we built
+          #  -l,--whole-archive
+          #  $(PRIVATE_WHOLE_STATIC_LIBRARIES)
+          #  -l,--no-whole-archive
+          #  $(PRIVATE_STATIC_LIBRARIES)   <-- The .a that we built
+          #  $(TARGET_LIBGCC)              <-- libgcc.a
+          #  $(PRIVATE_SHARED_LIBRARIES)   <-- The .so that we built
+          #  $(PRIVATE_LDLIBS)             <-- System .so
+          #
+          # For now, assume that whole static libraries are not needed.
+          #
+          # For both executables and shared libraries, add the proper
+          # libgcc.a to the start of libraries which puts it in the
+          # proper spot after .o and .a files get linked in.
+          #
+          # TODO: The proper thing to do longer-tem would be proper gyp
+          # support for a custom link command line.
+          ['_toolset=="target"', {
+            'cflags!': [
+              '-pthread',  # Not supported by Android toolchain.
+            ],
+            'cflags': [
+              '-ffunction-sections',
+              '-funwind-tables',
+              '-g',
+              '-fstack-protector',
+              '-fno-short-enums',
+              '-finline-limit=64',
+              '<@(release_extra_cflags)',
+              '--sysroot=<(android_ndk_sysroot)',
+              # NOTE: The libc++ header include paths below are specified in
+              # cflags rather than include_dirs because they need to come
+              # after include_dirs.
+              # The include ordering here is important; change with caution.
+              '-isystem<(android_libcpp_include)',
+              '-isystem<(android_ndk_root)/sources/cxx-stl/llvm-libc++abi/libcxxabi/include',
+              '-isystem<(android_ndk_root)/sources/android/support/include',
+            ],
+            'defines': [
+              'ANDROID',
+              '__GNU_SOURCE=1',  # Necessary for clone()
+              'CHROME_BUILD_ID="<(chrome_build_id)"',
+              # The NDK has these things, but doesn't define the constants
+              # to say that it does. Define them here instead.
+              'HAVE_SYS_UIO_H',
+            ],
+            'ldflags!': [
+              '-pthread',  # Not supported by Android toolchain.
+            ],
+            'ldflags': [
+              '-Wl,--build-id=sha1',
+              '-Wl,--no-undefined',
+              '--sysroot=<(android_ndk_sysroot)',
+              '-nostdlib',
+              '-L<(android_libcpp_libs_dir)',
+              # Don't allow visible symbols from libgcc or libc++ to be
+              # re-exported.
+              '-Wl,--exclude-libs=libgcc.a',
+              '-Wl,--exclude-libs=libc++_static.a',
+              # Don't allow visible symbols from libraries that contain
+              # assembly code with symbols that aren't hidden properly.
+              # http://crbug.com/448386
+              '-Wl,--exclude-libs=libcommon_audio.a',
+              '-Wl,--exclude-libs=libcommon_audio_neon.a',
+              '-Wl,--exclude-libs=libcommon_audio_sse2.a',
+              '-Wl,--exclude-libs=libiSACFix.a',
+              '-Wl,--exclude-libs=libisac_neon.a',
+              '-Wl,--exclude-libs=libopus.a',
+              '-Wl,--exclude-libs=libvpx.a',
+            ],
+            'libraries': [
+              '-l<(android_libcpp_library)',
+              '-latomic',
+              # Manually link the libgcc.a that the cross compiler uses.
+              '<!(<(android_toolchain)/*-gcc -print-libgcc-file-name)',
+              '-lc',
+              '-ldl',
+              '-lm',
+            ],
+            'conditions': [
+              ['component=="static_library"', {
+                'target_conditions': [
+                  ['use_native_jni_exports==0', {
+                    # Use a linker version script to strip JNI exports from
+                    # binaries which have not specifically asked to use them.
+                    'ldflags': [
+                      '-Wl,--version-script=<!(cd <(DEPTH) && pwd -P)/build/android/android_no_jni_exports.lst',
+                    ],
+                  }],
+                ],
+              }],
+              ['clang==1', {
+                'libraries!': [
+                  # Clang with libc++ does not require an explicit atomic
+                  # library reference.
+                  '-latomic',
+                ],
+                'cflags': [
+                  # Work around incompatibilities between bionic and clang
+                  # headers.
+                  '-D__compiler_offsetof=__builtin_offsetof',
+                  '-Dnan=__builtin_nan',
+                ],
+                'conditions': [
+                  ['target_arch=="arm"', {
+                    'cflags': [
+                      '-target arm-linux-androideabi',
+                    ],
+                    'ldflags': [
+                      '-target arm-linux-androideabi',
+                    ],
+                  }],
+                  ['target_arch=="ia32"', {
+                    'cflags': [
+                      '-target x86-linux-androideabi',
+                    ],
+                    'ldflags': [
+                      '-target x86-linux-androideabi',
+                    ],
+                  }],
+                  # Place holder for x64 support, not tested.
+                  # TODO: Enable clang support for Android x64. http://crbug.com/346626
+                  ['target_arch=="x64"', {
+                    'cflags': [
+                      '-target x86_64-linux-androideabi',
+                    ],
+                    'ldflags': [
+                      '-target x86_64-linux-androideabi',
+                    ],
+                  }],
+                ],
+              }],
+              ['asan==1', {
+                'cflags': [
+                  # Android build relies on -Wl,--gc-sections removing
+                  # unreachable code. ASan instrumentation for globals inhibits
+                  # this and results in a library with unresolvable relocations.
+                  # TODO(eugenis): find a way to reenable this.
+                  '-mllvm -asan-globals=0',
+                ],
+              }],
+              ['target_arch == "arm" and order_profiling==0', {
+                'ldflags': [
+                  # Enable identical code folding to reduce size.
+                  '-Wl,--icf=<(gold_icf_level)',
+                ],
+              }],
+              ['target_arch=="ia32"', {
+                # The x86 toolchain currently has problems with stack-protector.
+                'cflags!': [
+                  '-fstack-protector',
+                ],
+                'cflags': [
+                  '-fno-stack-protector',
+                ],
+              }],
+            ],
+            'target_conditions': [
+              ['_type=="executable"', {
+                # Force android tools to export the "main" symbol so they can be
+                # loaded on ICS using the run_pie wrapper. See crbug.com/373219.
+                # TODO(primiano): remove -fvisibility and -rdynamic flags below
+                # when ICS support will be dropped.
+                'cflags': [
+                  '-fPIE',
+                  '-fvisibility=default',
+                ],
+                'ldflags': [
+                  '-Bdynamic',
+                  '-Wl,--gc-sections',
+                  '-Wl,-z,nocopyreloc',
+                  '-pie',
+                  '-rdynamic',
+                  # crtbegin_dynamic.o should be the last item in ldflags.
+                  '<(android_ndk_lib)/crtbegin_dynamic.o',
+                ],
+                'libraries': [
+                  # crtend_android.o needs to be the last item in libraries.
+                  # Do not add any libraries after this!
+                  '<(android_ndk_lib)/crtend_android.o',
+                ],
+              }],
+              ['_type=="shared_library" or _type=="loadable_module"', {
+                'ldflags': [
+                  '-Wl,-shared,-Bsymbolic',
+                  # crtbegin_so.o should be the last item in ldflags.
+                  '<(android_ndk_lib)/crtbegin_so.o',
+                ],
+                'libraries': [
+                  # crtend_so.o needs to be the last item in libraries.
+                  # Do not add any libraries after this!
+                  '<(android_ndk_lib)/crtend_so.o',
+                ],
+              }],
+            ],
+          }],
+          # Settings for building host targets using the system toolchain.
+          ['_toolset=="host"', {
+            'cflags!': [
+              # Due to issues in Clang build system, using ASan on 32-bit
+              # binaries on x86_64 host is problematic.
+              # TODO(eugenis): re-enable.
+              '-fsanitize=address',
+            ],
+            'ldflags!': [
+              '-fsanitize=address',
+              '-Wl,-z,noexecstack',
+              '-Wl,--gc-sections',
+              '-Wl,-O1',
+              '-Wl,--as-needed',
+              '-Wl,--warn-shared-textrel',
+              '-Wl,--fatal-warnings',
+            ],
+          }],
+          # Settings for building host targets on mac.
+          ['_toolset=="host" and host_os=="mac"', {
+            'ldflags!': [
+              '-Wl,-z,now',
+              '-Wl,-z,relro',
+            ],
+          }],
+        ],
+      },
+    }],
+    ['OS=="solaris"', {
+      'cflags!': ['-fvisibility=hidden'],
+      'cflags_cc!': ['-fvisibility-inlines-hidden'],
+    }],
+    ['OS=="mac" or OS=="ios"', {
+      'target_defaults': {
+        'mac_bundle': 0,
+        'xcode_settings': {
+          'ALWAYS_SEARCH_USER_PATHS': 'NO',
+          # Don't link in libarclite_macosx.a, see http://crbug.com/156530.
+          'CLANG_LINK_OBJC_RUNTIME': 'NO',          # -fno-objc-link-runtime
+          'COPY_PHASE_STRIP': 'NO',
+          'GCC_C_LANGUAGE_STANDARD': 'c99',         # -std=c99
+          'GCC_CW_ASM_SYNTAX': 'NO',                # No -fasm-blocks
+          'GCC_ENABLE_CPP_EXCEPTIONS': 'NO',        # -fno-exceptions
+          'GCC_ENABLE_CPP_RTTI': 'NO',              # -fno-rtti
+          'GCC_ENABLE_PASCAL_STRINGS': 'NO',        # No -mpascal-strings
+          # GCC_INLINES_ARE_PRIVATE_EXTERN maps to -fvisibility-inlines-hidden
+          'GCC_INLINES_ARE_PRIVATE_EXTERN': 'YES',
+          'GCC_OBJC_CALL_CXX_CDTORS': 'YES',        # -fobjc-call-cxx-cdtors
+          'GCC_SYMBOLS_PRIVATE_EXTERN': 'YES',      # -fvisibility=hidden
+          'GCC_THREADSAFE_STATICS': 'NO',           # -fno-threadsafe-statics
+          'GCC_TREAT_WARNINGS_AS_ERRORS': 'YES',    # -Werror
+          'GCC_VERSION': '4.2',
+          'GCC_WARN_ABOUT_MISSING_NEWLINE': 'YES',  # -Wnewline-eof
+          'USE_HEADERMAP': 'NO',
+          'WARNING_CFLAGS': [
+            '-Wall',
+            '-Wendif-labels',
+            '-Wextra',
+            # Don't warn about unused function parameters.
+            '-Wno-unused-parameter',
+            # Don't warn about the "struct foo f = {0};" initialization
+            # pattern.
+            '-Wno-missing-field-initializers',
+          ],
+          'conditions': [
+            ['chromium_mac_pch', {'GCC_PRECOMPILE_PREFIX_HEADER': 'YES'},
+                                 {'GCC_PRECOMPILE_PREFIX_HEADER': 'NO'}
+            ],
+            # Note that the prebuilt Clang binaries should not be used for iOS
+            # development except for ASan builds.
+            ['clang==1', {
+              'CLANG_CXX_LANGUAGE_STANDARD': 'c++11',  # -std=c++11
+              # Warn if automatic synthesis is triggered with
+              # the -Wobjc-missing-property-synthesis flag.
+              'CLANG_WARN_OBJC_MISSING_PROPERTY_SYNTHESIS': 'YES',
+              'GCC_VERSION': 'com.apple.compilers.llvm.clang.1_0',
+              'WARNING_CFLAGS': [
+                # This warns on selectors from Cocoa headers (-length, -set).
+                # cfe-dev is currently discussing the merits of this warning.
+                # TODO(thakis): Reevaluate what to do with this, based one
+                # cfe-dev discussion.
+                '-Wno-selector-type-mismatch',
+              ],
+              'conditions': [
+                ['clang_xcode==0', {
+                  'CC': '$(SOURCE_ROOT)/<(clang_dir)/clang',
+                  'LDPLUSPLUS': '$(SOURCE_ROOT)/<(clang_dir)/clang++',
+                }],
+              ],
+            }],
+            ['clang==1 and clang_xcode==0 and clang_use_chrome_plugins==1', {
+              'OTHER_CFLAGS': [
+                '<@(clang_chrome_plugins_flags)',
+              ],
+            }],
+            ['clang==1 and clang_xcode==0 and clang_load!=""', {
+              'OTHER_CFLAGS': [
+                '-Xclang', '-load', '-Xclang', '<(clang_load)',
+              ],
+            }],
+            ['clang==1 and clang_xcode==0 and clang_add_plugin!=""', {
+              'OTHER_CFLAGS': [
+                '-Xclang', '-add-plugin', '-Xclang', '<(clang_add_plugin)',
+              ],
+            }],
+            ['clang==1 and "<(GENERATOR)"=="ninja"', {
+              'OTHER_CFLAGS': [
+                # See http://crbug.com/110262
+                '-fcolor-diagnostics',
+              ],
+            }],
+            ['OS=="ios" and target_subarch!="arm32" and \
+              "<(GENERATOR)"=="xcode"', {
+              'OTHER_CFLAGS': [
+                # TODO(ios): when building Chrome for iOS on 64-bit platform
+                # with Xcode, the -Wshorted-64-to-32 warning is automatically
+                # enabled. This cause failures when compiling protobuf code,
+                # so disable the warning. http://crbug.com/359107
+                '-Wno-shorten-64-to-32',
+              ],
+            }],
+          ],
+        },
+        'conditions': [
+          ['clang==1', {
+            'variables': {
+              'clang_dir': '../third_party/llvm-build/Release+Asserts/bin',
+            },
+          }],
+          ['asan==1', {
+            'xcode_settings': {
+              'OTHER_CFLAGS': [
+                '-fsanitize=address',
+                '-mllvm -asan-globals=0',  # http://crbug.com/352073
+                '-gline-tables-only',
+              ],
+            },
+          }],
+          ['asan_coverage!=0 and sanitizer_coverage==0', {
+            'target_conditions': [
+              ['_toolset=="target"', {
+                'cflags': [
+                  '-fsanitize-coverage=<(asan_coverage)',
+                ],
+                'defines': [
+                  'SANITIZER_COVERAGE',
+                ],
+              }],
+            ],
+          }],
+          ['sanitizer_coverage!=0', {
+            'target_conditions': [
+              ['_toolset=="target"', {
+                'cflags': [
+                  '-fsanitize-coverage=<(sanitizer_coverage)',
+                ],
+                'defines': [
+                  'SANITIZER_COVERAGE',
+                ],
+              }],
+            ],
+          }],
+        ],
+        'target_conditions': [
+          ['_type!="static_library"', {
+            'xcode_settings': {'OTHER_LDFLAGS': ['-Wl,-search_paths_first']},
+            'conditions': [
+              ['asan==1', {
+                'xcode_settings': {
+                  'OTHER_LDFLAGS': [
+                    '-fsanitize=address',
+                  ],
+                },
+              }],
+              ['mac_write_linker_maps==1', {
+                'xcode_settings': {
+                  'OTHER_LDFLAGS': [
+                    '-Wl,-map,>(_target_name).map',
+                  ],
+                },
+              }],
+            ],
+          }],
+          ['_mac_bundle', {
+            'xcode_settings': {'OTHER_LDFLAGS': ['-Wl,-ObjC']},
+            'target_conditions': [
+              ['_type=="executable"', {
+                'conditions': [
+                  ['asan==1', {
+                    'postbuilds': [
+                      {
+                        'variables': {
+                          # Define copy_asan_dylib_path in a variable ending in
+                          # _path so that gyp understands it's a path and
+                          # performs proper relativization during dict merging.
+                          'copy_asan_dylib_path':
+                            'mac/copy_asan_runtime_dylib.sh',
+                        },
+                        'postbuild_name': 'Copy ASan runtime dylib',
+                        'action': [
+                          '<(copy_asan_dylib_path)',
+                        ],
+                      },
+                    ],
+                  }],
+                ],
+              }],
+            ],
+          }],
+        ],  # target_conditions
+      },  # target_defaults
+    }],  # OS=="mac" or OS=="ios"
+    ['OS=="mac"', {
+      'target_defaults': {
+        'defines': [
+          # Prevent Mac OS X AssertMacros.h from defining macros that collide
+          # with common names, like 'check', 'require', and 'verify'.
+          # (Included by system header. Also exists on iOS but not included.)
+          # http://opensource.apple.com/source/CarbonHeaders/CarbonHeaders-18.1/AssertMacros.h
+          '__ASSERT_MACROS_DEFINE_VERSIONS_WITHOUT_UNDERSCORE=0',
+        ],
+        'variables': {
+          # These should end with %, but there seems to be a bug with % in
+          # variables that are intended to be set to different values in
+          # different targets, like these.
+          'mac_pie': 1,        # Most executables can be position-independent.
+          # Strip debugging symbols from the target.
+          'mac_strip': '<(mac_strip_release)',
+          'conditions': [
+            ['asan==1', {
+              'conditions': [
+                ['mac_want_real_dsym=="default"', {
+                  'mac_real_dsym': 1,
+                }, {
+                  'mac_real_dsym': '<(mac_want_real_dsym)'
+                }],
+              ],
+            }, {
+              'conditions': [
+                ['mac_want_real_dsym=="default"', {
+                  'mac_real_dsym': 0, # Fake .dSYMs are fine in most cases.
+                }, {
+                  'mac_real_dsym': '<(mac_want_real_dsym)'
+                }],
+              ],
+            }],
+          ],
+        },
+        'configurations': {
+          'Release_Base': {
+            'conditions': [
+              ['branding=="Chrome" and buildtype=="Official"', {
+                'xcode_settings': {
+                  'OTHER_CFLAGS': [
+                    # The Google Chrome Framework dSYM generated by dsymutil has
+                    # grown larger than 4GB, which dsymutil can't handle. Reduce
+                    # the amount of debug symbols.
+                    '-fno-standalone-debug',  # See http://crbug.com/479841
+                  ]
+                },
+              }],
+            ],
+          },  # configuration "Release"
+        },  # configurations
+        'xcode_settings': {
+          'GCC_DYNAMIC_NO_PIC': 'NO',               # No -mdynamic-no-pic
+                                                    # (Equivalent to -fPIC)
+          # MACOSX_DEPLOYMENT_TARGET maps to -mmacosx-version-min
+          'MACOSX_DEPLOYMENT_TARGET': '<(mac_deployment_target)',
+          # Keep pch files below xcodebuild/.
+          'SHARED_PRECOMPS_DIR': '$(CONFIGURATION_BUILD_DIR)/SharedPrecompiledHeaders',
+          'OTHER_CFLAGS': [
+            # Someday this can be replaced by an 'GCC_STRICT_ALIASING': 'NO'
+            # xcode_setting, but not until all downstream projects' mac bots are
+            # using xcode >= 4.6, because that's when the default value of the
+            # flag in the compiler switched.  Pre-4.6, the value 'NO' for that
+            # setting is a no-op as far as xcode is concerned, but the compiler
+            # behaves differently based on whether -fno-strict-aliasing is
+            # specified or not.
+            '-fno-strict-aliasing',  # See http://crbug.com/32204.
+          ],
+        },
+        'target_conditions': [
+          ['_type=="executable"', {
+            'postbuilds': [
+              {
+                # Arranges for data (heap) pages to be protected against
+                # code execution when running on Mac OS X 10.7 ("Lion"), and
+                # ensures that the position-independent executable (PIE) bit
+                # is set for ASLR when running on Mac OS X 10.5 ("Leopard").
+                'variables': {
+                  # Define change_mach_o_flags in a variable ending in _path
+                  # so that GYP understands it's a path and performs proper
+                  # relativization during dict merging.
+                  'change_mach_o_flags_path':
+                      'mac/change_mach_o_flags_from_xcode.sh',
+                  'change_mach_o_flags_options%': [
+                  ],
+                  'target_conditions': [
+                    ['mac_pie==0 or release_valgrind_build==1', {
+                      # Don't enable PIE if it's unwanted. It's unwanted if
+                      # the target specifies mac_pie=0 or if building for
+                      # Valgrind, because Valgrind doesn't understand slide.
+                      # See the similar mac_pie/release_valgrind_build check
+                      # below.
+                      'change_mach_o_flags_options': [
+                        '--no-pie',
+                      ],
+                    }],
+                  ],
+                },
+                'postbuild_name': 'Change Mach-O Flags',
+                'action': [
+                  '<(change_mach_o_flags_path)',
+                  '>@(change_mach_o_flags_options)',
+                ],
+              },
+            ],
+            'target_conditions': [
+              ['mac_pie==1 and release_valgrind_build==0', {
+                # Turn on position-independence (ASLR) for executables. When
+                # PIE is on for the Chrome executables, the framework will
+                # also be subject to ASLR.
+                # Don't do this when building for Valgrind, because Valgrind
+                # doesn't understand slide. TODO: Make Valgrind on Mac OS X
+                # understand slide, and get rid of the Valgrind check.
+                'xcode_settings': {
+                  'OTHER_LDFLAGS': [
+                    '-Wl,-pie',  # Position-independent executable (MH_PIE)
+                  ],
+                },
+              }],
+            ],
+          }],
+          ['(_type=="executable" or _type=="shared_library" or \
+             _type=="loadable_module") and mac_strip!=0', {
+            'target_conditions': [
+              ['mac_real_dsym == 1', {
+                # To get a real .dSYM bundle produced by dsymutil, set the
+                # debug information format to dwarf-with-dsym.  Since
+                # strip_from_xcode will not be used, set Xcode to do the
+                # stripping as well.
+                'configurations': {
+                  'Release_Base': {
+                    'xcode_settings': {
+                      'DEBUG_INFORMATION_FORMAT': 'dwarf-with-dsym',
+                      'DEPLOYMENT_POSTPROCESSING': 'YES',
+                      'STRIP_INSTALLED_PRODUCT': 'YES',
+                      'conditions': [
+                        # Only strip non-ASan builds.
+                        ['asan==0', {
+                          'target_conditions': [
+                            ['_type=="shared_library" or _type=="loadable_module"', {
+                              # The Xcode default is to strip debugging symbols
+                              # only (-S).  Local symbols should be stripped as
+                              # well, which will be handled by -x.  Xcode will
+                              # continue to insert -S when stripping even when
+                              # additional flags are added with STRIPFLAGS.
+                              'STRIPFLAGS': '-x',
+                            }],  # _type=="shared_library" or _type=="loadable_module"
+                          ],  # target_conditions
+                        }, {  # asan != 0
+                          'STRIPFLAGS': '-S',
+                        }],
+                      ],
+                    },  # xcode_settings
+                  },  # configuration "Release"
+                },  # configurations
+              }, {  # mac_real_dsym != 1
+                # To get a fast fake .dSYM bundle, use a post-build step to
+                # produce the .dSYM and strip the executable.  strip_from_xcode
+                # only operates in the Release configuration.
+                'postbuilds': [
+                  {
+                    'variables': {
+                      # Define strip_from_xcode in a variable ending in _path
+                      # so that gyp understands it's a path and performs proper
+                      # relativization during dict merging.
+                      'strip_from_xcode_path': 'mac/strip_from_xcode',
+                    },
+                    'postbuild_name': 'Strip If Needed',
+                    'action': ['<(strip_from_xcode_path)'],
+                  },
+                ],  # postbuilds
+              }],  # mac_real_dsym
+            ],  # target_conditions
+          }],  # (_type=="executable" or _type=="shared_library" or
+               #  _type=="loadable_module") and mac_strip!=0
+        ],  # target_conditions
+      },  # target_defaults
+    }],  # OS=="mac"
+    ['OS=="ios"', {
+      'includes': [
+        'ios/coverage.gypi',
+      ],
+      'target_defaults': {
+        'xcode_settings' : {
+          'CLANG_CXX_LANGUAGE_STANDARD': 'c++11',
+
+          'conditions': [
+            # Older Xcodes do not support -Wno-deprecated-register, so pass an
+            # additional flag to suppress the "unknown compiler option" error.
+            # Restrict this flag to builds that are either compiling with Xcode
+            # or compiling with Xcode's Clang.  This will allow Ninja builds to
+            # continue failing on unknown compiler options.
+            # TODO(rohitrao): This flag is temporary and should be removed as
+            # soon as the iOS bots are updated to use Xcode 5.1.
+            ['clang_xcode==1', {
+              'WARNING_CFLAGS': [
+                '-Wno-unknown-warning-option',
+                # It's not possible to achieve nullability completeness before
+                # all builders are running Xcode 7. crbug.com/499809
+                '-Wno-nullability-completeness',
+              ],
+            }],
+
+            # Limit the valid architectures depending on "target_subarch".
+            # This need to include the "arm" architectures but also the "x86"
+            # ones (they are used when building for the simulator).
+            ['target_subarch=="arm32"', {
+              'VALID_ARCHS': ['armv7', 'i386'],
+            }],
+            ['target_subarch=="arm64"', {
+              'VALID_ARCHS': ['arm64', 'x86_64'],
+            }],
+            ['target_subarch=="both"', {
+              'VALID_ARCHS': ['arm64', 'armv7', 'x86_64', 'i386'],
+            }],
+            ['use_system_libcxx==1', {
+              'target_conditions': [
+                # Only use libc++ when building target for iOS not when building
+                # tools for the host (OS X) as Mac targets OS X SDK 10.6 which
+                # does not support libc++.
+                ['_toolset=="target"', {
+                  'CLANG_CXX_LIBRARY': 'libc++',  # -stdlib=libc++
+                }]
+              ],
+            }, {
+              # The default for deployment target of 7.0+ is libc++, so force
+              # the old behavior unless libc++ is enabled.
+              'CLANG_CXX_LIBRARY': 'libstdc++',  # -stdlib=libstdc++
+            }],
+          ],
+        },
+        'target_conditions': [
+          ['_toolset=="host"', {
+            'xcode_settings': {
+              'SDKROOT': 'macosx<(mac_sdk)',  # -isysroot
+              'MACOSX_DEPLOYMENT_TARGET': '<(mac_deployment_target)',
+              'VALID_ARCHS': [
+                'x86_64',
+              ],
+              'ARCHS': [
+                'x86_64',
+              ],
+            },
+          }],
+          ['_toolset=="target"', {
+            'xcode_settings': {
+              # This section should be for overriding host settings. But,
+              # since we can't negate the iphone deployment target above, we
+              # instead set it here for target only.
+              'IPHONEOS_DEPLOYMENT_TARGET': '<(ios_deployment_target)',
+              'ARCHS': ['$(ARCHS_STANDARD_INCLUDING_64_BIT)'],
+            },
+          }],
+          ['_type=="executable"', {
+            'configurations': {
+              'Release_Base': {
+                'xcode_settings': {
+                  'DEPLOYMENT_POSTPROCESSING': 'YES',
+                  'STRIP_INSTALLED_PRODUCT': 'YES',
+                },
+              },
+              'Debug_Base': {
+                'xcode_settings': {
+                  # Remove dSYM to reduce build time.
+                  'DEBUG_INFORMATION_FORMAT': 'dwarf',
+                },
+              },
+            },
+            'xcode_settings': {
+              'conditions': [
+                ['chromium_ios_signing', {
+                  # iOS SDK wants everything for device signed.
+                  'CODE_SIGN_IDENTITY[sdk=iphoneos*]': 'iPhone Developer',
+                }, {
+                  'CODE_SIGNING_REQUIRED': 'NO',
+                  'CODE_SIGN_IDENTITY[sdk=iphoneos*]': '',
+                }],
+              ],
+            },
+          }],
+        ],  # target_conditions
+      },  # target_defaults
+    }],  # OS=="ios"
+    ['OS=="win"', {
+      'target_defaults': {
+        'defines': [
+          '_WIN32_WINNT=0x0603',
+          'WINVER=0x0603',
+          'WIN32',
+          '_WINDOWS',
+          'NOMINMAX',
+          'PSAPI_VERSION=1',
+          '_CRT_RAND_S',
+          'CERT_CHAIN_PARA_HAS_EXTRA_FIELDS',
+          'WIN32_LEAN_AND_MEAN',
+          '_ATL_NO_OPENGL',
+          '_SECURE_ATL',
+          # _HAS_EXCEPTIONS must match ExceptionHandling in msvs_settings.
+          '_HAS_EXCEPTIONS=0',
+          # Silence some warnings; we can't switch the the 'recommended'
+          # versions as they're not available on old OSs.
+          '_WINSOCK_DEPRECATED_NO_WARNINGS',
+        ],
+        'conditions': [
+          ['buildtype=="Official"', {
+              # In official builds, targets can self-select an optimization
+              # level by defining a variable named 'optimize', and setting it
+              # to one of
+              # - "size", optimizes for minimal code size - the default.
+              # - "speed", optimizes for speed over code size.
+              # - "max", whole program optimization and link-time code
+              #   generation. This is very expensive and should be used
+              #   sparingly.
+              'variables': {
+                'optimize%': 'size',
+              },
+              'msvs_settings': {
+                'VCLinkerTool': {
+                  # Set /LTCG for the official builds.
+                  'LinkTimeCodeGeneration': '1',
+                  'AdditionalOptions': [
+                    # Set the number of LTCG code-gen threads to eight.
+                    # The default is four. This gives a 5-10% link speedup.
+                    '/cgthreads:8',
+                  ],
+                },
+              },
+              'target_conditions': [
+                ['optimize=="size"', {
+                    'msvs_settings': {
+                      'VCCLCompilerTool': {
+                        # 1, optimizeMinSpace, Minimize Size (/O1)
+                        'Optimization': '1',
+                        # 2, favorSize - Favor small code (/Os)
+                        'FavorSizeOrSpeed': '2',
+                      },
+                    },
+                  },
+                ],
+                # This config is used to avoid a problem in ffmpeg, see
+                # http://crbug.com/264459.
+                ['optimize=="size_no_ltcg"', {
+                    'msvs_settings': {
+                      'VCCLCompilerTool': {
+                        # 1, optimizeMinSpace, Minimize Size (/O1)
+                        'Optimization': '1',
+                        # 2, favorSize - Favor small code (/Os)
+                        'FavorSizeOrSpeed': '2',
+                      },
+                    },
+                  },
+                ],
+                ['optimize=="speed"', {
+                    'msvs_settings': {
+                      'VCCLCompilerTool': {
+                        # 2, optimizeMaxSpeed, Maximize Speed (/O2)
+                        'Optimization': '2',
+                        # 1, favorSpeed - Favor fast code (/Ot)
+                        'FavorSizeOrSpeed': '1',
+                      },
+                    },
+                  },
+                ],
+                ['optimize=="max"', {
+                    # Disable Warning 4702 ("Unreachable code") for the WPO/PGO
+                    # builds. Probably anything that this would catch that
+                    # wouldn't be caught in a normal build isn't going to
+                    # actually be a bug, so the incremental value of C4702 for
+                    # PGO builds is likely very small.
+                    'msvs_disabled_warnings': [
+                      4702
+                    ],
+                    'msvs_settings': {
+                      'VCCLCompilerTool': {
+                        # 2, optimizeMaxSpeed, Maximize Speed (/O2)
+                        'Optimization': '2',
+                        # 1, favorSpeed - Favor fast code (/Ot)
+                        'FavorSizeOrSpeed': '1',
+                        # This implies link time code generation.
+                        'WholeProgramOptimization': 'true',
+                      },
+                    },
+                  },
+                ],
+              ],
+            },
+          ],
+          ['msvs_xtree_patched!=1', {
+            # If xtree hasn't been patched, then we disable C4702. Otherwise,
+            # it's enabled. This will generally only be true for system-level
+            # installed Express users.
+            'msvs_disabled_warnings': [
+              4702,
+            ],
+          }],
+        ],
+        'msvs_system_include_dirs': [
+          '<(windows_sdk_path)/Include/shared',
+          '<(windows_sdk_path)/Include/um',
+          '<(windows_sdk_path)/Include/winrt',
+          '$(VSInstallDir)/VC/atlmfc/include',
+        ],
+        'msvs_cygwin_shell': 0,
+        'msvs_disabled_warnings': [
+          # C4091: 'typedef ': ignored on left of 'X' when no variable is
+          #                    declared.
+          # This happens in a number of Windows headers. Dumb.
+          4091,
+
+          # C4127: conditional expression is constant
+          # This warning can in theory catch dead code and other problems, but
+          # triggers in far too many desirable cases where the conditional
+          # expression is either set by macros or corresponds some legitimate
+          # compile-time constant expression (due to constant template args,
+          # conditionals comparing the sizes of different types, etc.).  Some of
+          # these can be worked around, but it's not worth it.
+          4127,
+
+          # C4351: new behavior: elements of array 'array' will be default
+          #        initialized
+          # This is a silly "warning" that basically just alerts you that the
+          # compiler is going to actually follow the language spec like it's
+          # supposed to, instead of not following it like old buggy versions
+          # did.  There's absolutely no reason to turn this on.
+          4351,
+
+          # C4355: 'this': used in base member initializer list
+          # It's commonly useful to pass |this| to objects in a class'
+          # initializer list.  While this warning can catch real bugs, most of
+          # the time the constructors in question don't attempt to call methods
+          # on the passed-in pointer (until later), and annotating every legit
+          # usage of this is simply more hassle than the warning is worth.
+          4355,
+
+          # C4503: 'identifier': decorated name length exceeded, name was
+          #        truncated
+          # This only means that some long error messages might have truncated
+          # identifiers in the presence of lots of templates.  It has no effect
+          # on program correctness and there's no real reason to waste time
+          # trying to prevent it.
+          4503,
+
+          # Warning C4589 says: "Constructor of abstract class ignores
+          # initializer for virtual base class." Disable this warning because it
+          # is flaky in VS 2015 RTM. It triggers on compiler generated
+          # copy-constructors in some cases.
+          4589,
+
+          # C4611: interaction between 'function' and C++ object destruction is
+          #        non-portable
+          # This warning is unavoidable when using e.g. setjmp/longjmp.  MSDN
+          # suggests using exceptions instead of setjmp/longjmp for C++, but
+          # Chromium code compiles without exception support.  We therefore have
+          # to use setjmp/longjmp for e.g. JPEG decode error handling, which
+          # means we have to turn off this warning (and be careful about how
+          # object destruction happens in such cases).
+          4611,
+
+          # TODO(maruel): These warnings are level 4. They will be slowly
+          # removed as code is fixed.
+          4100, # Unreferenced formal parameter
+          4121, # Alignment of a member was sensitive to packing
+          4244, # Conversion from 'type1' to 'type2', possible loss of data
+          4481, # Nonstandard extension used: override specifier 'keyword'
+          4505, # Unreferenced local function has been removed
+          4510, # Default constructor could not be generated
+          4512, # Assignment operator could not be generated
+          4610, # Object can never be instantiated
+          4838, # Narrowing conversion. Doesn't seem to be very useful.
+          4996, # 'X': was declared deprecated (for GetVersionEx).
+
+          # These are variable shadowing warnings that are new in VS2015. We
+          # should work through these at some point -- they may be removed from
+          # the RTM release in the /W4 set.
+          4456, 4457, 4458, 4459,
+        ],
+        'msvs_settings': {
+          'VCCLCompilerTool': {
+            'AdditionalOptions': ['/MP'],
+            'MinimalRebuild': 'false',
+            'BufferSecurityCheck': 'true',
+            'EnableFunctionLevelLinking': 'true',
+            'RuntimeTypeInfo': 'false',
+            'WarningLevel': '4',
+            'WarnAsError': 'true',
+            'DebugInformationFormat': '3',
+            # ExceptionHandling must match _HAS_EXCEPTIONS above.
+            'ExceptionHandling': '0',
+          },
+          'VCLibrarianTool': {
+            'AdditionalOptions': ['/ignore:4221'],
+            'AdditionalLibraryDirectories': [
+              '<(windows_sdk_path)/Lib/win8/um/x86',
+            ],
+          },
+          'VCLinkerTool': {
+            'AdditionalDependencies': [
+              'wininet.lib',
+              'dnsapi.lib',
+              'version.lib',
+              'msimg32.lib',
+              'ws2_32.lib',
+              'usp10.lib',
+              'psapi.lib',
+              'dbghelp.lib',
+              'winmm.lib',
+              'shlwapi.lib',
+            ],
+            'AdditionalLibraryDirectories': [
+              '<(windows_sdk_path)/Lib/win8/um/x86',
+            ],
+            'GenerateDebugInformation': 'true',
+            'MapFileName': '$(OutDir)\\$(TargetName).map',
+            'ImportLibrary': '$(OutDir)\\lib\\$(TargetName).lib',
+            'FixedBaseAddress': '1',
+            # SubSystem values:
+            #   0 == not set
+            #   1 == /SUBSYSTEM:CONSOLE
+            #   2 == /SUBSYSTEM:WINDOWS
+            # Most of the executables we'll ever create are tests
+            # and utilities with console output.
+            'SubSystem': '1',
+          },
+          'VCMIDLTool': {
+            'GenerateStublessProxies': 'true',
+            'TypeLibraryName': '$(InputName).tlb',
+            'OutputDirectory': '$(IntDir)',
+            'HeaderFileName': '$(InputName).h',
+            'DLLDataFileName': '$(InputName).dlldata.c',
+            'InterfaceIdentifierFileName': '$(InputName)_i.c',
+            'ProxyFileName': '$(InputName)_p.c',
+          },
+          'VCResourceCompilerTool': {
+            'Culture' : '1033',
+            'AdditionalIncludeDirectories': [
+              '<(DEPTH)',
+              '<(SHARED_INTERMEDIATE_DIR)',
+            ],
+          },
+          'target_conditions': [
+            ['_type=="executable"', {
+              'VCManifestTool': {
+                'EmbedManifest': 'true',
+              },
+            }],
+            ['_type=="executable" and ">(win_exe_compatibility_manifest)"!=""', {
+              'VCManifestTool': {
+                'AdditionalManifestFiles': [
+                  '>(win_exe_compatibility_manifest)',
+                ],
+              },
+            }],
+          ],
+          'conditions': [
+            # Building with Clang on Windows is a work in progress and very
+            # experimental. See crbug.com/82385.
+            # Keep this in sync with the similar blocks in build/config/compiler/BUILD.gn
+            ['clang==1', {
+              'VCCLCompilerTool': {
+                'AdditionalOptions': [
+                  # Many files use intrinsics without including this header.
+                  # TODO(hans): Fix those files, or move this to sub-GYPs.
+                  '/FIIntrin.h',
+
+                  # TODO(hans): Make this list shorter eventually, http://crbug.com/504657
+                  '-Qunused-arguments',  # http://crbug.com/504658
+                  '-Wno-microsoft',  # http://crbug.com/505296
+                  '-Wno-switch',  # http://crbug.com/505308
+                  '-Wno-unknown-pragmas',  # http://crbug.com/505314
+                  '-Wno-unused-function',  # http://crbug.com/505316
+                  '-Wno-unused-value',  # http://crbug.com/505318
+                  '-Wno-unused-local-typedef',  # http://crbug.com/411648
+                ],
+              },
+            }],
+            ['clang==1 and target_arch=="ia32"', {
+              'VCCLCompilerTool': {
+                'WarnAsError': 'false',
+                'AdditionalOptions': [
+                  '/fallback',
+                ],
+              },
+            }],
+            ['clang==1 and clang_use_chrome_plugins==1', {
+              'VCCLCompilerTool': {
+                'AdditionalOptions': [
+                  '<@(clang_chrome_plugins_flags)',
+                ],
+              },
+            }],
+            ['clang==1 and MSVS_VERSION == "2013"', {
+              'VCCLCompilerTool': {
+                'AdditionalOptions': [
+                  '-fmsc-version=1800',
+                ],
+              },
+            }],
+            ['clang==1 and MSVS_VERSION == "2015"', {
+              'VCCLCompilerTool': {
+                'AdditionalOptions': [
+                  '-fmsc-version=1900',
+                ],
+              },
+            }],
+            ['clang==1 and "<!(python <(DEPTH)/build/win/use_ansi_codes.py)"=="True"', {
+              'VCCLCompilerTool': {
+                'AdditionalOptions': [
+                  # cmd.exe doesn't understand ANSI escape codes by default,
+                  # so only enable them if something emulating them is around.
+                  '-fansi-escape-codes',
+                  # Also see http://crbug.com/110262
+                  '-fcolor-diagnostics',
+                ],
+              },
+            }],
+          ],
+        },
+      },
+    }],
+    ['disable_nacl==1', {
+      'target_defaults': {
+        'defines': [
+          'DISABLE_NACL',
+        ],
+      },
+    }],
+    ['OS=="win" and msvs_use_common_linker_extras', {
+      'target_defaults': {
+        'msvs_settings': {
+          'VCLinkerTool': {
+            'DelayLoadDLLs': [
+              'dbghelp.dll',
+              'dwmapi.dll',
+              'shell32.dll',
+              'uxtheme.dll',
+            ],
+          },
+        },
+        'configurations': {
+          'x86_Base': {
+            'msvs_settings': {
+              'VCLinkerTool': {
+                'AdditionalOptions': [
+                  '/safeseh',
+                  '/dynamicbase',
+                  '/ignore:4199',
+                  '/ignore:4221',
+                  '/nxcompat',
+                ],
+              },
+              'conditions': [
+                ['syzyasan==0', {
+                  'VCLinkerTool': {
+                    'AdditionalOptions': ['/largeaddressaware'],
+                  },
+                }],
+                ['asan==1', {
+                  # TODO(asan/win): Move this down into the general
+                  # win-target_defaults section once the 64-bit asan runtime
+                  # exists.  See crbug.com/345874.
+                  'VCCLCompilerTool': {
+                    'AdditionalOptions': [
+                      '-fsanitize=address',
+                      '-fsanitize-blacklist=<(PRODUCT_DIR)/../../tools/memory/asan/blacklist_win.txt',
+                    ],
+                    'AdditionalIncludeDirectories': [
+                      # MSVC needs to be able to find the sanitizer headers when
+                      # invoked via /fallback. This is critical for using macros
+                      # like ASAN_UNPOISON_MEMORY_REGION in files where we fall
+                      # back.
+                      '<(DEPTH)/<(make_clang_dir)/lib/clang/<!(python <(DEPTH)/tools/clang/scripts/update.py --print-clang-version)/include_sanitizer',
+                    ],
+                  },
+                  'VCLinkerTool': {
+                    'AdditionalLibraryDirectories': [
+                      # TODO(hans): If make_clang_dir is absolute, this breaks.
+                      '<(DEPTH)/<(make_clang_dir)/lib/clang/<!(python <(DEPTH)/tools/clang/scripts/update.py --print-clang-version)/lib/windows',
+                    ],
+                  },
+                  'target_conditions': [
+                    ['component=="shared_library"', {
+                      'VCLinkerTool': {
+                        'AdditionalDependencies': [
+                           'clang_rt.asan_dynamic-i386.lib',
+                           'clang_rt.asan_dynamic_runtime_thunk-i386.lib',
+                        ],
+                      },
+                    }],
+                    ['_type=="executable" and component=="static_library"', {
+                      'VCLinkerTool': {
+                        'AdditionalDependencies': [
+                           'clang_rt.asan-i386.lib',
+                        ],
+                      },
+                    }],
+                    ['(_type=="shared_library" or _type=="loadable_module") and component=="static_library"', {
+                      'VCLinkerTool': {
+                        'AdditionalDependencies': [
+                           'clang_rt.asan_dll_thunk-i386.lib',
+                        ],
+                      },
+                    }],
+                  ],
+                }],
+                ['sanitizer_coverage!=0', {
+                  # TODO(asan/win): Move this down into the general
+                  # win-target_defaults section once the 64-bit asan runtime
+                  # exists.  See crbug.com/345874.
+                  'VCCLCompilerTool': {
+                    'AdditionalOptions': [
+                      '-fsanitize-coverage=<(sanitizer_coverage)',
+                    ],
+                  },
+                }],
+              ],
+            },
+            'conditions': [
+              ['sanitizer_coverage!=0', {
+                # TODO(asan/win): Move this down into the general
+                # win-target_defaults section once the 64-bit asan runtime
+                # exists.  See crbug.com/345874.
+                'defines': [
+                  'SANITIZER_COVERAGE',
+                ],
+              }],
+            ],
+          },
+          'x64_Base': {
+            'msvs_settings': {
+              'VCLinkerTool': {
+                'AdditionalOptions': [
+                  # safeseh is not compatible with x64
+                  '/dynamicbase',
+                  '/ignore:4199',
+                  '/ignore:4221',
+                  '/nxcompat',
+                ],
+              },
+            },
+          },
+        },
+      },
+    }],
+    ['enable_new_npdevice_api==1', {
+      'target_defaults': {
+        'defines': [
+          'ENABLE_NEW_NPDEVICE_API',
+        ],
+      },
+    }],
+    # Don't warn about the "typedef 'foo' locally defined but not used"
+    # for gcc 4.8 and higher.
+    # TODO: remove this flag once all builds work. See crbug.com/227506
+    ['gcc_version>=48 and clang==0', {
+      'target_defaults': {
+        'cflags': [
+          '-Wno-unused-local-typedefs',
+        ],
+      },
+    }],
+    ['gcc_version>=48 and clang==0 and host_clang==1', {
+      'target_defaults': {
+        'target_conditions': [
+          ['_toolset=="host"', { 'cflags!': [ '-Wno-unused-local-typedefs' ]}],
+        ],
+      },
+    }],
+    ['clang==1 and ((OS!="mac" and OS!="ios") or clang_xcode==0) '
+        'and OS!="win"', {
+      'make_global_settings': [
+        ['CC', '<(make_clang_dir)/bin/clang'],
+        ['CXX', '<(make_clang_dir)/bin/clang++'],
+        ['CC.host', '$(CC)'],
+        ['CXX.host', '$(CXX)'],
+      ],
+    }],
+    ['clang==1 and OS=="win"', {
+      'make_global_settings': [
+        # On Windows, gyp's ninja generator only looks at CC.
+        ['CC', '<(make_clang_dir)/bin/clang-cl'],
+      ],
+    }],
+    ['use_lld==1 and OS=="win"', {
+      'make_global_settings': [
+        # Limited to Windows because -flavor link2 is the driver that is
+        # compatible with link.exe.
+        ['LD', '<(make_clang_dir)/bin/lld -flavor link2'],
+      ],
+    }],
+    ['OS=="android" and clang==0', {
+      # Hardcode the compiler names in the Makefile so that
+      # it won't depend on the environment at make time.
+      'make_global_settings': [
+        ['CC', '<!(/bin/echo -n <(android_toolchain)/*-gcc)'],
+        ['CXX', '<!(/bin/echo -n <(android_toolchain)/*-g++)'],
+        ['CC.host', '<(host_cc)'],
+        ['CXX.host', '<(host_cxx)'],
+      ],
+    }],
+    ['OS=="linux" and target_arch=="mipsel" and clang==0', {
+      'make_global_settings': [
+        ['CC', '<(sysroot)/../bin/mipsel-linux-gnu-gcc'],
+        ['CXX', '<(sysroot)/../bin/mipsel-linux-gnu-g++'],
+        ['CC.host', '<(host_cc)'],
+        ['CXX.host', '<(host_cxx)'],
+      ],
+    }],
+    ['OS=="linux" and target_arch=="arm" and host_arch!="arm" and chromeos==0 and clang==0', {
+      # Set default ARM cross compiling on linux.  These can be overridden
+      # using CC/CXX/etc environment variables.
+      'make_global_settings': [
+        ['CC', '<!(which arm-linux-gnueabihf-gcc)'],
+        ['CXX', '<!(which arm-linux-gnueabihf-g++)'],
+        ['CC.host', '<(host_cc)'],
+        ['CXX.host', '<(host_cxx)'],
+      ],
+    }],
+
+    # TODO(yyanagisawa): supports GENERATOR==make
+    #  make generator doesn't support CC_wrapper without CC
+    #  in make_global_settings yet.
+    ['use_goma==1 and ("<(GENERATOR)"=="ninja" or clang==1)', {
+      'make_global_settings': [
+       ['CC_wrapper', '<(gomadir)/gomacc'],
+       ['CXX_wrapper', '<(gomadir)/gomacc'],
+       ['CC.host_wrapper', '<(gomadir)/gomacc'],
+       ['CXX.host_wrapper', '<(gomadir)/gomacc'],
+      ],
+    }],
+    ['use_lto==1', {
+      'target_defaults': {
+        'target_conditions': [
+          ['_toolset=="target"', {
+            'cflags': [
+              '-flto',
+            ],
+            'xcode_settings': {
+              'LLVM_LTO': 'YES',
+            },
+          }],
+          # Work-around for http://openradar.appspot.com/20356002
+          ['_toolset=="target" and _type!="static_library"', {
+            'xcode_settings': {
+              'OTHER_LDFLAGS': [
+                '-Wl,-all_load',
+              ],
+            },
+          }],
+        ],
+      },
+    }],
+    ['use_lto==1 and clang==0', {
+      'target_defaults': {
+        'target_conditions': [
+          ['_toolset=="target"', {
+            'cflags': [
+              '-ffat-lto-objects',
+            ],
+          }],
+        ],
+      },
+    }],
+    ['use_lto==1 and clang==1', {
+      'target_defaults': {
+        'target_conditions': [
+          ['_toolset=="target"', {
+            'arflags': [
+              '--plugin', '../../<(make_clang_dir)/lib/LLVMgold.so',
+            ],
+          }],
+        ],
+      },
+    }],
+    # Apply a lower LTO optimization level in non-official builds.
+    ['use_lto==1 and clang==1 and buildtype!="Official"', {
+      'target_defaults': {
+        'target_conditions': [
+          ['_toolset=="target"', {
+            'ldflags': [
+              '-Wl,--plugin-opt,O1',
+            ],
+          }],
+          ['_toolset=="target" and _type!="static_library"', {
+            'xcode_settings':  {
+              'OTHER_LDFLAGS': [
+                '-Wl,-mllvm,-O1',
+              ],
+            },
+          }],
+        ],
+      },
+    }],
+    ['use_lto==1 and clang==1 and target_arch=="arm"', {
+      'target_defaults': {
+        'target_conditions': [
+          ['_toolset=="target"', {
+            # Without this flag, LTO produces a .text section that is larger
+            # than the maximum call displacement, preventing the linker from
+            # relocating calls (http://llvm.org/PR22999).
+            'ldflags': [
+              '-Wl,-plugin-opt,-function-sections',
+            ],
+          }],
+        ],
+      },
+    }],
+    ['(use_lto==1 or use_lto_o2==1) and clang==0', {
+      'target_defaults': {
+        'target_conditions': [
+          ['_toolset=="target"', {
+            'ldflags': [
+              '-flto=32',
+            ],
+          }],
+        ],
+      },
+    }],
+    ['(use_lto==1 or use_lto_o2==1) and clang==1', {
+      'target_defaults': {
+        'target_conditions': [
+          ['_toolset=="target"', {
+            'ldflags': [
+              '-flto',
+            ],
+          }],
+        ],
+      },
+    }],
+    ['cfi_vptr==1', {
+      'target_defaults': {
+        'target_conditions': [
+          ['_toolset=="target"', {
+            'cflags': [
+              '-fsanitize=cfi-vcall',
+              '-fsanitize=cfi-derived-cast',
+              '-fsanitize=cfi-unrelated-cast',
+              '-fsanitize-blacklist=<(cfi_blacklist)',
+            ],
+            'ldflags': [
+              '-fsanitize=cfi-vcall',
+              '-fsanitize=cfi-derived-cast',
+              '-fsanitize=cfi-unrelated-cast',
+            ],
+            'xcode_settings': {
+              'OTHER_CFLAGS': [
+                '-fsanitize=cfi-vcall',
+                '-fsanitize=cfi-derived-cast',
+                '-fsanitize=cfi-unrelated-cast',
+                '-fsanitize-blacklist=<(cfi_blacklist)',
+              ],
+            },
+            'msvs_settings': {
+              'VCCLCompilerTool': {
+                'AdditionalOptions': [
+                  '-fsanitize=cfi-vcall',
+                  '-fsanitize=cfi-derived-cast',
+                  '-fsanitize=cfi-unrelated-cast',
+                  '-fsanitize-blacklist=<(cfi_blacklist)',
+                ],
+              },
+            },
+          }],
+          ['_toolset=="target" and _type!="static_library"', {
+            'xcode_settings':  {
+              'OTHER_LDFLAGS': [
+                '-fsanitize=cfi-vcall',
+                '-fsanitize=cfi-derived-cast',
+                '-fsanitize=cfi-unrelated-cast',
+              ],
+            },
+          }],
+        ],
+      },
+    }],
+  ],
+  'xcode_settings': {
+    # DON'T ADD ANYTHING NEW TO THIS BLOCK UNLESS YOU REALLY REALLY NEED IT!
+    # This block adds *project-wide* configuration settings to each project
+    # file.  It's almost always wrong to put things here.  Specify your
+    # custom xcode_settings in target_defaults to add them to targets instead.
+
+    'conditions': [
+      # In an Xcode Project Info window, the "Base SDK for All Configurations"
+      # setting sets the SDK on a project-wide basis. In order to get the
+      # configured SDK to show properly in the Xcode UI, SDKROOT must be set
+      # here at the project level.
+      ['OS=="mac"', {
+        'conditions': [
+          ['mac_sdk_path==""', {
+            'SDKROOT': 'macosx<(mac_sdk)',  # -isysroot
+          }, {
+            'SDKROOT': '<(mac_sdk_path)',  # -isysroot
+          }],
+        ],
+      }],
+      ['OS=="ios"', {
+        'conditions': [
+          ['ios_sdk_path==""', {
+            'conditions': [
+              # TODO(justincohen): Ninja only supports simulator for now.
+              ['"<(GENERATOR)"=="xcode"', {
+                'SDKROOT': 'iphoneos<(ios_sdk)',  # -isysroot
+              }, {
+                'SDKROOT': 'iphonesimulator<(ios_sdk)',  # -isysroot
+              }],
+            ],
+          }, {
+            'SDKROOT': '<(ios_sdk_path)',  # -isysroot
+          }],
+        ],
+      }],
+      ['OS=="ios"', {
+        # Target both iPhone and iPad.
+        'TARGETED_DEVICE_FAMILY': '1,2',
+      }, {  # OS!="ios"
+        'conditions': [
+          ['target_arch=="x64"', {
+            'ARCHS': [
+              'x86_64'
+            ],
+          }],
+          ['target_arch=="ia32"', {
+            'ARCHS': [
+              'i386'
+            ],
+          }],
+        ],
+      }],
+    ],
+
+    # The Xcode generator will look for an xcode_settings section at the root
+    # of each dict and use it to apply settings on a file-wide basis.  Most
+    # settings should not be here, they should be in target-specific
+    # xcode_settings sections, or better yet, should use non-Xcode-specific
+    # settings in target dicts.  SYMROOT is a special case, because many other
+    # Xcode variables depend on it, including variables such as
+    # PROJECT_DERIVED_FILE_DIR.  When a source group corresponding to something
+    # like PROJECT_DERIVED_FILE_DIR is added to a project, in order for the
+    # files to appear (when present) in the UI as actual files and not red
+    # red "missing file" proxies, the correct path to PROJECT_DERIVED_FILE_DIR,
+    # and therefore SYMROOT, needs to be set at the project level.
+    'SYMROOT': '<(DEPTH)/xcodebuild',
+  },
+}
diff --git a/build/common_untrusted.gypi b/build/common_untrusted.gypi
new file mode 100644
index 0000000..bcc3686
--- /dev/null
+++ b/build/common_untrusted.gypi
@@ -0,0 +1,40 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This GYP file should be included for every target in Chromium that is built
+# using the NaCl toolchain.
+{
+  'includes': [
+    '../native_client/build/untrusted.gypi',
+  ],
+  'target_defaults': {
+    'conditions': [
+      # TODO(bradnelson): Drop this once the nacl side does the same.
+      ['target_arch=="x64"', {
+        'variables': {
+          'enable_x86_32': 0,
+        },
+      }],
+      ['target_arch=="ia32" and OS!="win"', {
+        'variables': {
+          'enable_x86_64': 0,
+        },
+      }],
+      ['target_arch=="arm"', {
+        'variables': {
+          'clang': 1,
+        },
+        'defines': [
+          # Needed by build/build_config.h processor architecture detection.
+          '__ARMEL__',
+          # Needed by base/third_party/nspr/prtime.cc.
+          '__arm__',
+          # Disable ValGrind. The assembly code it generates causes the build
+          # to fail.
+          'NVALGRIND',
+        ],
+      }],
+    ],
+  },
+}
diff --git a/build/compiled_action.gni b/build/compiled_action.gni
new file mode 100644
index 0000000..b6d0c4d
--- /dev/null
+++ b/build/compiled_action.gni
@@ -0,0 +1,173 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file introduces two related templates that act like action and
+# action_foreach but instead of running a Python script, it will compile a
+# given tool in the host toolchain and run that (either once or over the list
+# of inputs, depending on the variant).
+#
+# Parameters
+#
+#   tool (required)
+#       [label] Label of the tool to run. This should be an executable, and
+#       this label should not include a toolchain (anything in parens). The
+#       host compile of this tool will be used.
+#
+#   outputs (required)
+#       [list of files] Like the outputs of action (if using "compiled_action",
+#       this would be just the list of outputs), or action_foreach (if using
+#       "compiled_action_foreach", this would contain source expansions mapping
+#       input to output files).
+#
+#   args (required)
+#       [list of strings] Same meaning as action/action_foreach.
+#
+#   inputs (optional)
+#       Files the binary takes as input. The step will be re-run whenever any
+#       of these change. If inputs is empty, the step will run only when the
+#       binary itself changes.
+#
+#   visibility
+#   deps
+#   args   (all optional)
+#       Same meaning as action/action_foreach.
+#
+#
+# Example of usage:
+#
+#   compiled_action("run_my_tool") {
+#     tool = "//tools/something:mytool"
+#     outputs = [
+#       "$target_gen_dir/mysource.cc",
+#       "$target_gen_dir/mysource.h",
+#     ]
+#
+#     # The tool takes this input.
+#     inputs = [ "my_input_file.idl" ]
+#
+#     # In this case, the tool takes as arguments the input file and the output
+#     # build dir (both relative to the "cd" that the script will be run in)
+#     # and will produce the output files listed above.
+#     args = [
+#       rebase_path("my_input_file.idl", root_build_dir),
+#       "--output-dir", rebase_path(target_gen_dir, root_build_dir),
+#     ]
+#   }
+#
+# You would typically declare your tool like this:
+#   if (host_toolchain == current_toolchain) {
+#     executable("mytool") {
+#       ...
+#     }
+#   }
+# The if statement around the executable is optional. That says "I only care
+# about this target in the host toolchain". Usually this is what you want, and
+# saves unnecessarily compiling your tool for the target platform. But if you
+# need a target build of your tool as well, just leave off the if statement.
+
+if (host_os == "win") {
+  _host_executable_suffix = ".exe"
+} else {
+  _host_executable_suffix = ""
+}
+
+template("compiled_action") {
+  assert(defined(invoker.tool), "tool must be defined for $target_name")
+  assert(defined(invoker.outputs), "outputs must be defined for $target_name")
+  assert(defined(invoker.args), "args must be defined for $target_name")
+
+  assert(!defined(invoker.sources),
+         "compiled_action doesn't take a sources arg. Use inputs instead.")
+
+  action(target_name) {
+    if (defined(invoker.visibility)) {
+      visibility = invoker.visibility
+    }
+
+    script = "//build/gn_run_binary.py"
+
+    if (defined(invoker.inputs)) {
+      inputs = invoker.inputs
+    } else {
+      inputs = []
+    }
+    outputs = invoker.outputs
+
+    # Constuct the host toolchain version of the tool.
+    host_tool = invoker.tool + "($host_toolchain)"
+
+    # Get the path to the executable. Currently, this assumes that the tool
+    # does not specify output_name so that the target name is the name to use.
+    # If that's not the case, we'll need another argument to the script to
+    # specify this, since we can't know what the output name is (it might be in
+    # another file not processed yet).
+    host_executable =
+        get_label_info(host_tool, "root_out_dir") + "/" +
+        get_label_info(host_tool, "name") + _host_executable_suffix
+
+    # Add the executable itself as an input.
+    inputs += [ host_executable ]
+
+    deps = [
+      host_tool,
+    ]
+    if (defined(invoker.deps)) {
+      deps += invoker.deps
+    }
+
+    # The script takes as arguments the binary to run, and then the arguments
+    # to pass it.
+    args = [ rebase_path(host_executable, root_build_dir) ] + invoker.args
+  }
+}
+
+template("compiled_action_foreach") {
+  assert(defined(invoker.sources), "sources must be defined for $target_name")
+  assert(defined(invoker.tool), "tool must be defined for $target_name")
+  assert(defined(invoker.outputs), "outputs must be defined for $target_name")
+  assert(defined(invoker.args), "args must be defined for $target_name")
+
+  action_foreach(target_name) {
+    # Otherwise this is a standalone action, define visibility if requested.
+    if (defined(invoker.visibility)) {
+      visibility = invoker.visibility
+    }
+
+    script = "//build/gn_run_binary.py"
+    sources = invoker.sources
+
+    if (defined(invoker.inputs)) {
+      inputs = invoker.inputs
+    } else {
+      inputs = []
+    }
+    outputs = invoker.outputs
+
+    # Constuct the host toolchain version of the tool.
+    host_tool = invoker.tool + "($host_toolchain)"
+
+    # Get the path to the executable. Currently, this assumes that the tool
+    # does not specify output_name so that the target name is the name to use.
+    # If that's not the case, we'll need another argument to the script to
+    # specify this, since we can't know what the output name is (it might be in
+    # another file not processed yet).
+    host_executable =
+        get_label_info(host_tool, "root_out_dir") + "/" +
+        get_label_info(host_tool, "name") + _host_executable_suffix
+
+    # Add the executable itself as an input.
+    inputs += [ host_executable ]
+
+    deps = [
+      host_tool,
+    ]
+    if (defined(invoker.deps)) {
+      deps += invoker.deps
+    }
+
+    # The script takes as arguments the binary to run, and then the arguments
+    # to pass it.
+    args = [ rebase_path(host_executable, root_build_dir) ] + invoker.args
+  }
+}
diff --git a/build/compiler_version.py b/build/compiler_version.py
new file mode 100755
index 0000000..05faf54
--- /dev/null
+++ b/build/compiler_version.py
@@ -0,0 +1,143 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Compiler version checking tool for gcc
+
+Print gcc version as XY if you are running gcc X.Y.*.
+This is used to tweak build flags for gcc 4.4.
+"""
+
+import os
+import re
+import subprocess
+import sys
+
+
+compiler_version_cache = {}  # Map from (compiler, tool) -> version.
+
+
+def Usage(program_name):
+  print '%s MODE TOOL' % os.path.basename(program_name)
+  print 'MODE: host or target.'
+  print 'TOOL: assembler or compiler or linker.'
+  return 1
+
+
+def ParseArgs(args):
+  if len(args) != 2:
+    raise Exception('Invalid number of arguments')
+  mode = args[0]
+  tool = args[1]
+  if mode not in ('host', 'target'):
+    raise Exception('Invalid mode: %s' % mode)
+  if tool not in ('assembler', 'compiler', 'linker'):
+    raise Exception('Invalid tool: %s' % tool)
+  return mode, tool
+
+
+def GetEnvironFallback(var_list, default):
+  """Look up an environment variable from a possible list of variable names."""
+  for var in var_list:
+    if var in os.environ:
+      return os.environ[var]
+  return default
+
+
+def GetVersion(compiler, tool):
+  tool_output = tool_error = None
+  cache_key = (compiler, tool)
+  cached_version = compiler_version_cache.get(cache_key)
+  if cached_version:
+    return cached_version
+  try:
+    # Note that compiler could be something tricky like "distcc g++".
+    if tool == "compiler":
+      compiler = compiler + " -dumpversion"
+      # 4.6
+      version_re = re.compile(r"(\d+)\.(\d+)")
+    elif tool == "assembler":
+      compiler = compiler + " -Xassembler --version -x assembler -c /dev/null"
+      # Unmodified: GNU assembler (GNU Binutils) 2.24
+      # Ubuntu: GNU assembler (GNU Binutils for Ubuntu) 2.22
+      # Fedora: GNU assembler version 2.23.2
+      version_re = re.compile(r"^GNU [^ ]+ .* (\d+).(\d+).*?$", re.M)
+    elif tool == "linker":
+      compiler = compiler + " -Xlinker --version"
+      # Using BFD linker
+      # Unmodified: GNU ld (GNU Binutils) 2.24
+      # Ubuntu: GNU ld (GNU Binutils for Ubuntu) 2.22
+      # Fedora: GNU ld version 2.23.2
+      # Using Gold linker
+      # Unmodified: GNU gold (GNU Binutils 2.24) 1.11
+      # Ubuntu: GNU gold (GNU Binutils for Ubuntu 2.22) 1.11
+      # Fedora: GNU gold (version 2.23.2) 1.11
+      version_re = re.compile(r"^GNU [^ ]+ .* (\d+).(\d+).*?$", re.M)
+    else:
+      raise Exception("Unknown tool %s" % tool)
+
+    # Force the locale to C otherwise the version string could be localized
+    # making regex matching fail.
+    env = os.environ.copy()
+    env["LC_ALL"] = "C"
+    pipe = subprocess.Popen(compiler, shell=True, env=env,
+                            stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+    tool_output, tool_error = pipe.communicate()
+    if pipe.returncode:
+      raise subprocess.CalledProcessError(pipe.returncode, compiler)
+
+    parsed_output = version_re.match(tool_output)
+    result = parsed_output.group(1) + parsed_output.group(2)
+    compiler_version_cache[cache_key] = result
+    return result
+  except Exception, e:
+    if tool_error:
+      sys.stderr.write(tool_error)
+    print >> sys.stderr, "compiler_version.py failed to execute:", compiler
+    print >> sys.stderr, e
+    return ""
+
+
+def main(args):
+  try:
+    (mode, tool) = ParseArgs(args[1:])
+  except Exception, e:
+    sys.stderr.write(e.message + '\n\n')
+    return Usage(args[0])
+
+  ret_code, result = ExtractVersion(mode, tool)
+  if ret_code == 0:
+    print result
+  return ret_code
+
+
+def DoMain(args):
+  """Hook to be called from gyp without starting a separate python
+  interpreter."""
+  (mode, tool) = ParseArgs(args)
+  ret_code, result = ExtractVersion(mode, tool)
+  if ret_code == 0:
+    return result
+  raise Exception("Failed to extract compiler version for args: %s" % args)
+
+
+def ExtractVersion(mode, tool):
+  # Check if various CXX environment variables exist and use them if they
+  # exist. The preferences and fallback order is a close approximation of
+  # GenerateOutputForConfig() in GYP's ninja generator.
+  # The main difference being not supporting GYP's make_global_settings.
+  environments = ['CXX_target', 'CXX']
+  if mode == 'host':
+    environments = ['CXX_host'] + environments;
+  compiler = GetEnvironFallback(environments, 'c++')
+
+  if compiler:
+    compiler_version = GetVersion(compiler, tool)
+    if compiler_version != "":
+      return (0, compiler_version)
+  return (1, None)
+
+
+if __name__ == "__main__":
+  sys.exit(main(sys.argv))
diff --git a/build/config/BUILD.gn b/build/config/BUILD.gn
new file mode 100644
index 0000000..0af90cd
--- /dev/null
+++ b/build/config/BUILD.gn
@@ -0,0 +1,434 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/allocator.gni")
+import("//build/config/chrome_build.gni")
+import("//build/config/crypto.gni")
+import("//build/config/features.gni")
+import("//build/config/ui.gni")
+import("//build/module_args/v8.gni")
+
+declare_args() {
+  # When set, turns off the (normally-on) iterator debugging and related stuff
+  # that is normally turned on for Debug builds. These are generally useful for
+  # catching bugs but in some cases may cause conflicts or excessive slowness.
+  disable_iterator_debugging = false
+
+  # Set to true to not store any build metadata, e.g. ifdef out all __DATE__
+  # and __TIME__. Set to 0 to reenable the use of these macros in the code
+  # base. See http://crbug.com/314403.
+  #
+  # Continue to embed build meta data in Official builds, basically the
+  # time it was built.
+  # TODO(maruel): This decision should be revisited because having an
+  # official deterministic build has high value too but MSVC toolset can't
+  # generate anything deterministic with WPO enabled AFAIK.
+  dont_embed_build_metadata = !is_official_build
+
+  # Set to true to enable dcheck in Release builds.
+  dcheck_always_on = false
+
+  # Set to true to compile with the OpenGL ES 2.0 conformance tests.
+  internal_gles2_conform_tests = false
+}
+
+# TODO(brettw) Most of these should be removed. Instead of global feature
+# flags, we should have more modular flags that apply only to a target and its
+# dependents. For example, depending on the "x11" meta-target should define
+# USE_X11 for all dependents so that everything that could use X11 gets the
+# define, but anything that doesn't depend on X11 doesn't see it.
+#
+# For now we define these globally to match the current GYP build.
+config("feature_flags") {
+  # TODO(brettw) this probably needs to be parameterized.
+  defines = [ "V8_DEPRECATION_WARNINGS" ]  # Don't use deprecated V8 APIs anywhere.
+
+  if (cld_version > 0) {
+    defines += [ "CLD_VERSION=$cld_version" ]
+  }
+  if (enable_mdns) {
+    defines += [ "ENABLE_MDNS=1" ]
+  }
+  if (enable_notifications) {
+    defines += [ "ENABLE_NOTIFICATIONS" ]
+  }
+  if (enable_pepper_cdms) {
+    # TODO(brettw) should probably be "=1"
+    defines += [ "ENABLE_PEPPER_CDMS" ]
+  }
+  if (enable_browser_cdms) {
+    # TODO(brettw) should probably be "=1"
+    defines += [ "ENABLE_BROWSER_CDMS" ]
+  }
+  if (enable_plugins) {
+    defines += [ "ENABLE_PLUGINS=1" ]
+  }
+  if (enable_basic_printing || enable_print_preview) {
+    # Convenience define for ENABLE_BASIC_PRINTING || ENABLE_PRINT_PREVIEW.
+    defines += [ "ENABLE_PRINTING=1" ]
+    if (enable_basic_printing) {
+      # Enable basic printing support and UI.
+      defines += [ "ENABLE_BASIC_PRINTING=1" ]
+    }
+    if (enable_print_preview) {
+      # Enable printing with print preview.
+      # Can be defined without ENABLE_BASIC_PRINTING.
+      defines += [ "ENABLE_PRINT_PREVIEW=1" ]
+    }
+  }
+  if (enable_spellcheck) {
+    defines += [ "ENABLE_SPELLCHECK=1" ]
+  }
+  if (use_platform_spellchecker) {
+    defines += [ "USE_PLATFORM_SPELLCHECKER=1" ]
+  }
+  if (dont_embed_build_metadata) {
+    defines += [ "DONT_EMBED_BUILD_METADATA" ]
+  }
+  if (dcheck_always_on) {
+    defines += [ "DCHECK_ALWAYS_ON=1" ]
+  }
+  if (use_udev) {
+    # TODO(brettw) should probably be "=1".
+    defines += [ "USE_UDEV" ]
+  }
+  if (ui_compositor_image_transport) {
+    # TODO(brettw) should probably be "=1".
+    defines += [ "UI_COMPOSITOR_IMAGE_TRANSPORT" ]
+  }
+  if (use_ash) {
+    defines += [ "USE_ASH=1" ]
+  }
+  if (use_aura) {
+    defines += [ "USE_AURA=1" ]
+  }
+  if (use_glfw) {
+    defines += [ "USE_GLFW=1" ]
+  }
+  if (use_pango) {
+    defines += [ "USE_PANGO=1" ]
+  }
+  if (use_cairo) {
+    defines += [ "USE_CAIRO=1" ]
+  }
+  if (use_clipboard_aurax11) {
+    defines += [ "USE_CLIPBOARD_AURAX11=1" ]
+  }
+  if (use_default_render_theme) {
+    defines += [ "USE_DEFAULT_RENDER_THEME=1" ]
+  }
+  if (use_openssl) {
+    defines += [ "USE_OPENSSL=1" ]
+  }
+  if (use_openssl_certs) {
+    defines += [ "USE_OPENSSL_CERTS=1" ]
+  }
+  if (use_nss_certs) {
+    defines += [ "USE_NSS_CERTS=1" ]
+  }
+  if (use_ozone) {
+    defines += [ "USE_OZONE=1" ]
+  }
+  if (use_x11) {
+    defines += [ "USE_X11=1" ]
+  }
+  if (use_allocator != "tcmalloc") {
+    defines += [ "NO_TCMALLOC" ]
+  }
+  if (is_asan || is_lsan || is_tsan || is_msan || is_ios) {
+    defines += [
+      "MEMORY_TOOL_REPLACES_ALLOCATOR",
+      "MEMORY_SANITIZER_INITIAL_SIZE",
+    ]
+  }
+  if (is_asan) {
+    defines += [ "ADDRESS_SANITIZER" ]
+  }
+  if (is_lsan) {
+    defines += [
+      "LEAK_SANITIZER",
+      "WTF_USE_LEAK_SANITIZER=1",
+    ]
+  }
+  if (is_tsan) {
+    defines += [
+      "THREAD_SANITIZER",
+      "DYNAMIC_ANNOTATIONS_EXTERNAL_IMPL=1",
+      "WTF_USE_DYNAMIC_ANNOTATIONS_NOIMPL=1",
+    ]
+  }
+  if (is_msan) {
+    defines += [ "MEMORY_SANITIZER" ]
+  }
+  if (enable_webrtc) {
+    defines += [ "ENABLE_WEBRTC=1" ]
+  }
+  if (disable_ftp_support) {
+    defines += [ "DISABLE_FTP_SUPPORT=1" ]
+  }
+  if (!enable_nacl) {
+    defines += [ "DISABLE_NACL" ]
+  }
+  if (enable_extensions) {
+    defines += [ "ENABLE_EXTENSIONS=1" ]
+  }
+  if (enable_configuration_policy) {
+    defines += [ "ENABLE_CONFIGURATION_POLICY" ]
+  }
+  if (enable_task_manager) {
+    defines += [ "ENABLE_TASK_MANAGER=1" ]
+  }
+  if (enable_themes) {
+    defines += [ "ENABLE_THEMES=1" ]
+  }
+  if (enable_captive_portal_detection) {
+    defines += [ "ENABLE_CAPTIVE_PORTAL_DETECTION=1" ]
+  }
+  if (enable_session_service) {
+    defines += [ "ENABLE_SESSION_SERVICE=1" ]
+  }
+  if (enable_rlz) {
+    defines += [ "ENABLE_RLZ" ]
+  }
+  if (enable_plugin_installation) {
+    defines += [ "ENABLE_PLUGIN_INSTALLATION=1" ]
+  }
+  if (enable_app_list) {
+    defines += [ "ENABLE_APP_LIST=1" ]
+  }
+  if (enable_settings_app) {
+    defines += [ "ENABLE_SETTINGS_APP=1" ]
+  }
+  if (enable_supervised_users) {
+    defines += [ "ENABLE_SUPERVISED_USERS=1" ]
+  }
+  if (enable_service_discovery) {
+    defines += [ "ENABLE_SERVICE_DISCOVERY=1" ]
+  }
+  if (enable_autofill_dialog) {
+    defines += [ "ENABLE_AUTOFILL_DIALOG=1" ]
+  }
+  if (enable_wifi_bootstrapping) {
+    defines += [ "ENABLE_WIFI_BOOTSTRAPPING=1" ]
+  }
+  if (enable_image_loader_extension) {
+    defines += [ "IMAGE_LOADER_EXTENSION=1" ]
+  }
+  if (enable_remoting) {
+    defines += [ "ENABLE_REMOTING=1" ]
+  }
+  if (enable_google_now) {
+    defines += [ "ENABLE_GOOGLE_NOW=1" ]
+  }
+  if (enable_one_click_signin) {
+    defines += [ "ENABLE_ONE_CLICK_SIGNIN" ]
+  }
+  if (enable_hidpi) {
+    defines += [ "ENABLE_HIDPI=1" ]
+  }
+  if (enable_topchrome_md) {
+    defines += [ "ENABLE_TOPCHROME_MD=1" ]
+  }
+  if (proprietary_codecs) {
+    defines += [ "USE_PROPRIETARY_CODECS" ]
+  }
+  if (enable_hangout_services_extension) {
+    defines += [ "ENABLE_HANGOUT_SERVICES_EXTENSION=1" ]
+  }
+  if (v8_use_external_startup_data) {
+    defines += [ "V8_USE_EXTERNAL_STARTUP_DATA" ]
+  }
+  if (enable_background) {
+    defines += [ "ENABLE_BACKGROUND=1" ]
+  }
+  if (enable_pre_sync_backup) {
+    defines += [ "ENABLE_PRE_SYNC_BACKUP" ]
+  }
+  if (enable_video_hole) {
+    defines += [ "VIDEO_HOLE=1" ]
+  }
+  if (safe_browsing_mode == 1) {
+    defines += [ "FULL_SAFE_BROWSING" ]
+    defines += [ "SAFE_BROWSING_CSD" ]
+    defines += [ "SAFE_BROWSING_DB_LOCAL" ]
+    defines += [ "SAFE_BROWSING_SERVICE" ]
+  } else if (safe_browsing_mode == 2) {
+    defines += [ "MOBILE_SAFE_BROWSING" ]
+    defines += [ "SAFE_BROWSING_SERVICE" ]
+  } else if (safe_browsing_mode == 3) {
+    defines += [ "MOBILE_SAFE_BROWSING" ]
+    defines += [ "SAFE_BROWSING_DB_REMOTE" ]
+    defines += [ "SAFE_BROWSING_SERVICE" ]
+  }
+  if (is_official_build) {
+    defines += [ "OFFICIAL_BUILD" ]
+  }
+  if (is_chrome_branded) {
+    defines += [ "GOOGLE_CHROME_BUILD" ]
+  } else {
+    defines += [ "CHROMIUM_BUILD" ]
+  }
+  if (enable_media_router) {
+    defines += [ "ENABLE_MEDIA_ROUTER=1" ]
+  }
+  if (enable_webvr) {
+    defines += [ "ENABLE_WEBVR" ]
+  }
+  if (is_fnl) {
+    defines += [ "HAVE_SYS_QUEUE_H_=0" ]
+  }
+}
+
+# Debug/release ----------------------------------------------------------------
+
+config("debug") {
+  defines = [
+    "_DEBUG",
+    "DYNAMIC_ANNOTATIONS_ENABLED=1",
+    "WTF_USE_DYNAMIC_ANNOTATIONS=1",
+  ]
+
+  if (is_nacl) {
+    defines += [ "DYNAMIC_ANNOTATIONS_PREFIX=NACL_" ]
+  }
+
+  if (is_win) {
+    if (disable_iterator_debugging) {
+      # Iterator debugging is enabled by the compiler on debug builds, and we
+      # have to tell it to turn it off.
+      defines += [ "_HAS_ITERATOR_DEBUGGING=0" ]
+    }
+  } else if (is_linux && !is_android && current_cpu == "x64" &&
+             !disable_iterator_debugging) {
+    # Enable libstdc++ debugging facilities to help catch problems early, see
+    # http://crbug.com/65151 .
+    # TODO(phajdan.jr): Should we enable this for all of POSIX?
+    defines += [ "_GLIBCXX_DEBUG=1" ]
+  }
+}
+
+config("release") {
+  defines = [ "NDEBUG" ]
+
+  # Sanitizers.
+  # TODO(GYP) The GYP build has "release_valgrind_build == 0" for this
+  # condition. When Valgrind is set up, we need to do the same here.
+  if (is_tsan) {
+    defines += [
+      "DYNAMIC_ANNOTATIONS_ENABLED=1",
+      "WTF_USE_DYNAMIC_ANNOTATIONS=1",
+    ]
+  } else {
+    defines += [ "NVALGRIND" ]
+    if (!is_nacl) {
+      # NaCl always enables dynamic annotations. Currently this value is set to
+      # 1 for all .nexes.
+      defines += [ "DYNAMIC_ANNOTATIONS_ENABLED=0" ]
+    }
+  }
+}
+
+# Default libraries ------------------------------------------------------------
+
+# This config defines the default libraries applied to all targets.
+config("default_libs") {
+  if (is_win) {
+    # TODO(brettw) this list of defaults should probably be smaller, and
+    # instead the targets that use the less common ones (e.g. wininet or
+    # winspool) should include those explicitly.
+    libs = [
+      "advapi32.lib",
+      "comdlg32.lib",
+      "dbghelp.lib",
+      "delayimp.lib",
+      "dnsapi.lib",
+      "gdi32.lib",
+      "kernel32.lib",
+      "msimg32.lib",
+      "odbc32.lib",
+      "odbccp32.lib",
+      "ole32.lib",
+      "oleaut32.lib",
+      "psapi.lib",
+      "shell32.lib",
+      "shlwapi.lib",
+      "user32.lib",
+      "usp10.lib",
+      "uuid.lib",
+      "version.lib",
+      "wininet.lib",
+      "winmm.lib",
+      "winspool.lib",
+      "ws2_32.lib",
+
+      # Please don't add more stuff here. We should actually be making this
+      # list smaller, since all common things should be covered. If you need
+      # some extra libraries, please just add a libs = [ "foo.lib" ] to your
+      # target that needs it.
+    ]
+  } else if (is_android) {
+    # Android uses -nostdlib so we need to add even libc here.
+    libs = [
+      # TODO(brettw) write a version of this, hopefully we can express this
+      # without forking out to GCC just to get the library name. The android
+      # toolchain directory should probably be extracted into a .gni file that
+      # this file and the android toolchain .gn file can share.
+      #   # Manually link the libgcc.a that the cross compiler uses.
+      #   '<!(<(android_toolchain)/*-gcc -print-libgcc-file-name)',
+      "c",
+      "dl",
+      "m",
+    ]
+  } else if (is_mac) {
+    libs = [
+      "AppKit.framework",
+      "ApplicationServices.framework",
+      "Carbon.framework",
+      "CoreFoundation.framework",
+      "Foundation.framework",
+      "IOKit.framework",
+      "Security.framework",
+      "OpenGL.framework",
+    ]
+  } else if (is_ios) {
+    # The libraries listed here will be specified for both the target and the
+    # host. Only the common ones should be listed here.
+    libs = [
+      "CoreFoundation.framework",
+      "CoreGraphics.framework",
+      "CoreText.framework",
+      "Foundation.framework",
+    ]
+  } else if (is_linux) {
+    libs = [ "dl" ]
+  }
+}
+
+# Add this config to your target to enable precompiled headers.
+#
+# On Windows, precompiled headers are done on a per-target basis. If you have
+# just a couple of files, the time it takes to precompile (~2 seconds) can
+# actually be longer than the time saved. On a Z620, a 100 file target compiles
+# about 2 seconds faster with precompiled headers, with greater savings for
+# larger targets.
+#
+# Recommend precompiled headers for targets with more than 50 .cc files.
+config("precompiled_headers") {
+  # TODO(brettw) enable this when GN support in the binary has been rolled.
+  #if (is_win) {
+  if (false) {
+    # This is a string rather than a file GN knows about. It has to match
+    # exactly what's in the /FI flag below, and what might appear in the source
+    # code in quotes for an #include directive.
+    precompiled_header = "build/precompile.h"
+
+    # This is a file that GN will compile with the above header. It will be
+    # implicitly added to the sources (potentially multiple times, with one
+    # variant for each language used in the target).
+    precompiled_source = "//build/precompile.cc"
+
+    # Force include the header.
+    cflags = [ "/FI$precompiled_header" ]
+  }
+}
diff --git a/build/config/BUILDCONFIG.gn b/build/config/BUILDCONFIG.gn
new file mode 100644
index 0000000..e22b29a
--- /dev/null
+++ b/build/config/BUILDCONFIG.gn
@@ -0,0 +1,827 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# =============================================================================
+# PLATFORM SELECTION
+# =============================================================================
+#
+# There are two main things to set: "os" and "cpu". The "toolchain" is the name
+# of the GN thing that encodes combinations of these things.
+#
+# Users typically only set the variables "target_os" and "target_cpu" in "gn
+# args", the rest are set up by our build and internal to GN.
+#
+# There are three different types of each of these things: The "host"
+# represents the computer doing the compile and never changes. The "target"
+# represents the main thing we're trying to build. The "current" represents
+# which configuration is currently being defined, which can be either the
+# host, the target, or something completely different (like nacl). GN will
+# run the same build file multiple times for the different required
+# configuration in the same build.
+#
+# This gives the following variables:
+#  - host_os, host_cpu, host_toolchain
+#  - target_os, target_cpu, default_toolchain
+#  - current_os, current_cpu, current_toolchain.
+#
+# Note the default_toolchain isn't symmetrical (you would expect
+# target_toolchain). This is because the "default" toolchain is a GN built-in
+# concept, and "target" is something our build sets up that's symmetrical with
+# its GYP counterpart. Potentially the built-in default_toolchain variable
+# could be renamed in the future.
+#
+# When writing build files, to do something only for the host:
+#   if (current_toolchain == host_toolchain) { ...
+
+if (target_os == "") {
+  target_os = host_os
+}
+
+if (target_cpu == "") {
+  if (target_os == "android") {
+    # If we're building for Android, we should assume that we want to
+    # build for ARM by default, not the host_cpu (which is likely x64).
+    # This allows us to not have to specify both target_os and target_cpu
+    # on the command line.
+    target_cpu = "arm"
+  } else {
+    target_cpu = host_cpu
+  }
+}
+
+if (current_cpu == "") {
+  current_cpu = target_cpu
+}
+if (current_os == "") {
+  current_os = target_os
+}
+
+# =============================================================================
+# BUILD FLAGS
+# =============================================================================
+#
+# This block lists input arguments to the build, along with their default
+# values.
+#
+# If a value is specified on the command line, it will overwrite the defaults
+# given in a declare_args block, otherwise the default will be used.
+#
+# YOU SHOULD ALMOST NEVER NEED TO ADD FLAGS TO THIS FILE. GN allows any file in
+# the build to declare build flags. If you need a flag for a single component,
+# you can just declare it in the corresponding BUILD.gn file. If you need a
+# flag in multiple components, there are a few options:
+#
+# - If your feature is a single target, say //components/foo, and the targets
+#   depending on foo need to have some define set if foo is enabled: (1) Write
+#   a declare_args block in foo's BUILD.gn file listing your enable_foo build
+#   flag. (2) Write a config in that file listing the define, and list that
+#   config in foo's public_configs. This will propagate that define to all the
+#   targets depending on foo. (3) When foo is not enabled, just make it expand
+#   to an empty group (or whatever's appropriate for the "off" state of your
+#   feature.
+#
+# - If a semi-random set of targets need to know about a define: (1) In the
+#   lowest level of the build that knows about this feature, add a declare_args
+#   block in the build file for your enable flag. (2) Write a config that adds
+#   a define conditionally based on that build flags. (3) Manually add that
+#   config to the "configs" applying to the targets that need the define.
+#
+# - If a semi-random set of targets need to know about the build flag (to do
+#   file inclusion or exclusion, more than just defines): (1) Write a .gni file
+#   in the lowest-level directory that knows about the feature. (2) Put the
+#   declare_args block with your build flag in that .gni file. (3) Import that
+#   .gni file from the BUILD.gn files that need the flag.
+#
+# Other advice:
+#
+# - Use boolean values when possible. If you need a default value that expands
+#   to some complex thing in the default case (like the location of the
+#   compiler which would be computed by a script), use a default value of -1 or
+#   the empty string. Outside of the declare_args block, conditionally expand
+#   the default value as necessary.
+#
+# - Use a name like "use_foo" or "is_foo" (whatever is more appropriate for
+#   your feature) rather than just "foo".
+#
+# - Write good comments directly above the declaration with no blank line.
+#   These comments will appear as documentation in "gn args --list".
+#
+# - Don't call exec_script inside declare_args. This will execute the script
+#   even if the value is overridden, which is wasteful. See first bullet.
+
+declare_args() {
+  # How many symbols to include in the build. This affects the performance of
+  # the build since the symbols are large and dealing with them is slow.
+  #   2 means regular build with symbols.
+  #   1 means minimal symbols, usually enough for backtraces only.
+  #   0 means no symbols.
+  #   -1 means auto-set (off in release, regular in debug).
+  symbol_level = -1
+
+  # Component build.
+  is_component_build = false
+
+  # Debug build.
+  is_debug = true
+
+  # Whether we're a traditional desktop unix.
+  is_desktop_linux = current_os == "linux" && current_os != "chromeos"
+
+  # Set to true when compiling with the Clang compiler. Typically this is used
+  # to configure warnings.
+  is_clang = current_os == "mac" || current_os == "ios" ||
+             current_os == "linux" || current_os == "chromeos"
+
+  # Compile for Address Sanitizer to find memory bugs.
+  is_asan = false
+
+  # Compile for Leak Sanitizer to find leaks.
+  is_lsan = false
+
+  # Compile for Memory Sanitizer to find uninitialized reads.
+  is_msan = false
+
+  # Compile for Thread Sanitizer to find threading bugs.
+  is_tsan = false
+
+  if (current_os == "chromeos") {
+    # Allows the target toolchain to be injected as arguments. This is needed
+    # to support the CrOS build system which supports per-build-configuration
+    # toolchains.
+    cros_use_custom_toolchain = false
+  }
+
+  # DON'T ADD MORE FLAGS HERE. Read the comment above.
+}
+
+# =============================================================================
+# OS DEFINITIONS
+# =============================================================================
+#
+# We set these various is_FOO booleans for convenience in writing OS-based
+# conditions.
+#
+# - is_android, is_chromeos, is_ios, and is_win should be obvious.
+# - is_mac is set only for desktop Mac. It is not set on iOS.
+# - is_posix is true for mac and any Unix-like system (basically everything
+#   except Windows).
+# - is_linux is true for desktop Linux and ChromeOS, but not Android (which is
+#   generally too different despite being based on the Linux kernel).
+#
+# Do not add more is_* variants here for random lesser-used Unix systems like
+# aix or one of the BSDs. If you need to check these, just check the
+# current_os value directly.
+
+if (current_os == "win") {
+  is_android = false
+  is_chromeos = false
+  is_fnl = false
+  is_ios = false
+  is_linux = false
+  is_mac = false
+  is_nacl = false
+  is_posix = false
+  is_win = true
+} else if (current_os == "mac") {
+  is_android = false
+  is_chromeos = false
+  is_fnl = false
+  is_ios = false
+  is_linux = false
+  is_mac = true
+  is_nacl = false
+  is_posix = true
+  is_win = false
+} else if (current_os == "android") {
+  is_android = true
+  is_chromeos = false
+  is_fnl = false
+  is_ios = false
+  is_linux = false
+  is_mac = false
+  is_nacl = false
+  is_posix = true
+  is_win = false
+} else if (current_os == "chromeos") {
+  is_android = false
+  is_chromeos = true
+  is_fnl = false
+  is_ios = false
+  is_linux = true
+  is_mac = false
+  is_nacl = false
+  is_posix = true
+  is_win = false
+} else if (current_os == "nacl") {
+  # current_os == "nacl" will be passed by the nacl toolchain definition.
+  # It is not set by default or on the command line. We treat is as a
+  # Posix variant.
+  is_android = false
+  is_chromeos = false
+  is_fnl = false
+  is_ios = false
+  is_linux = false
+  is_mac = false
+  is_nacl = true
+  is_posix = true
+  is_win = false
+} else if (current_os == "ios") {
+  is_android = false
+  is_chromeos = false
+  is_fnl = false
+  is_ios = true
+  is_linux = false
+  is_mac = false
+  is_nacl = false
+  is_posix = true
+  is_win = false
+} else if (current_os == "linux") {
+  is_android = false
+  is_chromeos = false
+  is_fnl = false
+  is_ios = false
+  is_linux = true
+  is_mac = false
+  is_nacl = false
+  is_posix = true
+  is_win = false
+} else if (current_os == "fnl") {
+  is_android = false
+  is_chromeos = false
+  is_fnl = true
+  is_ios = false
+  is_linux = true
+  is_mac = false
+  is_nacl = false
+  is_posix = true
+  is_win = false
+}
+
+# =============================================================================
+# SOURCES FILTERS
+# =============================================================================
+#
+# These patterns filter out platform-specific files when assigning to the
+# sources variable. The magic variable |sources_assignment_filter| is applied
+# to each assignment or appending to the sources variable and matches are
+# automatcally removed.
+#
+# Note that the patterns are NOT regular expressions. Only "*" and "\b" (path
+# boundary = end of string or slash) are supported, and the entire string
+# muct match the pattern (so you need "*.cc" to match all .cc files, for
+# example).
+
+# DO NOT ADD MORE PATTERNS TO THIS LIST, see set_sources_assignment_filter call
+# below.
+sources_assignment_filter = []
+if (!is_posix) {
+  sources_assignment_filter += [
+    "*_posix.h",
+    "*_posix.cc",
+    "*_posix_unittest.h",
+    "*_posix_unittest.cc",
+    "*\bposix/*",
+  ]
+}
+if (!is_win) {
+  sources_assignment_filter += [
+    "*_win.cc",
+    "*_win.h",
+    "*_win_unittest.cc",
+    "*\bwin/*",
+    "*.def",
+    "*.rc",
+  ]
+}
+if (!is_mac) {
+  sources_assignment_filter += [
+    "*_mac.h",
+    "*_mac.cc",
+    "*_mac.mm",
+    "*_mac_unittest.h",
+    "*_mac_unittest.cc",
+    "*_mac_unittest.mm",
+    "*\bmac/*",
+    "*_cocoa.h",
+    "*_cocoa.cc",
+    "*_cocoa.mm",
+    "*_cocoa_unittest.h",
+    "*_cocoa_unittest.cc",
+    "*_cocoa_unittest.mm",
+    "*\bcocoa/*",
+  ]
+}
+if (!is_ios) {
+  sources_assignment_filter += [
+    "*_ios.h",
+    "*_ios.cc",
+    "*_ios.mm",
+    "*_ios_unittest.h",
+    "*_ios_unittest.cc",
+    "*_ios_unittest.mm",
+    "*\bios/*",
+  ]
+}
+if (!is_mac && !is_ios) {
+  sources_assignment_filter += [ "*.mm" ]
+}
+if (!is_linux) {
+  sources_assignment_filter += [
+    "*_linux.h",
+    "*_linux.cc",
+    "*_linux_unittest.h",
+    "*_linux_unittest.cc",
+    "*\blinux/*",
+  ]
+}
+if (!is_android) {
+  sources_assignment_filter += [
+    "*_android.h",
+    "*_android.cc",
+    "*_android_unittest.h",
+    "*_android_unittest.cc",
+    "*\bandroid/*",
+  ]
+}
+if (!is_chromeos) {
+  sources_assignment_filter += [
+    "*_chromeos.h",
+    "*_chromeos.cc",
+    "*_chromeos_unittest.h",
+    "*_chromeos_unittest.cc",
+    "*\bchromeos/*",
+  ]
+}
+
+# DO NOT ADD MORE PATTERNS TO THIS LIST, see set_sources_assignment_filter call
+# below.
+
+# Actually save this list.
+#
+# These patterns are executed for every file in the source tree of every run.
+# Therefore, adding more patterns slows down the build for everybody. We should
+# only add automatic patterns for configurations affecting hundreds of files
+# across many projects in the tree.
+#
+# Therefore, we only add rules to this list corresponding to platforms on the
+# Chromium waterfall.  This is not for non-officially-supported platforms
+# (FreeBSD, etc.) toolkits, (X11, GTK, etc.), or features. For these cases,
+# write a conditional in the target to remove the file(s) from the list when
+# your platform/toolkit/feature doesn't apply.
+set_sources_assignment_filter(sources_assignment_filter)
+
+# =============================================================================
+# BUILD OPTIONS
+# =============================================================================
+
+# These Sanitizers all imply using the Clang compiler. On Windows they either
+# don't work or work differently.
+if (!is_clang && (is_asan || is_lsan || is_tsan || is_msan)) {
+  is_clang = true
+}
+
+# =============================================================================
+# TARGET DEFAULTS
+# =============================================================================
+#
+# Set up the default configuration for every build target of the given type.
+# The values configured here will be automatically set on the scope of the
+# corresponding target. Target definitions can add or remove to the settings
+# here as needed.
+
+# Holds all configs used for making native executables and libraries, to avoid
+# duplication in each target below.
+_native_compiler_configs = [
+  "//build/config:feature_flags",
+  "//build/config/compiler:compiler",
+  "//build/config/compiler:compiler_arm_fpu",
+  "//build/config/compiler:chromium_code",
+  "//build/config/compiler:default_include_dirs",
+  "//build/config/compiler:no_rtti",
+  "//build/config/compiler:runtime_library",
+]
+if (is_win) {
+  _native_compiler_configs += [
+    "//build/config/win:lean_and_mean",
+    "//build/config/win:nominmax",
+    "//build/config/win:sdk",
+    "//build/config/win:unicode",
+    "//build/config/win:winver",
+  ]
+}
+if (is_posix) {
+  _native_compiler_configs += [
+    "//build/config/gcc:no_exceptions",
+    "//build/config/gcc:symbol_visibility_hidden",
+  ]
+}
+
+if (is_fnl) {
+  _native_compiler_configs += [ "//build/config/fnl:sdk" ]
+} else if (is_linux) {
+  _native_compiler_configs += [ "//build/config/linux:sdk" ]
+} else if (is_mac) {
+  _native_compiler_configs += [ "//build/config/mac:sdk" ]
+} else if (is_ios) {
+  _native_compiler_configs += [ "//build/config/ios:sdk" ]
+} else if (is_android) {
+  _native_compiler_configs += [ "//build/config/android:sdk" ]
+}
+
+if (is_clang) {
+  _native_compiler_configs += [
+    "//build/config/clang:find_bad_constructs",
+    "//build/config/clang:extra_warnings",
+  ]
+}
+
+# Optimizations and debug checking.
+if (is_debug) {
+  _native_compiler_configs += [ "//build/config:debug" ]
+  _default_optimization_config = "//build/config/compiler:no_optimize"
+} else {
+  _native_compiler_configs += [ "//build/config:release" ]
+  _default_optimization_config = "//build/config/compiler:optimize"
+}
+_native_compiler_configs += [ _default_optimization_config ]
+
+# If it wasn't manually set, set to an appropriate default.
+if (symbol_level == -1) {
+  # Linux is slowed by having symbols as part of the target binary, whereas
+  # Mac and Windows have them separate, so in Release Linux, default them off.
+  if (is_debug || !is_linux) {
+    symbol_level = 2
+  } else if (is_asan || is_lsan || is_tsan || is_msan) {
+    # Sanitizers require symbols for filename suppressions to work.
+    symbol_level = 1
+  } else {
+    symbol_level = 0
+  }
+}
+
+# Symbol setup.
+if (symbol_level == 2) {
+  _default_symbols_config = "//build/config/compiler:symbols"
+} else if (symbol_level == 1) {
+  _default_symbols_config = "//build/config/compiler:minimal_symbols"
+} else if (symbol_level == 0) {
+  _default_symbols_config = "//build/config/compiler:no_symbols"
+} else {
+  assert(false, "Bad value for symbol_level.")
+}
+_native_compiler_configs += [ _default_symbols_config ]
+
+# Windows linker setup for EXEs and DLLs.
+if (is_win) {
+  _windows_linker_configs = [
+    "//build/config/win:default_incremental_linking",
+    "//build/config/win:sdk_link",
+    "//build/config/win:common_linker_setup",
+
+    # Default to console-mode apps. Most of our targets are tests and such
+    # that shouldn't use the windows subsystem.
+    "//build/config/win:console",
+  ]
+}
+
+# Executable defaults.
+_executable_configs =
+    _native_compiler_configs + [ "//build/config:default_libs" ]
+if (is_win) {
+  _executable_configs += _windows_linker_configs
+} else if (is_mac) {
+  _executable_configs += [
+    "//build/config/mac:mac_dynamic_flags",
+    "//build/config/mac:mac_executable_flags",
+  ]
+} else if (is_linux || is_android) {
+  _executable_configs += [ "//build/config/gcc:executable_ldconfig" ]
+  if (is_android) {
+    _executable_configs += [ "//build/config/android:executable_config" ]
+  }
+}
+set_defaults("executable") {
+  configs = _executable_configs
+}
+
+# Static library defaults.
+set_defaults("static_library") {
+  configs = _native_compiler_configs
+}
+
+# Shared library defaults (also for components in component mode).
+_shared_library_configs =
+    _native_compiler_configs + [ "//build/config:default_libs" ]
+if (is_win) {
+  _shared_library_configs += _windows_linker_configs
+} else if (is_mac) {
+  _shared_library_configs += [ "//build/config/mac:mac_dynamic_flags" ]
+} else if (is_android) {
+  # Strip native JNI exports from shared libraries by default. Binaries that
+  # want this can remove this config.
+  _shared_library_configs +=
+      [ "//build/config/android:hide_native_jni_exports" ]
+}
+set_defaults("shared_library") {
+  configs = _shared_library_configs
+}
+if (is_component_build) {
+  set_defaults("component") {
+    configs = _shared_library_configs
+  }
+}
+
+# Source set defaults (also for components in non-component mode).
+set_defaults("source_set") {
+  configs = _native_compiler_configs
+}
+if (!is_component_build) {
+  set_defaults("component") {
+    configs = _native_compiler_configs
+  }
+}
+
+# Test defaults.
+set_defaults("test") {
+  if (is_android) {
+    configs = _shared_library_configs
+  } else {
+    configs = _executable_configs
+  }
+}
+
+# ==============================================================================
+# TOOLCHAIN SETUP
+# ==============================================================================
+#
+# Here we set the default toolchain, as well as the variable host_toolchain
+# which will identify the toolchain corresponding to the local system when
+# doing cross-compiles. When not cross-compiling, this will be the same as the
+# default toolchain.
+
+if (is_win) {
+  # On windows we use the same toolchain for host and target by default.
+  if (is_clang) {
+    host_toolchain = "//build/toolchain/win:clang_$current_cpu"
+  } else {
+    host_toolchain = "//build/toolchain/win:$current_cpu"
+  }
+  set_default_toolchain("$host_toolchain")
+} else if (is_android) {
+  if (host_os == "linux") {
+    # Use clang for the x86/64 Linux host builds.
+    if (host_cpu == "x86" || host_cpu == "x64") {
+      host_toolchain = "//build/toolchain/linux:clang_$host_cpu"
+    } else {
+      host_toolchain = "//build/toolchain/linux:$host_cpu"
+    }
+  } else if (host_os == "mac") {
+    host_toolchain = "//build/toolchain/mac:clang_$host_cpu"
+  } else {
+    assert(false, "Unknown host for android cross compile")
+  }
+  if (is_clang) {
+    set_default_toolchain("//build/toolchain/android:clang_$current_cpu")
+  } else {
+    set_default_toolchain("//build/toolchain/android:$current_cpu")
+  }
+} else if (is_linux) {
+  if (is_clang) {
+    host_toolchain = "//build/toolchain/linux:clang_$host_cpu"
+    set_default_toolchain("//build/toolchain/linux:clang_$current_cpu")
+  } else {
+    host_toolchain = "//build/toolchain/linux:$host_cpu"
+    set_default_toolchain("//build/toolchain/linux:$current_cpu")
+  }
+  if (is_chromeos && cros_use_custom_toolchain) {
+    set_default_toolchain("//build/toolchain/cros:target")
+  }
+  if (is_fnl) {
+    set_default_toolchain("//build/toolchain/fnl:target")
+  }
+} else if (is_mac) {
+  host_toolchain = "//build/toolchain/mac:clang_x64"
+  set_default_toolchain(host_toolchain)
+} else if (is_ios) {
+  host_toolchain = "//build/toolchain/mac:clang_x64"
+  if (use_ios_simulator) {
+    set_default_toolchain("//build/toolchain/mac:ios_clang_x64")
+  } else {
+    set_default_toolchain("//build/toolchain/mac:ios_clang_arm")
+  }
+} else if (is_nacl) {
+  # TODO(GYP): This will need to change when we get NaCl working
+  # on multiple platforms, but this whole block of code (how we define
+  # host_toolchain) needs to be reworked regardless to key off of host_os
+  # and host_cpu rather than the is_* variables.
+  host_toolchain = "//build/toolchain/linux:clang_x64"
+}
+
+# ==============================================================================
+# COMPONENT SETUP
+# ==============================================================================
+
+# TODO(brettw) erase this once the built-in "component" function is removed.
+if (is_component_build) {
+  component_mode = "shared_library"
+} else {
+  component_mode = "source_set"
+}
+
+template("component") {
+  if (is_component_build) {
+    shared_library(target_name) {
+      # Configs will always be defined since we set_defaults for a component
+      # above. We want to use those rather than whatever came with the nested
+      # shared/static library inside the component.
+      configs = []  # Prevent list overwriting warning.
+      configs = invoker.configs
+
+      # The sources assignment filter will have already been applied when the
+      # code was originally executed. We don't want to apply it again, since
+      # the original target may have override it for some assignments.
+      set_sources_assignment_filter([])
+
+      if (defined(invoker.all_dependent_configs)) {
+        all_dependent_configs = invoker.all_dependent_configs
+      }
+      if (defined(invoker.allow_circular_includes_from)) {
+        allow_circular_includes_from = invoker.allow_circular_includes_from
+      }
+      if (defined(invoker.cflags)) {
+        cflags = invoker.cflags
+      }
+      if (defined(invoker.cflags_c)) {
+        cflags_c = invoker.cflags_c
+      }
+      if (defined(invoker.cflags_cc)) {
+        cflags_cc = invoker.cflags_cc
+      }
+      if (defined(invoker.cflags_objc)) {
+        cflags_objc = invoker.cflags_objc
+      }
+      if (defined(invoker.cflags_objcc)) {
+        cflags_objcc = invoker.cflags_objcc
+      }
+      if (defined(invoker.check_includes)) {
+        check_includes = invoker.check_includes
+      }
+      if (defined(invoker.data)) {
+        data = invoker.data
+      }
+      if (defined(invoker.data_deps)) {
+        data_deps = invoker.data_deps
+      }
+      if (defined(invoker.datadeps)) {
+        datadeps = invoker.datadeps
+      }
+      if (defined(invoker.defines)) {
+        defines = invoker.defines
+      }
+
+      # All shared libraries must have the sanitizer deps to properly link in
+      # asan mode (this target will be empty in other cases).
+      if (defined(invoker.deps)) {
+        deps = invoker.deps + [ "//build/config/sanitizers:deps" ]
+      } else {
+        deps = [
+          "//build/config/sanitizers:deps",
+        ]
+      }
+      if (defined(invoker.direct_dependent_configs)) {
+        direct_dependent_configs = invoker.direct_dependent_configs
+      }
+      if (defined(invoker.forward_dependent_configs_from)) {
+        forward_dependent_configs_from = invoker.forward_dependent_configs_from
+      }
+      if (defined(invoker.include_dirs)) {
+        include_dirs = invoker.include_dirs
+      }
+      if (defined(invoker.ldflags)) {
+        ldflags = invoker.ldflags
+      }
+      if (defined(invoker.lib_dirs)) {
+        lib_dirs = invoker.lib_dirs
+      }
+      if (defined(invoker.libs)) {
+        libs = invoker.libs
+      }
+      if (defined(invoker.output_extension)) {
+        output_extension = invoker.output_extension
+      }
+      if (defined(invoker.output_name)) {
+        output_name = invoker.output_name
+      }
+      if (defined(invoker.public)) {
+        public = invoker.public
+      }
+      if (defined(invoker.public_configs)) {
+        public_configs = invoker.public_configs
+      }
+      if (defined(invoker.public_deps)) {
+        public_deps = invoker.public_deps
+      }
+      if (defined(invoker.sources)) {
+        sources = invoker.sources
+      }
+      if (defined(invoker.testonly)) {
+        testonly = invoker.testonly
+      }
+      if (defined(invoker.visibility)) {
+        visibility = invoker.visibility
+      }
+    }
+  } else {
+    source_set(target_name) {
+      # See above.
+      configs = []  # Prevent list overwriting warning.
+      configs = invoker.configs
+
+      # See above call.
+      set_sources_assignment_filter([])
+
+      if (defined(invoker.all_dependent_configs)) {
+        all_dependent_configs = invoker.all_dependent_configs
+      }
+      if (defined(invoker.allow_circular_includes_from)) {
+        allow_circular_includes_from = invoker.allow_circular_includes_from
+      }
+      if (defined(invoker.cflags)) {
+        cflags = invoker.cflags
+      }
+      if (defined(invoker.cflags_c)) {
+        cflags_c = invoker.cflags_c
+      }
+      if (defined(invoker.cflags_cc)) {
+        cflags_cc = invoker.cflags_cc
+      }
+      if (defined(invoker.cflags_objc)) {
+        cflags_objc = invoker.cflags_objc
+      }
+      if (defined(invoker.cflags_objcc)) {
+        cflags_objcc = invoker.cflags_objcc
+      }
+      if (defined(invoker.check_includes)) {
+        check_includes = invoker.check_includes
+      }
+      if (defined(invoker.data)) {
+        data = invoker.data
+      }
+      if (defined(invoker.data_deps)) {
+        data_deps = invoker.data_deps
+      }
+      if (defined(invoker.datadeps)) {
+        datadeps = invoker.datadeps
+      }
+      if (defined(invoker.defines)) {
+        defines = invoker.defines
+      }
+      if (defined(invoker.deps)) {
+        deps = invoker.deps
+      }
+      if (defined(invoker.direct_dependent_configs)) {
+        direct_dependent_configs = invoker.direct_dependent_configs
+      }
+      if (defined(invoker.forward_dependent_configs_from)) {
+        forward_dependent_configs_from = invoker.forward_dependent_configs_from
+      }
+      if (defined(invoker.include_dirs)) {
+        include_dirs = invoker.include_dirs
+      }
+      if (defined(invoker.ldflags)) {
+        ldflags = invoker.ldflags
+      }
+      if (defined(invoker.lib_dirs)) {
+        lib_dirs = invoker.lib_dirs
+      }
+      if (defined(invoker.libs)) {
+        libs = invoker.libs
+      }
+      if (defined(invoker.output_extension)) {
+        output_extension = invoker.output_extension
+      }
+      if (defined(invoker.output_name)) {
+        output_name = invoker.output_name
+      }
+      if (defined(invoker.public)) {
+        public = invoker.public
+      }
+      if (defined(invoker.public_configs)) {
+        public_configs = invoker.public_configs
+      }
+      if (defined(invoker.public_deps)) {
+        public_deps = invoker.public_deps
+      }
+      if (defined(invoker.sources)) {
+        sources = invoker.sources
+      }
+      if (defined(invoker.testonly)) {
+        testonly = invoker.testonly
+      }
+      if (defined(invoker.visibility)) {
+        visibility = invoker.visibility
+      }
+    }
+  }
+}
diff --git a/build/config/OWNERS b/build/config/OWNERS
new file mode 100644
index 0000000..bd53091
--- /dev/null
+++ b/build/config/OWNERS
@@ -0,0 +1,6 @@
+brettw@chromium.org
+dpranke@chromium.org
+scottmg@chromium.org
+
+per-file BUILDCONFIG.gn=brettw@chromium.org
+per-file BUILDCONFIG.gn=set noparent
diff --git a/build/config/allocator.gni b/build/config/allocator.gni
new file mode 100644
index 0000000..71418a8
--- /dev/null
+++ b/build/config/allocator.gni
@@ -0,0 +1,16 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# TODO(GYP): Make tcmalloc work on win.
+if (is_android || current_cpu == "mipsel" || is_mac || is_ios || is_asan ||
+    is_lsan || is_tsan || is_msan || is_win) {
+  _default_allocator = "none"
+} else {
+  _default_allocator = "tcmalloc"
+}
+
+declare_args() {
+  # Memory allocator to use. Set to "none" to use default allocator.
+  use_allocator = _default_allocator
+}
diff --git a/build/config/android/BUILD.gn b/build/config/android/BUILD.gn
new file mode 100644
index 0000000..5492693
--- /dev/null
+++ b/build/config/android/BUILD.gn
@@ -0,0 +1,32 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/android/config.gni")
+import("//build/config/sysroot.gni")
+
+config("sdk") {
+  if (sysroot != "") {
+    cflags = [ "--sysroot=" + sysroot ]
+    ldflags = [ "--sysroot=" + sysroot ]
+
+    # Need to get some linker flags out of the sysroot.
+    sysroot_ld_path = rebase_path("//build/config/linux/sysroot_ld_path.py")
+    ldflags += [ exec_script(sysroot_ld_path,
+                             [
+                               rebase_path("//build/linux/sysroot_ld_path.sh"),
+                               sysroot,
+                             ],
+                             "value") ]
+  }
+}
+
+config("executable_config") {
+  cflags = [ "-fPIE" ]
+  ldflags = [ "-pie" ]
+}
+
+config("hide_native_jni_exports") {
+  ldflags = [ "-Wl,--version-script=" +
+              rebase_path("//build/android/android_no_jni_exports.lst") ]
+}
diff --git a/build/config/android/OWNERS b/build/config/android/OWNERS
new file mode 100644
index 0000000..3759e93
--- /dev/null
+++ b/build/config/android/OWNERS
@@ -0,0 +1 @@
+cjhopman@chromium.org
diff --git a/build/config/android/config.gni b/build/config/android/config.gni
new file mode 100644
index 0000000..5457b88
--- /dev/null
+++ b/build/config/android/config.gni
@@ -0,0 +1,206 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file contains common system config stuff for the Android build.
+
+if (is_android) {
+  has_chrome_android_internal =
+      exec_script("//build/dir_exists.py",
+                  [ rebase_path("//clank", root_build_dir) ],
+                  "string") == "True"
+
+  if (has_chrome_android_internal) {
+    import("//clank/config.gni")
+  }
+
+  if (!defined(default_android_sdk_root)) {
+    default_android_sdk_root = "//third_party/android_tools/sdk"
+    default_android_sdk_version = "22"
+    default_android_sdk_build_tools_version = "22.0.1"
+  }
+
+  if (!defined(google_play_services_library)) {
+    google_play_services_library =
+        "//third_party/android_tools:google_play_services_default_java"
+  }
+
+  declare_args() {
+    android_sdk_root = default_android_sdk_root
+    android_sdk_version = default_android_sdk_version
+    android_sdk_build_tools_version = default_android_sdk_build_tools_version
+
+    android_default_keystore_path =
+        "//build/android/ant/chromium-debug.keystore"
+    android_default_keystore_name = "chromiumdebugkey"
+    android_default_keystore_password = "chromium"
+
+    # This is a unique identifier for a given build. It's used for
+    # identifying various build artifacts corresponding to a particular build of
+    # chrome (e.g. where to find archived symbols).
+    android_chrome_build_id = "\"\""
+
+    # Set to true to run findbugs on JAR targets.
+    run_findbugs = false
+
+    # Set to true to enable the Errorprone compiler
+    use_errorprone_java_compiler = false
+  }
+
+  # Host stuff -----------------------------------------------------------------
+
+  # Defines the name the Android build gives to the current host CPU
+  # architecture, which is different than the names GN uses.
+  if (host_cpu == "x64") {
+    android_host_arch = "x86_64"
+  } else if (host_cpu == "x86") {
+    android_host_arch = "x86"
+  } else {
+    assert(false, "Need Android toolchain support for your build CPU arch.")
+  }
+
+  # Defines the name the Android build gives to the current host CPU
+  # architecture, which is different than the names GN uses.
+  if (host_os == "linux") {
+    android_host_os = "linux"
+  } else if (host_os == "mac") {
+    android_host_os = "darwin"
+  } else {
+    assert(false, "Need Android toolchain support for your build OS.")
+  }
+
+  # Directories and files ------------------------------------------------------
+  #
+  # We define may of the dirs strings here for each output architecture (rather
+  # than just the current one) since these are needed by the Android toolchain
+  # file to define toolchains for all possible targets in one pass.
+
+  android_sdk = "${android_sdk_root}/platforms/android-${android_sdk_version}"
+
+  # Path to the Android NDK and SDK.
+  android_ndk_root = "//third_party/android_tools/ndk"
+  android_ndk_include_dir = "$android_ndk_root/usr/include"
+
+  android_sdk = "${android_sdk_root}/platforms/android-${android_sdk_version}"
+
+  android_sdk_tools = "${android_sdk_root}/tools"
+  android_sdk_build_tools =
+      "${android_sdk_root}/build-tools/$android_sdk_build_tools_version"
+
+  # Path to the SDK's android.jar
+  android_sdk_jar = "$android_sdk/android.jar"
+
+  zipalign_path = "$android_sdk_build_tools/zipalign"
+
+  # Subdirectories inside android_ndk_root that contain the sysroot for the
+  # associated platform.
+  _android_api_level = 16
+  x86_android_sysroot_subdir =
+      "platforms/android-${_android_api_level}/arch-x86"
+  arm_android_sysroot_subdir =
+      "platforms/android-${_android_api_level}/arch-arm"
+  mips_android_sysroot_subdir =
+      "platforms/android-${_android_api_level}/arch-mips"
+  _android64_api_level = 21
+  x86_64_android_sysroot_subdir =
+      "platforms/android-${_android64_api_level}/arch-x86_64"
+  arm64_android_sysroot_subdir =
+      "platforms/android-${_android64_api_level}/arch-arm64"
+  mips64_android_sysroot_subdir =
+      "platforms/android-${_android64_api_level}/arch-mips64"
+
+  # Toolchain root directory for each build. The actual binaries are inside
+  # a "bin" directory inside of these.
+  _android_toolchain_version = "4.9"
+  x86_android_toolchain_root = "$android_ndk_root/toolchains/x86-${_android_toolchain_version}/prebuilt/${android_host_os}-${android_host_arch}"
+  arm_android_toolchain_root = "$android_ndk_root/toolchains/arm-linux-androideabi-${_android_toolchain_version}/prebuilt/${android_host_os}-${android_host_arch}"
+  mips_android_toolchain_root = "$android_ndk_root/toolchains/mipsel-linux-android-${_android_toolchain_version}/prebuilt/${android_host_os}-${android_host_arch}"
+  x86_64_android_toolchain_root = "$android_ndk_root/toolchains/x86_64-${_android_toolchain_version}/prebuilt/${android_host_os}-${android_host_arch}"
+  arm64_android_toolchain_root = "$android_ndk_root/toolchains/aarch64-${_android_toolchain_version}/prebuilt/${android_host_os}-${android_host_arch}"
+  mips64_android_toolchain_root = "$android_ndk_root/toolchains/mips64el-${_android_toolchain_version}/prebuilt/${android_host_os}-${android_host_arch}"
+
+  # Location of libgcc. This is only needed for the current GN toolchain, so we
+  # only need to define the current one, rather than one for every platform
+  # like the toolchain roots.
+  if (current_cpu == "x86") {
+    android_prebuilt_arch = "android-x86"
+    _binary_prefix = "i686-linux-android"
+    android_toolchain_root = "$x86_android_toolchain_root"
+    android_libgcc_file = "$android_toolchain_root/lib/gcc/i686-linux-android/${_android_toolchain_version}/libgcc.a"
+  } else if (current_cpu == "arm") {
+    android_prebuilt_arch = "android-arm"
+    _binary_prefix = "arm-linux-androideabi"
+    android_toolchain_root = "$arm_android_toolchain_root"
+    android_libgcc_file = "$android_toolchain_root/lib/gcc/arm-linux-androideabi/${_android_toolchain_version}/libgcc.a"
+  } else if (current_cpu == "mipsel") {
+    android_prebuilt_arch = "android-mips"
+    _binary_prefix = "mipsel-linux-android"
+    android_toolchain_root = "$mips_android_toolchain_root"
+    android_libgcc_file = "$android_toolchain_root/lib/gcc/mipsel-linux-android/${_android_toolchain_version}/libgcc.a"
+  } else if (current_cpu == "x64") {
+    android_prebuilt_arch = "android-x86_64"
+    _binary_prefix = "x86_64-linux-android"
+    android_toolchain_root = "$x86_64_android_toolchain_root"
+    android_libgcc_file = "$android_toolchain_root/lib/gcc/x86_64-linux-android/${_android_toolchain_version}/libgcc.a"
+  } else if (current_cpu == "arm64") {
+    android_prebuilt_arch = "android-arm64"
+    _binary_prefix = "aarch64-linux-android"
+    android_toolchain_root = "$arm64_android_toolchain_root"
+    android_libgcc_file = "$android_toolchain_root/lib/gcc/aarch64-linux-android/${_android_toolchain_version}/libgcc.a"
+  } else if (current_cpu == "mips64el") {
+    android_prebuilt_arch = "android-mips64"
+    _binary_prefix = "mips64el-linux-android"
+    android_toolchain_root = "$mips64_android_toolchain_root"
+    android_libgcc_file = "$android_toolchain_root/lib/gcc/mips64el-linux-android/${_android_toolchain_version}/libgcc.a"
+  } else {
+    assert(false, "Need android libgcc support for your target arch.")
+  }
+
+  android_tool_prefix = "$android_toolchain_root/bin/$_binary_prefix-"
+  android_readelf = "${android_tool_prefix}readelf"
+  android_objcopy = "${android_tool_prefix}objcopy"
+  android_gdbserver =
+      "$android_ndk_root/prebuilt/$android_prebuilt_arch/gdbserver/gdbserver"
+
+  # Toolchain stuff ------------------------------------------------------------
+
+  android_libcpp_root = "$android_ndk_root/sources/cxx-stl/llvm-libc++"
+  if (component_mode == "shared_library") {
+    android_libcpp_library = "c++_shared"
+  } else {
+    android_libcpp_library = "c++_static"
+  }
+
+  if (component_mode == "shared_library") {
+    # By appending .cr, we prevent name collisions with libraries already
+    # loaded by the Android zygote.
+    android_product_extension = ".cr.so"
+  } else {
+    android_product_extension = ".so"
+  }
+
+  # ABI ------------------------------------------------------------------------
+
+  if (current_cpu == "x86") {
+    android_app_abi = "x86"
+  } else if (current_cpu == "arm") {
+    import("//build/config/arm.gni")
+    if (arm_version < 7) {
+      android_app_abi = "armeabi"
+    } else {
+      android_app_abi = "armeabi-v7a"
+    }
+  } else if (current_cpu == "mipsel") {
+    android_app_abi = "mips"
+  } else if (current_cpu == "x64") {
+    android_app_abi = "x86_64"
+  } else if (current_cpu == "arm64") {
+    android_app_abi = "arm64-v8a"
+  } else if (current_cpu == "mips64el") {
+    android_app_abi = "mips64"
+  } else {
+    assert(false, "Unknown Android ABI: " + current_cpu)
+  }
+
+  android_log_tag = "\"flutter\""
+}
diff --git a/build/config/android/internal_rules.gni b/build/config/android/internal_rules.gni
new file mode 100644
index 0000000..ab154b4
--- /dev/null
+++ b/build/config/android/internal_rules.gni
@@ -0,0 +1,1596 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/android/config.gni")
+
+assert(is_android)
+
+rebased_android_sdk = rebase_path(android_sdk, root_build_dir)
+rebased_android_sdk_root = rebase_path(android_sdk_root, root_build_dir)
+rebased_android_sdk_build_tools =
+    rebase_path(android_sdk_build_tools, root_build_dir)
+
+android_sdk_jar = "$android_sdk/android.jar"
+rebased_android_sdk_jar = rebase_path(android_sdk_jar, root_build_dir)
+android_aapt_path = "$rebased_android_sdk_build_tools/aapt"
+
+template("android_lint") {
+  set_sources_assignment_filter([])
+  if (defined(invoker.testonly)) {
+    testonly = invoker.testonly
+  }
+
+  jar_path = invoker.jar_path
+  android_manifest = invoker.android_manifest
+  java_files = invoker.java_files
+  base_path = "$target_gen_dir/$target_name"
+
+  action(target_name) {
+    script = "//build/android/gyp/lint.py"
+    result_path = base_path + "/result.xml"
+    config_path = base_path + "/config.xml"
+    suppressions_file = "//build/android/lint/suppressions.xml"
+    inputs = [
+               suppressions_file,
+               android_manifest,
+               jar_path,
+             ] + java_files
+
+    outputs = [
+      config_path,
+      result_path,
+    ]
+
+    rebased_java_files = rebase_path(java_files, root_build_dir)
+
+    args = [
+      "--lint-path=$rebased_android_sdk_root/tools/lint",
+      "--config-path",
+      rebase_path(suppressions_file, root_build_dir),
+      "--manifest-path",
+      rebase_path(android_manifest, root_build_dir),
+      "--product-dir=.",
+      "--jar-path",
+      rebase_path(jar_path, root_build_dir),
+      "--processed-config-path",
+      rebase_path(config_path, root_build_dir),
+      "--result-path",
+      rebase_path(result_path, root_build_dir),
+      "--java-files=$rebased_java_files",
+      "--enable",
+    ]
+
+    if (defined(invoker.deps)) {
+      deps = invoker.deps
+    }
+    if (defined(invoker.public_deps)) {
+      public_deps = invoker.public_deps
+    }
+    if (defined(invoker.data_deps)) {
+      data_deps = invoker.data_deps
+    }
+  }
+}
+
+template("findbugs") {
+  jar_path = invoker.jar_path
+
+  build_config = invoker.build_config
+
+  action(target_name) {
+    script = "//build/android/findbugs_diff.py"
+    depfile = "$target_gen_dir/$target_name.d"
+    result_path = "$target_gen_dir/$target_name/result.xml"
+    exclusions_file = "//build/android/findbugs_filter/findbugs_exclude.xml"
+
+    rebased_build_config = rebase_path(build_config, root_build_dir)
+
+    if (defined(invoker.deps)) {
+      deps = invoker.deps
+    }
+
+    if (defined(invoker.testonly)) {
+      testonly = invoker.testonly
+    }
+
+    inputs = [
+      "//build/android/pylib/utils/findbugs.py",
+      exclusions_file,
+      jar_path,
+    ]
+
+    outputs = [
+      depfile,
+      result_path,
+    ]
+
+    args = [
+      "--depfile",
+      rebase_path(depfile, root_build_dir),
+      "--exclude",
+      rebase_path(exclusions_file, root_build_dir),
+      "--auxclasspath-gyp",
+      "@FileArg($rebased_build_config:javac:classpath)",
+      "--output-file",
+      rebase_path(result_path, root_build_dir),
+      rebase_path(jar_path, root_build_dir),
+    ]
+  }
+}
+
+template("dex") {
+  set_sources_assignment_filter([])
+  if (defined(invoker.testonly)) {
+    testonly = invoker.testonly
+  }
+
+  assert(defined(invoker.output))
+  action(target_name) {
+    script = "//build/android/gyp/dex.py"
+    depfile = "$target_gen_dir/$target_name.d"
+    if (defined(invoker.sources)) {
+      sources = invoker.sources
+    }
+    outputs = [
+      depfile,
+      invoker.output,
+    ]
+    if (defined(invoker.inputs)) {
+      inputs = invoker.inputs
+    }
+
+    if (defined(invoker.deps)) {
+      deps = invoker.deps
+    }
+
+    rebased_output = rebase_path(invoker.output, root_build_dir)
+
+    args = [
+      "--depfile",
+      rebase_path(depfile, root_build_dir),
+      "--android-sdk-tools",
+      rebased_android_sdk_build_tools,
+      "--dex-path",
+      rebased_output,
+    ]
+
+    if (defined(invoker.no_locals) && invoker.no_locals) {
+      args += [ "--no-locals=1" ]
+    }
+
+    if (defined(invoker.args)) {
+      args += invoker.args
+    }
+
+    if (defined(invoker.sources)) {
+      args += rebase_path(invoker.sources, root_build_dir)
+    }
+  }
+}
+
+# Creates a zip archive of the inputs.
+# If base_dir is provided, the archive paths will be relative to it.
+template("zip") {
+  set_sources_assignment_filter([])
+  if (defined(invoker.testonly)) {
+    testonly = invoker.testonly
+  }
+
+  assert(defined(invoker.inputs))
+  assert(defined(invoker.output))
+
+  rebase_inputs = rebase_path(invoker.inputs, root_build_dir)
+  rebase_output = rebase_path(invoker.output, root_build_dir)
+  action(target_name) {
+    script = "//build/android/gn/zip.py"
+    depfile = "$target_gen_dir/$target_name.d"
+    inputs = invoker.inputs
+    outputs = [
+      depfile,
+      invoker.output,
+    ]
+    args = [
+      "--depfile",
+      rebase_path(depfile, root_build_dir),
+      "--inputs=$rebase_inputs",
+      "--output=$rebase_output",
+    ]
+    if (defined(invoker.base_dir)) {
+      args += [
+        "--base-dir",
+        rebase_path(invoker.base_dir, root_build_dir),
+      ]
+    }
+
+    if (defined(invoker.deps)) {
+      deps = invoker.deps
+    }
+    if (defined(invoker.public_deps)) {
+      public_deps = invoker.public_deps
+    }
+    if (defined(invoker.data_deps)) {
+      data_deps = invoker.data_deps
+    }
+
+    if (defined(invoker.visibility)) {
+      visibility = invoker.visibility
+    }
+  }
+}
+
+# Write the target's .build_config file. This is a json file that contains a
+# dictionary of information about how to build this target (things that
+# require knowledge about this target's dependencies and cannot be calculated
+# at gn-time). There is a special syntax to add a value in that dictionary to
+# an action/action_foreachs args:
+#   --python-arg=@FileArg($rebased_build_config_path:key0:key1)
+# At runtime, such an arg will be replaced by the value in the build_config.
+# See build/android/gyp/write_build_config.py and
+# build/android/gyp/util/build_utils.py:ExpandFileArgs
+template("write_build_config") {
+  set_sources_assignment_filter([])
+  if (defined(invoker.testonly)) {
+    testonly = invoker.testonly
+  }
+
+  assert(defined(invoker.type))
+  assert(defined(invoker.build_config))
+
+  type = invoker.type
+  build_config = invoker.build_config
+
+  assert(type == "android_apk" || type == "java_library" ||
+         type == "android_resources" || type == "deps_dex")
+
+  action(target_name) {
+    if (defined(invoker.visibility)) {
+      visibility = invoker.visibility
+    }
+
+    script = "//build/android/gyp/write_build_config.py"
+    depfile = "$target_gen_dir/$target_name.d"
+    inputs = []
+
+    deps = []
+    if (defined(invoker.deps)) {
+      deps += invoker.deps
+    }
+
+    possible_deps_configs = []
+    foreach(d, deps) {
+      dep_gen_dir = get_label_info(d, "target_gen_dir")
+      dep_name = get_label_info(d, "name")
+      possible_deps_configs += [ "$dep_gen_dir/$dep_name.build_config" ]
+    }
+    rebase_possible_deps_configs =
+        rebase_path(possible_deps_configs, root_build_dir)
+
+    outputs = [
+      depfile,
+      build_config,
+    ]
+
+    args = [
+      "--type",
+      type,
+      "--depfile",
+      rebase_path(depfile, root_build_dir),
+      "--possible-deps-configs=$rebase_possible_deps_configs",
+      "--build-config",
+      rebase_path(build_config, root_build_dir),
+    ]
+
+    is_java_library = type == "java_library"
+    is_apk = type == "android_apk"
+    is_android_resources = type == "android_resources"
+    is_deps_dex = type == "deps_dex"
+
+    supports_android = is_apk || is_android_resources || is_deps_dex ||
+                       (is_java_library && defined(invoker.supports_android) &&
+                        invoker.supports_android)
+    requires_android = is_apk || is_android_resources || is_deps_dex ||
+                       (is_java_library && defined(invoker.requires_android) &&
+                        invoker.requires_android)
+
+    assert(!requires_android || supports_android,
+           "requires_android requires" + " supports_android")
+
+    # Mark these variables as used.
+    assert(is_java_library || true)
+    assert(is_apk || true)
+    assert(is_android_resources || true)
+    assert(is_deps_dex || true)
+
+    if (is_java_library || is_apk) {
+      args += [
+        "--jar-path",
+        rebase_path(invoker.jar_path, root_build_dir),
+      ]
+    }
+
+    if (is_apk || is_deps_dex || (is_java_library && supports_android)) {
+      args += [
+        "--dex-path",
+        rebase_path(invoker.dex_path, root_build_dir),
+      ]
+    }
+    if (supports_android) {
+      args += [ "--supports-android" ]
+    }
+    if (requires_android) {
+      args += [ "--requires-android" ]
+    }
+    if (defined(invoker.bypass_platform_checks) &&
+        invoker.bypass_platform_checks) {
+      args += [ "--bypass-platform-checks" ]
+    }
+
+    if (defined(invoker.apk_under_test)) {
+      deps += [ invoker.apk_under_test ]
+      apk_under_test_gen_dir =
+          get_label_info(invoker.apk_under_test, "target_gen_dir")
+      apk_under_test_name = get_label_info(invoker.apk_under_test, "name")
+      apk_under_test_config =
+          "$apk_under_test_gen_dir/$apk_under_test_name.build_config"
+      args += [
+        "--tested-apk-config",
+        rebase_path(apk_under_test_config, root_build_dir),
+      ]
+    }
+
+    if (is_android_resources || is_apk) {
+      assert(defined(invoker.resources_zip))
+      args += [
+        "--resources-zip",
+        rebase_path(invoker.resources_zip, root_build_dir),
+      ]
+      if (defined(invoker.android_manifest)) {
+        inputs += [ invoker.android_manifest ]
+        args += [
+          "--android-manifest",
+          rebase_path(invoker.android_manifest, root_build_dir),
+        ]
+      } else {
+        assert(!is_apk, "apk build configs require an android_manifest")
+      }
+      if (defined(invoker.custom_package)) {
+        args += [
+          "--package-name",
+          invoker.custom_package,
+        ]
+      }
+      if (defined(invoker.r_text)) {
+        args += [
+          "--r-text",
+          rebase_path(invoker.r_text, root_build_dir),
+        ]
+      }
+    }
+
+    if (is_apk) {
+      if (defined(invoker.native_libs)) {
+        inputs += invoker.native_libs
+        rebased_native_libs = rebase_path(invoker.native_libs, root_build_dir)
+        rebased_android_readelf = rebase_path(android_readelf, root_build_dir)
+        args += [
+          "--native-libs=$rebased_native_libs",
+          "--readelf-path=$rebased_android_readelf",
+        ]
+      }
+    }
+
+    if (defined(invoker.srcjar)) {
+      args += [
+        "--srcjar",
+        rebase_path(invoker.srcjar, root_build_dir),
+      ]
+    }
+  }
+}
+
+template("process_java_prebuilt") {
+  set_sources_assignment_filter([])
+  if (defined(invoker.testonly)) {
+    testonly = invoker.testonly
+  }
+
+  _input_jar_path = invoker.input_jar_path
+  _output_jar_path = invoker.output_jar_path
+  _jar_toc_path = _output_jar_path + ".TOC"
+
+  assert(invoker.build_config != "")
+
+  if (defined(invoker.proguard_preprocess) && invoker.proguard_preprocess) {
+    _proguard_jar_path = "$android_sdk_root/tools/proguard/lib/proguard.jar"
+    _proguard_config_path = invoker.proguard_config
+    _build_config = invoker.build_config
+    _rebased_build_config = rebase_path(_build_config, root_build_dir)
+    _output_jar_target = "${target_name}__proguard_process"
+    action(_output_jar_target) {
+      script = "//build/android/gyp/proguard.py"
+      inputs = [
+        android_sdk_jar,
+        _proguard_jar_path,
+        _build_config,
+        _input_jar_path,
+        _proguard_config_path,
+      ]
+      depfile = "${target_gen_dir}/${target_name}.d"
+      outputs = [
+        depfile,
+        _output_jar_path,
+      ]
+      args = [
+        "--depfile",
+        rebase_path(depfile, root_build_dir),
+        "--proguard-path",
+        rebase_path(_proguard_jar_path, root_build_dir),
+        "--input-path",
+        rebase_path(_input_jar_path, root_build_dir),
+        "--output-path",
+        rebase_path(_output_jar_path, root_build_dir),
+        "--proguard-config",
+        rebase_path(_proguard_config_path, root_build_dir),
+        "--classpath",
+        rebased_android_sdk_jar,
+        "--classpath=@FileArg($_rebased_build_config:javac:classpath)",
+      ]
+
+      if (defined(invoker.deps)) {
+        deps = invoker.deps
+      }
+      if (defined(invoker.public_deps)) {
+        public_deps = invoker.public_deps
+      }
+      if (defined(invoker.data_deps)) {
+        data_deps = invoker.data_deps
+      }
+    }
+  } else {
+    _output_jar_target = "${target_name}__copy_jar"
+    copy(_output_jar_target) {
+      sources = [
+        _input_jar_path,
+      ]
+      outputs = [
+        _output_jar_path,
+      ]
+
+      if (defined(invoker.deps)) {
+        deps = invoker.deps
+      }
+      if (defined(invoker.public_deps)) {
+        public_deps = invoker.public_deps
+      }
+      if (defined(invoker.data_deps)) {
+        data_deps = invoker.data_deps
+      }
+    }
+  }
+
+  action("${target_name}__jar_toc") {
+    script = "//build/android/gyp/jar_toc.py"
+    depfile = "$target_gen_dir/$target_name.d"
+    outputs = [
+      depfile,
+      _jar_toc_path,
+      _jar_toc_path + ".md5.stamp",
+    ]
+    inputs = [
+      _output_jar_path,
+    ]
+    args = [
+      "--depfile",
+      rebase_path(depfile, root_build_dir),
+      "--jar-path",
+      rebase_path(_output_jar_path, root_build_dir),
+      "--toc-path",
+      rebase_path(_jar_toc_path, root_build_dir),
+    ]
+    public_deps = [
+      ":$_output_jar_target",
+    ]
+  }
+
+  group(target_name) {
+    if (defined(invoker.visibility)) {
+      visibility = invoker.visibility
+    }
+    public_deps = [
+      ":${target_name}__jar_toc",
+      ":$_output_jar_target",
+    ]
+  }
+}
+
+template("finalize_apk") {
+  action(target_name) {
+    script = "//build/android/gyp/finalize_apk.py"
+    depfile = "$target_gen_dir/$target_name.d"
+
+    if (defined(invoker.testonly)) {
+      testonly = invoker.testonly
+    }
+
+    sources = [
+      invoker.input_apk_path,
+    ]
+    inputs = [
+      invoker.keystore_path,
+    ]
+    outputs = [
+      depfile,
+      invoker.output_apk_path,
+    ]
+
+    args = [
+      "--depfile",
+      rebase_path(depfile, root_build_dir),
+      "--zipalign-path",
+      rebase_path(zipalign_path, root_build_dir),
+      "--unsigned-apk-path",
+      rebase_path(invoker.input_apk_path, root_build_dir),
+      "--final-apk-path",
+      rebase_path(invoker.output_apk_path, root_build_dir),
+      "--key-path",
+      rebase_path(invoker.keystore_path, root_build_dir),
+      "--key-name",
+      invoker.keystore_name,
+      "--key-passwd",
+      invoker.keystore_password,
+    ]
+    if (defined(invoker.rezip_apk) && invoker.rezip_apk) {
+      _rezip_jar_path = "$root_build_dir/lib.java/rezip_apk.jar"
+      inputs += [ _rezip_jar_path ]
+      args += [
+        "--load-library-from-zip=1",
+        "--rezip-apk-jar-path",
+        rebase_path(_rezip_jar_path, root_build_dir),
+      ]
+    }
+
+    if (defined(invoker.deps)) {
+      deps = invoker.deps
+    }
+    if (defined(invoker.public_deps)) {
+      public_deps = invoker.public_deps
+    }
+    if (defined(invoker.data_deps)) {
+      data_deps = invoker.data_deps
+    }
+  }
+}
+
+# Packages resources, assets, dex, and native libraries into an apk. Signs and
+# zipaligns the apk.
+template("create_apk") {
+  set_sources_assignment_filter([])
+  if (defined(invoker.testonly)) {
+    testonly = invoker.testonly
+  }
+
+  _android_manifest = invoker.android_manifest
+  _base_path = invoker.base_path
+  _final_apk_path = invoker.apk_path
+
+  if (defined(invoker.resources_zip)) {
+    _resources_zip = invoker.resources_zip
+  }
+  if (defined(invoker.dex_path)) {
+    _dex_path = invoker.dex_path
+  }
+  _load_library_from_apk = invoker.load_library_from_apk
+
+  _package_deps = []
+  if (defined(invoker.deps)) {
+    _package_deps = invoker.deps
+  }
+
+  _native_libs_dir = "//build/android/empty/res"
+  if (defined(invoker.native_libs_dir)) {
+    _native_libs_dir = invoker.native_libs_dir
+  }
+
+  if (defined(invoker.asset_location)) {
+    _asset_location = invoker.asset_location
+  }
+
+  _version_code = invoker.version_code
+  _version_name = invoker.version_name
+
+  _base_apk_path = _base_path + ".apk_intermediates"
+
+  _resource_packaged_apk_path = _base_apk_path + ".ap_"
+  _packaged_apk_path = _base_apk_path + ".unfinished.apk"
+  _shared_resources =
+      defined(invoker.shared_resources) && invoker.shared_resources
+
+  _configuration_name = "Release"
+  if (is_debug) {
+    _configuration_name = "Debug"
+  }
+
+  _keystore_path = invoker.keystore_path
+  _keystore_name = invoker.keystore_name
+  _keystore_password = invoker.keystore_password
+
+  _split_densities = []
+  if (defined(invoker.create_density_splits) && invoker.create_density_splits) {
+    _split_densities = [
+      "hdpi",
+      "xhdpi",
+      "xxhdpi",
+      "xxxhdpi",
+      "tvdpi",
+    ]
+  }
+
+  _split_languages = []
+  if (defined(invoker.language_splits)) {
+    _split_languages = invoker.language_splits
+  }
+
+  _package_resources_target_name = "${target_name}__package_resources"
+  action(_package_resources_target_name) {
+    deps = _package_deps
+
+    script = "//build/android/gyp/package_resources.py"
+    depfile = "${target_gen_dir}/${target_name}.d"
+    inputs = [
+      _android_manifest,
+    ]
+    if (defined(_resources_zip)) {
+      inputs += [ _resources_zip ]
+    }
+    outputs = [
+      depfile,
+      _resource_packaged_apk_path,
+    ]
+
+    args = [
+      "--depfile",
+      rebase_path(depfile, root_build_dir),
+      "--android-sdk",
+      rebased_android_sdk,
+      "--aapt-path",
+      android_aapt_path,
+      "--configuration-name=$_configuration_name",
+      "--android-manifest",
+      rebase_path(_android_manifest, root_build_dir),
+      "--version-code",
+      _version_code,
+      "--version-name",
+      _version_name,
+      "--apk-path",
+      rebase_path(_resource_packaged_apk_path, root_build_dir),
+    ]
+
+    if (defined(_asset_location)) {
+      args += [
+        "--asset-dir",
+        rebase_path(_asset_location, root_build_dir),
+      ]
+    }
+    if (defined(_resources_zip)) {
+      args += [
+        "--resource-zips",
+        rebase_path(_resources_zip, root_build_dir),
+      ]
+    }
+    if (_shared_resources) {
+      args += [ "--shared-resources" ]
+    }
+    if (_split_densities != []) {
+      args += [ "--create-density-splits" ]
+      foreach(_density, _split_densities) {
+        outputs += [ "${_resource_packaged_apk_path}_${_density}" ]
+      }
+    }
+    if (_split_languages != []) {
+      args += [ "--language-splits=$_split_languages" ]
+      foreach(_language, _split_languages) {
+        outputs += [ "${_resource_packaged_apk_path}_${_language}" ]
+      }
+    }
+    if (defined(invoker.extensions_to_not_compress)) {
+      args += [
+        "--no-compress",
+        invoker.extensions_to_not_compress,
+      ]
+    }
+  }
+
+  package_target = "${target_name}__package"
+  action(package_target) {
+    script = "//build/android/gyp/ant.py"
+    _ant_script = "//build/android/ant/apk-package.xml"
+
+    deps = [
+      ":${_package_resources_target_name}",
+    ]
+    if (defined(invoker.deps)) {
+      deps += invoker.deps
+    }
+    depfile = "$target_gen_dir/$target_name.d"
+
+    inputs = [
+      _resource_packaged_apk_path,
+      _ant_script,
+    ]
+    if (defined(_dex_path)) {
+      inputs += [ _dex_path ]
+    }
+
+    outputs = [
+      depfile,
+      _packaged_apk_path,
+    ]
+
+    _rebased_emma_jar = ""
+    _rebased_resource_packaged_apk_path =
+        rebase_path(_resource_packaged_apk_path, root_build_dir)
+    _rebased_packaged_apk_path = rebase_path(_packaged_apk_path, root_build_dir)
+    _rebased_native_libs_dir = rebase_path(_native_libs_dir, root_build_dir)
+    args = [
+      "--depfile",
+      rebase_path(depfile, root_build_dir),
+      "--",
+      "-quiet",
+      "-DANDROID_SDK_ROOT=$rebased_android_sdk_root",
+      "-DANDROID_SDK_TOOLS=$rebased_android_sdk_build_tools",
+      "-DRESOURCE_PACKAGED_APK_NAME=$_rebased_resource_packaged_apk_path",
+      "-DCONFIGURATION_NAME=$_configuration_name",
+      "-DNATIVE_LIBS_DIR=$_rebased_native_libs_dir",
+      "-DOUT_DIR=",
+      "-DUNSIGNED_APK_PATH=$_rebased_packaged_apk_path",
+      "-DEMMA_INSTRUMENT=0",
+      "-DEMMA_DEVICE_JAR=$_rebased_emma_jar",
+      "-Dbasedir=.",
+      "-buildfile",
+      rebase_path(_ant_script, root_build_dir),
+    ]
+    if (defined(_dex_path)) {
+      _rebased_dex_path = rebase_path(_dex_path, root_build_dir)
+      args += [
+        "-DDEX_FILE_PATH=$_rebased_dex_path",
+        "-DHAS_CODE=true",
+      ]
+    } else {
+      args += [ "-DHAS_CODE=false" ]
+    }
+  }
+
+  _finalize_apk_rule_name = "${target_name}__finalize"
+  finalize_apk(_finalize_apk_rule_name) {
+    input_apk_path = _packaged_apk_path
+    output_apk_path = _final_apk_path
+    keystore_path = _keystore_path
+    keystore_name = _keystore_name
+    keystore_password = _keystore_password
+    rezip_apk = _load_library_from_apk
+
+    public_deps = [
+      # Generator of the _packaged_apk_path this target takes as input.
+      ":$package_target",
+    ]
+  }
+
+  _final_deps = [ ":${_finalize_apk_rule_name}" ]
+
+  template("finalize_split") {
+    finalize_apk(target_name) {
+      _config = invoker.split_config
+      _type = invoker.split_type
+      input_apk_path = "${_resource_packaged_apk_path}_${_config}"
+      _output_paths = process_file_template(
+              [ _final_apk_path ],
+              "{{source_dir}}/{{source_name_part}}-${_type}-${_config}.apk")
+      output_apk_path = _output_paths[0]
+      keystore_path = _keystore_path
+      keystore_name = _keystore_name
+      keystore_password = _keystore_password
+      deps = [
+        ":${_package_resources_target_name}",
+      ]
+    }
+  }
+
+  foreach(_split, _split_densities) {
+    _split_rule = "${target_name}__finalize_${_split}_split"
+    finalize_split(_split_rule) {
+      split_type = "density"
+      split_config = _split
+    }
+    _final_deps += [ ":$_split_rule" ]
+  }
+  foreach(_split, _split_languages) {
+    _split_rule = "${target_name}__finalize_${_split}_split"
+    finalize_split(_split_rule) {
+      split_type = "lang"
+      split_config = _split
+    }
+    _final_deps += [ ":$_split_rule" ]
+  }
+
+  group(target_name) {
+    public_deps = _final_deps
+  }
+}
+
+template("java_prebuilt_impl") {
+  set_sources_assignment_filter([])
+  if (defined(invoker.testonly)) {
+    testonly = invoker.testonly
+  }
+  _supports_android =
+      defined(invoker.supports_android) && invoker.supports_android
+
+  assert(defined(invoker.jar_path))
+  _base_path = "${target_gen_dir}/$target_name"
+  _jar_path = _base_path + ".jar"
+  _build_config = _base_path + ".build_config"
+
+  if (_supports_android) {
+    _dex_path = _base_path + ".dex.jar"
+  }
+  _deps = []
+  if (defined(invoker.deps)) {
+    _deps = invoker.deps
+  }
+  _jar_deps = []
+  if (defined(invoker.jar_dep)) {
+    _jar_deps = [ invoker.jar_dep ]
+  }
+
+  _template_name = target_name
+
+  build_config_target_name = "${_template_name}__build_config"
+  process_jar_target_name = "${_template_name}__process_jar"
+  if (_supports_android) {
+    dex_target_name = "${_template_name}__dex"
+  }
+
+  write_build_config(build_config_target_name) {
+    type = "java_library"
+    supports_android = _supports_android
+    requires_android =
+        defined(invoker.requires_android) && invoker.requires_android
+
+    deps = _deps
+    build_config = _build_config
+    jar_path = _jar_path
+    if (_supports_android) {
+      dex_path = _dex_path
+    }
+  }
+
+  process_java_prebuilt(process_jar_target_name) {
+    visibility = [ ":$_template_name" ]
+    if (_supports_android) {
+      visibility += [ ":$dex_target_name" ]
+    }
+
+    if (defined(invoker.proguard_preprocess) && invoker.proguard_preprocess) {
+      proguard_preprocess = true
+      proguard_config = invoker.proguard_config
+    }
+
+    build_config = _build_config
+    input_jar_path = invoker.jar_path
+    output_jar_path = _jar_path
+
+    deps = [ ":$build_config_target_name" ] + _deps + _jar_deps
+  }
+
+  if (_supports_android) {
+    dex(dex_target_name) {
+      sources = [
+        _jar_path,
+      ]
+      output = _dex_path
+      deps = [ ":$process_jar_target_name" ] + _deps + _jar_deps
+    }
+  }
+
+  group(target_name) {
+    deps = [
+      ":$process_jar_target_name",
+    ]
+    if (defined(invoker.data_deps)) {
+      data_deps = invoker.data_deps
+    }
+    if (_supports_android) {
+      deps += [ ":$dex_target_name" ]
+    }
+  }
+}
+
+# Compiles and jars a set of java files.
+#
+# Outputs:
+#  $jar_path.jar
+#  $jar_path.jar.TOC
+#
+# Variables
+#   java_files: List of .java files to compile.
+#   java_deps: List of java dependencies. These should all have a .jar output
+#     at "${target_gen_dir}/${target_name}.jar.
+#   chromium_code: If true, enable extra warnings.
+#   srcjar_deps: List of srcjar dependencies. The .java files contained in the
+#     dependencies srcjar outputs will be compiled and added to the output jar.
+#   jar_path: Use this to explicitly set the output jar path. Defaults to
+#     "${target_gen_dir}/${target_name}.jar.
+template("compile_java") {
+  set_sources_assignment_filter([])
+  if (defined(invoker.testonly)) {
+    testonly = invoker.testonly
+  }
+
+  assert(defined(invoker.java_files))
+  assert(defined(invoker.build_config))
+  assert(defined(invoker.jar_path))
+
+  _java_files = invoker.java_files
+  _final_jar_path = invoker.jar_path
+  _intermediate_jar_path = "$target_gen_dir/$target_name.initial.jar"
+
+  _build_config = invoker.build_config
+
+  _jar_excluded_patterns = []
+  if (defined(invoker.jar_excluded_patterns)) {
+    _jar_excluded_patterns += invoker.jar_excluded_patterns
+  }
+
+  _chromium_code = false
+  if (defined(invoker.chromium_code)) {
+    _chromium_code = invoker.chromium_code
+  }
+
+  _supports_android = true
+  if (defined(invoker.supports_android)) {
+    _supports_android = invoker.supports_android
+  }
+
+  _enable_errorprone = use_errorprone_java_compiler
+  if (defined(invoker.enable_errorprone)) {
+    _enable_errorprone = invoker.enable_errorprone
+  }
+
+  _manifest_entries = []
+  if (defined(invoker.manifest_entries)) {
+    _manifest_entries = invoker.manifest_entries
+  }
+
+  _srcjar_deps = []
+  if (defined(invoker.srcjar_deps)) {
+    _srcjar_deps += invoker.srcjar_deps
+  }
+
+  _java_srcjars = []
+  if (defined(invoker.srcjars)) {
+    _java_srcjars = invoker.srcjars
+  }
+  foreach(dep, _srcjar_deps) {
+    _dep_gen_dir = get_label_info(dep, "target_gen_dir")
+    _dep_name = get_label_info(dep, "name")
+    _java_srcjars += [ "$_dep_gen_dir/$_dep_name.srcjar" ]
+  }
+
+  # Mark srcjar_deps as used.
+  assert(_srcjar_deps == [] || true)
+
+  _system_jars = []
+  if (defined(invoker.android) && invoker.android) {
+    _system_jars += [ android_sdk_jar ]
+  }
+
+  _rebased_build_config = rebase_path(_build_config, root_build_dir)
+  _rebased_jar_path = rebase_path(_intermediate_jar_path, root_build_dir)
+
+  javac_target_name = "${target_name}__javac"
+  finish_target_name = "${target_name}__finish"
+  final_target_name = target_name
+
+  action(javac_target_name) {
+    script = "//build/android/gyp/javac.py"
+    depfile = "$target_gen_dir/$target_name.d"
+    deps = _srcjar_deps
+    if (defined(invoker.deps)) {
+      deps += invoker.deps
+    }
+
+    outputs = [
+      depfile,
+      _intermediate_jar_path,
+      _intermediate_jar_path + ".md5.stamp",
+    ]
+    sources = _java_files + _java_srcjars
+    inputs = _system_jars + [ _build_config ]
+
+    _rebased_system_jars = rebase_path(_system_jars, root_build_dir)
+    _rebased_java_srcjars = rebase_path(_java_srcjars, root_build_dir)
+    _rebased_depfile = rebase_path(depfile, root_build_dir)
+    args = [
+      "--depfile=$_rebased_depfile",
+      "--classpath=$_rebased_system_jars",
+      "--classpath=@FileArg($_rebased_build_config:javac:classpath)",
+      "--jar-path=$_rebased_jar_path",
+      "--java-srcjars=$_rebased_java_srcjars",
+      "--java-srcjars=@FileArg($_rebased_build_config:javac:srcjars)",
+      "--jar-excluded-classes=$_jar_excluded_patterns",
+    ]
+    if (_supports_android) {
+      _rebased_android_sdk_jar = rebase_path(android_sdk_jar, root_build_dir)
+      args += [ "--bootclasspath=$_rebased_android_sdk_jar" ]
+    }
+    foreach(e, _manifest_entries) {
+      args += [ "--manifest-entry=" + e ]
+    }
+    if (_chromium_code) {
+      args += [ "--chromium-code=1" ]
+    }
+    if (_enable_errorprone) {
+      deps += [ "//third_party/errorprone:chromium_errorprone" ]
+      args += [
+        "--use-errorprone-path",
+        "bin/chromium_errorprone",
+      ]
+    }
+    args += rebase_path(_java_files, root_build_dir)
+  }
+
+  process_java_prebuilt(finish_target_name) {
+    visibility = [ ":$final_target_name" ]
+
+    build_config = _build_config
+    input_jar_path = _intermediate_jar_path
+    output_jar_path = _final_jar_path
+    if (defined(invoker.proguard_preprocess) && invoker.proguard_preprocess) {
+      proguard_preprocess = invoker.proguard_preprocess
+      proguard_config = invoker.proguard_config
+    }
+    deps = [
+      ":$javac_target_name",
+    ]
+  }
+
+  group(final_target_name) {
+    if (defined(invoker.visibility)) {
+      visibility = invoker.visibility
+    }
+    public_deps = [
+      ":$finish_target_name",
+    ]
+  }
+}
+
+template("java_library_impl") {
+  set_sources_assignment_filter([])
+  if (defined(invoker.testonly)) {
+    testonly = invoker.testonly
+  }
+
+  assert(
+      defined(invoker.java_files) || defined(invoker.DEPRECATED_java_in_dir) ||
+      defined(invoker.srcjars) || defined(invoker.srcjar_deps))
+  _base_path = "$target_gen_dir/$target_name"
+  _jar_path = _base_path + ".jar"
+  if (defined(invoker.jar_path)) {
+    _jar_path = invoker.jar_path
+  }
+  _template_name = target_name
+
+  _final_deps = []
+  _final_datadeps = []
+  if (defined(invoker.datadeps)) {
+    _final_datadeps = invoker.datadeps
+  }
+
+  _supports_android =
+      defined(invoker.supports_android) && invoker.supports_android
+  _requires_android =
+      defined(invoker.requires_android) && invoker.requires_android
+
+  if (_supports_android) {
+    _dex_path = _base_path + ".dex.jar"
+    if (defined(invoker.dex_path)) {
+      _dex_path = invoker.dex_path
+    }
+  }
+
+  # Define build_config_deps which will be a list of targets required to
+  # build the _build_config.
+  if (defined(invoker.override_build_config)) {
+    _build_config = invoker.override_build_config
+
+    # When a custom build config file is specified, we need to use the deps
+    # supplied by the invoker any time we reference the build config file.
+    assert(defined(invoker.deps),
+           "If you specify a build config file for " +
+               "java_library_impl($target_name), you should " +
+               "also specify the target that made it in the deps")
+    build_config_deps = invoker.deps
+  } else {
+    _build_config = _base_path + ".build_config"
+    build_config_target_name = "${_template_name}__build_config"
+    build_config_deps = [ ":$build_config_target_name" ]
+
+    write_build_config(build_config_target_name) {
+      type = "java_library"
+      supports_android = _supports_android
+      requires_android = _requires_android
+      bypass_platform_checks = defined(invoker.bypass_platform_checks) &&
+                               invoker.bypass_platform_checks
+
+      deps = []
+      if (defined(invoker.deps)) {
+        deps += invoker.deps
+      }
+
+      build_config = _build_config
+      jar_path = _jar_path
+      if (_supports_android) {
+        dex_path = _dex_path
+      }
+    }
+  }
+
+  _chromium_code = true
+  if (defined(invoker.chromium_code)) {
+    _chromium_code = invoker.chromium_code
+  }
+
+  _srcjar_deps = []
+  if (defined(invoker.srcjar_deps)) {
+    _srcjar_deps = invoker.srcjar_deps
+  }
+
+  _srcjars = []
+  if (defined(invoker.srcjars)) {
+    _srcjars = invoker.srcjars
+  }
+
+  _java_files = []
+  if (defined(invoker.java_files)) {
+    _java_files = invoker.java_files
+  } else if (defined(invoker.DEPRECATED_java_in_dir)) {
+    _src_dir = invoker.DEPRECATED_java_in_dir + "/src"
+    _src_dir_exists = exec_script("//build/dir_exists.py",
+                                  [ rebase_path(_src_dir, root_build_dir) ],
+                                  "string")
+    assert(_src_dir_exists == "False",
+           "In GN, java_in_dir should be the fully specified java directory " +
+               "(i.e. including the trailing \"/src\")")
+
+    _java_files_build_rel = exec_script(
+            "//build/android/gyp/find.py",
+            [
+              "--pattern",
+              "*.java",
+              rebase_path(invoker.DEPRECATED_java_in_dir, root_build_dir),
+            ],
+            "list lines")
+    _java_files = rebase_path(_java_files_build_rel, ".", root_build_dir)
+  }
+  assert(_java_files != [] || _srcjar_deps != [] || _srcjars != [])
+
+  _compile_java_target = "${_template_name}__compile_java"
+  _final_deps += [ ":$_compile_java_target" ]
+  compile_java(_compile_java_target) {
+    jar_path = _jar_path
+    build_config = _build_config
+    java_files = _java_files
+    srcjar_deps = _srcjar_deps
+    srcjars = _srcjars
+    chromium_code = _chromium_code
+    android = _requires_android
+
+    if (defined(invoker.enable_errorprone)) {
+      enable_errorprone = invoker.enable_errorprone
+    }
+    if (defined(invoker.jar_excluded_patterns)) {
+      jar_excluded_patterns = invoker.jar_excluded_patterns
+    }
+    if (defined(invoker.proguard_preprocess)) {
+      proguard_preprocess = invoker.proguard_preprocess
+    }
+    if (defined(invoker.proguard_config)) {
+      proguard_config = invoker.proguard_config
+    }
+    if (defined(invoker.dist_jar_path)) {
+      dist_jar_path = invoker.dist_jar_path
+    }
+    if (defined(invoker.manifest_entries)) {
+      manifest_entries = invoker.manifest_entries
+    }
+
+    supports_android = _supports_android
+    deps = build_config_deps
+  }
+
+  if (defined(invoker.main_class)) {
+    _final_deps += [ ":${_template_name}__binary_script" ]
+    action("${_template_name}__binary_script") {
+      script = "//build/android/gyp/create_java_binary_script.py"
+      depfile = "$target_gen_dir/$target_name.d"
+      java_script = "$root_build_dir/bin/$_template_name"
+      inputs = [
+        _build_config,
+      ]
+      outputs = [
+        depfile,
+        java_script,
+      ]
+      _rebased_build_config = rebase_path(_build_config, root_build_dir)
+      args = [
+        "--depfile",
+        rebase_path(depfile, root_build_dir),
+        "--output",
+        rebase_path(java_script, root_build_dir),
+        "--classpath=@FileArg($_rebased_build_config:java:full_classpath)",
+        "--jar-path",
+        rebase_path(_jar_path, root_build_dir),
+        "--main-class",
+        invoker.main_class,
+      ]
+
+      deps = build_config_deps
+    }
+  }
+
+  if (_supports_android) {
+    if (defined(invoker.chromium_code) && invoker.chromium_code) {
+      _android_manifest = "//build/android/AndroidManifest.xml"
+      if (defined(invoker.android_manifest)) {
+        _android_manifest = invoker.android_manifest
+      }
+
+      _final_datadeps += [ ":${_template_name}__lint" ]
+      android_lint("${_template_name}__lint") {
+        android_manifest = _android_manifest
+        jar_path = _jar_path
+        java_files = _java_files
+        deps = [
+          ":$_compile_java_target",
+        ]
+        if (defined(invoker.deps)) {
+          deps += invoker.deps
+        }
+      }
+
+      if (run_findbugs) {
+        _final_datadeps += [ ":${_template_name}__findbugs" ]
+        findbugs("${_template_name}__findbugs") {
+          build_config = _build_config
+          jar_path = _jar_path
+          deps = build_config_deps
+        }
+      }
+    }
+
+    _final_deps += [ ":${_template_name}__dex" ]
+    dex("${_template_name}__dex") {
+      sources = [
+        _jar_path,
+      ]
+      output = _dex_path
+      deps = [
+        ":$_compile_java_target",
+      ]
+    }
+  }
+
+  group(target_name) {
+    if (defined(invoker.visibility)) {
+      visibility = invoker.visibility
+    }
+    public_deps = _final_deps
+    data_deps = _final_datadeps
+  }
+}
+
+# Runs process_resources.py
+template("process_resources") {
+  set_sources_assignment_filter([])
+  if (defined(invoker.testonly)) {
+    testonly = invoker.testonly
+  }
+
+  zip_path = invoker.zip_path
+  srcjar_path = invoker.srcjar_path
+  r_text_path = invoker.r_text_path
+  build_config = invoker.build_config
+  resource_dirs = invoker.resource_dirs
+  android_manifest = invoker.android_manifest
+
+  non_constant_id = true
+  if (defined(invoker.generate_constant_ids) && invoker.generate_constant_ids) {
+    non_constant_id = false
+  }
+
+  action(target_name) {
+    if (defined(invoker.visibility)) {
+      visibility = invoker.visibility
+    }
+
+    script = "//build/android/gyp/process_resources.py"
+
+    depfile = "$target_gen_dir/$target_name.d"
+    outputs = [
+      depfile,
+      zip_path,
+      srcjar_path,
+      r_text_path,
+    ]
+
+    sources_build_rel = exec_script("//build/android/gyp/find.py",
+                                    rebase_path(resource_dirs, root_build_dir),
+                                    "list lines")
+    sources = rebase_path(sources_build_rel, ".", root_build_dir)
+
+    inputs = [
+      build_config,
+      android_manifest,
+    ]
+
+    rebase_resource_dirs = rebase_path(resource_dirs, root_build_dir)
+    rebase_build_config = rebase_path(build_config, root_build_dir)
+    args = [
+      "--depfile",
+      rebase_path(depfile, root_build_dir),
+      "--android-sdk",
+      rebase_path(android_sdk, root_build_dir),
+      "--aapt-path",
+      android_aapt_path,
+      "--android-manifest",
+      rebase_path(android_manifest, root_build_dir),
+      "--resource-dirs=$rebase_resource_dirs",
+      "--srcjar-out",
+      rebase_path(srcjar_path, root_build_dir),
+      "--resource-zip-out",
+      rebase_path(zip_path, root_build_dir),
+      "--r-text-out",
+      rebase_path(r_text_path, root_build_dir),
+      "--dependencies-res-zips=@FileArg($rebase_build_config:resources:dependency_zips)",
+      "--extra-res-packages=@FileArg($rebase_build_config:resources:extra_package_names)",
+      "--extra-r-text-files=@FileArg($rebase_build_config:resources:extra_r_text_files)",
+    ]
+
+    if (non_constant_id) {
+      args += [ "--non-constant-id" ]
+    }
+
+    if (defined(invoker.custom_package)) {
+      args += [
+        "--custom-package",
+        invoker.custom_package,
+      ]
+    }
+
+    if (defined(invoker.v14_skip) && invoker.v14_skip) {
+      args += [ "--v14-skip" ]
+    }
+
+    if (defined(invoker.shared_resources) && invoker.shared_resources) {
+      args += [ "--shared-resources" ]
+    }
+
+    if (defined(invoker.include_all_resources) &&
+        invoker.include_all_resources) {
+      args += [ "--include-all-resources" ]
+    }
+
+    if (defined(invoker.all_resources_zip_path)) {
+      all_resources_zip = invoker.all_resources_zip_path
+      outputs += [ all_resources_zip ]
+      args += [
+        "--all-resources-zip-out",
+        rebase_path(all_resources_zip, root_build_dir),
+      ]
+    }
+
+    if (defined(invoker.args)) {
+      args += invoker.args
+    }
+    if (defined(invoker.deps)) {
+      deps = invoker.deps
+    }
+  }
+}
+
+template("copy_ex") {
+  set_sources_assignment_filter([])
+  if (defined(invoker.testonly)) {
+    testonly = invoker.testonly
+  }
+
+  action(target_name) {
+    if (defined(invoker.visibility)) {
+      visibility = invoker.visibility
+    }
+
+    script = "//build/android/gyp/copy_ex.py"
+
+    if (defined(invoker.deps)) {
+      deps = invoker.deps
+    }
+
+    sources = []
+    if (defined(invoker.sources)) {
+      sources += invoker.sources
+    }
+
+    inputs = []
+    if (defined(invoker.inputs)) {
+      inputs += invoker.inputs
+    }
+
+    depfile = "$target_gen_dir/$target_name.d"
+    outputs = [
+      depfile,
+    ]
+
+    args = [
+      "--depfile",
+      rebase_path(depfile, root_build_dir),
+      "--dest",
+      rebase_path(invoker.dest, root_build_dir),
+    ]
+    rebased_sources = rebase_path(sources, root_build_dir)
+    args += [ "--files=$rebased_sources" ]
+
+    if (defined(invoker.clear_dir) && invoker.clear_dir) {
+      args += [ "--clear" ]
+    }
+
+    if (defined(invoker.args)) {
+      args += invoker.args
+    }
+  }
+}
+
+# Produces a single .dex.jar out of a set of Java dependencies.
+template("deps_dex") {
+  set_sources_assignment_filter([])
+  build_config = "$target_gen_dir/${target_name}.build_config"
+  build_config_target_name = "${target_name}__build_config"
+
+  write_build_config(build_config_target_name) {
+    type = "deps_dex"
+    deps = invoker.deps
+
+    build_config = build_config
+    dex_path = invoker.dex_path
+  }
+
+  rebased_build_config = rebase_path(build_config, root_build_dir)
+  dex(target_name) {
+    inputs = [
+      build_config,
+    ]
+    output = invoker.dex_path
+    dex_arg_key = "${rebased_build_config}:final_dex:dependency_dex_files"
+    args = [ "--inputs=@FileArg($dex_arg_key)" ]
+    if (defined(invoker.excluded_jars)) {
+      excluded_jars = rebase_path(invoker.excluded_jars, root_build_dir)
+      args += [ "--excluded-paths=${excluded_jars}" ]
+    }
+    deps = [
+      ":$build_config_target_name",
+    ]
+  }
+}
+
+# Creates an AndroidManifest.xml for an APK split.
+template("generate_split_manifest") {
+  assert(defined(invoker.main_manifest))
+  assert(defined(invoker.out_manifest))
+  assert(defined(invoker.split_name))
+
+  action(target_name) {
+    depfile = "$target_gen_dir/$target_name.d"
+    if (defined(invoker.deps)) {
+      deps = invoker.deps
+    }
+    args = [
+      "--main-manifest",
+      rebase_path(invoker.main_manifest, root_build_dir),
+      "--out-manifest",
+      rebase_path(invoker.out_manifest, root_build_dir),
+      "--split",
+      invoker.split_name,
+    ]
+    if (defined(invoker.version_code)) {
+      args += [
+        "--version-code",
+        invoker.version_code,
+      ]
+    }
+    if (defined(invoker.version_name)) {
+      args += [
+        "--version-name",
+        invoker.version_name,
+      ]
+    }
+    if (defined(invoker.has_code)) {
+      args += [
+        "--has-code",
+        invoker.has_code,
+      ]
+    }
+    args += [
+      "--depfile",
+      rebase_path(depfile, root_build_dir),
+    ]
+
+    script = "//build/android/gyp/generate_split_manifest.py"
+    outputs = [
+      depfile,
+      invoker.out_manifest,
+    ]
+    inputs = [
+      invoker.main_manifest,
+    ]
+  }
+}
+
+# Generates a script in the output bin directory which runs the test
+# target using the test runner script in build/android/test_runner.py.
+template("test_runner_script") {
+  testonly = true
+  _test_name = invoker.test_name
+  _test_type = invoker.test_type
+
+  action(target_name) {
+    script = "//build/android/gyp/create_test_runner_script.py"
+    depfile = "$target_gen_dir/$target_name.d"
+
+    test_runner_args = [
+      _test_type,
+      "--output-directory",
+      rebase_path(root_build_dir, root_build_dir),
+    ]
+    if (_test_type == "gtest") {
+      assert(defined(invoker.test_suite))
+      test_runner_args += [
+        "--suite",
+        invoker.test_suite,
+      ]
+    } else if (_test_type == "instrumentation") {
+      assert(defined(invoker.test_apk))
+      test_runner_args += [
+        "--test-apk",
+        invoker.test_apk,
+      ]
+      if (defined(invoker.support_apk_path)) {
+        test_runner_args += [
+          "--support-apk",
+          rebase_path(invoker.support_apk_path, root_build_dir),
+        ]
+      }
+    } else {
+      assert(false, "Invalid test type: $_test_type.")
+    }
+
+    if (defined(invoker.isolate_file)) {
+      test_runner_args += [
+        "--isolate-file-path",
+        rebase_path(invoker.isolate_file, root_build_dir),
+      ]
+    }
+
+    generated_script = "$root_build_dir/bin/run_${_test_name}"
+    outputs = [
+      depfile,
+      generated_script,
+    ]
+    args = [
+      "--depfile",
+      rebase_path(depfile, root_build_dir),
+      "--script-output-path",
+      rebase_path(generated_script, root_build_dir),
+    ]
+    args += test_runner_args
+  }
+}
diff --git a/build/config/android/rules.gni b/build/config/android/rules.gni
new file mode 100644
index 0000000..3c8640b
--- /dev/null
+++ b/build/config/android/rules.gni
@@ -0,0 +1,2238 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//base/android/linker/config.gni")
+import("//build/config/android/config.gni")
+import("//build/config/android/internal_rules.gni")
+import("//third_party/android_platform/config.gni")
+import("//tools/grit/grit_rule.gni")
+
+assert(is_android)
+
+# Declare a jni target
+#
+# This target generates the native jni bindings for a set of .java files.
+#
+# See base/android/jni_generator/jni_generator.py for more info about the
+# format of generating JNI bindings.
+#
+# Variables
+#   sources: list of .java files to generate jni for
+#   jni_package: subdirectory path for generated bindings
+#
+# Example
+#   generate_jni("foo_jni") {
+#     sources = [
+#       "android/java/src/org/chromium/foo/Foo.java",
+#       "android/java/src/org/chromium/foo/FooUtil.java",
+#     ]
+#     jni_package = "foo"
+#   }
+template("generate_jni") {
+  set_sources_assignment_filter([])
+  if (defined(invoker.testonly)) {
+    testonly = invoker.testonly
+  }
+
+  assert(defined(invoker.sources))
+  assert(defined(invoker.jni_package))
+  jni_package = invoker.jni_package
+  base_output_dir = "${target_gen_dir}/${target_name}"
+  package_output_dir = "${base_output_dir}/${jni_package}"
+  jni_output_dir = "${package_output_dir}/jni"
+
+  jni_generator_include = "//base/android/jni_generator/jni_generator_helper.h"
+
+  foreach_target_name = "${target_name}__jni_gen"
+  action_foreach(foreach_target_name) {
+    script = "//base/android/jni_generator/jni_generator.py"
+    depfile = "$target_gen_dir/$target_name.{{source_name_part}}.d"
+    sources = invoker.sources
+    outputs = [
+      depfile,
+      "${jni_output_dir}/{{source_name_part}}_jni.h",
+    ]
+
+    args = [
+      "--depfile",
+      rebase_path(depfile, root_build_dir),
+      "--input_file={{source}}",
+      "--optimize_generation=1",
+      "--ptr_type=long",
+      "--output_dir",
+      rebase_path(jni_output_dir, root_build_dir),
+      "--includes",
+      rebase_path(jni_generator_include, jni_output_dir),
+      "--native_exports_optional",
+    ]
+    if (defined(invoker.jni_generator_jarjar_file)) {
+      args += [
+        "--jarjar",
+        rebase_path(jni_generator_jarjar_file, root_build_dir),
+      ]
+    }
+  }
+
+  config("jni_includes_${target_name}") {
+    # TODO(cjhopman): #includes should probably all be relative to
+    # base_output_dir. Remove that from this config once the includes are
+    # updated.
+    include_dirs = [
+      base_output_dir,
+      package_output_dir,
+    ]
+  }
+
+  group(target_name) {
+    deps = [
+      ":$foreach_target_name",
+    ]
+    public_configs = [ ":jni_includes_${target_name}" ]
+
+    if (defined(invoker.deps)) {
+      deps += invoker.deps
+    }
+    if (defined(invoker.public_deps)) {
+      public_deps = invoker.public_deps
+    }
+
+    if (defined(invoker.visibility)) {
+      visibility = invoker.visibility
+    }
+  }
+}
+
+# Declare a jni target for a prebuilt jar
+#
+# This target generates the native jni bindings for a set of classes in a .jar.
+#
+# See base/android/jni_generator/jni_generator.py for more info about the
+# format of generating JNI bindings.
+#
+# Variables
+#   classes: list of .class files in the jar to generate jni for. These should
+#     include the full path to the .class file.
+#   jni_package: subdirectory path for generated bindings
+#   jar_file: the path to the .jar. If not provided, will default to the sdk's
+#     android.jar
+#
+#   deps, public_deps: As normal
+#
+# Example
+#   generate_jar_jni("foo_jni") {
+#     classes = [
+#       "android/view/Foo.class",
+#     ]
+#     jni_package = "foo"
+#   }
+template("generate_jar_jni") {
+  set_sources_assignment_filter([])
+  if (defined(invoker.testonly)) {
+    testonly = invoker.testonly
+  }
+
+  assert(defined(invoker.classes))
+  assert(defined(invoker.jni_package))
+
+  if (defined(invoker.jar_file)) {
+    jar_file = invoker.jar_file
+  } else {
+    jar_file = android_sdk_jar
+  }
+
+  jni_package = invoker.jni_package
+  base_output_dir = "${root_gen_dir}/${target_name}/${jni_package}"
+  jni_output_dir = "${base_output_dir}/jni"
+
+  jni_generator_include = "//base/android/jni_generator/jni_generator_helper.h"
+
+  # TODO(cjhopman): make jni_generator.py support generating jni for multiple
+  # .class files from a .jar.
+  jni_actions = []
+  foreach(class, invoker.classes) {
+    _classname_list = []
+    _classname_list = process_file_template([ class ], "{{source_name_part}}")
+    classname = _classname_list[0]
+    jni_target_name = "${target_name}__jni_${classname}"
+    jni_actions += [ ":$jni_target_name" ]
+    action(jni_target_name) {
+      # The sources aren't compiled so don't check their dependencies.
+      check_includes = false
+      depfile = "$target_gen_dir/$target_name.d"
+      script = "//base/android/jni_generator/jni_generator.py"
+      sources = [
+        jar_file,
+      ]
+      outputs = [
+        depfile,
+        "${jni_output_dir}/${classname}_jni.h",
+      ]
+
+      args = [
+        "--depfile",
+        rebase_path(depfile, root_build_dir),
+        "--jar_file",
+        rebase_path(jar_file, root_build_dir),
+        "--input_file",
+        class,
+        "--optimize_generation=1",
+        "--ptr_type=long",
+        "--output_dir",
+        rebase_path(jni_output_dir, root_build_dir),
+        "--includes",
+        rebase_path(jni_generator_include, jni_output_dir),
+        "--native_exports_optional",
+      ]
+    }
+  }
+
+  config("jni_includes_${target_name}") {
+    include_dirs = [ base_output_dir ]
+  }
+
+  group(target_name) {
+    deps = jni_actions
+    if (defined(invoker.deps)) {
+      deps += invoker.deps
+    }
+    if (defined(invoker.public_deps)) {
+      public_deps = invoker.public_deps
+    }
+    public_configs = [ ":jni_includes_${target_name}" ]
+  }
+}
+
+# Declare a target for c-preprocessor-generated java files
+#
+# NOTE: For generating Java conterparts to enums prefer using the java_cpp_enum
+#       rule instead.
+#
+# This target generates java files using the host C pre-processor. Each file in
+# sources will be compiled using the C pre-processor. If include_path is
+# specified, it will be passed (with --I) to the pre-processor.
+#
+# This target will create a single .srcjar. Adding this target to an
+# android_library target's srcjar_deps will make the generated java files be
+# included in that library's final outputs.
+#
+# Variables
+#   sources: list of files to be processed by the C pre-processor. For each
+#     file in sources, there will be one .java file in the final .srcjar. For a
+#     file named FooBar.template, a java file will be created with name
+#     FooBar.java.
+#   inputs: additional compile-time dependencies. Any files
+#     `#include`-ed in the templates should be listed here.
+#   package_name: this will be the subdirectory for each .java file in the
+#     .srcjar.
+#
+# Example
+#   java_cpp_template("foo_generated_enum") {
+#     sources = [
+#       "android/java/templates/Foo.template",
+#     ]
+#     inputs = [
+#       "android/java/templates/native_foo_header.h",
+#     ]
+#
+#     package_name = "org/chromium/base/library_loader"
+#     include_path = "android/java/templates"
+#   }
+template("java_cpp_template") {
+  set_sources_assignment_filter([])
+  if (defined(invoker.testonly)) {
+    testonly = invoker.testonly
+  }
+
+  assert(defined(invoker.sources))
+  package_name = invoker.package_name + ""
+
+  if (defined(invoker.include_path)) {
+    include_path = invoker.include_path + ""
+  } else {
+    include_path = "//"
+  }
+
+  apply_gcc_target_name = "${target_name}__apply_gcc"
+  zip_srcjar_target_name = "${target_name}__zip_srcjar"
+  final_target_name = target_name
+
+  action_foreach(apply_gcc_target_name) {
+    visibility = [ ":$zip_srcjar_target_name" ]
+    script = "//build/android/gyp/gcc_preprocess.py"
+    if (defined(invoker.inputs)) {
+      inputs = invoker.inputs + []
+    }
+    depfile = "${target_gen_dir}/${target_name}_{{source_name_part}}.d"
+
+    sources = invoker.sources
+
+    if (defined(invoker.deps)) {
+      deps = invoker.deps
+    }
+    if (defined(invoker.public_deps)) {
+      public_deps = invoker.public_deps
+    }
+    if (defined(invoker.data_deps)) {
+      data_deps = invoker.data_deps
+    }
+
+    gen_dir =
+        "${target_gen_dir}/${target_name}/java_cpp_template/${package_name}"
+    gcc_template_output_pattern = "${gen_dir}/{{source_name_part}}.java"
+
+    outputs = [
+      depfile,
+      gcc_template_output_pattern,
+    ]
+
+    args = [
+      "--depfile",
+      rebase_path(depfile, root_build_dir),
+      "--include-path",
+      rebase_path(include_path, root_build_dir),
+      "--output",
+      rebase_path(gen_dir, root_build_dir) + "/{{source_name_part}}.java",
+      "--template={{source}}",
+    ]
+
+    if (defined(invoker.defines)) {
+      foreach(def, invoker.defines) {
+        args += [
+          "--defines",
+          def,
+        ]
+      }
+    }
+  }
+
+  apply_gcc_outputs = get_target_outputs(":$apply_gcc_target_name")
+  base_gen_dir = get_label_info(":$apply_gcc_target_name", "target_gen_dir")
+
+  srcjar_path = "${target_gen_dir}/${target_name}.srcjar"
+  zip(zip_srcjar_target_name) {
+    visibility = [ ":$final_target_name" ]
+    inputs = apply_gcc_outputs
+    output = srcjar_path
+    base_dir = base_gen_dir
+    deps = [
+      ":$apply_gcc_target_name",
+    ]
+  }
+
+  group(final_target_name) {
+    if (defined(invoker.visibility)) {
+      visibility = invoker.visibility
+    }
+    deps = [
+      ":$zip_srcjar_target_name",
+    ]
+  }
+}
+
+# Declare a target for generating Java classes from C++ enums.
+#
+# This target generates Java files from C++ enums using a script.
+#
+# This target will create a single .srcjar. Adding this target to an
+# android_library target's srcjar_deps will make the generated java files be
+# included in that library's final outputs.
+#
+# Variables
+#   sources: list of files to be processed by the script. For each annotated
+#     enum contained in the sources files the script will generate a .java
+#     file with the same name as the name of the enum.
+#
+#   outputs: list of outputs, relative to the output_dir. These paths are
+#     verified at build time by the script. To get the list programatically run:
+#       python build/android/gyp/java_cpp_enum.py \
+#         --print_output_only . path/to/header/file.h
+#
+# Example
+#   java_cpp_enum("foo_generated_enum") {
+#     sources = [
+#       "src/native_foo_header.h",
+#     ]
+#     outputs = [
+#       "org/chromium/FooEnum.java",
+#     ]
+#   }
+template("java_cpp_enum") {
+  set_sources_assignment_filter([])
+  if (defined(invoker.testonly)) {
+    testonly = invoker.testonly
+  }
+
+  assert(defined(invoker.sources))
+  assert(defined(invoker.outputs))
+
+  generate_enum_target_name = "${target_name}__generate_enum"
+  zip_srcjar_target_name = "${target_name}__zip_srcjar"
+  final_target_name = target_name
+
+  action(generate_enum_target_name) {
+    visibility = [ ":$zip_srcjar_target_name" ]
+
+    # The sources aren't compiled so don't check their dependencies.
+    check_includes = false
+
+    sources = invoker.sources
+    script = "//build/android/gyp/java_cpp_enum.py"
+    gen_dir = "${target_gen_dir}/${target_name}/enums"
+    outputs =
+        get_path_info(rebase_path(invoker.outputs, ".", gen_dir), "abspath")
+
+    args = []
+    foreach(output, rebase_path(outputs, root_build_dir)) {
+      args += [
+        "--assert_file",
+        output,
+      ]
+    }
+    args += [ rebase_path(gen_dir, root_build_dir) ]
+    args += rebase_path(invoker.sources, root_build_dir)
+  }
+
+  generate_enum_outputs = get_target_outputs(":$generate_enum_target_name")
+  base_gen_dir = get_label_info(":$generate_enum_target_name", "target_gen_dir")
+
+  srcjar_path = "${target_gen_dir}/${target_name}.srcjar"
+  zip(zip_srcjar_target_name) {
+    visibility = [ ":$final_target_name" ]
+    inputs = generate_enum_outputs
+    output = srcjar_path
+    base_dir = base_gen_dir
+    deps = [
+      ":$generate_enum_target_name",
+    ]
+  }
+
+  group(final_target_name) {
+    if (defined(invoker.visibility)) {
+      visibility = invoker.visibility
+    }
+    deps = [
+      ":$zip_srcjar_target_name",
+    ]
+  }
+}
+
+# Declare a target for processing Android resources as Jinja templates.
+#
+# This takes an Android resource directory where each resource is a Jinja
+# template, processes each template, then packages the results in a zip file
+# which can be consumed by an android resources, library, or apk target.
+#
+# If this target is included in the deps of an android resources/library/apk,
+# the resources will be included with that target.
+#
+# Variables
+#   resources: The list of resources files to process.
+#   res_dir: The resource directory containing the resources.
+#   variables: (Optional) A list of variables to make available to the template
+#     processing environment, e.g. ["name=foo", "color=red"].
+#
+# Example
+#   jinja_template_resources("chrome_shell_template_resources") {
+#     res_dir = "shell/res_template"
+#     resources = ["shell/res_template/xml/syncable.xml"]
+#     variables = ["color=red"]
+#   }
+template("jinja_template_resources") {
+  set_sources_assignment_filter([])
+  if (defined(invoker.testonly)) {
+    testonly = invoker.testonly
+  }
+
+  assert(defined(invoker.resources))
+  assert(defined(invoker.res_dir))
+
+  _base_path = "$target_gen_dir/$target_name"
+  _resources_zip = _base_path + ".resources.zip"
+  _build_config = _base_path + ".build_config"
+
+  write_build_config("${target_name}__build_config") {
+    build_config = _build_config
+    resources_zip = _resources_zip
+    type = "android_resources"
+  }
+
+  action("${target_name}__template") {
+    sources = invoker.resources
+    script = "//build/android/gyp/jinja_template.py"
+    depfile = "$target_gen_dir/$target_name.d"
+
+    outputs = [
+      depfile,
+      _resources_zip,
+    ]
+
+    rebased_resources = rebase_path(invoker.resources, root_build_dir)
+    args = [
+      "--inputs=${rebased_resources}",
+      "--inputs-base-dir",
+      rebase_path(invoker.res_dir, root_build_dir),
+      "--outputs-zip",
+      rebase_path(_resources_zip, root_build_dir),
+      "--depfile",
+      rebase_path(depfile, root_build_dir),
+    ]
+    if (defined(invoker.variables)) {
+      variables = invoker.variables
+      args += [ "--variables=${variables}" ]
+    }
+  }
+
+  group(target_name) {
+    deps = [
+      ":${target_name}__build_config",
+      ":${target_name}__template",
+    ]
+  }
+}
+
+# Creates a resources.zip with locale.pak files placed into appropriate
+# resource configs (e.g. en-GB.pak -> res/raw-en/en_gb.pak). Also generates
+# a locale_paks TypedArray so that resource files can be enumerated at runtime.
+#
+# If this target is included in the deps of an android resources/library/apk,
+# the resources will be included with that target.
+#
+# Variables:
+#   sources: List of .pak files. Names must be of the form "en.pak" or
+#       "en-US.pak".
+#   deps: (optional) List of dependencies that might be needed to generate
+#       the .pak files.
+#
+# Example
+#   locale_pak_resources("locale_paks") {
+#     sources = [ "path/en-US.pak", "path/fr.pak", ... ]
+#   }
+template("locale_pak_resources") {
+  set_sources_assignment_filter([])
+  assert(defined(invoker.sources))
+
+  _base_path = "$target_gen_dir/$target_name"
+  _resources_zip = _base_path + ".resources.zip"
+  _build_config = _base_path + ".build_config"
+
+  write_build_config("${target_name}__build_config") {
+    build_config = _build_config
+    resources_zip = _resources_zip
+    type = "android_resources"
+  }
+
+  action("${target_name}__create_resources_zip") {
+    sources = invoker.sources
+    script = "//build/android/gyp/locale_pak_resources.py"
+    depfile = "$target_gen_dir/$target_name.d"
+
+    outputs = [
+      depfile,
+      _resources_zip,
+    ]
+
+    _rebased_sources = rebase_path(invoker.sources, root_build_dir)
+    args = [
+      "--locale-paks=${_rebased_sources}",
+      "--resources-zip",
+      rebase_path(_resources_zip, root_build_dir),
+      "--depfile",
+      rebase_path(depfile, root_build_dir),
+    ]
+
+    if (defined(invoker.deps)) {
+      deps = invoker.deps
+    }
+  }
+
+  group(target_name) {
+    deps = [
+      ":${target_name}__build_config",
+      ":${target_name}__create_resources_zip",
+    ]
+  }
+}
+
+# Declare an Android resources target
+#
+# This creates a resources zip file that will be used when building an Android
+# library or apk and included into a final apk.
+#
+# To include these resources in a library/apk, this target should be listed in
+# the library's deps. A library/apk will also include any resources used by its
+# own dependencies.
+#
+# Variables
+#   deps: Specifies the dependencies of this target. Any Android resources
+#     listed in deps will be included by libraries/apks that depend on this
+#     target.
+#   resource_dirs: List of directories containing resources for this target.
+#   android_manifest: AndroidManifest.xml for this target. Defaults to
+#     //build/android/AndroidManifest.xml.
+#   custom_package: java package for generated .java files.
+#   v14_skip: If true, don't run v14 resource generator on this. Defaults to
+#     false. (see build/android/gyp/generate_v14_compatible_resources.py)
+#
+#   shared_resources: If true make a resource package that can be loaded by a
+#     different application at runtime to access the package's resources.
+#
+
+# Example
+#   android_resources("foo_resources") {
+#     deps = [":foo_strings_grd"]
+#     resource_dirs = ["res"]
+#     custom_package = "org.chromium.foo"
+#   }
+template("android_resources") {
+  set_sources_assignment_filter([])
+  if (defined(invoker.testonly)) {
+    testonly = invoker.testonly
+  }
+
+  assert(defined(invoker.resource_dirs))
+  assert(defined(invoker.android_manifest) || defined(invoker.custom_package))
+
+  base_path = "$target_gen_dir/$target_name"
+  zip_path = base_path + ".resources.zip"
+  srcjar_path = base_path + ".srcjar"
+  r_text_path = base_path + "_R.txt"
+  build_config = base_path + ".build_config"
+
+  build_config_target_name = "${target_name}__build_config"
+  process_resources_target_name = "${target_name}__process_resources"
+  final_target_name = target_name
+
+  write_build_config(build_config_target_name) {
+    visibility = [ ":$process_resources_target_name" ]
+
+    type = "android_resources"
+    resources_zip = zip_path
+    srcjar = srcjar_path
+    r_text = r_text_path
+    if (defined(invoker.deps)) {
+      deps = invoker.deps
+    }
+    if (defined(invoker.android_manifest)) {
+      android_manifest = invoker.android_manifest
+    }
+    if (defined(invoker.custom_package)) {
+      custom_package = invoker.custom_package
+    }
+  }
+
+  android_manifest = "//build/android/AndroidManifest.xml"
+  if (defined(invoker.android_manifest)) {
+    android_manifest = invoker.android_manifest
+  }
+
+  process_resources(process_resources_target_name) {
+    visibility = [ ":$final_target_name" ]
+
+    resource_dirs = invoker.resource_dirs
+    if (defined(invoker.custom_package)) {
+      custom_package = invoker.custom_package
+    }
+
+    if (defined(invoker.v14_skip)) {
+      v14_skip = invoker.v14_skip
+    }
+
+    if (defined(invoker.shared_resources)) {
+      shared_resources = invoker.shared_resources
+    }
+
+    deps = [
+      ":$build_config_target_name",
+    ]
+    if (defined(invoker.deps)) {
+      # Invoker may have added deps that generate the input resources.
+      deps += invoker.deps
+    }
+  }
+
+  group(final_target_name) {
+    if (defined(invoker.visibility)) {
+      visibility = invoker.visibility
+    }
+    deps = [
+      ":${target_name}__process_resources",
+    ]
+  }
+}
+
+# Declare a target that generates localized strings.xml from a .grd file.
+#
+# If this target is included in the deps of an android resources/library/apk,
+# the strings.xml will be included with that target.
+#
+# Variables
+#   deps: Specifies the dependencies of this target.
+#   grd_file: Path to the .grd file to generate strings.xml from.
+#   outputs: Expected grit outputs (see grit rule).
+#
+# Example
+#  java_strings_grd("foo_strings_grd") {
+#    grd_file = "foo_strings.grd"
+#  }
+template("java_strings_grd") {
+  set_sources_assignment_filter([])
+  if (defined(invoker.testonly)) {
+    testonly = invoker.testonly
+  }
+
+  base_path = "$target_gen_dir/$target_name"
+  resources_zip = base_path + ".resources.zip"
+  build_config = base_path + ".build_config"
+
+  write_build_config("${target_name}__build_config") {
+    type = "android_resources"
+    if (defined(invoker.deps)) {
+      deps = invoker.deps
+    }
+  }
+
+  # Put grit files into this subdirectory of target_gen_dir.
+  extra_output_path = target_name + "_grit_output"
+
+  grit_target_name = "${target_name}__grit"
+  grit_output_dir = "$target_gen_dir/$extra_output_path"
+  grit(grit_target_name) {
+    grit_flags = [
+      "-E",
+      "ANDROID_JAVA_TAGGED_ONLY=false",
+    ]
+    output_dir = grit_output_dir
+    resource_ids = ""
+    source = invoker.grd_file
+    outputs = invoker.outputs
+  }
+
+  # This needs to get outputs from grit's internal target, not the final
+  # source_set.
+  generate_strings_outputs = get_target_outputs(":${grit_target_name}_grit")
+
+  zip("${target_name}__zip") {
+    base_dir = grit_output_dir
+    inputs = generate_strings_outputs
+    output = resources_zip
+    deps = [
+      ":$grit_target_name",
+    ]
+  }
+
+  group(target_name) {
+    deps = [
+      ":${target_name}__build_config",
+      ":${target_name}__zip",
+    ]
+  }
+}
+
+# Declare a target that packages strings.xml generated from a grd file.
+#
+# If this target is included in the deps of an android resources/library/apk,
+# the strings.xml will be included with that target.
+#
+# Variables
+#  grit_output_dir: directory containing grit-generated files.
+#  generated_files: list of android resource files to package.
+#
+# Example
+#  java_strings_grd_prebuilt("foo_strings_grd") {
+#    grit_output_dir = "$root_gen_dir/foo/grit"
+#    generated_files = [
+#      "values/strings.xml"
+#    ]
+#  }
+template("java_strings_grd_prebuilt") {
+  set_sources_assignment_filter([])
+  if (defined(invoker.testonly)) {
+    testonly = invoker.testonly
+  }
+
+  base_path = "$target_gen_dir/$target_name"
+  resources_zip = base_path + ".resources.zip"
+  build_config = base_path + ".build_config"
+
+  build_config_target_name = "${target_name}__build_config"
+  zip_target_name = "${target_name}__zip"
+  final_target_name = target_name
+
+  write_build_config(build_config_target_name) {
+    visibility = [ ":$zip_target_name" ]
+    type = "android_resources"
+  }
+
+  zip(zip_target_name) {
+    visibility = [ ":$final_target_name" ]
+
+    base_dir = invoker.grit_output_dir
+    inputs = rebase_path(invoker.generated_files, ".", base_dir)
+    output = resources_zip
+    deps = [
+      ":$build_config_target_name",
+    ]
+    if (defined(invoker.deps)) {
+      deps += invoker.deps
+    }
+  }
+
+  group(final_target_name) {
+    if (defined(invoker.visibility)) {
+      visibility = invoker.visibility
+    }
+    deps = [
+      ":$zip_target_name",
+    ]
+  }
+}
+
+# Declare a Java executable target
+#
+# This target creates an executable from java code and libraries. The executable
+# will be in the output folder's /bin/ directory.
+#
+# Variables
+#   deps: Specifies the dependencies of this target. Java targets in this list
+#     will be included in the executable (and the javac classpath).
+#
+#   java_files: List of .java files included in this library.
+#   srcjar_deps: List of srcjar dependencies. The .java files in the srcjars
+#     will be added to java_files and be included in this library.
+#   srcjars: List of srcjars to be included in this library, together with the
+#     ones obtained from srcjar_deps.
+#
+#   bypass_platform_checks: Disables checks about cross-platform (Java/Android)
+#     dependencies for this target. This will allow depending on an
+#     android_library target, for example.
+#
+#   chromium_code: If true, extra analysis warning/errors will be enabled.
+#   enable_errorprone: If true, enables the errorprone compiler.
+#
+#   data_deps, testonly
+#
+# Example
+#   java_binary("foo") {
+#     java_files = [ "org/chromium/foo/FooMain.java" ]
+#     deps = [ ":bar_java" ]
+#     main_class = "org.chromium.foo.FooMain"
+#   }
+template("java_binary") {
+  set_sources_assignment_filter([])
+
+  # TODO(cjhopman): This should not act like a java_library for dependents (i.e.
+  # dependents shouldn't get the jar in their classpath, etc.).
+  java_library_impl(target_name) {
+    if (defined(invoker.DEPRECATED_java_in_dir)) {
+      DEPRECATED_java_in_dir = invoker.DEPRECATED_java_in_dir
+    }
+    if (defined(invoker.chromium_code)) {
+      chromium_code = invoker.chromium_code
+    }
+    if (defined(invoker.data_deps)) {
+      deps = invoker.data_deps
+    }
+    if (defined(invoker.deps)) {
+      deps = invoker.deps
+    }
+    if (defined(invoker.enable_errorprone)) {
+      enable_errorprone = invoker.enable_errorprone
+    }
+    if (defined(invoker.java_files)) {
+      java_files = invoker.java_files
+    }
+    if (defined(invoker.srcjar_deps)) {
+      srcjar_deps = invoker.srcjar_deps
+    }
+    if (defined(invoker.srcjars)) {
+      srcjars = invoker.srcjars
+    }
+    if (defined(invoker.bypass_platform_checks)) {
+      bypass_platform_checks = invoker.bypass_platform_checks
+    }
+    if (defined(invoker.testonly)) {
+      testonly = invoker.testonly
+    }
+
+    supports_android = false
+    main_class = invoker.main_class
+  }
+}
+
+# Declare a Junit executable target
+#
+# This target creates an executable from java code for running as a junit test
+# suite. The executable will be in the output folder's /bin/ directory.
+#
+# Variables
+#   deps: Specifies the dependencies of this target. Java targets in this list
+#     will be included in the executable (and the javac classpath).
+#
+#   java_files: List of .java files included in this library.
+#   srcjar_deps: List of srcjar dependencies. The .java files in the srcjars
+#     will be added to java_files and be included in this library.
+#   srcjars: List of srcjars to be included in this library, together with the
+#     ones obtained from srcjar_deps.
+#
+#   chromium_code: If true, extra analysis warning/errors will be enabled.
+#
+# Example
+#   junit_binary("foo") {
+#     java_files = [ "org/chromium/foo/FooTest.java" ]
+#     deps = [ ":bar_java" ]
+#   }
+template("junit_binary") {
+  set_sources_assignment_filter([])
+
+  java_binary(target_name) {
+    bypass_platform_checks = true
+    main_class = "org.chromium.testing.local.JunitTestMain"
+    testonly = true
+
+    if (defined(invoker.DEPRECATED_java_in_dir)) {
+      DEPRECATED_java_in_dir = invoker.DEPRECATED_java_in_dir
+    }
+    if (defined(invoker.chromium_code)) {
+      chromium_code = invoker.chromium_code
+    }
+    deps = [
+      "//testing/android/junit:junit_test_support",
+      "//third_party/junit",
+      "//third_party/mockito:mockito_java",
+      "//third_party/robolectric:robolectric_java",
+      "//third_party/robolectric:android-all-4.3_r2-robolectric-0",
+    ]
+    if (defined(invoker.deps)) {
+      deps += invoker.deps
+    }
+    if (defined(invoker.java_files)) {
+      java_files = invoker.java_files
+    }
+    if (defined(invoker.srcjar_deps)) {
+      srcjar_deps = invoker.srcjar_deps
+    }
+    if (defined(invoker.srcjars)) {
+      srcjars = invoker.srcjars
+    }
+  }
+}
+
+# Declare a java library target
+#
+# Variables
+#   deps: Specifies the dependencies of this target. Java targets in this list
+#     will be added to the javac classpath.
+#
+#   java_files: List of .java files included in this library.
+#   srcjar_deps: List of srcjar dependencies. The .java files in the srcjars
+#     will be added to java_files and be included in this library.
+#   srcjars: List of srcjars to be included in this library, together with the
+#     ones obtained from srcjar_deps.
+#   DEPRECATED_java_in_dir: Directory containing java files. All .java files in
+#     this directory will be included in the library. This is only supported to
+#     ease the gyp->gn conversion and will be removed in the future.
+#
+#   chromium_code: If true, extra analysis warning/errors will be enabled.
+#   enable_errorprone: If true, enables the errorprone compiler.
+#
+#   jar_excluded_patterns: List of patterns of .class files to exclude from the
+#     final jar.
+#
+#   proguard_preprocess: If true, proguard preprocessing will be run. This can
+#     be used to remove unwanted parts of the library.
+#   proguard_config: Path to the proguard config for preprocessing.
+#
+#   supports_android: If true, Android targets (android_library, android_apk)
+#     may depend on this target. Note: if true, this target must only use the
+#     subset of Java available on Android.
+#   bypass_platform_checks: Disables checks about cross-platform (Java/Android)
+#     dependencies for this target. This will allow depending on an
+#     android_library target, for example.
+#
+#   data_deps, testonly
+#
+# Example
+#   java_library("foo_java") {
+#     java_files = [
+#       "org/chromium/foo/Foo.java",
+#       "org/chromium/foo/FooInterface.java",
+#       "org/chromium/foo/FooService.java",
+#     ]
+#     deps = [
+#       ":bar_java"
+#     ]
+#     srcjar_deps = [
+#       ":foo_generated_enum"
+#     ]
+#     jar_excluded_patterns = [
+#       "*/FooService.class", "*/FooService##*.class"
+#     ]
+#   }
+template("java_library") {
+  set_sources_assignment_filter([])
+  java_library_impl(target_name) {
+    if (defined(invoker.DEPRECATED_java_in_dir)) {
+      DEPRECATED_java_in_dir = invoker.DEPRECATED_java_in_dir
+    }
+    if (defined(invoker.chromium_code)) {
+      chromium_code = invoker.chromium_code
+    }
+    if (defined(invoker.data_deps)) {
+      deps = invoker.data_deps
+    }
+    if (defined(invoker.deps)) {
+      deps = invoker.deps
+    }
+    if (defined(invoker.enable_errorprone)) {
+      enable_errorprone = invoker.enable_errorprone
+    }
+    if (defined(invoker.jar_excluded_patterns)) {
+      jar_excluded_patterns = invoker.jar_excluded_patterns
+    }
+    if (defined(invoker.java_files)) {
+      java_files = invoker.java_files
+    }
+    if (defined(invoker.proguard_config)) {
+      proguard_config = invoker.proguard_config
+    }
+    if (defined(invoker.proguard_preprocess)) {
+      proguard_preprocess = invoker.proguard_preprocess
+    }
+    if (defined(invoker.srcjar_deps)) {
+      srcjar_deps = invoker.srcjar_deps
+    }
+    if (defined(invoker.srcjars)) {
+      srcjars = invoker.srcjars
+    }
+    if (defined(invoker.bypass_platform_checks)) {
+      bypass_platform_checks = invoker.bypass_platform_checks
+    }
+    if (defined(invoker.testonly)) {
+      testonly = invoker.testonly
+    }
+    if (defined(invoker.jar_path)) {
+      jar_path = invoker.jar_path
+    }
+
+    if (defined(invoker.supports_android) && invoker.supports_android) {
+      supports_android = true
+    }
+  }
+}
+
+# Declare a java library target for a prebuilt jar
+#
+# Variables
+#   deps: Specifies the dependencies of this target. Java targets in this list
+#     will be added to the javac classpath.
+#   jar_path: Path to the prebuilt jar.
+#   jar_dep: Target that builds jar_path (optional).
+#   proguard_preprocess: If true, proguard preprocessing will be run. This can
+#     be used to remove unwanted parts of the library.
+#   proguard_config: Path to the proguard config for preprocessing.
+#
+# Example
+#   java_prebuilt("foo_java") {
+#     jar_path = "foo.jar"
+#     deps = [
+#       ":foo_resources",
+#       ":bar_java"
+#     ]
+#   }
+template("java_prebuilt") {
+  set_sources_assignment_filter([])
+  java_prebuilt_impl(target_name) {
+    jar_path = invoker.jar_path
+    if (defined(invoker.jar_dep)) {
+      jar_dep = invoker.jar_dep
+    }
+    if (defined(invoker.testonly)) {
+      testonly = invoker.testonly
+    }
+    if (defined(invoker.deps)) {
+      deps = invoker.deps
+    }
+    if (defined(invoker.data_deps)) {
+      data_deps = invoker.data_deps
+    }
+    if (defined(invoker.proguard_config)) {
+      proguard_config = invoker.proguard_config
+    }
+    if (defined(invoker.proguard_preprocess)) {
+      proguard_preprocess = invoker.proguard_preprocess
+    }
+  }
+}
+
+# Declare an Android library target
+#
+# This target creates an Android library containing java code and Android
+# resources.
+#
+# Variables
+#   deps: Specifies the dependencies of this target. Java targets in this list
+#     will be added to the javac classpath. Android resources in dependencies
+#     will be used when building this library.
+#
+#   java_files: List of .java files included in this library.
+#   srcjar_deps: List of srcjar dependencies. The .java files in the srcjars
+#     will be added to java_files and be included in this library.
+#   srcjars: List of srcjars to be included in this library, together with the
+#     ones obtained from srcjar_deps.
+#   DEPRECATED_java_in_dir: Directory containing java files. All .java files in
+#     this directory will be included in the library. This is only supported to
+#     ease the gyp->gn conversion and will be removed in the future.
+#
+#   chromium_code: If true, extra analysis warning/errors will be enabled.
+#   enable_errorprone: If true, enables the errorprone compiler.
+#
+#   jar_excluded_patterns: List of patterns of .class files to exclude from the
+#     final jar.
+#
+#   proguard_preprocess: If true, proguard preprocessing will be run. This can
+#     be used to remove unwanted parts of the library.
+#   proguard_config: Path to the proguard config for preprocessing.
+#
+#   dex_path: If set, the resulting .dex.jar file will be placed under this
+#     path.
+#
+#
+# Example
+#   android_library("foo_java") {
+#     java_files = [
+#       "android/org/chromium/foo/Foo.java",
+#       "android/org/chromium/foo/FooInterface.java",
+#       "android/org/chromium/foo/FooService.java",
+#     ]
+#     deps = [
+#       ":bar_java"
+#     ]
+#     srcjar_deps = [
+#       ":foo_generated_enum"
+#     ]
+#     jar_excluded_patterns = [
+#       "*/FooService.class", "*/FooService##*.class"
+#     ]
+#   }
+template("android_library") {
+  set_sources_assignment_filter([])
+  assert(!defined(invoker.jar_path),
+         "android_library does not support a custom jar path")
+  java_library_impl(target_name) {
+    if (defined(invoker.DEPRECATED_java_in_dir)) {
+      DEPRECATED_java_in_dir = invoker.DEPRECATED_java_in_dir
+    }
+    if (defined(invoker.chromium_code)) {
+      chromium_code = invoker.chromium_code
+    }
+    if (defined(invoker.data_deps)) {
+      deps = invoker.data_deps
+    }
+    if (defined(invoker.deps)) {
+      deps = invoker.deps
+    }
+    if (defined(invoker.enable_errorprone)) {
+      enable_errorprone = invoker.enable_errorprone
+    }
+    if (defined(invoker.jar_excluded_patterns)) {
+      jar_excluded_patterns = invoker.jar_excluded_patterns
+    }
+    if (defined(invoker.java_files)) {
+      java_files = invoker.java_files
+    }
+    if (defined(invoker.proguard_config)) {
+      proguard_config = invoker.proguard_config
+    }
+    if (defined(invoker.proguard_preprocess)) {
+      proguard_preprocess = invoker.proguard_preprocess
+    }
+    if (defined(invoker.srcjar_deps)) {
+      srcjar_deps = invoker.srcjar_deps
+    }
+    if (defined(invoker.srcjars)) {
+      srcjars = invoker.srcjars
+    }
+    if (defined(invoker.testonly)) {
+      testonly = invoker.testonly
+    }
+    if (defined(invoker.visibility)) {
+      visibility = invoker.visibility
+    }
+    if (defined(invoker.dex_path)) {
+      dex_path = invoker.dex_path
+    }
+    if (defined(invoker.manifest_entries)) {
+      manifest_entries = invoker.manifest_entries
+    }
+
+    supports_android = true
+    requires_android = true
+
+    if (!defined(jar_excluded_patterns)) {
+      jar_excluded_patterns = []
+    }
+    jar_excluded_patterns += [
+      "*/R.class",
+      "*/R##*.class",
+      "*/Manifest.class",
+      "*/Manifest##*.class",
+    ]
+  }
+}
+
+# Declare a target that packages a set of Java dependencies into a standalone
+# .dex.jar.
+#
+# Variables
+#   deps: specifies the dependencies of this target. Android libraries in deps
+#     will be packaged into the resulting .dex.jar file.
+#   dex_path: location at which the output file will be put
+template("android_standalone_library") {
+  set_sources_assignment_filter([])
+  deps_dex(target_name) {
+    deps = invoker.deps
+    dex_path = invoker.dex_path
+    if (defined(invoker.excluded_jars)) {
+      excluded_jars = invoker.excluded_jars
+    }
+  }
+}
+
+# Declare an Android library target for a prebuilt jar
+#
+# This target creates an Android library containing java code and Android
+# resources.
+#
+# Variables
+#   deps: Specifies the dependencies of this target. Java targets in this list
+#     will be added to the javac classpath. Android resources in dependencies
+#     will be used when building this library.
+#   jar_path: Path to the prebuilt jar.
+#   proguard_preprocess: If true, proguard preprocessing will be run. This can
+#     be used to remove unwanted parts of the library.
+#   proguard_config: Path to the proguard config for preprocessing.
+#
+# Example
+#   android_java_prebuilt("foo_java") {
+#     jar_path = "foo.jar"
+#     deps = [
+#       ":foo_resources",
+#       ":bar_java"
+#     ]
+#   }
+template("android_java_prebuilt") {
+  set_sources_assignment_filter([])
+  java_prebuilt_impl(target_name) {
+    jar_path = invoker.jar_path
+    supports_android = true
+    requires_android = true
+    if (defined(invoker.testonly)) {
+      testonly = invoker.testonly
+    }
+    if (defined(invoker.deps)) {
+      deps = invoker.deps
+    }
+    if (defined(invoker.data_deps)) {
+      data_deps = invoker.data_deps
+    }
+    if (defined(invoker.proguard_config)) {
+      proguard_config = invoker.proguard_config
+    }
+    if (defined(invoker.proguard_preprocess)) {
+      proguard_preprocess = invoker.proguard_preprocess
+    }
+  }
+}
+
+# Declare an Android apk target
+#
+# This target creates an Android APK containing java code, resources, assets,
+# and (possibly) native libraries.
+#
+# Variables
+#   android_manifest: Path to AndroidManifest.xml.
+#   android_manifest_dep: Target that generates AndroidManifest (if applicable)
+#   data_deps: List of dependencies needed at runtime. These will be built but
+#     won't change the generated .apk in any way (in fact they may be built
+#     after the .apk is).
+#   deps: List of dependencies. All Android java resources and libraries in the
+#     "transitive closure" of these dependencies will be included in the apk.
+#     Note: this "transitive closure" actually only includes such targets if
+#     they are depended on through android_library or android_resources targets
+#     (and so not through builtin targets like 'action', 'group', etc).
+#   java_files: List of .java files to include in the apk.
+#   srcjar_deps: List of srcjar dependencies. The .java files in the srcjars
+#      will be added to java_files and be included in this apk.
+#   apk_name: Name for final apk.
+#   final_apk_path: Path to final built apk. Default is
+#     $root_out_dir/apks/$apk_name.apk. Setting this will override apk_name.
+#   native_libs: List paths of native libraries to include in this apk. If these
+#     libraries depend on other shared_library targets, those dependencies will
+#     also be included in the apk.
+#   apk_under_test: For an instrumentation test apk, this is the target of the
+#     tested apk.
+#   include_all_resources - If true include all resource IDs in all generated
+#     R.java files.
+#   testonly: Marks this target as "test-only".
+#
+#   DEPRECATED_java_in_dir: Directory containing java files. All .java files in
+#     this directory will be included in the library. This is only supported to
+#     ease the gyp->gn conversion and will be removed in the future.
+#
+# Example
+#   android_apk("foo_apk") {
+#     android_manifest = "AndroidManifest.xml"
+#     java_files = [
+#       "android/org/chromium/foo/FooApplication.java",
+#       "android/org/chromium/foo/FooActivity.java",
+#     ]
+#     deps = [
+#       ":foo_support_java"
+#       ":foo_resources"
+#     ]
+#     srcjar_deps = [
+#       ":foo_generated_enum"
+#     ]
+#     native_libs = [
+#       native_lib_path
+#     ]
+#   }
+template("android_apk") {
+  set_sources_assignment_filter([])
+  if (defined(invoker.testonly)) {
+    testonly = invoker.testonly
+  }
+
+  assert(defined(invoker.final_apk_path) || defined(invoker.apk_name))
+  assert(defined(invoker.android_manifest))
+  gen_dir = "$target_gen_dir/$target_name"
+  base_path = "$gen_dir/$target_name"
+  _build_config = "$target_gen_dir/$target_name.build_config"
+  resources_zip_path = "$base_path.resources.zip"
+  _all_resources_zip_path = "$base_path.resources.all.zip"
+  jar_path = "$base_path.jar"
+  _template_name = target_name
+
+  final_dex_path = "$gen_dir/classes.dex"
+  final_dex_target_name = "${_template_name}__final_dex"
+
+  _final_apk_path = ""
+  if (defined(invoker.final_apk_path)) {
+    _final_apk_path = invoker.final_apk_path
+  } else if (defined(invoker.apk_name)) {
+    _final_apk_path = "$root_build_dir/apks/" + invoker.apk_name + ".apk"
+  }
+  _dist_jar_path_list =
+      process_file_template(
+          [ _final_apk_path ],
+          "$root_build_dir/test.lib.java/{{source_name_part}}.jar")
+  _dist_jar_path = _dist_jar_path_list[0]
+
+  _native_libs = []
+
+  _version_code = "1"
+  if (defined(invoker.version_code)) {
+    _version_code = invoker.version_code
+  }
+
+  _version_name = "Developer Build"
+  if (defined(invoker.version_name)) {
+    _version_name = invoker.version_name
+  }
+  _keystore_path = android_default_keystore_path
+  _keystore_name = android_default_keystore_name
+  _keystore_password = android_default_keystore_password
+
+  if (defined(invoker.keystore_path)) {
+    _keystore_path = invoker.keystore_path
+    _keystore_name = invoker.keystore_name
+    _keystore_password = invoker.keystore_password
+  }
+
+  _srcjar_deps = []
+  if (defined(invoker.srcjar_deps)) {
+    _srcjar_deps += invoker.srcjar_deps
+  }
+
+  _load_library_from_apk = false
+
+  # The dependency that makes the chromium linker, if any is needed.
+  _chromium_linker_dep = []
+
+  if (defined(invoker.native_libs)) {
+    _use_chromium_linker = false
+    if (defined(invoker.use_chromium_linker)) {
+      _use_chromium_linker =
+          invoker.use_chromium_linker && chromium_linker_supported
+      _chromium_linker_dep = [ "//base/android/linker:chromium_android_linker" ]
+    }
+
+    if (defined(invoker.load_library_from_apk) &&
+        invoker.load_library_from_apk) {
+      _load_library_from_apk = true
+      assert(_use_chromium_linker,
+             "Loading library from the apk requires use" +
+                 " of the Chromium linker.")
+    }
+
+    _enable_relocation_packing = false
+    if (defined(invoker.enable_relocation_packing) &&
+        invoker.enable_relocation_packing) {
+      _enable_relocation_packing = relocation_packing_supported
+      assert(_use_chromium_linker,
+             "Relocation packing requires use of the" + " Chromium linker.")
+    }
+
+    if (is_component_build) {
+      _native_libs += [ "$root_out_dir/lib.stripped/libc++_shared.so" ]
+      _chromium_linker_dep += [ "//build/android:cpplib_stripped" ]
+    }
+
+    # Allow native_libs to be in the form "foo.so" or "foo.cr.so"
+    _first_ext_removed =
+        process_file_template(invoker.native_libs, "{{source_name_part}}")
+    _native_libs += process_file_template(
+            _first_ext_removed,
+            "$root_build_dir/lib.stripped/{{source_name_part}}$android_product_extension")
+
+    _native_libs_dir = base_path + "/libs"
+
+    if (_use_chromium_linker) {
+      _native_libs += [ "$root_build_dir/lib.stripped/libchromium_android_linker$android_product_extension" ]
+    }
+
+    _enable_relocation_packing = false
+    if (_use_chromium_linker && defined(invoker.enable_relocation_packing) &&
+        invoker.enable_relocation_packing) {
+      _enable_relocation_packing = true
+    }
+
+    _native_lib_version_rule = ""
+    if (defined(invoker.native_lib_version_rule)) {
+      _native_lib_version_rule = invoker.native_lib_version_rule
+    }
+    _native_lib_version_arg = "\"\""
+    if (defined(invoker.native_lib_version_arg)) {
+      _native_lib_version_arg = invoker.native_lib_version_arg
+    }
+  }
+
+  _android_manifest_deps = []
+  if (defined(invoker.android_manifest_dep)) {
+    _android_manifest_deps = [ invoker.android_manifest_dep ]
+  }
+  _android_manifest = invoker.android_manifest
+
+  _rebased_build_config = rebase_path(_build_config, root_build_dir)
+  _create_abi_split =
+      defined(invoker.create_abi_split) && invoker.create_abi_split
+  _create_density_splits =
+      defined(invoker.create_density_splits) && invoker.create_density_splits
+
+  # Help GN understand that _create_abi_split is not unused (bug in GN).
+  assert(_create_abi_split || true)
+
+  build_config_target = "${_template_name}__build_config"
+  write_build_config(build_config_target) {
+    type = "android_apk"
+    dex_path = final_dex_path
+    resources_zip = resources_zip_path
+    build_config = _build_config
+    android_manifest = _android_manifest
+
+    deps = _chromium_linker_dep + _android_manifest_deps
+    if (defined(invoker.deps)) {
+      deps += invoker.deps
+    }
+
+    if (defined(invoker.apk_under_test)) {
+      apk_under_test = invoker.apk_under_test
+    }
+
+    native_libs = _native_libs
+  }
+
+  final_deps = []
+
+  process_resources_target = "${_template_name}__process_resources"
+  final_deps += [ ":$process_resources_target" ]
+  process_resources(process_resources_target) {
+    srcjar_path = "${target_gen_dir}/${target_name}.srcjar"
+    r_text_path = "${target_gen_dir}/${target_name}_R.txt"
+    android_manifest = _android_manifest
+    resource_dirs = [ "//build/android/ant/empty/res" ]
+    zip_path = resources_zip_path
+    all_resources_zip_path = _all_resources_zip_path
+    generate_constant_ids = true
+
+    if (defined(invoker.include_all_resources)) {
+      include_all_resources = invoker.include_all_resources
+    }
+
+    build_config = _build_config
+    deps = _android_manifest_deps + [ ":$build_config_target" ]
+    if (defined(invoker.deps)) {
+      deps += invoker.deps
+    }
+  }
+  _srcjar_deps += [ ":$process_resources_target" ]
+
+  if (_native_libs != []) {
+    _enable_chromium_linker_tests = false
+    if (defined(invoker.enable_chromium_linker_tests)) {
+      _enable_chromium_linker_tests = invoker.enable_chromium_linker_tests
+    }
+
+    java_cpp_template("${_template_name}__native_libraries_java") {
+      package_name = "org/chromium/base/library_loader"
+      sources = [
+        "//base/android/java/templates/NativeLibraries.template",
+      ]
+      inputs = [
+        _build_config,
+      ]
+      deps = [
+        ":$build_config_target",
+      ]
+      if (_native_lib_version_rule != "") {
+        deps += [ _native_lib_version_rule ]
+      }
+
+      defines = [
+        "NATIVE_LIBRARIES_LIST=" +
+            "@FileArg($_rebased_build_config:native:java_libraries_list)",
+        "NATIVE_LIBRARIES_VERSION_NUMBER=$_native_lib_version_arg",
+      ]
+      if (_use_chromium_linker) {
+        defines += [ "ENABLE_CHROMIUM_LINKER" ]
+      }
+      if (_load_library_from_apk) {
+        defines += [ "ENABLE_CHROMIUM_LINKER_LIBRARY_IN_ZIP_FILE" ]
+      }
+      if (_enable_chromium_linker_tests) {
+        defines += [ "ENABLE_CHROMIUM_LINKER_TESTS" ]
+      }
+    }
+    _srcjar_deps += [ ":${_template_name}__native_libraries_java" ]
+  }
+
+  java_target = "${_template_name}__java"
+  final_deps += [ ":$java_target" ]
+  java_library_impl(java_target) {
+    supports_android = true
+    requires_android = true
+    override_build_config = _build_config
+    deps = _android_manifest_deps + [ ":$build_config_target" ]
+
+    android_manifest = _android_manifest
+    chromium_code = true
+    if (defined(invoker.java_files)) {
+      java_files = invoker.java_files
+    } else if (defined(invoker.DEPRECATED_java_in_dir)) {
+      DEPRECATED_java_in_dir = invoker.DEPRECATED_java_in_dir
+    } else {
+      java_files = []
+    }
+    srcjar_deps = _srcjar_deps
+    dex_path = base_path + ".dex.jar"
+
+    if (defined(invoker.deps)) {
+      deps += invoker.deps
+    }
+  }
+
+  if (_dist_jar_path != "") {
+    create_dist_target = "${_template_name}__create_dist_jar"
+    final_deps += [ ":$create_dist_target" ]
+
+    # TODO(cjhopman): This is only ever needed to calculate the list of tests to
+    # run. See build/android/pylib/instrumentation/test_jar.py. We should be
+    # able to just do that calculation at build time instead.
+    action(create_dist_target) {
+      script = "//build/android/gyp/create_dist_jar.py"
+      depfile = "$target_gen_dir/$target_name.d"
+      inputs = [
+        _build_config,
+      ]
+      outputs = [
+        depfile,
+        _dist_jar_path,
+      ]
+      args = [
+        "--depfile",
+        rebase_path(depfile, root_build_dir),
+        "--output",
+        rebase_path(_dist_jar_path, root_build_dir),
+        "--inputs=@FileArg($_rebased_build_config:dist_jar:dependency_jars)",
+      ]
+      inputs += [ jar_path ]
+      _rebased_jar_path = rebase_path([ jar_path ], root_build_dir)
+      args += [ "--inputs=$_rebased_jar_path" ]
+      deps = [
+        ":$build_config_target",  # Generates the build config file.
+        ":$java_target",  # Generates the jar file.
+      ]
+    }
+  }
+
+  final_deps += [ ":$final_dex_target_name" ]
+  dex("${final_dex_target_name}_jar") {
+    deps = [
+      ":$build_config_target",
+      ":$java_target",
+    ]
+    sources = [
+      jar_path,
+    ]
+    inputs = [
+      _build_config,
+    ]
+    output = "${final_dex_path}.jar"
+    dex_arg_key = "${_rebased_build_config}:final_dex:dependency_dex_files"
+    args = [ "--inputs=@FileArg($dex_arg_key)" ]
+  }
+
+  dex("$final_dex_target_name") {
+    deps = [
+      ":${final_dex_target_name}_jar",
+    ]
+    sources = [
+      "${final_dex_path}.jar",
+    ]
+    output = final_dex_path
+  }
+
+  if (_native_libs != []) {
+    action("${_template_name}__prepare_native") {
+      script = "//build/android/gyp/pack_relocations.py"
+      packed_libraries_dir = "$_native_libs_dir/$android_app_abi"
+      depfile = "$target_gen_dir/$target_name.d"
+      outputs = [
+        depfile,
+      ]
+
+      inputs = _native_libs
+      deps = _chromium_linker_dep
+
+      inputs += [ _build_config ]
+      deps += [ ":$build_config_target" ]
+
+      skip_packing_list = [
+        "gdbserver",
+        "libchromium_android_linker$android_product_extension",
+      ]
+
+      enable_packing_arg = 0
+      if (_enable_relocation_packing) {
+        enable_packing_arg = 1
+        deps += [ relocation_packer_target ]
+      }
+
+      args = [
+        "--depfile",
+        rebase_path(depfile, root_build_dir),
+        "--enable-packing=$enable_packing_arg",
+        "--exclude-packing-list=$skip_packing_list",
+        "--android-pack-relocations",
+        rebase_path(relocation_packer_exe, root_build_dir),
+        "--stripped-libraries-dir",
+        rebase_path(root_build_dir, root_build_dir),
+        "--packed-libraries-dir",
+        rebase_path(packed_libraries_dir, root_build_dir),
+        "--libraries=@FileArg(${_rebased_build_config}:native:libraries)",
+        "--clear-dir",
+      ]
+
+      if (defined(invoker.deps)) {
+        deps += invoker.deps
+      }
+      if (defined(invoker.public_deps)) {
+        public_deps = invoker.public_deps
+      }
+      if (defined(invoker.data_deps)) {
+        data_deps = invoker.data_deps
+      }
+
+      if (is_debug) {
+        rebased_gdbserver = rebase_path([ android_gdbserver ], root_build_dir)
+        inputs += [ android_gdbserver ]
+        args += [ "--libraries=$rebased_gdbserver" ]
+      }
+    }
+  }
+
+  final_deps += [ ":${_template_name}__create" ]
+  create_apk("${_template_name}__create") {
+    apk_path = _final_apk_path
+    android_manifest = _android_manifest
+    resources_zip = _all_resources_zip_path
+    dex_path = final_dex_path
+    load_library_from_apk = _load_library_from_apk
+    create_density_splits = _create_density_splits
+    if (defined(invoker.language_splits)) {
+      language_splits = invoker.language_splits
+    }
+    if (defined(invoker.extensions_to_not_compress)) {
+      extensions_to_not_compress = invoker.extensions_to_not_compress
+    } else {
+      # Allow icu data, v8 snapshots, and pak files to be loaded directly from
+      # the .apk.
+      # Note: These are actually suffix matches, not necessarily extensions.
+      extensions_to_not_compress = ".dat,.bin,.pak"
+    }
+
+    version_code = _version_code
+    version_name = _version_name
+
+    keystore_name = _keystore_name
+    keystore_path = _keystore_path
+    keystore_password = _keystore_password
+
+    # This target generates the input file _all_resources_zip_path.
+    deps = _android_manifest_deps + [
+             ":$process_resources_target",
+             ":$final_dex_target_name",
+           ]
+    if (defined(invoker.deps)) {
+      deps += invoker.deps
+    }
+
+    if (defined(invoker.asset_location)) {
+      asset_location = invoker.asset_location
+
+      # We don't know the exact dependencies that create the assets in
+      # |asset_location|; we depend on all caller deps until a better solution
+      # is figured out (http://crbug.com/433330).
+      if (defined(invoker.deps)) {
+        deps += invoker.deps
+      }
+    }
+
+    if (_native_libs != [] && !_create_abi_split) {
+      native_libs_dir = _native_libs_dir
+      deps += [ ":${_template_name}__prepare_native" ]
+    }
+  }
+
+  if (_native_libs != [] && _create_abi_split) {
+    _manifest_rule = "${_template_name}__split_manifest_abi_${android_app_abi}"
+    generate_split_manifest(_manifest_rule) {
+      main_manifest = _android_manifest
+      out_manifest =
+          "$gen_dir/split-manifests/${android_app_abi}/AndroidManifest.xml"
+      split_name = "abi_${android_app_abi}"
+      deps = _android_manifest_deps
+    }
+
+    _apk_rule = "${_template_name}__split_apk_abi_${android_app_abi}"
+    final_deps += [ ":$_apk_rule" ]
+    create_apk(_apk_rule) {
+      _split_paths = process_file_template(
+              [ _final_apk_path ],
+              "{{source_dir}}/{{source_name_part}}-abi-${android_app_abi}.apk")
+      apk_path = _split_paths[0]
+      base_path = "$gen_dir/$_apk_rule"
+
+      manifest_outputs = get_target_outputs(":${_manifest_rule}")
+      android_manifest = manifest_outputs[1]
+      load_library_from_apk = _load_library_from_apk
+
+      version_code = _version_code
+      version_name = _version_name
+
+      keystore_name = _keystore_name
+      keystore_path = _keystore_path
+      keystore_password = _keystore_password
+
+      native_libs_dir = _native_libs_dir
+      deps = [
+        ":${_template_name}__prepare_native",
+        ":${_manifest_rule}",
+      ]
+    }
+  }
+
+  if (defined(invoker.flutter_dist_jar)) {
+    flutter_jar_target = "${_template_name}__create_flutter_jar"
+    final_deps += [ ":$flutter_jar_target" ]
+
+    action(flutter_jar_target) {
+      script = "//build/android/gyp/create_flutter_jar.py"
+      depfile = "$target_gen_dir/$target_name.d"
+      inputs = [
+        _dist_jar_path,
+      ]
+      outputs = [
+        invoker.flutter_dist_jar,
+      ]
+      args = [
+        "--depfile",
+        rebase_path(depfile, root_build_dir),
+        "--output",
+        rebase_path(invoker.flutter_dist_jar, root_build_dir),
+        "--dist_jar",
+        rebase_path(_dist_jar_path, root_build_dir),
+        "--android_abi",
+        "$android_app_abi",
+        "--asset_dir",
+        rebase_path(invoker.asset_location, root_build_dir),
+      ]
+      foreach(native_lib, rebase_path(_native_libs, root_build_dir)) {
+        args += [
+          "--native_lib",
+          native_lib,
+        ]
+      }
+      deps = [
+        ":$create_dist_target",
+        ":${_template_name}__prepare_native"
+      ]
+      if (defined(invoker.deps)) {
+        deps += invoker.deps
+      }
+    }
+  }
+
+  group(target_name) {
+    deps = final_deps
+    if (defined(invoker.data_deps)) {
+      data_deps = invoker.data_deps
+    }
+  }
+}
+
+# Declare an Android instrumentation test apk
+#
+# This target creates an Android instrumentation test apk.
+#
+# Variables
+#   android_manifest: Path to AndroidManifest.xml.
+#   data_deps: List of dependencies needed at runtime. These will be built but
+#     won't change the generated .apk in any way (in fact they may be built
+#     after the .apk is).
+#   deps: List of dependencies. All Android java resources and libraries in the
+#     "transitive closure" of these dependencies will be included in the apk.
+#     Note: this "transitive closure" actually only includes such targets if
+#     they are depended on through android_library or android_resources targets
+#     (and so not through builtin targets like 'action', 'group', etc).
+#   java_files: List of .java files to include in the apk.
+#   srcjar_deps: List of srcjar dependencies. The .java files in the srcjars
+#      will be added to java_files and be included in this apk.
+#   apk_name: Name for final apk.
+#   support_apk_path: Path to a support apk. If present, the test runner script
+#      will install it on the device before running the instrumentation tests.
+#      Should be a path relative to the src root.
+#   final_apk_path: Path to final built apk. Default is
+#     $root_out_dir/apks/$apk_name.apk. Setting this will override apk_name.
+#   native_libs: List paths of native libraries to include in this apk. If these
+#     libraries depend on other shared_library targets, those dependencies will
+#     also be included in the apk.
+#   apk_under_test: The apk being tested.
+#   isolate_file: Isolate file containing the list of test data dependencies.
+#
+#   DEPRECATED_java_in_dir: Directory containing java files. All .java files in
+#     this directory will be included in the library. This is only supported to
+#     ease the gyp->gn conversion and will be removed in the future.
+#
+# Example
+#   instrumentation_test_apk("foo_test_apk") {
+#     android_manifest = "AndroidManifest.xml"
+#     apk_name = "FooTest"
+#     apk_under_test = "Foo"
+#     java_files = [
+#       "android/org/chromium/foo/FooTestCase.java",
+#       "android/org/chromium/foo/FooExampleTest.java",
+#     ]
+#     deps = [
+#       ":foo_test_support_java"
+#     ]
+#   }
+template("instrumentation_test_apk") {
+  set_sources_assignment_filter([])
+  testonly = true
+  _template_name = target_name
+
+  if (defined(invoker.apk_name)) {
+    test_runner_data_dep = [ ":${_template_name}__test_runner_script" ]
+    test_runner_script("${_template_name}__test_runner_script") {
+      test_name = invoker.target_name
+      test_type = "instrumentation"
+      test_apk = invoker.apk_name
+      if (defined(invoker.isolate_file)) {
+        isolate_file = invoker.isolate_file
+      }
+      if (defined(invoker.support_apk_path)) {
+        support_apk_path = invoker.support_apk_path
+      }
+    }
+  }
+
+  android_apk(target_name) {
+    if (defined(invoker.android_manifest)) {
+      android_manifest = invoker.android_manifest
+    }
+    data_deps = [
+      "//testing/android/driver:driver_apk",
+      "//tools/android/forwarder2",
+      "//tools/android/md5sum",
+    ]
+    if (defined(test_runner_data_dep)) {
+      data_deps += test_runner_data_dep
+    }
+    if (defined(invoker.data_deps)) {
+      data_deps += invoker.data_deps
+    }
+    deps = [
+      "//testing/android/broker:broker_java",
+    ]
+    if (defined(invoker.deps)) {
+      deps += invoker.deps
+    }
+    if (defined(invoker.java_files)) {
+      java_files = invoker.java_files
+    }
+    if (defined(invoker.srcjar_deps)) {
+      srcjar_deps = invoker.srcjar_deps
+    }
+    if (defined(invoker.apk_name)) {
+      apk_name = invoker.apk_name
+    }
+    if (defined(invoker.final_apk_path)) {
+      final_apk_path = invoker.final_apk_path
+    }
+    if (defined(invoker.native_libs)) {
+      native_libs = invoker.native_libs
+    }
+    if (defined(invoker.apk_under_test)) {
+      apk_under_test = invoker.apk_under_test
+    }
+    if (defined(invoker.DEPRECATED_java_in_dir)) {
+      DEPRECATED_java_in_dir = invoker.DEPRECATED_java_in_dir
+    }
+  }
+}
+
+# Declare an Android gtest apk
+#
+# This target creates an Android apk for running gtest-based unittests.
+#
+# Variables
+#   deps: Specifies the dependencies of this target. These will be passed to
+#     the underlying android_apk invocation and should include the java and
+#     resource dependencies of the apk.
+#   unittests_dep: This should be the label of the gtest native target. This
+#     target must be defined previously in the same file.
+#   unittests_binary: The basename of the library produced by the unittests_dep
+#     target. If unspecified, it assumes the name of the unittests_dep target
+#     (which will be correct unless that target specifies an "output_name".
+#     TODO(brettw) make this automatic by allowing get_target_outputs to
+#     support executables.
+#   apk_name: The name of the produced apk. If unspecified, it uses the name
+#             of the unittests_dep target postfixed with "_apk"
+#
+# Example
+#   unittest_apk("foo_unittests_apk") {
+#     deps = [ ":foo_java", ":foo_resources" ]
+#     unittests_dep = ":foo_unittests"
+#   }
+template("unittest_apk") {
+  set_sources_assignment_filter([])
+  testonly = true
+
+  assert(defined(invoker.unittests_dep), "Need unittests_dep for $target_name")
+
+  test_suite_name = get_label_info(invoker.unittests_dep, "name")
+
+  # This trivial assert is needed in case both unittests_binary and apk_name
+  # are defined, as otherwise test_suite_name would not be used.
+  assert(test_suite_name != "")
+
+  if (defined(invoker.unittests_binary)) {
+    unittests_binary = invoker.unittests_binary
+  } else {
+    unittests_binary = "lib${test_suite_name}${android_product_extension}"
+  }
+
+  if (defined(invoker.apk_name)) {
+    apk_name = invoker.apk_name
+  } else {
+    apk_name = test_suite_name
+  }
+
+  android_apk(target_name) {
+    final_apk_path = "$root_build_dir/${apk_name}_apk/${apk_name}-debug.apk"
+    java_files = [
+      "//testing/android/native_test/java/src/org/chromium/native_test/NativeBrowserTestActivity.java",
+      "//testing/android/native_test/java/src/org/chromium/native_test/NativeTestActivity.java",
+      "//testing/android/native_test/java/src/org/chromium/native_test/NativeUnitTestActivity.java",
+      "//testing/android/native_test/java/src/org/chromium/native_test/NativeTestInstrumentationTestRunner.java",
+    ]
+    android_manifest = "//testing/android/native_test/java/AndroidManifest.xml"
+    native_libs = [ unittests_binary ]
+    if (defined(invoker.asset_location)) {
+      asset_location = invoker.asset_location
+    }
+    deps = [
+      "//base:base_java",
+      "//build/android/pylib/remote/device/dummy:remote_device_dummy_apk",
+      "//testing/android/appurify_support:appurify_support_java",
+      "//testing/android/reporter:reporter_java",
+    ]
+    if (defined(invoker.deps)) {
+      deps += invoker.deps
+    }
+    data_deps = [ "//tools/android/md5sum" ]
+    if (host_os == "linux") {
+      data_deps += [ "//tools/android/forwarder2" ]
+    }
+    if (defined(invoker.data_deps)) {
+      data_deps += invoker.data_deps
+    }
+  }
+}
+
+# Generate .java files from .aidl files.
+#
+# This target will store the .java files in a srcjar and should be included in
+# an android_library or android_apk's srcjar_deps.
+#
+# Variables
+#   sources: Paths to .aidl files to compile.
+#   import_include: Path to directory containing .java files imported by the
+#     .aidl files.
+#   interface_file: Preprocessed aidl file to import.
+#
+# Example
+#   android_aidl("foo_aidl") {
+#     import_include = "java/src"
+#     sources = [
+#       "java/src/com/foo/bar/FooBarService.aidl",
+#       "java/src/com/foo/bar/FooBarServiceCallback.aidl",
+#     ]
+#   }
+template("android_aidl") {
+  set_sources_assignment_filter([])
+  if (defined(invoker.testonly)) {
+    testonly = invoker.testonly
+  }
+
+  srcjar_path = "${target_gen_dir}/${target_name}.srcjar"
+  aidl_path = "${android_sdk_build_tools}/aidl"
+  framework_aidl = "$android_sdk/framework.aidl"
+
+  action(target_name) {
+    script = "//build/android/gyp/aidl.py"
+    sources = invoker.sources
+
+    imports = [ framework_aidl ]
+    if (defined(invoker.interface_file)) {
+      assert(invoker.interface_file != "")
+      imports += [ invoker.interface_file ]
+    }
+
+    inputs = [ aidl_path ] + imports
+
+    depfile = "${target_gen_dir}/${target_name}.d"
+    outputs = [
+      depfile,
+      srcjar_path,
+    ]
+    rebased_imports = rebase_path(imports, root_build_dir)
+    args = [
+      "--depfile",
+      rebase_path(depfile, root_build_dir),
+      "--aidl-path",
+      rebase_path(aidl_path, root_build_dir),
+      "--imports=$rebased_imports",
+      "--srcjar",
+      rebase_path(srcjar_path, root_build_dir),
+    ]
+    if (defined(invoker.import_include) && invoker.import_include != "") {
+      # TODO(cjhopman): aidl supports creating a depfile. We should be able to
+      # switch to constructing a depfile for the overall action from that
+      # instead of having all the .java files in the include paths as inputs.
+      rebased_import_includes =
+          rebase_path([ invoker.import_include ], root_build_dir)
+      args += [ "--includes=$rebased_import_includes" ]
+
+      _java_files_build_rel =
+          exec_script("//build/android/gyp/find.py",
+                      rebase_path([ invoker.import_include ], root_build_dir),
+                      "list lines")
+      _java_files = rebase_path(_java_files_build_rel, ".", root_build_dir)
+      inputs += _java_files
+    }
+    args += rebase_path(sources, root_build_dir)
+  }
+}
+
+# Creates a dist directory for a native executable.
+#
+# Running a native executable on a device requires all the shared library
+# dependencies of that executable. To make it easier to install and run such an
+# executable, this will create a directory containing the native exe and all
+# it's library dependencies.
+#
+# Note: It's usually better to package things as an APK than as a native
+# executable.
+#
+# Variables
+#   dist_dir: Directory for the exe and libraries. Everything in this directory
+#     will be deleted before copying in the exe and libraries.
+#   binary: Path to (stripped) executable.
+#
+# Example
+#   create_native_executable_dist("foo_dist") {
+#     dist_dir = "$root_build_dir/foo_dist"
+#     binary = "$root_build_dir/exe.stripped/foo"
+#     deps = [ ":the_thing_that_makes_foo" ]
+#   }
+template("create_native_executable_dist") {
+  set_sources_assignment_filter([])
+  if (defined(invoker.testonly)) {
+    testonly = invoker.testonly
+  }
+
+  dist_dir = invoker.dist_dir
+  binary = invoker.binary
+  template_name = target_name
+
+  libraries_list =
+      "${target_gen_dir}/${template_name}_library_dependencies.list"
+
+  find_deps_target_name = "${template_name}__find_library_dependencies"
+  copy_target_name = "${template_name}__copy_libraries_and_exe"
+
+  stripped_libraries_dir = "$root_build_dir/lib.stripped"
+  action(find_deps_target_name) {
+    visibility = [ ":$copy_target_name" ]
+
+    script = "//build/android/gyp/write_ordered_libraries.py"
+    depfile = "$target_gen_dir/$target_name.d"
+    inputs = [
+      binary,
+      android_readelf,
+    ]
+    outputs = [
+      depfile,
+      libraries_list,
+    ]
+    rebased_binaries = rebase_path([ binary ], root_build_dir)
+    args = [
+      "--depfile",
+      rebase_path(depfile, root_build_dir),
+      "--input-libraries=$rebased_binaries",
+      "--libraries-dir",
+      rebase_path(stripped_libraries_dir, root_build_dir),
+      "--output",
+      rebase_path(libraries_list, root_build_dir),
+      "--readelf",
+      rebase_path(android_readelf, root_build_dir),
+    ]
+    if (defined(invoker.deps)) {
+      deps = invoker.deps
+    }
+  }
+
+  copy_ex(copy_target_name) {
+    visibility = [ ":$template_name" ]
+
+    clear_dir = true
+    inputs = [
+      binary,
+      libraries_list,
+    ]
+    dest = dist_dir
+    rebased_binaries_list = rebase_path([ binary ], root_build_dir)
+    rebased_libraries_list = rebase_path(libraries_list, root_build_dir)
+    args = [
+      "--files=$rebased_binaries_list",
+      "--files=@FileArg($rebased_libraries_list:lib_paths)",
+    ]
+
+    deps = [
+      ":$find_deps_target_name",
+    ]
+    if (defined(invoker.deps)) {
+      deps += invoker.deps
+    }
+  }
+
+  group(template_name) {
+    if (defined(invoker.visibility)) {
+      visibility = invoker.visibility
+    }
+    deps = [
+      ":$copy_target_name",
+    ]
+  }
+}
+
+# Compile a protocol buffer to java.
+#
+# This generates java files from protocol buffers and creates an Android library
+# containing the classes.
+#
+# Variables
+#   sources: Paths to .proto files to compile.
+#   proto_path: Root directory of .proto files.
+#
+# Example:
+#  proto_java_library("foo_proto_java") {
+#    proto_path = [ "src/foo" ]
+#    sources = [ "$proto_path/foo.proto" ]
+#  }
+template("proto_java_library") {
+  set_sources_assignment_filter([])
+  _protoc_dep = "//third_party/android_protobuf:android_protoc($host_toolchain)"
+  _protoc_out_dir = get_label_info(_protoc_dep, "root_out_dir")
+  _protoc_bin = "$_protoc_out_dir/android_protoc"
+  _proto_path = invoker.proto_path
+
+  _template_name = target_name
+
+  action("${_template_name}__protoc_java") {
+    srcjar_path = "$target_gen_dir/$target_name.srcjar"
+    script = "//build/protoc_java.py"
+    deps = [
+      _protoc_dep,
+    ]
+    sources = invoker.sources
+    depfile = "$target_gen_dir/$target_name.d"
+    outputs = [
+      depfile,
+      srcjar_path,
+    ]
+    args = [
+             "--depfile",
+             rebase_path(depfile, root_build_dir),
+             "--protoc",
+             rebase_path(_protoc_bin, root_build_dir),
+             "--proto-path",
+             rebase_path(_proto_path, root_build_dir),
+             "--srcjar",
+             rebase_path(srcjar_path, root_build_dir),
+           ] + rebase_path(sources, root_build_dir)
+  }
+
+  android_library(target_name) {
+    java_files = []
+    srcjar_deps = [ ":${_template_name}__protoc_java" ]
+    deps = [
+      "//third_party/android_protobuf:protobuf_nano_javalib",
+    ]
+  }
+}
+
+# TODO(GYP): implement this.
+template("uiautomator_test") {
+  set_sources_assignment_filter([])
+  if (defined(invoker.testonly)) {
+    testonly = invoker.testonly
+  }
+  assert(target_name != "")
+  assert(invoker.deps != [] || true)
+  group(target_name) {
+  }
+}
diff --git a/build/config/arm.gni b/build/config/arm.gni
new file mode 100644
index 0000000..429a250
--- /dev/null
+++ b/build/config/arm.gni
@@ -0,0 +1,78 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+if (current_cpu == "arm" || current_cpu == "arm64") {
+  declare_args() {
+    # Version of the ARM processor when compiling on ARM. Ignored on non-ARM
+    # platforms.
+    if (current_cpu == "arm") {
+      arm_version = 7
+    } else if(current_cpu == "arm64") {
+      arm_version = 8
+    } else {
+      assert(false, "Unconfigured arm version")
+    }
+
+    # The ARM floating point mode. This is either the string "hard", "soft", or
+    # "softfp". An empty string means to use the default one for the
+    # arm_version.
+    arm_float_abi = ""
+
+    # The ARM variant-specific tuning mode. This will be a string like "armv6"
+    # or "cortex-a15". An empty string means to use the default for the
+    # arm_version.
+    arm_tune = ""
+
+    # Whether to use the neon FPU instruction set or not.
+    arm_use_neon = true
+
+    # Whether to enable optional NEON code paths.
+    arm_optionally_use_neon = false
+
+    if (is_android) {
+      arm_use_neon = false
+      arm_optionally_use_neon = true
+    }
+
+    if (is_ios) {
+      arm_use_neon = false
+      arm_optionally_use_neon = false
+    }
+  }
+
+  assert(arm_float_abi == "" || arm_float_abi == "hard" ||
+         arm_float_abi == "soft" || arm_float_abi == "softfp")
+
+  if (arm_version == 6) {
+    arm_arch = "armv6"
+    if (arm_tune != "") {
+      arm_tune = ""
+    }
+    if (arm_float_abi == "") {
+      arm_float_abi = "softfp"
+    }
+    arm_fpu = "vfp"
+
+    # Thumb is a reduced instruction set available on some ARM processors that
+    # has increased code density.
+    arm_use_thumb = false
+  } else if (arm_version == 7) {
+    arm_arch = "armv7-a"
+    if (arm_tune == "") {
+      arm_tune = "generic-armv7-a"
+    }
+
+    if (arm_float_abi == "") {
+      arm_float_abi = "softfp"
+    }
+
+    arm_use_thumb = true
+
+    if (arm_use_neon) {
+      arm_fpu = "neon"
+    } else {
+      arm_fpu = "vfpv3-d16"
+    }
+  }
+}
diff --git a/build/config/chrome_build.gni b/build/config/chrome_build.gni
new file mode 100644
index 0000000..c2132c4
--- /dev/null
+++ b/build/config/chrome_build.gni
@@ -0,0 +1,19 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+declare_args() {
+  # Selects the desired build flavor. Official builds get additional
+  # processing to prepare for release. Normally you will want to develop and
+  # test with this flag off.
+  is_official_build = false
+
+  # Select the desired branding flavor. False means normal Chromium branding,
+  # true means official Google Chrome branding (requires extra Google-internal
+  # resources).
+  is_chrome_branded = false
+
+  # Break chrome.dll into multple pieces based on process type. Only available
+  # on Windows.
+  is_multi_dll_chrome = is_win && !is_component_build
+}
diff --git a/build/config/clang/BUILD.gn b/build/config/clang/BUILD.gn
new file mode 100644
index 0000000..6dff486
--- /dev/null
+++ b/build/config/clang/BUILD.gn
@@ -0,0 +1,62 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("clang.gni")
+
+config("find_bad_constructs") {
+  if (clang_use_chrome_plugins) {
+    cflags = []
+
+    # On Windows, the plugin is built directly into clang, so there's
+    # no need to load it dynamically.
+
+    if (is_mac || is_ios) {
+      cflags += [
+        "-Xclang",
+        "-load",
+        "-Xclang",
+        rebase_path(
+            "//third_party/llvm-build/Release+Asserts/lib/libFindBadConstructs.dylib",
+            root_build_dir),
+      ]
+    } else if (is_linux) {
+      cflags += [
+        "-Xclang",
+        "-load",
+        "-Xclang",
+        rebase_path(
+            "//third_party/llvm-build/Release+Asserts/lib/libFindBadConstructs.so",
+            root_build_dir),
+      ]
+    }
+
+    # https://crbug.com/441916
+    if (is_android || is_linux || is_mac) {
+      cflags += [
+        "-Xclang",
+        "-plugin-arg-find-bad-constructs",
+        "-Xclang",
+        "check-templates",
+      ]
+    }
+
+    cflags += [
+      "-Xclang",
+      "-add-plugin",
+      "-Xclang",
+      "find-bad-constructs",
+    ]
+  }
+}
+
+# Enables some extra Clang-specific warnings. Some third-party code won't
+# compile with these so may want to remove this config.
+config("extra_warnings") {
+  cflags = [
+    "-Wheader-hygiene",
+
+    # Warns when a const char[] is converted to bool.
+    "-Wstring-conversion",
+  ]
+}
diff --git a/build/config/clang/clang.gni b/build/config/clang/clang.gni
new file mode 100644
index 0000000..cb84879
--- /dev/null
+++ b/build/config/clang/clang.gni
@@ -0,0 +1,9 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+declare_args() {
+  # Indicates if the build should use the Chrome-specific plugins for enforcing
+  # coding guidelines, etc. Only used when compiling with Clang.
+  clang_use_chrome_plugins = is_clang && !is_nacl
+}
diff --git a/build/config/compiler/BUILD.gn b/build/config/compiler/BUILD.gn
new file mode 100644
index 0000000..04d4bd9
--- /dev/null
+++ b/build/config/compiler/BUILD.gn
@@ -0,0 +1,1243 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/android/config.gni")
+import("//build/config/chrome_build.gni")
+if (current_cpu == "arm") {
+  import("//build/config/arm.gni")
+}
+if (current_cpu == "mipsel" || current_cpu == "mips64el") {
+  import("//build/config/mips.gni")
+}
+if (is_posix) {
+  import("//build/config/gcc/gcc_version.gni")
+}
+if (is_win) {
+  import("//build/config/win/visual_studio_version.gni")
+}
+
+import("//build/toolchain/ccache.gni")
+import("//build/config/sanitizers/sanitizers.gni")
+
+declare_args() {
+  # Normally, Android builds are lightly optimized, even for debug builds, to
+  # keep binary size down. Setting this flag to true disables such optimization
+  android_full_debug = false
+
+  # Whether to use the binary binutils checked into third_party/binutils.
+  # These are not multi-arch so cannot be used except on x86 and x86-64 (the
+  # only two architectures that are currently checked in). Turn this off when
+  # you are using a custom toolchain and need to control -B in cflags.
+  linux_use_bundled_binutils = is_linux && current_cpu == "x64"
+
+  # Compile in such a way as to enable profiling of the generated code. For
+  # example, don't omit the frame pointer and leave in symbols.
+  enable_profiling = false
+
+  # Compile in such a way as to make it possible for the profiler to unwind full
+  # stack frames. Setting this flag has a large effect on the performance of the
+  # generated code than just setting profiling, but gives the profiler more
+  # information to analyze.
+  # Requires profiling to be set to true.
+  enable_full_stack_frames_for_profiling = false
+
+  # Use gold for linking on 64-bit Linux only (on 32-bit it runs out of
+  # address space, and it doesn't support cross-compiling).
+  use_gold = is_linux && current_cpu == "x64"
+
+  # use_debug_fission: whether to use split DWARF debug info
+  # files. This can reduce link time significantly, but is incompatible
+  # with some utilities such as icecc and ccache. Requires gold and
+  # gcc >= 4.8 or clang.
+  # http://gcc.gnu.org/wiki/DebugFission
+  use_debug_fission = is_debug && !is_win && use_gold &&
+                      linux_use_bundled_binutils && !use_ccache
+
+  if (is_win) {
+    # Whether the VS xtree header has been patched to disable warning 4702. If
+    # it has, then we don't need to disable 4702 (unreachable code warning).
+    # The patch is preapplied to the internal toolchain and hence all bots.
+    msvs_xtree_patched = false
+  }
+}
+
+# default_include_dirs ---------------------------------------------------------
+#
+# This is a separate config so that third_party code (which would not use the
+# source root and might have conflicting versions of some headers) can remove
+# this and specify their own include paths.
+config("default_include_dirs") {
+  include_dirs = [
+    "//",
+    root_gen_dir,
+  ]
+}
+
+# TODO(GYP): is_ubsan, is_ubsan_vptr
+if (!is_win) {
+  using_sanitizer = is_asan || is_lsan || is_tsan || is_msan
+}
+
+# compiler ---------------------------------------------------------------------
+#
+# Base compiler configuration.
+#
+# See also "runtime_library" below for related stuff and a discussion about
+# where stuff should go. Put warning related stuff in the "warnings" config.
+
+config("compiler") {
+  cflags = []
+  cflags_c = []
+  cflags_cc = []
+  cflags_objcc = []
+  ldflags = []
+  defines = []
+
+  # In general, Windows is totally different, but all the other builds share
+  # some common GCC configuration. This section sets up Windows and the common
+  # GCC flags, and then we handle the other non-Windows platforms specifically
+  # below.
+  if (is_win) {
+    # Windows compiler flags setup.
+    # -----------------------------
+    cflags += [
+      "/Gy",  # Enable function-level linking.
+      "/GS",  # Enable buffer security checking.
+      "/FS",  # Preserve previous PDB behavior.
+    ]
+
+    # Building with Clang on Windows is a work in progress and very
+    # experimental. See crbug.com/82385.
+    # Keep this in sync with the similar block in build/common.gypi
+    if (is_clang) {
+      cflags += [
+        # Many files use intrinsics without including this header.
+        # TODO(hans): Fix those files, or move this to sub-GYPs.
+        "/FIIntrin.h",
+      ]
+
+      if (visual_studio_version == "2013") {
+        cflags += [ "-fmsc-version=1800" ]
+      } else if (visual_studio_version == "2015") {
+        cflags += [ "-fmsc-version=1900" ]
+      }
+
+      if (current_cpu == "x86") {
+        cflags += [
+          "/fallback",
+          "-m32",
+        ]
+      } else {
+        cflags += [ "-m64" ]
+      }
+      if (exec_script("//build/win/use_ansi_codes.py", [], "trim string") ==
+          "True") {
+        cflags += [
+          # cmd.exe doesn't understand ANSI escape codes by default,
+          # so only enable them if something emulating them is around.
+          "-fansi-escape-codes",
+        ]
+      }
+    }
+  } else {
+    # Common GCC compiler flags setup.
+    # --------------------------------
+    cflags += [ "-fno-strict-aliasing" ]  # See http://crbug.com/32204
+    common_flags = [
+      # Not exporting C++ inline functions can generally be applied anywhere
+      # so we do so here. Normal function visibility is controlled by
+      # //build/config/gcc:symbol_visibility_hidden.
+      "-fvisibility-inlines-hidden",
+    ]
+    cflags_cc += common_flags
+    cflags_objcc += common_flags
+
+    # Stack protection.
+    if (is_mac) {
+      cflags += [ "-fstack-protector-all" ]
+    } else if (is_linux) {
+      cflags += [
+        "-fstack-protector",
+        "--param=ssp-buffer-size=4",
+      ]
+    }
+
+    # Linker warnings.
+    if (!(is_chromeos && current_cpu == "arm") && !is_mac && !is_ios) {
+      # TODO(jochen): Enable this on chromeos on arm. http://crbug.com/356580
+      ldflags += [ "-Wl,--fatal-warnings" ]
+    }
+
+    # Common options for AddressSanitizer, LeakSanitizer, ThreadSanitizer and
+    # MemorySanitizer
+    if (using_sanitizer) {
+      cflags += [
+        "-fno-omit-frame-pointer",
+        "-gline-tables-only",
+      ]
+    }
+    if (is_asan) {
+      asan_blacklist_path =
+          rebase_path("//tools/memory/asan/blacklist.txt", root_build_dir)
+      cflags += [
+        "-fsanitize=address",
+        "-fsanitize-blacklist=$asan_blacklist_path",
+      ]
+      if (is_mac) {
+        cflags += [ "-mllvm -asan-globals=0" ]  # http://crbug.com/352073
+        # TODO(GYP): deal with mac_bundles.
+      }
+    }
+    if (is_lsan) {
+      cflags += [ "-fsanitize=leak" ]
+    }
+    if (is_tsan) {
+      tsan_blacklist_path =
+          rebase_path("//tools/memory/tsan_v2/ignores.txt", root_build_dir)
+      cflags += [
+        "-fsanitize=thread",
+        "-fsanitize-blacklist=$tsan_blacklist_path",
+      ]
+    }
+    if (is_msan) {
+      msan_blacklist_path =
+          rebase_path("//tools/msan/blacklist.txt", root_build_dir)
+      cflags += [
+        "-fsanitize=memory",
+        "-fsanitize-memory-track-origins=$msan_track_origins",
+        "-fsanitize-blacklist=$msan_blacklist_path",
+      ]
+    }
+
+    if (use_custom_libcxx) {
+      cflags_cc += [ "-nostdinc++" ]
+      include_dirs = [
+        "//buildtools/third_party/libc++/trunk/include",
+        "//buildtools/third_party/libc++abi/trunk/include",
+      ]
+    }
+
+    if (is_fnl) {
+      # TODO(kulakowski) remove when fnl no longer uses gcc
+      cflags += [ "-Wno-maybe-uninitialized" ]
+    }
+  }
+
+  if (is_clang && is_debug) {
+    # Allow comparing the address of references and 'this' against 0
+    # in debug builds. Technically, these can never be null in
+    # well-defined C/C++ and Clang can optimize such checks away in
+    # release builds, but they may be used in asserts in debug builds.
+    extra_flags = [
+      "-Wno-undefined-bool-conversion",
+      "-Wno-tautological-undefined-compare",
+    ]
+    cflags_cc += extra_flags
+    cflags_objcc += extra_flags
+  }
+
+  if (is_clang && !is_nacl) {
+    # This is here so that all files get recompiled after a clang roll and
+    # when turning clang on or off. (defines are passed via the command line,
+    # and build system rebuild things when their commandline changes). Nothing
+    # should ever read this define.
+    defines +=
+        [ "CR_CLANG_REVISION=" + exec_script("//tools/clang/scripts/update.py",
+                                             [ "--print-revision" ],
+                                             "trim string") ]
+  }
+
+  # Mac-specific compiler flags setup.
+  # ----------------------------------
+  if (is_mac || is_ios) {
+    # These flags are shared between the C compiler and linker.
+    common_mac_flags = []
+
+    # CPU architecture.
+    if (current_cpu == "x64") {
+      common_mac_flags += [
+        "-arch",
+        "x86_64",
+      ]
+    } else if (current_cpu == "x86") {
+      common_mac_flags += [
+        "-arch",
+        "i386",
+      ]
+    } else if (current_cpu == "arm") {
+      common_mac_flags += [
+        "-arch",
+        "armv7",
+      ]
+    } else if (current_cpu == "arm64") {
+      common_mac_flags += [
+        "-arch",
+        "arm64",
+      ]
+    }
+
+    cflags += common_mac_flags
+
+    # Without this, the constructors and destructors of a C++ object inside
+    # an Objective C struct won't be called, which is very bad.
+    cflags_objcc += [ "-fobjc-call-cxx-cdtors" ]
+
+    cflags_c += [ "-std=c99" ]
+
+    ldflags += common_mac_flags
+  } else if (is_posix) {
+    # CPU architecture. We may or may not be doing a cross compile now, so for
+    # simplicity we always explicitly set the architecture.
+    if (current_cpu == "x64") {
+      cflags += [
+        "-m64",
+        "-march=x86-64",
+      ]
+      ldflags += [ "-m64" ]
+    } else if (current_cpu == "x86") {
+      cflags += [ "-m32" ]
+      ldflags += [ "-m32" ]
+      if (is_clang) {
+        cflags += [
+          # Else building libyuv gives clang's register allocator issues,
+          # see llvm.org/PR15798 / crbug.com/233709
+          "-momit-leaf-frame-pointer",
+
+          # Align the stack on 16-byte boundaries, http://crbug.com/418554.
+          "-mstack-alignment=16",
+          "-mstackrealign",
+        ]
+      }
+    } else if (current_cpu == "arm") {
+      cflags += [
+        "-march=$arm_arch",
+        "-mfloat-abi=$arm_float_abi",
+      ]
+      if (arm_tune != "") {
+        cflags += [ "-mtune=$arm_tune" ]
+      }
+      if (arm_use_thumb) {
+        cflags += [ "-mthumb" ]
+        if (is_android && !is_clang) {  # Clang doesn't support this option.
+          cflags += [ "-mthumb-interwork" ]
+        }
+      }
+      if (!is_clang) {
+        # Clang doesn't support these flags.
+        cflags += [
+          # The tree-sra optimization (scalar replacement for
+          # aggregates enabling subsequent optimizations) leads to
+          # invalid code generation when using the Android NDK's
+          # compiler (r5-r7). This can be verified using
+          # webkit_unit_tests' WTF.Checked_int8_t test.
+          "-fno-tree-sra",
+
+          # The following option is disabled to improve binary
+          # size and performance in gcc 4.9.
+          "-fno-caller-saves",
+        ]
+      }
+    } else if (current_cpu == "mipsel") {
+      if (mips_arch_variant == "r6") {
+        cflags += [
+          "-mips32r6",
+          "-Wa,-mips32r6",
+        ]
+        if (is_android) {
+          ldflags += [
+            "-mips32r6",
+            "-Wl,-melf32ltsmip",
+          ]
+        }
+      } else if (mips_arch_variant == "r2") {
+        cflags += [
+          "-mips32r2",
+          "-Wa,-mips32r2",
+        ]
+        if (mips_float_abi == "hard" && mips_fpu_mode != "") {
+          cflags += [ "-m$mips_fpu_mode" ]
+        }
+      } else if (mips_arch_variant == "r1") {
+        cflags += [
+          "-mips32",
+          "-Wa,-mips32",
+        ]
+      }
+
+      if (mips_dsp_rev == 1) {
+        cflags += [ "-mdsp" ]
+      } else if (mips_dsp_rev == 2) {
+        cflags += [ "-mdspr2" ]
+      }
+
+      cflags += [ "-m${mips_float_abi}-float" ]
+    } else if (current_cpu == "mips64el") {
+      if (mips_arch_variant == "r6") {
+        cflags += [
+          "-mips64r6",
+          "-Wa,-mips64r6",
+        ]
+        ldflags += [ "-mips64r6" ]
+      } else if (mips_arch_variant == "r2") {
+        cflags += [
+          "-mips64r2",
+          "-Wa,-mips64r2",
+        ]
+        ldflags += [ "-mips64r2" ]
+      }
+    }
+
+    defines += [ "_FILE_OFFSET_BITS=64" ]
+
+    if (!is_android) {
+      defines += [
+        "_LARGEFILE_SOURCE",
+        "_LARGEFILE64_SOURCE",
+      ]
+    }
+
+    # Omit unwind support in official builds to save space. We can use breakpad
+    # for these builds.
+    if (is_chrome_branded && is_official_build) {
+      cflags += [
+        "-fno-unwind-tables",
+        "-fno-asynchronous-unwind-tables",
+      ]
+      defines += [ "NO_UNWIND_TABLES" ]
+    } else {
+      cflags += [ "-funwind-tables" ]
+    }
+  }
+
+  if (enable_profiling && !is_debug) {
+    # The GYP build spams this define into every compilation unit, as we do
+    # here, but it only appears to be used in base and a couple other places.
+    # TODO(abarth): Should we move this define closer to where it's used?
+    defines += [ "ENABLE_PROFILING" ]
+
+    cflags += [
+      "-fno-omit-frame-pointer",
+      "-g",
+    ]
+
+    if (enable_full_stack_frames_for_profiling) {
+      cflags += [
+        "-fno-inline",
+        "-fno-optimize-sibling-calls",
+      ]
+    }
+  }
+
+  # Linux/Android common flags setup.
+  # ---------------------------------
+  if (is_linux || is_android) {
+    cflags += [
+      "-fPIC",
+      "-pipe",  # Use pipes for communicating between sub-processes. Faster.
+    ]
+
+    ldflags += [
+      "-fPIC",
+      "-Wl,-z,noexecstack",
+      "-Wl,-z,now",
+      "-Wl,-z,relro",
+    ]
+    if (!using_sanitizer) {
+      ldflags += [ "-Wl,-z,defs" ]
+    }
+  }
+
+  # Linux-specific compiler flags setup.
+  # ------------------------------------
+  if (is_linux) {
+    cflags += [ "-pthread" ]
+    ldflags += [ "-pthread" ]
+  }
+  if (use_gold) {
+    gold_path = rebase_path("//third_party/binutils/Linux_x64/Release/bin",
+                            root_build_dir)
+    ldflags += [
+      "-B$gold_path",
+
+      # Newer gccs and clangs support -fuse-ld, use the flag to force gold
+      # selection.
+      # gcc -- http://gcc.gnu.org/onlinedocs/gcc-4.8.0/gcc/Optimize-Options.html
+      "-fuse-ld=gold",
+
+      # Experimentation found that using four linking threads
+      # saved ~20% of link time.
+      # https://groups.google.com/a/chromium.org/group/chromium-dev/browse_thread/thread/281527606915bb36
+      # Only apply this to the target linker, since the host
+      # linker might not be gold, but isn't used much anyway.
+      # TODO(raymes): Disable threading because gold is frequently
+      # crashing on the bots: crbug.com/161942.
+      #"-Wl,--threads",
+      #"-Wl,--thread-count=4",
+    ]
+
+    if (!is_asan && !is_msan && !is_lsan && !is_tsan) {
+      # TODO(brettw) common.gypi has this only for target toolset.
+      ldflags += [ "-Wl,--icf=all" ]
+    }
+
+    # TODO(thestig): Make this flag work with GN.
+    #if (!is_official_build && !is_chromeos && !(is_asan || is_lsan || is_tsan || is_msan)) {
+    #  ldflags += [
+    #    "-Wl,--detect-odr-violations",
+    #  ]
+    #}
+  }
+
+  if (linux_use_bundled_binutils) {
+    binutils_path = rebase_path("//third_party/binutils/Linux_x64/Release/bin",
+                                root_build_dir)
+    cflags += [ "-B$binutils_path" ]
+  }
+
+  # Clang-specific compiler flags setup.
+  # ------------------------------------
+  if (is_clang) {
+    cflags += [ "-fcolor-diagnostics" ]
+  }
+
+  # C++11 compiler flags setup.
+  # ---------------------------
+  if (is_linux || is_android || is_nacl) {
+    # gnu++11 instead of c++11 is needed because some code uses typeof() (a
+    # GNU extension).
+    # TODO(thakis): Eventually switch this to c++11 instead,
+    # http://crbug.com/427584
+    cflags_cc += [ "-std=gnu++11" ]
+  } else if (!is_win) {
+    cc_std = [ "-std=c++11" ]
+    cflags_cc += cc_std
+    cflags_objcc += cc_std
+  }
+
+  # Android-specific flags setup.
+  # -----------------------------
+  if (is_android) {
+    cflags += [
+      "-ffunction-sections",
+      "-funwind-tables",
+      "-fno-short-enums",
+    ]
+    if (!is_clang) {
+      # Clang doesn't support these flags.
+      cflags += [ "-finline-limit=64" ]
+    }
+    if (is_asan) {
+      # Android build relies on -Wl,--gc-sections removing unreachable code.
+      # ASan instrumentation for globals inhibits this and results in a library
+      # with unresolvable relocations.
+      # TODO(eugenis): find a way to reenable this.
+      cflags += [ "-mllvm -asan-globals=0" ]
+    }
+
+    defines += [ "ANDROID" ]
+
+    # The NDK has these things, but doesn't define the constants
+    # to say that it does. Define them here instead.
+    defines += [ "HAVE_SYS_UIO_H" ]
+
+    # Use gold for Android for most CPU architectures.
+    if (current_cpu == "x86" || current_cpu == "x64" || current_cpu == "arm") {
+      ldflags += [ "-fuse-ld=gold" ]
+      if (is_clang) {
+        # Let clang find the ld.gold in the NDK.
+        ldflags += [ "--gcc-toolchain=" +
+                     rebase_path(android_toolchain_root, root_build_dir) ]
+      }
+    }
+
+    ldflags += [
+      "-Wl,--no-undefined",
+
+      # Don't allow visible symbols from libgcc or libc++ to be
+      # re-exported.
+      "-Wl,--exclude-libs=libgcc.a",
+      "-Wl,--exclude-libs=libc++_static.a",
+
+      # Don't allow visible symbols from libraries that contain
+      # assembly code with symbols that aren't hidden properly.
+      # http://crbug.com/448386
+      "-Wl,--exclude-libs=libvpx_assembly_arm.a",
+    ]
+    if (current_cpu == "arm") {
+      ldflags += [
+        # Enable identical code folding to reduce size.
+        "-Wl,--icf=safe",
+      ]
+    }
+
+    if (is_clang) {
+      if (current_cpu == "arm") {
+        cflags += [ "--target=arm-linux-androideabi" ]
+        ldflags += [ "--target=arm-linux-androideabi" ]
+      } else if (current_cpu == "x86") {
+        cflags += [ "--target=x86-linux-androideabi" ]
+        ldflags += [ "--target=x86-linux-androideabi" ]
+      }
+    }
+  }
+}
+
+config("compiler_arm_fpu") {
+  if (current_cpu == "arm" && !is_ios) {
+    cflags = [ "-mfpu=$arm_fpu" ]
+  }
+}
+
+# runtime_library -------------------------------------------------------------
+#
+# Sets the runtime library and associated options.
+#
+# How do you determine what should go in here vs. "compiler" above? Consider if
+# a target might choose to use a different runtime library (ignore for a moment
+# if this is possible or reasonable on your system). If such a target would want
+# to change or remove your option, put it in the runtime_library config. If a
+# target wants the option regardless, put it in the compiler config.
+
+config("runtime_library") {
+  cflags = []
+  defines = []
+  ldflags = []
+  lib_dirs = []
+  libs = []
+
+  if (is_component_build) {
+    # Component mode: dynamic CRT.
+    defines += [ "COMPONENT_BUILD" ]
+    if (is_win) {
+      # Since the library is shared, it requires exceptions or will give errors
+      # about things not matching, so keep exceptions on.
+      if (is_debug) {
+        cflags += [ "/MDd" ]
+      } else {
+        cflags += [ "/MD" ]
+      }
+    }
+  } else {
+    # Static CRT.
+    if (is_win) {
+      if (is_debug) {
+        cflags += [ "/MTd" ]
+      } else {
+        cflags += [ "/MT" ]
+      }
+    }
+  }
+
+  if (is_win) {
+    defines += [
+      "__STD_C",
+      "_CRT_RAND_S",
+      "_CRT_SECURE_NO_DEPRECATE",
+      "_HAS_EXCEPTIONS=0",
+      "_SCL_SECURE_NO_DEPRECATE",
+    ]
+  }
+
+  # Android standard library setup.
+  if (is_android) {
+    if (is_clang) {
+      # Work around incompatibilities between bionic and clang headers.
+      defines += [
+        "__compiler_offsetof=__builtin_offsetof",
+        "nan=__builtin_nan",
+      ]
+    }
+
+    defines += [ "__GNU_SOURCE=1" ]  # Necessary for clone().
+
+    # TODO(jdduke) Re-enable on mips after resolving linking
+    # issues with libc++ (crbug.com/456380).
+    if (current_cpu != "mipsel" && current_cpu != "mips64el") {
+      ldflags += [ "-Wl,--warn-shared-textrel" ]
+    }
+    ldflags += [ "-nostdlib" ]
+
+    # NOTE: The libc++ header include paths below are specified in cflags
+    # rather than include_dirs because they need to come after include_dirs.
+    # Think of them like system headers, but don't use '-isystem' because the
+    # arm-linux-androideabi-4.4.3 toolchain (circa Gingerbread) will exhibit
+    # strange errors. The include ordering here is important; change with
+    # caution.
+    cflags += [
+      "-isystem" +
+          rebase_path("$android_libcpp_root/libcxx/include", root_build_dir),
+      "-isystem" + rebase_path(
+              "$android_ndk_root/sources/cxx-stl/llvm-libc++abi/libcxxabi/include",
+              root_build_dir),
+      "-isystem" +
+          rebase_path("$android_ndk_root/sources/android/support/include",
+                      root_build_dir),
+    ]
+
+    lib_dirs += [ "$android_libcpp_root/libs/$android_app_abi" ]
+    libs += [ "$android_libcpp_library" ]
+
+    if (current_cpu == "mipsel") {
+      libs += [
+        # ld linker is used for mips Android, and ld does not accept library
+        # absolute path prefixed by "-l"; Since libgcc does not exist in mips
+        # sysroot the proper library will be linked.
+        # TODO(gordanac): Remove once gold linker is used for mips Android.
+        "gcc",
+      ]
+    } else {
+      libs += [
+        # Manually link the libgcc.a that the cross compiler uses. This is
+        # absolute because the linker will look inside the sysroot if it's not.
+        rebase_path(android_libgcc_file),
+      ]
+    }
+
+    libs += [
+      "c",
+      "dl",
+      "m",
+    ]
+
+    # Clang with libc++ does not require an explicit atomic library reference.
+    if (!is_clang) {
+      libs += [ "atomic" ]
+    }
+  }
+}
+
+# default_warning_flags collects all warning flags that are used by default.
+# This is in a variable instead of a config so that it can be used in
+# both chromium_code and no_chromium_code.  This way these flags are guaranteed
+# to appear on the compile command line after -Wall.
+
+default_warning_flags = []
+default_warning_flags_cc = []
+if (is_win) {
+  if (!is_clang || current_cpu != "x86") {
+    default_warning_flags += [ "/WX" ]  # Treat warnings as errors.
+  }
+
+  default_warning_flags += [
+    # Warnings permanently disabled:
+
+    # TODO(GYP) The GYP build doesn't have this globally enabled but disabled
+    # for a bunch of individual targets. Re-enable this globally when those
+    # targets are fixed.
+    "/wd4018",  # Comparing signed and unsigned values.
+
+    # C4127: conditional expression is constant
+    # This warning can in theory catch dead code and other problems, but
+    # triggers in far too many desirable cases where the conditional
+    # expression is either set by macros or corresponds some legitimate
+    # compile-time constant expression (due to constant template args,
+    # conditionals comparing the sizes of different types, etc.).  Some of
+    # these can be worked around, but it's not worth it.
+    "/wd4127",
+
+    # C4251: 'identifier' : class 'type' needs to have dll-interface to be
+    #        used by clients of class 'type2'
+    # This is necessary for the shared library build.
+    "/wd4251",
+
+    # C4351: new behavior: elements of array 'array' will be default
+    #        initialized
+    # This is a silly "warning" that basically just alerts you that the
+    # compiler is going to actually follow the language spec like it's
+    # supposed to, instead of not following it like old buggy versions did.
+    # There's absolutely no reason to turn this on.
+    "/wd4351",
+
+    # C4355: 'this': used in base member initializer list
+    # It's commonly useful to pass |this| to objects in a class' initializer
+    # list.  While this warning can catch real bugs, most of the time the
+    # constructors in question don't attempt to call methods on the passed-in
+    # pointer (until later), and annotating every legit usage of this is
+    # simply more hassle than the warning is worth.
+    "/wd4355",
+
+    # C4503: 'identifier': decorated name length exceeded, name was
+    #        truncated
+    # This only means that some long error messages might have truncated
+    # identifiers in the presence of lots of templates.  It has no effect on
+    # program correctness and there's no real reason to waste time trying to
+    # prevent it.
+    "/wd4503",
+
+    # Warning C4589 says: "Constructor of abstract class ignores
+    # initializer for virtual base class." Disable this warning because it
+    # is flaky in VS 2015 RTM. It triggers on compiler generated
+    # copy-constructors in some cases.
+    "/wd4589",
+
+    # C4611: interaction between 'function' and C++ object destruction is
+    #        non-portable
+    # This warning is unavoidable when using e.g. setjmp/longjmp.  MSDN
+    # suggests using exceptions instead of setjmp/longjmp for C++, but
+    # Chromium code compiles without exception support.  We therefore have to
+    # use setjmp/longjmp for e.g. JPEG decode error handling, which means we
+    # have to turn off this warning (and be careful about how object
+    # destruction happens in such cases).
+    "/wd4611",
+
+    # Warnings to evaluate and possibly fix/reenable later:
+
+    "/wd4100",  # Unreferenced formal function parameter.
+    "/wd4121",  # Alignment of a member was sensitive to packing.
+    "/wd4244",  # Conversion: possible loss of data.
+    "/wd4481",  # Nonstandard extension: override specifier.
+    "/wd4505",  # Unreferenced local function has been removed.
+    "/wd4510",  # Default constructor could not be generated.
+    "/wd4512",  # Assignment operator could not be generated.
+    "/wd4610",  # Class can never be instantiated, constructor required.
+    "/wd4996",  # Deprecated function warning.
+  ]
+
+  # VS xtree header file needs to be patched or 4702 (unreachable code
+  # warning) is reported if _HAS_EXCEPTIONS=0. Disable the warning if xtree is
+  # not patched.
+  if (!msvs_xtree_patched &&
+      exec_script("../../win_is_xtree_patched.py", [], "value") == 0) {
+    default_warning_flags += [ "/wd4702" ]  # Unreachable code.
+  }
+
+  # Building with Clang on Windows is a work in progress and very
+  # experimental. See crbug.com/82385.
+  # Keep this in sync with the similar block in build/common.gypi
+  if (is_clang) {
+    default_warning_flags += [
+      # TODO(hans): Make this list shorter eventually, http://crbug.com/504657
+      "-Qunused-arguments",  # http://crbug.com/504658
+      "-Wno-microsoft",  # http://crbug.com/505296
+      "-Wno-switch",  # http://crbug.com/505308
+      "-Wno-unknown-pragmas",  # http://crbug.com/505314
+      "-Wno-unused-function",  # http://crbug.com/505316
+      "-Wno-unused-value",  # http://crbug.com/505318
+      "-Wno-unused-local-typedef",  # http://crbug.com/411648
+    ]
+  }
+} else {
+  # Common GCC warning setup.
+  default_warning_flags += [
+    # Enables.
+    "-Wendif-labels",  # Weird old-style text after an #endif.
+    "-Werror",  # Warnings as errors.
+
+    # Disables.
+    "-Wno-missing-field-initializers",  # "struct foo f = {0};"
+    "-Wno-unused-parameter",  # Unused function parameters.
+  ]
+
+  if (is_mac) {
+    # TODO(abarth): Re-enable once https://github.com/domokit/mojo/issues/728
+    #               is fixed.
+    # default_warning_flags += [ "-Wnewline-eof" ]
+    if (!is_nacl) {
+      # When compiling Objective-C, warns if a method is used whose
+      # availability is newer than the deployment target. This is not
+      # required when compiling Chrome for iOS.
+      default_warning_flags += [ "-Wpartial-availability" ]
+    }
+  }
+
+  if (gcc_version >= 48) {
+    default_warning_flags_cc += [
+      # See comment for -Wno-c++11-narrowing.
+      "-Wno-narrowing",
+    ]
+  }
+
+  # Suppress warnings about ABI changes on ARM (Clang doesn't give this
+  # warning).
+  if (current_cpu == "arm" && !is_clang) {
+    default_warning_flags += [ "-Wno-psabi" ]
+  }
+
+  if (is_android) {
+    # Disable any additional warnings enabled by the Android build system but
+    # which chromium does not build cleanly with (when treating warning as
+    # errors).
+    default_warning_flags += [
+      "-Wno-extra",
+      "-Wno-ignored-qualifiers",
+      "-Wno-type-limits",
+    ]
+    default_warning_flags_cc += [
+      # Disabling c++0x-compat should be handled in WebKit, but
+      # this currently doesn't work because gcc_version is not set
+      # correctly when building with the Android build system.
+      # TODO(torne): Fix this in WebKit.
+      "-Wno-error=c++0x-compat",
+
+      # Other things unrelated to -Wextra:
+      "-Wno-non-virtual-dtor",
+      "-Wno-sign-promo",
+    ]
+  }
+
+  if (gcc_version >= 48) {
+    # Don't warn about the "typedef 'foo' locally defined but not used"
+    # for gcc 4.8.
+    # TODO: remove this flag once all builds work. See crbug.com/227506
+    default_warning_flags += [ "-Wno-unused-local-typedefs" ]
+  }
+}
+if (is_clang) {
+  default_warning_flags += [
+    # This warns on using ints as initializers for floats in
+    # initializer lists (e.g. |int a = f(); CGSize s = { a, a };|),
+    # which happens in several places in chrome code. Not sure if
+    # this is worth fixing.
+    "-Wno-c++11-narrowing",
+
+    # Don't die on dtoa code that uses a char as an array index.
+    # This is required solely for base/third_party/dmg_fp/dtoa.cc.
+    # TODO(brettw) move this to that project then!
+    "-Wno-char-subscripts",
+
+    # Warns on switches on enums that cover all enum values but
+    # also contain a default: branch. Chrome is full of that.
+    "-Wno-covered-switch-default",
+
+    # Clang considers the `register` keyword as deprecated, but e.g.
+    # code generated by flex (used in angle) contains that keyword.
+    # http://crbug.com/255186
+    "-Wno-deprecated-register",
+  ]
+
+  # NaCl's Clang compiler and Chrome's hermetic Clang compiler will almost
+  # always have different versions. Certain flags may not be recognized by
+  # one version or the other.
+  if (!is_nacl) {
+    # Flags NaCl does not recognize.
+    default_warning_flags += [
+      # TODO(hans): Get this cleaned up, http://crbug.com/428099
+      "-Wno-inconsistent-missing-override",
+
+      # TODO(thakis): Enable this, crbug.com/507717
+      "-Wno-shift-negative-value",
+    ]
+  }
+}
+
+# chromium_code ---------------------------------------------------------------
+#
+# Toggles between higher and lower warnings for code that is (or isn't)
+# part of Chromium.
+
+config("chromium_code") {
+  if (is_win) {
+    cflags = [ "/W4" ]  # Warning level 4.
+  } else {
+    cflags = [
+      "-Wall",
+      "-Wextra",
+    ]
+
+    # In Chromium code, we define __STDC_foo_MACROS in order to get the
+    # C99 macros on Mac and Linux.
+    defines = [
+      "__STDC_CONSTANT_MACROS",
+      "__STDC_FORMAT_MACROS",
+    ]
+
+    if (!using_sanitizer && (!is_linux || !is_clang || is_official_build)) {
+      # _FORTIFY_SOURCE isn't really supported by Clang now, see
+      # http://llvm.org/bugs/show_bug.cgi?id=16821.
+      # It seems to work fine with Ubuntu 12 headers though, so use it in
+      # official builds.
+      #
+      # Non-chromium code is not guaranteed to compile cleanly with
+      # _FORTIFY_SOURCE. Also, fortified build may fail when optimizations are
+      # disabled, so only do that for Release build.
+      defines += [ "_FORTIFY_SOURCE=2" ]
+    }
+  }
+  cflags += default_warning_flags
+  cflags_cc = default_warning_flags_cc
+}
+config("no_chromium_code") {
+  cflags = []
+  cflags_cc = []
+  defines = []
+
+  if (is_win) {
+    cflags += [
+      "/W3",  # Warning level 3.
+      "/wd4800",  # Disable warning when forcing value to bool.
+      "/wd4267",  # TODO(jschuh): size_t to int.
+      "/wd4996",  # Deprecated function warning.
+    ]
+    defines += [
+      "_CRT_NONSTDC_NO_WARNINGS",
+      "_CRT_NONSTDC_NO_DEPRECATE",
+    ]
+  }
+
+  if (is_linux) {
+    # Don't warn about ignoring the return value from e.g. close(). This is
+    # off by default in some gccs but on by default in others. BSD systems do
+    # not support this option, since they are usually using gcc 4.2.1, which
+    # does not have this flag yet.
+    cflags += [ "-Wno-unused-result" ]
+  }
+
+  if (is_clang) {
+    cflags += [
+      # TODO(mgiuca): Move this suppression into individual third-party
+      # libraries as required. http://crbug.com/505301.
+      "-Wno-overloaded-virtual",
+
+      # Lots of third-party libraries have unused variables. Instead of
+      # suppressing them individually, we just blanket suppress them here.
+      "-Wno-unused-variable",
+    ]
+  }
+
+  if (is_linux || is_android) {
+    cflags += [
+      # Don't warn about printf format problems. This is off by default in gcc
+      # but on in Ubuntu's gcc(!).
+      "-Wno-format",
+    ]
+    cflags_cc += [
+      # Don't warn about hash_map in third-party code.
+      "-Wno-deprecated",
+    ]
+  }
+  cflags += default_warning_flags
+  cflags_cc += default_warning_flags_cc
+}
+
+# rtti ------------------------------------------------------------------------
+#
+# Allows turning Run-Time Type Identification on or off.
+
+config("rtti") {
+  if (is_win) {
+    cflags_cc = [ "/GR" ]
+  }
+}
+config("no_rtti") {
+  if (is_win) {
+    cflags_cc = [ "/GR-" ]
+  } else {
+    rtti_flags = [ "-fno-rtti" ]
+    cflags_cc = rtti_flags
+    cflags_objcc = rtti_flags
+  }
+}
+
+# Warnings ---------------------------------------------------------------------
+
+# This will generate warnings when using Clang if code generates exit-time
+# destructors, which will slow down closing the program.
+# TODO(thakis): Make this a blacklist instead, http://crbug.com/101600
+config("wexit_time_destructors") {
+  # TODO: Enable on Windows too, http://crbug.com/404525
+  if (is_clang && !is_win) {
+    cflags = [ "-Wexit-time-destructors" ]
+  }
+}
+
+# On Windows compiling on x64, VC will issue a warning when converting
+# size_t to int because it will truncate the value. Our code should not have
+# these warnings and one should use a static_cast or a checked_cast for the
+# conversion depending on the case. However, a lot of code still needs to be
+# fixed. Apply this config to such targets to disable the warning.
+#
+# Note that this can be applied regardless of platform and architecture to
+# clean up the call sites. This will only apply the flag when necessary.
+#
+# TODO(jschuh): crbug.com/167187 fix this and delete this config.
+config("no_size_t_to_int_warning") {
+  if (is_win && current_cpu == "x64") {
+    cflags = [ "/wd4267" ]
+  }
+}
+
+# Optimization -----------------------------------------------------------------
+#
+# Note that BUILDCONFIG.gn sets up a variable "default_optimization_config"
+# which it will assign to the config it implicitly applies to every target. If
+# you want to override the optimization level for your target, remove this
+# config (which will expand differently for debug or release builds), and then
+# add back the one you want to override it with:
+#
+#   configs -= default_optimization_config
+#   configs += [ "//build/config/compiler/optimize_max" ]
+
+# Shared settings for both "optimize" and "optimize_max" configs.
+# IMPORTANT: On Windows "/O1" and "/O2" must go before the common flags.
+if (is_win) {
+  common_optimize_on_cflags = [
+    "/Ob2",  # Both explicit and auto inlining.
+    "/Oy-",  # Disable omitting frame pointers, must be after /O2.
+  ]
+  if (!is_asan) {
+    common_optimize_on_cflags += [
+      # Put data in separate COMDATs. This allows the linker
+      # to put bit-identical constants at the same address even if
+      # they're unrelated constants, which saves binary size.
+      # This optimization can't be used when ASan is enabled because
+      # it is not compatible with the ASan ODR checker.
+      "/Gw",
+    ]
+  }
+  common_optimize_on_ldflags = [ "/OPT:REF" ]
+} else {
+  common_optimize_on_cflags = [
+    # Don't emit the GCC version ident directives, they just end up in the
+    # .comment section taking up binary size.
+    "-fno-ident",
+
+    # Put data and code in their own sections, so that unused symbols
+    # can be removed at link time with --gc-sections.
+    "-fdata-sections",
+    "-ffunction-sections",
+  ]
+  common_optimize_on_ldflags = []
+
+  if (is_android) {
+    if (!using_sanitizer) {
+      common_optimize_on_cflags += [ "-fomit-frame-pointer" ]
+    }
+
+    # TODO(jdduke) Re-enable on mips after resolving linking
+    # issues with libc++ (crbug.com/456380).
+    if (current_cpu != "mipsel" && current_cpu != "mips64el") {
+      common_optimize_on_ldflags += [
+        # Warn in case of text relocations.
+        "-Wl,--warn-shared-textrel",
+      ]
+    }
+  }
+
+  if (is_mac || is_ios) {
+    if (symbol_level == 2) {
+      # Mac dead code stripping requires symbols.
+      common_optimize_on_ldflags += [ "-Wl,-dead_strip" ]
+    }
+  } else {
+    # Non-Mac Posix linker flags.
+    common_optimize_on_ldflags += [
+      # Specifically tell the linker to perform optimizations.
+      # See http://lwn.net/Articles/192624/ .
+      "-Wl,-O1",
+      "-Wl,--gc-sections",
+    ]
+
+    if (!using_sanitizer) {
+      # Functions interposed by the sanitizers can make ld think
+      # that some libraries aren't needed when they actually are,
+      # http://crbug.com/234010. As workaround, disable --as-needed.
+      common_optimize_on_ldflags += [ "-Wl,--as-needed" ]
+    }
+  }
+}
+
+# Default "optimization on" config. On Windows, this favors size over speed.
+config("optimize") {
+  if (is_win) {
+    # Favor size over speed, /O1 must be before the common flags. The GYP
+    # build also specifies /Os and /GF but these are implied by /O1.
+    cflags = [ "/O1" ] + common_optimize_on_cflags + [ "/Oi" ]
+  } else if (is_android || is_ios) {
+    cflags = [ "-Os" ] + common_optimize_on_cflags  # Favor size over speed.
+  } else {
+    cflags = [ "-O2" ] + common_optimize_on_cflags
+  }
+  ldflags = common_optimize_on_ldflags
+}
+
+# Turn off optimizations.
+config("no_optimize") {
+  if (is_win) {
+    cflags = [
+      "/Od",  # Disable optimization.
+      "/Ob0",  # Disable all inlining (on by default).
+      "/RTC1",  # Runtime checks for stack frame and uninitialized variables.
+    ]
+  } else if (is_android && !android_full_debug) {
+    # On Android we kind of optimize some things that don't affect debugging
+    # much even when optimization is disabled to get the binary size down.
+    cflags = [
+      "-Os",
+      "-fdata-sections",
+      "-ffunction-sections",
+    ]
+    if (!using_sanitizer) {
+      cflags += [ "-fomit-frame-pointer" ]
+    }
+    ldflags = common_optimize_on_ldflags
+  } else {
+    cflags = [ "-O0" ]
+  }
+}
+
+# Turns up the optimization level. On Windows, this implies whole program
+# optimization and link-time code generation which is very expensive and should
+# be used sparingly.
+config("optimize_max") {
+  ldflags = common_optimize_on_ldflags
+  if (is_win) {
+    # Favor speed over size, /O2 must be before the common flags. The GYP
+    # build also specifies /Ot, /Oi, and /GF, but these are implied by /O2.
+    cflags = [ "/O2" ] + common_optimize_on_cflags
+    if (is_official_build) {
+      # TODO(GYP): TODO(dpranke): Should these only be on in an official
+      # build, or on all the time? For now we'll require official build so
+      # that the compile is clean.
+      cflags += [
+        "/GL",  # Whole program optimization.
+
+        # Disable Warning 4702 ("Unreachable code") for the WPO/PGO builds.
+        # Probably anything that this would catch that wouldn't be caught in a
+        # normal build isn't going to actually be a bug, so the incremental
+        # value of C4702 for PGO builds is likely very small.
+        "/wd4702",
+      ]
+      ldflags += [ "/LTCG" ]
+    }
+  } else {
+    cflags = [ "-O2" ] + common_optimize_on_cflags
+  }
+}
+
+# Symbols ----------------------------------------------------------------------
+
+config("symbols") {
+  if (is_win) {
+    import("//build/toolchain/goma.gni")
+    if (use_goma) {
+      cflags = [ "/Z7" ]  # No PDB file
+    } else {
+      cflags = [ "/Zi" ]  # Produce PDB file, no edit and continue.
+    }
+    ldflags = [ "/DEBUG" ]
+  } else {
+    cflags = [ "-g2" ]
+    if (use_debug_fission) {
+      cflags += [ "-gsplit-dwarf" ]
+    }
+  }
+}
+
+config("minimal_symbols") {
+  if (is_win) {
+    # Linker symbols for backtraces only.
+    ldflags = [ "/DEBUG" ]
+  } else {
+    cflags = [ "-g1" ]
+    if (use_debug_fission) {
+      cflags += [ "-gsplit-dwarf" ]
+    }
+  }
+}
+
+config("no_symbols") {
+  if (!is_win) {
+    cflags = [ "-g0" ]
+  }
+}
diff --git a/build/config/crypto.gni b/build/config/crypto.gni
new file mode 100644
index 0000000..2cd72d3
--- /dev/null
+++ b/build/config/crypto.gni
@@ -0,0 +1,29 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file declares build flags for the SSL library configuration.
+#
+# TODO(brettw) this should probably be moved to src/crypto or somewhere, and
+# the global build dependency on it should be removed.
+#
+# PLEASE TRY TO AVOID ADDING FLAGS TO THIS FILE in cases where grit isn't
+# required. See the declare_args block of BUILDCONFIG.gn for advice on how
+# to set up feature flags.
+
+declare_args() {
+  # Use OpenSSL instead of NSS. This is used for all platforms but iOS. (See
+  # http://crbug.com/338886).
+  use_openssl = !is_ios
+}
+
+# True when we're using OpenSSL for representing certificates. When targeting
+# Android, the platform certificate library is used for certificate
+# verification. On other targets, this flag also enables OpenSSL for certificate
+# verification, but this configuration is unsupported.
+use_openssl_certs = is_android
+
+# True if NSS is used for certificate verification. Note that this is
+# independent from use_openssl. It is possible to use OpenSSL for the crypto
+# library, but NSS for the platform certificate library.
+use_nss_certs = false
diff --git a/build/config/features.gni b/build/config/features.gni
new file mode 100644
index 0000000..93b19dd
--- /dev/null
+++ b/build/config/features.gni
@@ -0,0 +1,204 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file contains Chrome-feature-related build flags (see ui.gni for
+# UI-related ones). These should theoretically be moved to the build files of
+# the features themselves.
+#
+# However, today we have many "bad" dependencies on some of these flags from,
+# e.g. base, so they need to be global to match the GYP configuration. Also,
+# anything that needs a grit define must be in either this file or ui.gni.
+#
+# PLEASE TRY TO AVOID ADDING FLAGS TO THIS FILE in cases where grit isn't
+# required. See the declare_args block of BUILDCONFIG.gn for advice on how
+# to set up feature flags.
+
+import("//build/config/chrome_build.gni")
+if (is_android) {
+  import("//build/config/android/config.gni")
+}
+
+declare_args() {
+  # Multicast DNS.
+  enable_mdns = is_win || is_linux
+
+  enable_plugins = !is_android && !is_ios
+
+  # Enables Native Client support.
+  # TODO(GYP): Get NaCl linking on other platforms.
+  # Also, see if we can always get rid of enable_nacl_untrusted and
+  # enable_pnacl and always build them if enable_nacl is true.
+  # The "is_nacl" part of the condition is needed to ensure that
+  # the untrusted code is built properly; arguably it should be
+  # guarded by "is_nacl" directly rather than enable_nacl_untrusted, but
+  # this will go away when Mac and Win are working and we can just use
+  # the commented out logic.
+  # Eventually we want this to be:
+  #   enable_nacl = !is_ios && !is_android
+  enable_nacl = (is_linux && !is_chromeos && current_cpu == "x64") || is_nacl
+  enable_nacl_untrusted = enable_nacl
+  enable_pnacl = enable_nacl_untrusted
+
+  # If debug_devtools is set to true, JavaScript files for DevTools are stored
+  # as is and loaded from disk. Otherwise, a concatenated file is stored in
+  # resources.pak. It is still possible to load JS files from disk by passing
+  # --debug-devtools cmdline switch.
+  debug_devtools = false
+
+  # Enables WebRTC.
+  # TODO(GYP) make mac and android work.
+  enable_webrtc = !is_ios && !is_mac && !is_android
+
+  # Enables the Media Router.
+  enable_media_router = !is_ios && !is_android
+
+  # Enables proprietary codecs and demuxers; e.g. H264, MOV, AAC, and MP3.
+  # Android OS includes support for proprietary codecs regardless of building
+  # Chromium or Google Chrome. We also ship Google Chrome and Chromecast with
+  # proprietary codecs.
+  # TODO(GYP) The GYP build has || chromecast==1 for this:
+  proprietary_codecs = is_android || is_chrome_branded
+
+  enable_configuration_policy = true
+
+  # Enables support for background apps.
+  enable_background = !is_ios && !is_android
+
+  enable_captive_portal_detection = !is_android && !is_ios
+
+  # Enables use of the session service, which is enabled by default.
+  # Android stores them separately on the Java side.
+  enable_session_service = !is_android && !is_ios
+
+  enable_plugin_installation = is_win || is_mac
+
+  enable_app_list = !is_ios && !is_android
+
+  enable_supervised_users = !is_ios
+
+  enable_autofill_dialog = !is_ios
+
+  enable_google_now = !is_ios && !is_android
+
+  enable_one_click_signin = is_win || is_mac || (is_linux && !is_chromeos)
+
+  enable_remoting = !is_ios && !is_android
+
+  # Enable hole punching for the protected video.
+  enable_video_hole = is_android
+
+  # Enables browser side Content Decryption Modules. Required for embedders
+  # (e.g. Android and ChromeCast) that use a browser side CDM.
+  enable_browser_cdms = is_android
+
+  # Variable safe_browsing is used to control the build time configuration
+  # for safe browsing feature. Safe browsing can be compiled in 4 different
+  # levels: 0 disables it, 1 enables it fully, and 2 enables only UI and
+  # reporting features for use with Data Saver on Mobile, and 3 enables
+  # extended mobile protection via an external API.  When 3 is fully deployed,
+  # it will replace 2.
+  if (is_android) {
+    safe_browsing_mode = 2
+  } else if (is_ios) {
+    safe_browsing_mode = 0
+  } else {
+    safe_browsing_mode = 1
+  }
+}
+
+# Additional dependent variables -----------------------------------------------
+
+# Set the version of CLD.
+#   0: Don't specify the version. This option is for the Finch testing.
+#   1: Use only CLD1.
+#   2: Use only CLD2.
+if (is_android) {
+  cld_version = 1
+} else {
+  cld_version = 2
+}
+
+# libudev usage. This currently only affects the content layer.
+use_udev = is_linux
+
+# Enable the spell checker.
+enable_spellcheck = !is_android
+
+# Use the operating system's spellchecker rather than hunspell.
+use_platform_spellchecker = is_android || is_mac
+
+enable_pepper_cdms = enable_plugins && (is_linux || is_mac || is_win)
+
+# Enable basic printing support and UI.
+enable_basic_printing = !is_chromeos
+
+# Enable printing with print preview. It does not imply
+# enable_basic_printing. It's possible to build Chrome with preview only.
+enable_print_preview = !is_android
+
+# The seccomp-bpf sandbox is only supported on three architectures
+# currently.
+# Do not disable seccomp_bpf anywhere without talking to
+# security@chromium.org!
+use_seccomp_bpf = (is_linux || is_android) &&
+                  (current_cpu == "x86" || current_cpu == "x64" ||
+                   current_cpu == "arm" || current_cpu == "mipsel")
+
+# Enable notifications everywhere except iOS.
+enable_notifications = !is_ios
+
+# TODO(brettw) this should be moved to net and only dependents get this define.
+disable_ftp_support = is_ios
+
+enable_web_speech = !is_android && !is_ios
+
+use_dbus = is_linux
+
+enable_extensions = !is_android && !is_ios
+
+enable_task_manager = !is_ios && !is_android
+
+use_cups = is_desktop_linux || is_mac
+
+enable_themes = !is_android && !is_ios
+
+# TODO(scottmg) remove this when we've fixed printing.
+win_pdf_metafile_for_printing = true
+
+# Whether we are using the rlz library or not.  Platforms like Android send
+# rlz codes for searches but do not use the library.
+enable_rlz_support = is_win || is_mac || is_ios || is_chromeos
+enable_rlz = is_chrome_branded && enable_rlz_support
+
+enable_settings_app = enable_app_list && !is_chromeos
+
+enable_service_discovery = enable_mdns || is_mac
+
+enable_wifi_bootstrapping = is_win || is_mac
+
+# Image loader extension is enabled on ChromeOS only.
+enable_image_loader_extension = is_chromeos
+
+# Chrome OS: whether to also build the upcoming version of
+# ChromeVox, which can then be enabled via a command-line switch.
+enable_chromevox_next = false
+
+# Use brlapi from brltty for braille display support.
+use_brlapi = is_chromeos
+
+# Option controlling the use of GConf (the classic GNOME configuration
+# system).
+# TODO(GYP) also require !embedded to enable.
+use_gconf = is_linux && !is_chromeos
+
+# Hangout services is an extension that adds extra features to Hangouts.
+# For official GYP builds, this flag is set, it will likely need to be
+# parameterized in the future for a similar use.
+enable_hangout_services_extension = false
+
+# Whether to back up data before sync.
+enable_pre_sync_backup = is_win || is_mac || (is_linux && !is_chromeos)
+
+# WebVR support disabled until platform implementations have been added
+enable_webvr = false
diff --git a/build/config/fnl/BUILD.gn b/build/config/fnl/BUILD.gn
new file mode 100644
index 0000000..0eb35e3
--- /dev/null
+++ b/build/config/fnl/BUILD.gn
@@ -0,0 +1,48 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/linux/pkg_config.gni")
+import("//build/config/features.gni")
+import("//build/config/sysroot.gni")
+import("//build/config/ui.gni")
+
+config("sdk") {
+  if (sysroot != "") {
+    cflags = [ "--sysroot=" + sysroot ]
+    ldflags = [ "--sysroot=" + sysroot ]
+
+    # Need to get some linker flags out of the sysroot.
+    ldflags += [ exec_script("../linux/sysroot_ld_path.py",
+                             [
+                               rebase_path("//build/linux/sysroot_ld_path.sh",
+                                           root_build_dir),
+                               sysroot,
+                             ],
+                             "value") ]
+  }
+
+  defines = [ "FNL_MUSL" ]
+
+  if (use_ozone) {
+    defines += [ "MESA_EGL_NO_X11_HEADERS" ]
+  }
+}
+
+config("fontconfig") {
+  libs = [ "fontconfig" ]
+}
+
+pkg_config("freetype2") {
+  packages = [ "freetype2" ]
+}
+
+if (use_evdev_gestures) {
+  pkg_config("libevdev-cros") {
+    packages = [ "libevdev-cros" ]
+  }
+
+  pkg_config("libgestures") {
+    packages = [ "libgestures" ]
+  }
+}
diff --git a/build/config/gcc/BUILD.gn b/build/config/gcc/BUILD.gn
new file mode 100644
index 0000000..47bcc0b
--- /dev/null
+++ b/build/config/gcc/BUILD.gn
@@ -0,0 +1,46 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This config causes functions not to be automatically exported from shared
+# libraries. By default, all symbols are exported but this means there are
+# lots of exports that slow everything down. In general we explicitly mark
+# which functiosn we want to export from components.
+#
+# Some third_party code assumes all functions are exported so this is separated
+# into its own config so such libraries can remove this config to make symbols
+# public again.
+#
+# See http://gcc.gnu.org/wiki/Visibility
+config("symbol_visibility_hidden") {
+  # Note that -fvisibility-inlines-hidden is set globally in the compiler
+  # config since that can almost always be applied.
+  cflags = [ "-fvisibility=hidden" ]
+}
+
+# Settings for executables and shared libraries.
+config("executable_ldconfig") {
+  if (is_android) {
+    ldflags = [
+      "-Bdynamic",
+      "-Wl,-z,nocopyreloc",
+    ]
+  } else {
+    # Android doesn't support rpath.
+    ldflags = [
+      # Want to pass "\$". GN will re-escape as required for ninja.
+      "-Wl,-rpath=\$ORIGIN/",
+      "-Wl,-rpath-link=",
+
+      # Newer binutils don't set DT_RPATH unless you disable "new" dtags
+      # and the new DT_RUNPATH doesn't work without --no-as-needed flag.
+      "-Wl,--disable-new-dtags",
+    ]
+  }
+}
+
+config("no_exceptions") {
+  no_exceptions_flags = [ "-fno-exceptions" ]
+  cflags_cc = no_exceptions_flags
+  cflags_objcc = no_exceptions_flags
+}
diff --git a/build/config/gcc/gcc_version.gni b/build/config/gcc/gcc_version.gni
new file mode 100644
index 0000000..6741e45
--- /dev/null
+++ b/build/config/gcc/gcc_version.gni
@@ -0,0 +1,26 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+if (is_android) {
+  gcc_version = 49
+} else if (current_toolchain == "//build/toolchain/cros:target" ||
+           current_toolchain == "//build/toolchain/linux:mipsel") {
+  gcc_version = exec_script("../../compiler_version.py",
+                            [
+                              "target",
+                              "compiler",
+                            ],
+                            "value")
+} else if (current_toolchain == "//build/toolchain/linux:x64" ||
+           current_toolchain == "//build/toolchain/linux:x86") {
+  # These are both the same and just use the default gcc on the system.
+  gcc_version = exec_script("../../compiler_version.py",
+                            [
+                              "host",
+                              "compiler",
+                            ],
+                            "value")
+} else {
+  gcc_version = 0
+}
diff --git a/build/config/ios/BUILD.gn b/build/config/ios/BUILD.gn
new file mode 100644
index 0000000..0292315
--- /dev/null
+++ b/build/config/ios/BUILD.gn
@@ -0,0 +1,15 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/sysroot.gni")
+import("//build/config/ios/ios_sdk.gni")
+
+config("sdk") {
+  common_flags = [
+    "-stdlib=libc++",
+  ]
+
+  cflags = common_flags
+  ldflags = common_flags
+}
diff --git a/build/config/ios/XcodeHarness/.gitignore b/build/config/ios/XcodeHarness/.gitignore
new file mode 100644
index 0000000..89c499e
--- /dev/null
+++ b/build/config/ios/XcodeHarness/.gitignore
@@ -0,0 +1,18 @@
+# Xcode
+.DS_Store
+build/
+*.pbxuser
+!default.pbxuser
+*.mode1v3
+!default.mode1v3
+*.mode2v3
+!default.mode2v3
+*.perspectivev3
+!default.perspectivev3
+*.xcworkspace
+!default.xcworkspace
+xcuserdata
+profile
+*.moved-aside
+DerivedData
+.idea/
diff --git a/build/config/ios/XcodeHarness/FakeMain.m b/build/config/ios/XcodeHarness/FakeMain.m
new file mode 100644
index 0000000..5ddbd34
--- /dev/null
+++ b/build/config/ios/XcodeHarness/FakeMain.m
@@ -0,0 +1,10 @@
+// Copyright 2015 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#import <Foundation/Foundation.h>
+
+int main(int argc, char* argv[]) {
+  NSCAssert(NO, @"Placeholder for Xcode. Should never be run");
+  return EXIT_FAILURE;
+}
diff --git a/build/config/ios/find_signing_identity.py b/build/config/ios/find_signing_identity.py
new file mode 100644
index 0000000..2fe67f9
--- /dev/null
+++ b/build/config/ios/find_signing_identity.py
@@ -0,0 +1,36 @@
+#!/usr/bin/python
+# Copyright (c) 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import subprocess
+import sys
+import re
+
+def ListIdentities():
+  return subprocess.check_output([
+    '/usr/bin/env',
+    'xcrun',
+    'security',
+    'find-identity',
+    '-v',
+    '-p',
+    'codesigning',
+  ]).strip()
+
+
+def FindValidIdentity():
+  lines = ListIdentities().splitlines()
+  # Look for something like "2) XYZ "iPhone Developer: Name (ABC)""
+  exp = re.compile('.*\) ([A-F|0-9]*)(.*)')
+  for line in lines:
+    res = exp.match(line)
+    if res is None:
+      continue
+    if "iPhone Developer: Google Development" in res.group(2):
+      return res.group(1)
+  return ""
+
+
+if __name__ == '__main__':
+  print FindValidIdentity()
diff --git a/build/config/ios/ios_app.py b/build/config/ios/ios_app.py
new file mode 100644
index 0000000..b8bf544
--- /dev/null
+++ b/build/config/ios/ios_app.py
@@ -0,0 +1,137 @@
+#!/usr/bin/python
+# Copyright (c) 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import os
+import errno
+import subprocess
+import sys
+
+PLUTIL = [
+  '/usr/bin/env',
+  'xcrun',
+  'plutil'
+]
+
+def MakeDirectories(path):
+  try:
+    os.makedirs(path)
+  except OSError as exc:
+    if exc.errno == errno.EEXIST and os.path.isdir(path):
+      return 0
+    else:
+      return -1
+
+  return 0
+
+
+def ProcessInfoPlist(args):
+  output_plist_file = os.path.abspath(os.path.join(args.output, 'Info.plist'))
+
+  if MakeDirectories(os.path.dirname(output_plist_file)) == -1:
+    return -1
+
+  return subprocess.check_call( PLUTIL + [
+    '-convert',
+    'binary1',
+    '-o',
+    output_plist_file,
+    '--',
+    args.input,
+  ])
+
+
+def PerformCodeSigning(args):
+  return subprocess.check_call([
+    '/usr/bin/env',
+    'xcrun',
+    'codesign',
+    '--entitlements',
+    args.entitlements_path,
+    '--sign',
+    args.identity,
+    '-f',
+    args.application_path,
+  ])
+
+def GenerateDSYM(args):
+  return subprocess.check_call([
+    '/usr/bin/env',
+    'xcrun',
+    'dsymutil',
+    '-o',
+    args.output,
+    args.executable_path
+  ])
+
+
+def GenerateProjectStructure(args):
+  application_path = os.path.join( args.dir, args.name + ".app" )
+  return MakeDirectories( application_path )
+
+
+def Main():
+  parser = argparse.ArgumentParser(description='A script that aids in '
+                                   'the creation of an iOS application')
+
+  subparsers = parser.add_subparsers()
+
+  # Plist Parser
+
+  plist_parser = subparsers.add_parser('plist',
+                                       help='Process the Info.plist')
+  plist_parser.set_defaults(func=ProcessInfoPlist)
+  
+  plist_parser.add_argument('-i', dest='input', help='The input plist path')
+  plist_parser.add_argument('-o', dest='output', help='The output plist dir')
+
+  # Directory Structure Parser
+
+  dir_struct_parser = subparsers.add_parser('structure',
+                      help='Creates the directory of an iOS application')
+
+  dir_struct_parser.set_defaults(func=GenerateProjectStructure)
+
+  dir_struct_parser.add_argument('-d', dest='dir', help='Out directory')
+  dir_struct_parser.add_argument('-n', dest='name', help='App name')
+
+  # Code Signing
+
+  code_signing_parser = subparsers.add_parser('codesign',
+                        help='Code sign the specified application')
+
+  code_signing_parser.set_defaults(func=PerformCodeSigning)
+
+  code_signing_parser.add_argument('-p', dest='application_path', required=True,
+                                   help='The application path')
+  code_signing_parser.add_argument('-i', dest='identity', required=True,
+                                   help='The code signing identity to use')
+  code_signing_parser.add_argument('-e', dest='entitlements_path',
+                                   required=True,
+                                   help='The path to the entitlements .xcent')
+
+  # dSYM Generation
+
+  dsym_generation_parser = subparsers.add_parser('dsym',
+                        help='Generate a .dSYM file for an executable')
+
+  dsym_generation_parser.set_defaults(func=GenerateDSYM)
+
+  dsym_generation_parser.add_argument('-e', dest='executable_path',
+                                      required=True,
+                                      help='The executable path')
+  dsym_generation_parser.add_argument('-o', dest='output',
+                                      required=True,
+                                      help='The output file name')
+
+  # Engage!
+
+  args = parser.parse_args()
+  
+  return args.func(args)
+
+
+if __name__ == '__main__':
+  sys.exit(Main())
diff --git a/build/config/ios/ios_sdk.gni b/build/config/ios/ios_sdk.gni
new file mode 100644
index 0000000..65654a5
--- /dev/null
+++ b/build/config/ios/ios_sdk.gni
@@ -0,0 +1,58 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+declare_args() {
+  # SDK path to use. When empty this will use the default SDK based on the
+  # value of use_ios_simulator.
+  ios_sdk_path = ""
+
+  # Set to true when targeting a simulator build on iOS. False means that the
+  # target is for running on the device. The default value is to use the
+  # Simulator except when targeting GYP's Xcode builds (for compat with the
+  # existing GYP build).
+  use_ios_simulator = true
+
+  # Version of iOS that we're targeting.
+  ios_deployment_target = "6.0"
+
+  # The iOS Code signing identity to use
+  ios_code_signing_identity = ""
+
+  # The path to the iOS device SDK.
+  ios_device_sdk_path = ""
+
+  # The path to the iOS simulator SDK.
+  ios_simulator_sdk_path = ""
+}
+
+if (ios_device_sdk_path == "") {
+  _ios_device_sdk_result =
+      exec_script("ios_sdk.py", [ "iphoneos" ], "list lines")
+  ios_device_sdk_path = _ios_device_sdk_result[0]
+}
+
+if (ios_simulator_sdk_path == "") {
+  _ios_sim_sdk_result =
+      exec_script("ios_sdk.py", [ "iphonesimulator" ], "list lines")
+  ios_simulator_sdk_path = _ios_sim_sdk_result[0]
+}
+
+# Compute default target.
+if (use_ios_simulator) {
+  ios_sdk_path = ios_simulator_sdk_path
+} else {
+  ios_sdk_path = ios_device_sdk_path
+}
+
+if (use_ios_simulator) {
+  # Always disable code signing on the simulator
+  ios_code_signing_identity = ""
+} else {
+  # If an identity is not provided, look for one on the host
+  if (ios_code_signing_identity == "") {
+    _ios_identities = exec_script("find_signing_identity.py", 
+                                  [], "list lines")
+    ios_code_signing_identity = _ios_identities[0]
+  }
+}
diff --git a/build/config/ios/ios_sdk.py b/build/config/ios/ios_sdk.py
new file mode 100644
index 0000000..dfec4db
--- /dev/null
+++ b/build/config/ios/ios_sdk.py
@@ -0,0 +1,19 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import subprocess
+import sys
+
+# This script returns the path to the SDK of the given type. Pass the type of
+# SDK you want, which is typically "iphone" or "iphonesimulator".
+#
+# In the GYP build, this is done inside GYP itself based on the SDKROOT
+# variable.
+
+if len(sys.argv) != 2:
+  print "Takes one arg (SDK to find)"
+  sys.exit(1)
+
+print subprocess.check_output(['xcodebuild', '-version', '-sdk',
+                               sys.argv[1], 'Path']).strip()
diff --git a/build/config/ios/lldb_start_commands.txt b/build/config/ios/lldb_start_commands.txt
new file mode 100644
index 0000000..42e0b14
--- /dev/null
+++ b/build/config/ios/lldb_start_commands.txt
@@ -0,0 +1,4 @@
+breakpoint set --name UIApplicationMain
+breakpoint set --name objc_exception_throw
+continue
+script print "........ Debugger break on main() ........"
diff --git a/build/config/ios/rules.gni b/build/config/ios/rules.gni
new file mode 100644
index 0000000..429c153
--- /dev/null
+++ b/build/config/ios/rules.gni
@@ -0,0 +1,234 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+ios_app_script = "//build/config/ios/ios_app.py"
+
+template("code_sign_ios") {
+  assert(defined(invoker.entitlements_path),
+         "The path to the entitlements .xcent file")
+  assert(defined(invoker.identity),
+         "The code signing identity")
+  assert(defined(invoker.application_path),
+         "The application to code sign")
+  assert(defined(invoker.deps))
+
+  action(target_name) {
+    sources = [
+      invoker.entitlements_path,
+    ]
+
+    _application_path = invoker.application_path
+
+    script = ios_app_script
+
+    outputs = [
+      "$_application_path/_CodeSignature/CodeResources"
+    ]
+
+    args = [
+      "codesign",
+      "-p",
+      rebase_path(invoker.application_path, root_build_dir),
+      "-i",
+      invoker.identity,
+      "-e",
+      rebase_path(invoker.entitlements_path, root_build_dir),
+    ]
+
+    deps = invoker.deps
+  }
+}
+
+template("xcode_harness_ios") {
+  assert(defined(invoker.deps),
+         "The dependencies must be specified")
+  assert(defined(invoker.app_bundle),
+         "The app bundle must be defined")
+  assert(defined(invoker.app_name),
+         "The application name must be defined")
+  app_name = invoker.app_name
+
+  xcode_project_gen_target_name = app_name + "_xcode"
+  copy(xcode_project_gen_target_name) {
+    sources = [
+      "//build/config/ios/XcodeHarness/FakeMain.m",
+      "//build/config/ios/XcodeHarness/Harness.xcodeproj",
+    ]
+    outputs = [
+      "$root_build_dir/$xcode_project_gen_target_name/{{source_file_part}}",
+    ]
+  }
+
+  bundle_copy_gen_target_name = app_name + "_bundle_copy"
+  copy(bundle_copy_gen_target_name) {
+    sources = [
+      invoker.app_bundle
+    ]
+    outputs = [
+      "$root_build_dir/$xcode_project_gen_target_name/Application",
+    ]
+
+    deps = invoker.deps
+  }
+
+  group(target_name) {
+    deps = [
+      ":$xcode_project_gen_target_name",
+      ":$bundle_copy_gen_target_name",
+    ]
+  }
+}
+
+template("resource_copy_ios") {
+  assert(defined(invoker.resources),
+         "The source list of resources to copy over")
+  assert(defined(invoker.bundle_directory),
+         "The directory within the bundle to place the sources in")
+  assert(defined(invoker.app_name),
+         "The name of the application")
+
+  _bundle_directory = invoker.bundle_directory
+  _app_name = invoker.app_name
+  _resources = invoker.resources
+
+  copy(target_name) {
+    set_sources_assignment_filter([])
+    sources = _resources
+    outputs = [ "$root_build_dir/$_app_name.app/$_bundle_directory/{{source_file_part}}" ]
+
+    if (defined(invoker.deps)) {
+      deps = invoker.deps
+    }
+  }
+}
+
+template("ios_app") {
+
+  assert(defined(invoker.deps),
+         "Dependencies must be specified for $target_name")
+  assert(defined(invoker.info_plist),
+         "The application plist file must be specified for $target_name")
+  assert(defined(invoker.app_name),
+         "The name of iOS application for $target_name")
+  assert(defined(invoker.entitlements_path),
+         "The entitlements path must be specified for $target_name")
+  assert(defined(invoker.code_signing_identity),
+         "The entitlements path must be specified for $target_name")
+
+  # We just create a variable so we can use the same in interpolation
+  app_name = invoker.app_name
+
+  # Generate the project structure
+
+  struct_gen_target_name = target_name + "_struct"
+
+  action(struct_gen_target_name) {
+
+    script = ios_app_script
+
+    sources = []
+    outputs = [ "$root_build_dir/$app_name.app" ]
+
+    args = [
+      "structure",
+      "-d",
+      rebase_path(root_build_dir),
+      "-n",
+      app_name
+    ]
+
+  }
+
+  # Generate the executable
+
+  bin_gen_target_name = target_name + "_bin"
+
+  executable(bin_gen_target_name) {
+    libs = [
+      "AudioToolbox.framework",
+      "AVFoundation.framework",
+      "OpenGLES.framework",
+      "QuartzCore.framework",
+      "UIKit.framework",
+    ]
+    deps = invoker.deps
+    output_name = app_name
+  }
+
+  # Process the Info.plist
+
+  plist_gen_target_name = target_name + "_plist"
+
+  action(plist_gen_target_name) {
+
+    script = ios_app_script
+
+    sources = [ invoker.info_plist ]
+    outputs = [ "$root_build_dir/plist/$app_name/Info.plist" ]
+
+    args = [
+      "plist",
+      "-i",
+      rebase_path(invoker.info_plist, root_build_dir),
+      "-o",
+      rebase_path("$root_build_dir/plist/$app_name"),
+    ]
+  }
+
+  # Copy the generated binaries and assets to their appropriate locations
+
+  copy_gen_target_name = target_name + "_copy"
+  copy(copy_gen_target_name) {
+    sources = [
+      "$root_build_dir/plist/$app_name/Info.plist",
+      "$root_build_dir/$app_name",
+    ]
+
+    outputs = [
+      "$root_build_dir/$app_name.app/{{source_file_part}}"
+    ]
+
+    deps = [
+      ":$struct_gen_target_name",
+      ":$bin_gen_target_name",
+      ":$plist_gen_target_name",
+    ]
+  }
+
+  # Generate the Xcode Harness for Profiling
+
+  xcode_harness_gen_target_name = app_name + "_harness"
+  xcode_harness_ios(xcode_harness_gen_target_name) {
+    app_bundle = "$root_build_dir/$app_name.app"
+    deps = [
+      ":$bin_gen_target_name",
+      ":$struct_gen_target_name",
+      ":$copy_gen_target_name",
+    ]
+  }
+
+  # Perform Code Signing
+
+  code_sign_gen_target_name = target_name + "_codesign"
+  code_sign_ios(code_sign_gen_target_name) {
+    entitlements_path = invoker.entitlements_path
+    identity = invoker.code_signing_identity
+    application_path = "$root_build_dir/$app_name.app"
+    deps = [ ":$copy_gen_target_name" ]
+  }
+
+  # Top level group
+
+  group(target_name) {
+    # Skip code signing if no identity is provided. This is useful for simulator
+    # builds
+    deps = [ ":$xcode_harness_gen_target_name" ]
+    if (invoker.code_signing_identity == "") {
+      deps += [ ":$copy_gen_target_name" ]
+    } else {
+      deps += [ ":$code_sign_gen_target_name" ]
+    }
+  }
+
+}
diff --git a/build/config/linux/BUILD.gn b/build/config/linux/BUILD.gn
new file mode 100644
index 0000000..71a73b2
--- /dev/null
+++ b/build/config/linux/BUILD.gn
@@ -0,0 +1,62 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/linux/pkg_config.gni")
+import("//build/config/features.gni")
+import("//build/config/sysroot.gni")
+import("//build/config/ui.gni")
+
+config("sdk") {
+  if (sysroot != "") {
+    cflags = [ "--sysroot=" + sysroot ]
+    ldflags = [ "--sysroot=" + sysroot ]
+
+    # Need to get some linker flags out of the sysroot.
+    ldflags += [ exec_script("sysroot_ld_path.py",
+                             [
+                               rebase_path("//build/linux/sysroot_ld_path.sh",
+                                           root_build_dir),
+                               sysroot,
+                             ],
+                             "value") ]
+  }
+}
+
+config("fontconfig") {
+  libs = [ "fontconfig" ]
+}
+
+pkg_config("freetype2") {
+  packages = [ "freetype2" ]
+}
+
+config("x11") {
+  libs = [
+    "X11",
+    "Xcomposite",
+    "Xcursor",
+    "Xdamage",
+    "Xext",
+    "Xfixes",
+    "Xi",
+    "Xrender",
+    "Xtst",
+  ]
+}
+
+config("xrandr") {
+  libs = [ "Xrandr" ]
+}
+
+config("xinerama") {
+  libs = [ "Xinerama" ]
+}
+
+config("xcomposite") {
+  libs = [ "Xcomposite" ]
+}
+
+config("xext") {
+  libs = [ "Xext" ]
+}
diff --git a/build/config/linux/gtk/BUILD.gn b/build/config/linux/gtk/BUILD.gn
new file mode 100644
index 0000000..9c9c696
--- /dev/null
+++ b/build/config/linux/gtk/BUILD.gn
@@ -0,0 +1,45 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/linux/pkg_config.gni")
+
+assert(is_linux, "This file should only be referenced on Linux")
+
+# Depend on //build/config/linux/gtk to use GTK.
+#
+# GN doesn't check visibility for configs so we give this an obviously internal
+# name to discourage random targets from accidentally depending on this and
+# bypassing the GTK target's visibility.
+pkg_config("gtk_internal_config") {
+  # Gtk requires gmodule, but it does not list it as a dependency in some
+  # misconfigured systems.
+  packages = [
+    "gmodule-2.0",
+    "gtk+-2.0",
+    "gthread-2.0",
+  ]
+}
+
+# Basically no parts of Chrome should depend on GTK. To prevent accidents, the
+# parts that explicitly need GTK are whitelisted on this target.
+group("gtk") {
+  visibility = [
+    "//chrome/browser/ui/libgtk2ui",
+    "//gpu/gles2_conform_support:gles2_conform_test_windowless",
+    "//remoting/host",
+    "//remoting/host/it2me:remote_assistance_host",
+    "//remoting/host:remoting_me2me_host_static",
+  ]
+  public_configs = [ ":gtk_internal_config" ]
+}
+
+# Depend on "gtkprint" to get this.
+pkg_config("gtkprint_internal_config") {
+  packages = [ "gtk+-unix-print-2.0" ]
+}
+
+group("gtkprint") {
+  visibility = [ "//chrome/browser/ui/libgtk2ui" ]
+  public_configs = [ ":gtkprint_internal_config" ]
+}
diff --git a/build/config/linux/pkg-config.py b/build/config/linux/pkg-config.py
new file mode 100644
index 0000000..fadcc0b
--- /dev/null
+++ b/build/config/linux/pkg-config.py
@@ -0,0 +1,200 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import json
+import os
+import subprocess
+import sys
+import re
+from optparse import OptionParser
+
+# This script runs pkg-config, optionally filtering out some results, and
+# returns the result.
+#
+# The result will be [ <includes>, <cflags>, <libs>, <lib_dirs>, <ldflags> ]
+# where each member is itself a list of strings.
+#
+# You can filter out matches using "-v <regexp>" where all results from
+# pkgconfig matching the given regular expression will be ignored. You can
+# specify more than one regular expression my specifying "-v" more than once.
+#
+# You can specify a sysroot using "-s <sysroot>" where sysroot is the absolute
+# system path to the sysroot used for compiling. This script will attempt to
+# generate correct paths for the sysroot.
+#
+# When using a sysroot, you must also specify the architecture via
+# "-a <arch>" where arch is either "x86" or "x64".
+#
+# Additionally, you can specify the option --atleast-version. This will skip
+# the normal outputting of a dictionary and instead print true or false,
+# depending on the return value of pkg-config for the given package.
+
+# If this is run on non-Linux platforms, just return nothing and indicate
+# success. This allows us to "kind of emulate" a Linux build from other
+# platforms.
+if sys.platform.find("linux") == -1:
+  print "[[],[],[],[],[]]"
+  sys.exit(0)
+
+
+def SetConfigPath(options):
+  """Set the PKG_CONFIG_PATH environment variable.
+  This takes into account any sysroot and architecture specification from the
+  options on the given command line."""
+
+  sysroot = options.sysroot
+  if not sysroot:
+    sysroot = ""
+
+  # Compute the library path name based on the architecture.
+  arch = options.arch
+  if sysroot and not arch:
+    print "You must specify an architecture via -a if using a sysroot."
+    sys.exit(1)
+  if arch == 'x64':
+    libpath = 'lib64'
+  else:
+    libpath = 'lib'
+
+  # Add the sysroot path to the environment's PKG_CONFIG_PATH
+  config_path = sysroot + '/usr/' + libpath + '/pkgconfig'
+  config_path += ':' + sysroot + '/usr/share/pkgconfig'
+  if 'PKG_CONFIG_PATH' in os.environ:
+    os.environ['PKG_CONFIG_PATH'] += ':' + config_path
+  else:
+    os.environ['PKG_CONFIG_PATH'] = config_path
+
+
+def GetPkgConfigPrefixToStrip(args):
+  """Returns the prefix from pkg-config where packages are installed.
+  This returned prefix is the one that should be stripped from the beginning of
+  directory names to take into account sysroots."""
+  # Some sysroots, like the Chromium OS ones, may generate paths that are not
+  # relative to the sysroot. For example,
+  # /path/to/chroot/build/x86-generic/usr/lib/pkgconfig/pkg.pc may have all
+  # paths relative to /path/to/chroot (i.e. prefix=/build/x86-generic/usr)
+  # instead of relative to /path/to/chroot/build/x86-generic (i.e prefix=/usr).
+  # To support this correctly, it's necessary to extract the prefix to strip
+  # from pkg-config's |prefix| variable.
+  prefix = subprocess.check_output(["pkg-config", "--variable=prefix"] + args,
+      env=os.environ)
+  if prefix[-4] == '/usr':
+    return prefix[4:]
+  return prefix
+
+
+def MatchesAnyRegexp(flag, list_of_regexps):
+  """Returns true if the first argument matches any regular expression in the
+  given list."""
+  for regexp in list_of_regexps:
+    if regexp.search(flag) != None:
+      return True
+  return False
+
+
+def RewritePath(path, strip_prefix, sysroot):
+  """Rewrites a path by stripping the prefix and prepending the sysroot."""
+  if os.path.isabs(path) and not path.startswith(sysroot):
+    if path.startswith(strip_prefix):
+      path = path[len(strip_prefix):]
+    path = path.lstrip('/')
+    return os.path.join(sysroot, path)
+  else:
+    return path
+
+
+parser = OptionParser()
+parser.add_option('-p', action='store', dest='pkg_config', type='string',
+                  default='pkg-config')
+parser.add_option('-v', action='append', dest='strip_out', type='string')
+parser.add_option('-s', action='store', dest='sysroot', type='string')
+parser.add_option('-a', action='store', dest='arch', type='string')
+parser.add_option('--atleast-version', action='store',
+                  dest='atleast_version', type='string')
+parser.add_option('--libdir', action='store_true', dest='libdir')
+(options, args) = parser.parse_args()
+
+# Make a list of regular expressions to strip out.
+strip_out = []
+if options.strip_out != None:
+  for regexp in options.strip_out:
+    strip_out.append(re.compile(regexp))
+
+SetConfigPath(options)
+if options.sysroot:
+  prefix = GetPkgConfigPrefixToStrip(args)
+else:
+  prefix = ''
+
+if options.atleast_version:
+  # When asking for the return value, just run pkg-config and print the return
+  # value, no need to do other work.
+  if not subprocess.call([options.pkg_config,
+                          "--atleast-version=" + options.atleast_version] +
+                          args,
+                         env=os.environ):
+    print "true"
+  else:
+    print "false"
+  sys.exit(0)
+
+if options.libdir:
+  try:
+    libdir = subprocess.check_output([options.pkg_config,
+                                      "--variable=libdir"] +
+                                     args,
+                                     env=os.environ)
+  except:
+    print "Error from pkg-config."
+    sys.exit(1)
+  sys.stdout.write(libdir.strip())
+  sys.exit(0)
+
+try:
+  flag_string = subprocess.check_output(
+      [ options.pkg_config, "--cflags", "--libs-only-l", "--libs-only-L" ] +
+      args, env=os.environ)
+  # For now just split on spaces to get the args out. This will break if
+  # pkgconfig returns quoted things with spaces in them, but that doesn't seem
+  # to happen in practice.
+  all_flags = flag_string.strip().split(' ')
+except:
+  print "Could not run pkg-config."
+  sys.exit(1)
+
+
+sysroot = options.sysroot
+if not sysroot:
+  sysroot = ''
+
+includes = []
+cflags = []
+libs = []
+lib_dirs = []
+ldflags = []
+
+for flag in all_flags[:]:
+  if len(flag) == 0 or MatchesAnyRegexp(flag, strip_out):
+    continue;
+
+  if flag[:2] == '-l':
+    libs.append(RewritePath(flag[2:], prefix, sysroot))
+  elif flag[:2] == '-L':
+    lib_dirs.append(RewritePath(flag[2:], prefix, sysroot))
+  elif flag[:2] == '-I':
+    includes.append(RewritePath(flag[2:], prefix, sysroot))
+  elif flag[:3] == '-Wl':
+    ldflags.append(flag)
+  elif flag == '-pthread':
+    # Many libs specify "-pthread" which we don't need since we always include
+    # this anyway. Removing it here prevents a bunch of duplicate inclusions on
+    # the command line.
+    pass
+  else:
+    cflags.append(flag)
+
+# Output a GN array, the first one is the cflags, the second are the libs. The
+# JSON formatter prints GN compatible lists when everything is a list of
+# strings.
+print json.dumps([includes, cflags, libs, lib_dirs, ldflags])
diff --git a/build/config/linux/pkg_config.gni b/build/config/linux/pkg_config.gni
new file mode 100644
index 0000000..34ed1af
--- /dev/null
+++ b/build/config/linux/pkg_config.gni
@@ -0,0 +1,83 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/sysroot.gni")
+
+# Defines a config specifying the result of running pkg-config for the given
+# packages. Put the package names you want to query in the "packages" variable
+# inside the template invocation.
+#
+# You can also add defines via the "defines" variable. This can be useful to
+# add this to the config to pass defines that the library expects to get by
+# users of its headers.
+#
+# Example:
+#   pkg_config("mything") {
+#     packages = [ "mything1", "mything2" ]
+#     defines = [ "ENABLE_AWESOME" ]
+#   }
+#
+# You can also use "extra args" to filter out results (see pkg-config.py):
+#   extra_args = [ "-v, "foo" ]
+# To ignore libs and ldflags (only cflags/defines will be set, which is useful
+# when doing manual dynamic linking), set:
+#   ignore_libs = true
+
+declare_args() {
+  # A pkg-config wrapper to call instead of trying to find and call the right
+  # pkg-config directly. Wrappers like this are common in cross-compilation
+  # environments.
+  # Leaving it blank defaults to searching PATH for 'pkg-config' and relying on
+  # the sysroot mechanism to find the right .pc files.
+  pkg_config = ""
+}
+
+pkg_config_script = "//build/config/linux/pkg-config.py"
+
+# Define the args we pass to the pkg-config script for other build files that
+# need to invoke it manually.
+if (sysroot != "") {
+  # Pass the sysroot if we're using one (it requires the CPU arch also).
+  pkg_config_args = [
+    "-s",
+    sysroot,
+    "-a",
+    current_cpu,
+  ]
+} else if (pkg_config != "") {
+  pkg_config_args = [
+    "-p",
+    pkg_config,
+  ]
+} else {
+  pkg_config_args = []
+}
+
+template("pkg_config") {
+  assert(defined(invoker.packages),
+         "Variable |packages| must be defined to be a list in pkg_config.")
+  config(target_name) {
+    args = pkg_config_args + invoker.packages
+    if (defined(invoker.extra_args)) {
+      args += invoker.extra_args
+    }
+
+    pkgresult = exec_script(pkg_config_script, args, "value")
+    include_dirs = pkgresult[0]
+    cflags = pkgresult[1]
+
+    if (!defined(invoker.ignore_libs) || !invoker.ignore_libs) {
+      libs = pkgresult[2]
+      lib_dirs = pkgresult[3]
+      ldflags = pkgresult[4]
+    }
+
+    if (defined(invoker.defines)) {
+      defines = invoker.defines
+    }
+    if (defined(invoker.visibility)) {
+      visibility = invoker.visibility
+    }
+  }
+}
diff --git a/build/config/linux/sysroot_ld_path.py b/build/config/linux/sysroot_ld_path.py
new file mode 100644
index 0000000..4bce7ee
--- /dev/null
+++ b/build/config/linux/sysroot_ld_path.py
@@ -0,0 +1,20 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file takes two arguments, the relative location of the shell script that
+# does the checking, and the name of the sysroot.
+
+# TODO(brettw) the build/linux/sysroot_ld_path.sh script should be rewritten in
+# Python in this file.
+
+import subprocess
+import sys
+
+if len(sys.argv) != 3:
+  print "Need two arguments"
+  sys.exit(1)
+
+result = subprocess.check_output([sys.argv[1], sys.argv[2]]).strip()
+
+print '"' + result + '"'
diff --git a/build/config/locales.gni b/build/config/locales.gni
new file mode 100644
index 0000000..a628007
--- /dev/null
+++ b/build/config/locales.gni
@@ -0,0 +1,118 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Note: keep in sync with below.
+locales = [
+  "am",
+  "ar",
+  "bg",
+  "bn",
+  "ca",
+  "cs",
+  "da",
+  "de",
+  "el",
+  "en-GB",
+  "en-US",
+  "es-419",
+  "es",
+  "et",
+  "fa",
+  "fi",
+  "fil",
+  "fr",
+  "gu",
+  "he",
+  "hi",
+  "hr",
+  "hu",
+  "id",
+  "it",
+  "ja",
+  "kn",
+  "ko",
+  "lt",
+  "lv",
+  "ml",
+  "mr",
+  "ms",
+  "nb",
+  "nl",
+  "pl",
+  "pt-BR",
+  "pt-PT",
+  "ro",
+  "ru",
+  "sk",
+  "sl",
+  "sr",
+  "sv",
+  "sw",
+  "ta",
+  "te",
+  "th",
+  "tr",
+  "uk",
+  "vi",
+  "zh-CN",
+  "zh-TW",
+]
+
+# Same as the locales list but in the format Mac expects for output files:
+# it uses underscores instead of hyphens, and "en" instead of "en-US".
+locales_as_mac_outputs = [
+  "am",
+  "ar",
+  "bg",
+  "bn",
+  "ca",
+  "cs",
+  "da",
+  "de",
+  "el",
+  "en_GB",
+  "en",
+  "es_419",
+  "es",
+  "et",
+  "fa",
+  "fi",
+  "fil",
+  "fr",
+  "gu",
+  "he",
+  "hi",
+  "hr",
+  "hu",
+  "id",
+  "it",
+  "ja",
+  "kn",
+  "ko",
+  "lt",
+  "lv",
+  "ml",
+  "mr",
+  "ms",
+  "nb",
+  "nl",
+  "pl",
+  "pt_BR",
+  "pt_PT",
+  "ro",
+  "ru",
+  "sk",
+  "sl",
+  "sr",
+  "sv",
+  "sw",
+  "ta",
+  "te",
+  "th",
+  "tr",
+  "uk",
+  "vi",
+  "zh_CN",
+  "zh_TW",
+]
diff --git a/build/config/mac/BUILD.gn b/build/config/mac/BUILD.gn
new file mode 100644
index 0000000..132e12c
--- /dev/null
+++ b/build/config/mac/BUILD.gn
@@ -0,0 +1,34 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/sysroot.gni")
+
+config("sdk") {
+  common_flags = [
+    "-stdlib=libc++",
+  ]
+
+  cflags = common_flags
+  ldflags = common_flags
+}
+
+# On Mac, this is used for everything except static libraries.
+config("mac_dynamic_flags") {
+  ldflags = [
+    "-Wl,-search_paths_first",
+    "-L.",
+
+    # Path for loading shared libraries for unbundled binaries.
+    "-Wl,-rpath,@loader_path/.",
+
+    # Path for loading shared libraries for bundled binaries. Get back from
+    # Binary.app/Contents/MacOS.
+    "-Wl,-rpath,@loader_path/../../..",
+  ]
+}
+
+# On Mac, this is used only for executables.
+config("mac_executable_flags") {
+  ldflags = [ "-Wl,-pie" ]  # Position independent.
+}
diff --git a/build/config/mac/mac_app.py b/build/config/mac/mac_app.py
new file mode 100644
index 0000000..909fa58
--- /dev/null
+++ b/build/config/mac/mac_app.py
@@ -0,0 +1,113 @@
+#!/usr/bin/python
+# Copyright (c) 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import os
+import errno
+import subprocess
+import sys
+
+PLUTIL = [
+  '/usr/bin/env',
+  'xcrun',
+  'plutil'
+]
+
+IBTOOL = [
+  '/usr/bin/env',
+  'xcrun',
+  'ibtool',
+]
+
+
+def MakeDirectories(path):
+  try:
+    os.makedirs(path)
+  except OSError as exc:
+    if exc.errno == errno.EEXIST and os.path.isdir(path):
+      return 0
+    else:
+      return -1
+
+  return 0
+
+
+def ProcessInfoPlist(args):
+  output_plist_file = os.path.abspath(os.path.join(args.output, 'Info.plist'))
+  return subprocess.check_call( PLUTIL + [
+    '-convert',
+    'binary1',
+    '-o',
+    output_plist_file,
+    '--',
+    args.input,
+  ])
+
+
+def ProcessNIB(args):
+  output_nib_file = os.path.join(os.path.abspath(args.output),
+      "%s.nib" % os.path.splitext(os.path.basename(args.input))[0])
+
+  return subprocess.check_call(IBTOOL + [
+    '--module',
+    args.module,
+    '--auto-activate-custom-fonts',
+    '--target-device',
+    'mac',
+    '--compile',
+    output_nib_file,
+    os.path.abspath(args.input),
+  ])
+
+
+def GenerateProjectStructure(args):
+  application_path = os.path.join( args.dir, args.name + ".app", "Contents" )
+  return MakeDirectories( application_path )
+
+
+def Main():
+  parser = argparse.ArgumentParser(description='A script that aids in '
+                                   'the creation of an Mac application')
+
+  subparsers = parser.add_subparsers()
+
+  # Plist Parser
+
+  plist_parser = subparsers.add_parser('plist',
+                                       help='Process the Info.plist')
+  plist_parser.set_defaults(func=ProcessInfoPlist)
+  
+  plist_parser.add_argument('-i', dest='input', help='The input plist path')
+  plist_parser.add_argument('-o', dest='output', help='The output plist dir')
+
+  # NIB Parser
+
+  plist_parser = subparsers.add_parser('nib',
+                                       help='Process a NIB file')
+  plist_parser.set_defaults(func=ProcessNIB)
+  
+  plist_parser.add_argument('-i', dest='input', help='The input nib path')
+  plist_parser.add_argument('-o', dest='output', help='The output nib dir')
+  plist_parser.add_argument('-m', dest='module', help='The module name')
+
+  # Directory Structure Parser
+
+  dir_struct_parser = subparsers.add_parser('structure',
+                      help='Creates the directory of an Mac application')
+
+  dir_struct_parser.set_defaults(func=GenerateProjectStructure)
+
+  dir_struct_parser.add_argument('-d', dest='dir', help='Out directory')
+  dir_struct_parser.add_argument('-n', dest='name', help='App name')
+
+  # Engage!
+
+  args = parser.parse_args()
+
+  return args.func(args)
+
+
+if __name__ == '__main__':
+  sys.exit(Main())
diff --git a/build/config/mac/mac_sdk.gni b/build/config/mac/mac_sdk.gni
new file mode 100644
index 0000000..54b68c4
--- /dev/null
+++ b/build/config/mac/mac_sdk.gni
@@ -0,0 +1,41 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/chrome_build.gni")
+
+declare_args() {
+  # Minimum supported version of the Mac SDK.
+  mac_sdk_min = "10.8"
+
+  # Path to a specific version of the Mac SDKJ, not including a backslash at
+  # the end. If empty, the path to the lowest version greater than or equal to
+  # mac_sdk_min is used.
+  mac_sdk_path = ""
+}
+
+find_sdk_args = [ "--print_sdk_path" ]
+if (is_chrome_branded && is_official_build) {
+  find_sdk_args += [
+    "--verify",
+    mac_sdk_min,
+    "--sdk_path=" + mac_sdk_path,
+  ]
+} else {
+  find_sdk_args += [ mac_sdk_min ]
+}
+
+# The tool will print the SDK path on the first line, and the version on the
+# second line.
+find_sdk_lines =
+    exec_script("//build/mac/find_sdk.py", find_sdk_args, "list lines")
+mac_sdk_version = find_sdk_lines[1]
+if (mac_sdk_path == "") {
+  # TODO(brettw) http://crbug.com/335325  when everybody moves to XCode 5 we
+  # can remove the --print_sdk_path argument to find_sdk and instead just use
+  # the following two lines to get the path. Although it looks longer here, it
+  # saves forking a process in find_sdk.py so will be faster.
+  #mac_sdk_root = "/Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX"
+  #mac_sdk_path = mac_sdk_root + mac_sdk_version + ".sdk"
+  mac_sdk_path = find_sdk_lines[0]
+}
diff --git a/build/config/mac/rules.gni b/build/config/mac/rules.gni
new file mode 100644
index 0000000..66f0de4
--- /dev/null
+++ b/build/config/mac/rules.gni
@@ -0,0 +1,220 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+mac_app_script = "//build/config/mac/mac_app.py"
+
+template("code_sign_mac") {
+  assert(defined(invoker.entitlements_path),
+         "The path to the entitlements .xcent file")
+  assert(defined(invoker.identity),
+         "The code signing identity")
+  assert(defined(invoker.application_path),
+         "The application to code sign")
+  assert(defined(invoker.deps))
+
+  action(target_name) {
+    sources = [
+      invoker.entitlements_path,
+    ]
+
+    _application_path = invoker.application_path
+
+    script = mac_app_script
+
+    outputs = [
+      "$_application_path/_CodeSignature/CodeResources"
+    ]
+
+    args = [
+      "codesign",
+      "-p",
+      rebase_path(invoker.application_path, root_build_dir),
+      "-i",
+      invoker.identity,
+      "-e",
+      rebase_path(invoker.entitlements_path, root_build_dir),
+    ]
+
+    deps = invoker.deps
+  }
+}
+
+template("process_nibs_mac") {
+  assert(defined(invoker.sources),
+         "The nib sources must be specified")
+  assert(defined(invoker.module),
+         "The nib module must be specified")
+  assert(defined(invoker.output_dir),
+         "The output directory must be specified")
+
+  action_foreach(target_name) {
+    sources = invoker.sources
+
+    script = mac_app_script
+
+    invoker_out_dir = invoker.output_dir
+
+    outputs = [
+      "$root_build_dir/$invoker_out_dir/{{source_name_part}}.nib"
+    ]
+
+    args = [
+      "nib",
+      "-i",
+      "{{source}}",
+      "-o",
+      invoker_out_dir,
+      "-m",
+      invoker.module,
+    ]
+  }
+}
+
+template("resource_copy_mac") {
+  assert(defined(invoker.resources),
+         "The source list of resources to copy over")
+  assert(defined(invoker.bundle_directory),
+         "The directory within the bundle to place the sources in")
+  assert(defined(invoker.app_name),
+         "The name of the application")
+
+  _bundle_directory = invoker.bundle_directory
+  _app_name = invoker.app_name
+  _resources = invoker.resources
+
+  copy(target_name) {
+    set_sources_assignment_filter([])
+    sources = _resources
+    outputs = [ "$root_build_dir/$_app_name.app/$_bundle_directory/Contents/Resources/{{source_file_part}}" ]
+
+    if (defined(invoker.deps)) {
+      deps = invoker.deps
+    }
+  }
+}
+
+template("mac_app") {
+
+  assert(defined(invoker.deps),
+         "Dependencies must be specified for $target_name")
+  assert(defined(invoker.info_plist),
+         "The application plist file must be specified for $target_name")
+  assert(defined(invoker.app_name),
+         "The name of Mac application for $target_name")
+  assert(defined(invoker.xibs),
+         "The list of XIB files must be specified for $target_name")
+  # assert(defined(invoker.entitlements_path),
+  #        "The entitlements path must be specified for $target_name")
+  # assert(defined(invoker.code_signing_identity),
+  #        "The entitlements path must be specified for $target_name")
+
+  # We just create a variable so we can use the same in interpolation
+  app_name = invoker.app_name
+
+  # Generate the project structure
+
+  struct_gen_target_name = target_name + "_struct"
+
+  action(struct_gen_target_name) {
+
+    script = mac_app_script
+
+    sources = []
+    outputs = [ "$root_build_dir/$app_name.app" ]
+
+    args = [
+      "structure",
+      "-d",
+      rebase_path(root_build_dir),
+      "-n",
+      app_name
+    ]
+
+  }
+
+  # Generate the executable
+
+  bin_gen_target_name = target_name + "_bin"
+
+  executable(bin_gen_target_name) {
+    deps = invoker.deps
+    output_name = app_name
+  }
+
+  # Process the Info.plist
+
+  plist_gen_target_name = target_name + "_plist"
+
+  action(plist_gen_target_name) {
+
+    script = mac_app_script
+
+    sources = [ invoker.info_plist ]
+    outputs = [ "$root_build_dir/plist/$app_name/Info.plist" ]
+
+    args = [
+      "plist",
+      "-i",
+      rebase_path(invoker.info_plist, root_build_dir),
+      "-o",
+      rebase_path("$root_build_dir/plist/$app_name"),
+    ]
+  }
+
+  # Copy the generated binaries and assets to their appropriate locations
+
+  copy_plist_gen_target_name = target_name + "_plist_copy"
+  copy(copy_plist_gen_target_name) {
+    sources = [
+      "$root_build_dir/plist/$app_name/Info.plist",
+    ]
+
+    outputs = [
+      "$root_build_dir/$app_name.app/Contents/{{source_file_part}}"
+    ]
+
+    deps = [
+      ":$plist_gen_target_name",
+    ]
+  }
+
+  copy_bin_target_name = target_name + "_bin_copy"
+  copy(copy_bin_target_name) {
+    sources = [
+      "$root_build_dir/$app_name",
+    ]
+
+    outputs = [
+      "$root_build_dir/$app_name.app/Contents/MacOS/{{source_file_part}}"
+    ]
+
+    deps = [
+      ":$bin_gen_target_name",
+    ]
+  }
+
+  copy_xib_target_name = target_name + "_xib_copy"
+  process_nibs_mac(copy_xib_target_name) {
+    sources = invoker.xibs
+    module = app_name
+    output_dir = "$app_name.app/Contents/Resources"
+  }
+
+  copy_all_target_name = target_name + "_all_copy"
+  group(copy_all_target_name) {
+    deps = [
+      ":$struct_gen_target_name",
+      ":$copy_plist_gen_target_name",
+      ":$copy_bin_target_name",
+      ":$copy_xib_target_name",
+    ]
+  }
+
+  # Top level group
+
+  group(target_name) {
+    deps = [ ":$copy_all_target_name" ]
+  }
+
+}
diff --git a/build/config/mips.gni b/build/config/mips.gni
new file mode 100644
index 0000000..1b40657
--- /dev/null
+++ b/build/config/mips.gni
@@ -0,0 +1,43 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+if (current_cpu == "mipsel") {
+  declare_args() {
+    # MIPS arch variant. Possible values are:
+    #   "r1"
+    #   "r2"
+    #   "r6"
+    mips_arch_variant = "r1"
+
+    # MIPS DSP ASE revision. Possible values are:
+    #   0: unavailable
+    #   1: revision 1
+    #   2: revision 2
+    mips_dsp_rev = 0
+
+    # MIPS floating-point ABI. Possible values are:
+    #   "hard": sets the GCC -mhard-float option.
+    #   "soft": sets the GCC -msoft-float option.
+    mips_float_abi = "hard"
+
+    # MIPS32 floating-point register width. Possible values are:
+    #   "fp32": sets the GCC -mfp32 option.
+    #   "fp64": sets the GCC -mfp64 option.
+    #   "fpxx": sets the GCC -mfpxx option.
+    mips_fpu_mode = "fp32"
+  }
+} else if (current_cpu == "mips64el") {
+  # MIPS arch variant. Possible values are:
+  #   "r2"
+  #   "r6"
+  if (is_android) {
+    declare_args() {
+      mips_arch_variant = "r6"
+    }
+  } else {
+    declare_args() {
+      mips_arch_variant = "r2"
+    }
+  }
+}
diff --git a/build/config/sanitizers/BUILD.gn b/build/config/sanitizers/BUILD.gn
new file mode 100644
index 0000000..8996951
--- /dev/null
+++ b/build/config/sanitizers/BUILD.gn
@@ -0,0 +1,59 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/sanitizers/sanitizers.gni")
+
+# Contains the dependencies needed for sanitizers to link into executables and
+# shared_libraries. Unconditionally depend upon this target as it is empty if
+# |is_asan|, |is_lsan|, |is_tsan|, |is_msan| and |use_custom_libcxx| are false.
+group("deps") {
+  deps = [
+    "//third_party/instrumented_libraries:deps",
+  ]
+  if (is_asan || is_lsan || is_tsan || is_msan) {
+    public_configs = [ ":sanitizer_options_link_helper" ]
+    deps += [ ":options_sources" ]
+  }
+  if (use_custom_libcxx) {
+    deps += [ "//buildtools/third_party/libc++:libcxx_proxy" ]
+  }
+}
+
+config("sanitizer_options_link_helper") {
+  ldflags = [ "-Wl,-u_sanitizer_options_link_helper" ]
+  if (is_asan) {
+    ldflags += [ "-fsanitize=address" ]
+  }
+  if (is_lsan) {
+    ldflags += [ "-fsanitize=leak" ]
+  }
+  if (is_tsan) {
+    ldflags += [ "-fsanitize=thread" ]
+  }
+  if (is_msan) {
+    ldflags += [ "-fsanitize=memory" ]
+  }
+}
+
+source_set("options_sources") {
+  visibility = [
+    ":deps",
+    "//:gn_visibility",
+  ]
+  sources = [
+    "//build/sanitizers/sanitizer_options.cc",
+  ]
+
+  if (is_asan) {
+    sources += [ "//build/sanitizers/asan_suppressions.cc" ]
+  }
+
+  if (is_lsan) {
+    sources += [ "//build/sanitizers/lsan_suppressions.cc" ]
+  }
+
+  if (is_tsan) {
+    sources += [ "//build/sanitizers/tsan_suppressions.cc" ]
+  }
+}
diff --git a/build/config/sanitizers/sanitizers.gni b/build/config/sanitizers/sanitizers.gni
new file mode 100644
index 0000000..a7b9658
--- /dev/null
+++ b/build/config/sanitizers/sanitizers.gni
@@ -0,0 +1,20 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+declare_args() {
+  # Use libc++ (buildtools/third_party/libc++ and
+  # buildtools/third_party/libc++abi) instead of stdlibc++ as standard library.
+  # This is intended to be used for instrumented builds.
+  use_custom_libcxx = (is_asan && is_linux) || is_tsan || is_msan
+
+  # Track where uninitialized memory originates from. From fastest to slowest:
+  # 0 - no tracking, 1 - track only the initial allocation site, 2 - track the
+  # chain of stores leading from allocation site to use site.
+  msan_track_origins = 2
+
+  # Use dynamic libraries instrumented by one of the sanitizers instead of the
+  # standard system libraries. Set this flag to download prebuilt binaries from
+  # GCS.
+  use_prebuilt_instrumented_libraries = false
+}
diff --git a/build/config/sysroot.gni b/build/config/sysroot.gni
new file mode 100644
index 0000000..5bce02e
--- /dev/null
+++ b/build/config/sysroot.gni
@@ -0,0 +1,60 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This header file defines the "sysroot" variable which is the absolute path
+# of the sysroot. If no sysroot applies, the variable will be an empty string.
+
+import("//build/config/chrome_build.gni")
+
+declare_args() {
+  # The absolute path of the sysroot that is applied when compiling using
+  # the target toolchain.
+  target_sysroot = ""
+}
+
+if (current_toolchain == default_toolchain && target_sysroot != "") {
+  sysroot = target_sysroot
+} else if (is_android) {
+  import("//build/config/android/config.gni")
+  if (current_cpu == "x86") {
+    sysroot = rebase_path("$android_ndk_root/$x86_android_sysroot_subdir")
+  } else if (current_cpu == "arm") {
+    sysroot = rebase_path("$android_ndk_root/$arm_android_sysroot_subdir")
+  } else if (current_cpu == "mipsel") {
+    sysroot = rebase_path("$android_ndk_root/$mips_android_sysroot_subdir")
+  } else if (current_cpu == "x64") {
+    sysroot = rebase_path("$android_ndk_root/$x86_64_android_sysroot_subdir")
+  } else if (current_cpu == "arm64") {
+    sysroot = rebase_path("$android_ndk_root/$arm64_android_sysroot_subdir")
+  } else if (current_cpu == "mips64") {
+    sysroot = rebase_path("$android_ndk_root/$mips64_android_sysroot_subdir")
+  } else {
+    sysroot = ""
+  }
+} else if (is_linux && is_chrome_branded && is_official_build && !is_chromeos) {
+  # For official builds, use the sysroot checked into the internal source repo
+  # so that the builds work on older versions of Linux.
+  if (current_cpu == "x64") {
+    sysroot = rebase_path("//build/linux/debian_wheezy_amd64-sysroot")
+  } else if (current_cpu == "x86") {
+    sysroot = rebase_path("//build/linux/debian_wheezy_i386-sysroot")
+  } else {
+    # Any other builds don't use a sysroot.
+    sysroot = ""
+  }
+} else if (is_linux && !is_chromeos) {
+  if (current_cpu == "mipsel") {
+    sysroot = rebase_path("//mipsel-sysroot/sysroot")
+  } else {
+    sysroot = ""
+  }
+} else if (is_mac) {
+  import("//build/config/mac/mac_sdk.gni")
+  sysroot = mac_sdk_path
+} else if (is_ios) {
+  import("//build/config/ios/ios_sdk.gni")
+  sysroot = ios_sdk_path
+} else {
+  sysroot = ""
+}
diff --git a/build/config/templates/templates.gni b/build/config/templates/templates.gni
new file mode 100644
index 0000000..ae00fca
--- /dev/null
+++ b/build/config/templates/templates.gni
@@ -0,0 +1,56 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Declare a target for processing a template.
+#
+# Variables
+#   input: The template file to be processed.
+#   output: Where to save the result.
+#   variables: A list of variables to make available to the template
+#     processing environment, e.g. ["name=foo", "color=red"].
+#
+# Example
+#   file_template("chrome_shell_manifest") {
+#     input = "shell/java/AndroidManifest.xml"
+#     output = "$target_gen_dir/AndroidManifest.xml"
+#     variables = "app_name=chrome_shell app_version=1"
+#   }
+template("file_template") {
+  set_sources_assignment_filter([])
+
+  if (defined(invoker.testonly)) {
+    testonly = invoker.testonly
+  }
+
+  assert(defined(invoker.input),
+      "The input file must be specified")
+  assert(defined(invoker.output),
+      "The output file must be specified")
+  assert(defined(invoker.variables),
+      "The variable used for substitution in templates must be specified")
+
+  variables = invoker.variables
+
+  action(target_name) {
+    if(defined(invoker.visibility)) {
+      visibility = invoker.visibility
+    }
+
+    script = "//build/android/gyp/jinja_template.py"
+    depfile = "$target_gen_dir/$target_name.d"
+
+    sources = [ invoker.input ]
+    outputs = [ invoker.output, depfile ]
+
+    args = [
+      "--inputs",
+      rebase_path(invoker.input, root_build_dir),
+      "--output",
+      rebase_path(invoker.output, root_build_dir),
+      "--depfile",
+      rebase_path(depfile, root_build_dir),
+      "--variables=${variables}"
+    ]
+  }
+}
diff --git a/build/config/ui.gni b/build/config/ui.gni
new file mode 100644
index 0000000..c2dff4a
--- /dev/null
+++ b/build/config/ui.gni
@@ -0,0 +1,68 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file contains UI-related build flags. It should theoretically be in the
+# src/ui directory and only things that depend on the ui module should get the
+# definitions.
+#
+# However, today we have many "bad" dependencies on some of these flags from,
+# e.g. base, so they need to be global.
+#
+# See also build/config/features.gni
+
+declare_args() {
+  # Indicates if Ash is enabled. Ash is the Aura Shell which provides a
+  # desktop-like environment for Aura. Requires use_aura = true
+  use_ash = is_win || is_linux
+
+  # Indicates if Ozone is enabled. Ozone is a low-level library layer for Linux
+  # that does not require X11.
+  use_ozone = false
+
+  # Indicates if GLFW is enabled. GLFW is an abstraction layer for the
+  # windowing system and OpenGL rendering, providing cross-platform support
+  # for creating windows and OpenGL surfaces and contexts, and handling
+  # window system events and input.
+  use_glfw = false
+
+  # Support ChromeOS touchpad gestures with ozone.
+  use_evdev_gestures = false
+
+  # Indicates if Aura is enabled. Aura is a low-level windowing library, sort
+  # of a replacement for GDI or GTK.
+  use_aura = is_win || is_linux
+
+  # True means the UI is built using the "views" framework.
+  toolkit_views = is_mac || is_win || is_chromeos || use_aura
+
+  # Whether the entire browser uses toolkit-views on Mac instead of Cocoa.
+  mac_views_browser = false
+
+  # Whether we should use glib, a low level C utility library.
+  use_glib = is_linux && !use_ozone
+}
+
+# Additional dependent variables -----------------------------------------------
+#
+# These variables depend on other variables and can't be set externally.
+
+use_cairo = false
+use_pango = false
+
+# Use GPU accelerated cross process image transport by default on linux builds
+# with the Aura window manager.
+ui_compositor_image_transport = use_aura && is_linux
+
+use_default_render_theme = use_aura || is_linux
+
+# Indicates if the UI toolkit depends on X11.
+use_x11 = is_linux && !use_ozone && !use_glfw
+
+use_ozone_evdev = use_ozone
+
+use_clipboard_aurax11 = is_linux && use_aura && use_x11
+
+enable_hidpi = is_mac || is_chromeos || is_win || is_linux
+
+enable_topchrome_md = false
diff --git a/build/config/win/BUILD.gn b/build/config/win/BUILD.gn
new file mode 100644
index 0000000..261a1ac
--- /dev/null
+++ b/build/config/win/BUILD.gn
@@ -0,0 +1,181 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/win/visual_studio_version.gni")
+
+# Compiler setup for the Windows SDK. Applied to all targets.
+config("sdk") {
+  # The include path is the stuff returned by the script.
+  #include_dirs = msvc_config[0]  TODO(brettw) make this work.
+
+  defines = [
+    "_ATL_NO_OPENGL",
+    "_WINDOWS",
+    "CERT_CHAIN_PARA_HAS_EXTRA_FIELDS",
+    "NTDDI_VERSION=0x06030000",
+    "PSAPI_VERSION=1",
+    "WIN32",
+    "_SECURE_ATL",
+
+    # This is required for ATL to use XP-safe versions of its functions.
+    "_USING_V110_SDK71_",
+  ]
+}
+
+# Sets the default Windows build version. This is separated because some
+# targets need to manually override it for their compiles.
+config("winver") {
+  defines = [
+    "_WIN32_WINNT=0x0603",
+    "WINVER=0x0603",
+  ]
+}
+
+# Linker flags for Windows SDK setup, this is applied only to EXEs and DLLs.
+config("sdk_link") {
+  if (current_cpu == "x64") {
+    ldflags = [ "/MACHINE:X64" ]
+    lib_dirs = [
+      "$windows_sdk_path\Lib\winv6.3\um\x64",
+      "$visual_studio_path\VC\lib\amd64",
+      "$visual_studio_path\VC\atlmfc\lib\amd64",
+    ]
+  } else {
+    ldflags = [
+      "/MACHINE:X86",
+      "/SAFESEH",  # Not compatible with x64 so use only for x86.
+    ]
+    lib_dirs = [
+      "$windows_sdk_path\Lib\winv6.3\um\x86",
+      "$visual_studio_path\VC\lib",
+      "$visual_studio_path\VC\atlmfc\lib",
+    ]
+    if (!is_asan) {
+      ldflags += [ "/largeaddressaware" ]
+    }
+  }
+}
+
+# This default linker setup is provided separately from the SDK setup so
+# targets who want different library configurations can remove this and specify
+# their own.
+config("common_linker_setup") {
+  ldflags = [
+    "/FIXED:NO",
+    "/ignore:4199",
+    "/ignore:4221",
+    "/NXCOMPAT",
+
+    # Suggested by Microsoft Devrel to avoid
+    #   LINK : fatal error LNK1248: image size (80000000)
+    #   exceeds maximum allowable size (80000000)
+    # which started happening more regularly after VS2013 Update 4.
+    "/maxilksize:2147483647",
+  ]
+
+  # ASLR makes debugging with windbg difficult because Chrome.exe and
+  # Chrome.dll share the same base name. As result, windbg will name the
+  # Chrome.dll module like chrome_<base address>, where <base address>
+  # typically changes with each launch. This in turn means that breakpoints in
+  # Chrome.dll don't stick from one launch to the next. For this reason, we
+  # turn ASLR off in debug builds.
+  if (is_debug) {
+    ldflags += [ "/DYNAMICBASE:NO" ]
+  } else {
+    ldflags += [ "/DYNAMICBASE" ]
+  }
+
+  # Delay loaded DLLs.
+  ldflags += [
+    "/DELAYLOAD:dbghelp.dll",
+    "/DELAYLOAD:dwmapi.dll",
+    "/DELAYLOAD:shell32.dll",
+    "/DELAYLOAD:uxtheme.dll",
+  ]
+}
+
+# Subsystem --------------------------------------------------------------------
+
+# This is appended to the subsystem to specify a minimum version.
+if (current_cpu == "x64") {
+  # The number after the comma is the minimum required OS version.
+  # 5.02 = Windows Server 2003.
+  subsystem_version_suffix = ",5.02"
+} else {
+  # 5.01 = Windows XP.
+  subsystem_version_suffix = ",5.01"
+}
+
+config("console") {
+  ldflags = [ "/SUBSYSTEM:CONSOLE$subsystem_version_suffix" ]
+}
+config("windowed") {
+  ldflags = [ "/SUBSYSTEM:WINDOWS$subsystem_version_suffix" ]
+}
+
+# Incremental linking ----------------------------------------------------------
+
+incremental_linking_on_switch = [ "/INCREMENTAL" ]
+incremental_linking_off_switch = [ "/INCREMENTAL:NO" ]
+if (is_debug) {
+  default_incremental_linking_switch = incremental_linking_on_switch
+} else {
+  default_incremental_linking_switch = incremental_linking_off_switch
+}
+
+# Applies incremental linking or not depending on the current configuration.
+config("default_incremental_linking") {
+  ldflags = default_incremental_linking_switch
+}
+
+# Explicitly on or off incremental linking
+config("incremental_linking") {
+  ldflags = incremental_linking_on_switch
+}
+config("no_incremental_linking") {
+  ldflags = incremental_linking_off_switch
+}
+
+# Some large modules can't handle incremental linking in some situations. This
+# config should be applied to large modules to turn off incremental linking
+# when it won't work.
+config("default_large_module_incremental_linking") {
+  if (symbol_level > 0 && (current_cpu == "x86" || !is_component_build)) {
+    # When symbols are on, things get so large that the tools fail due to the
+    # size of the .ilk files.
+    ldflags = incremental_linking_off_switch
+  } else {
+    # Otherwise just do the default incremental linking for this build type.
+    ldflags = default_incremental_linking_switch
+  }
+}
+
+# Character set ----------------------------------------------------------------
+
+# Not including this config means "ansi" (8-bit system codepage).
+config("unicode") {
+  defines = [
+    "_UNICODE",
+    "UNICODE",
+  ]
+}
+
+# Lean and mean ----------------------------------------------------------------
+
+# Some third party code might not compile with WIN32_LEAN_AND_MEAN so we have
+# to have a separate config for it. Remove this config from your target to
+# get the "bloaty and accomodating" version of windows.h.
+config("lean_and_mean") {
+  defines = [ "WIN32_LEAN_AND_MEAN" ]
+}
+
+# Nominmax --------------------------------------------------------------------
+
+# Some third party code defines NOMINMAX before including windows.h, which
+# then causes warnings when it's been previously defined on the command line.
+# For such targets, this config can be removed.
+
+config("nominmax") {
+  defines = [ "NOMINMAX" ]
+}
diff --git a/build/config/win/visual_studio_version.gni b/build/config/win/visual_studio_version.gni
new file mode 100644
index 0000000..6a2828c
--- /dev/null
+++ b/build/config/win/visual_studio_version.gni
@@ -0,0 +1,39 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+declare_args() {
+  # Path to Visual Studio. If empty, the default is used which is to use the
+  # automatic toolchain in depot_tools. If set, you must also set the
+  # visual_studio_version and wdk_path.
+  visual_studio_path = ""
+
+  # Version of Visual Studio pointed to by the visual_studio_path.
+  # Use "2013" for Visual Studio 2013, or "2013e" for the Express version.
+  visual_studio_version = ""
+
+  # Directory of the Windows driver kit. If visual_studio_path is empty, this
+  # will be auto-filled.
+  wdk_path = ""
+
+  # Full path to the Windows SDK, not including a backslash at the end.
+  # This value is the default location, override if you have a different
+  # installation location.
+  windows_sdk_path = "C:\Program Files (x86)\Windows Kits\8.1"
+}
+
+if (visual_studio_path == "") {
+  toolchain_data =
+      exec_script("../../vs_toolchain.py", [ "get_toolchain_dir" ], "scope")
+  visual_studio_path = toolchain_data.vs_path
+  windows_sdk_path = toolchain_data.sdk_path
+  visual_studio_version = toolchain_data.vs_version
+  wdk_path = toolchain_data.wdk_dir
+  visual_studio_runtime_dirs = toolchain_data.runtime_dirs
+} else {
+  assert(visual_studio_version != "",
+         "You must set the visual_studio_version if you set the path")
+  assert(wdk_path != "",
+         "You must set the wdk_path if you set the visual studio path")
+  visual_studio_runtime_dirs = []
+}
diff --git a/build/copy_test_data_ios.gypi b/build/copy_test_data_ios.gypi
new file mode 100644
index 0000000..576a0f2
--- /dev/null
+++ b/build/copy_test_data_ios.gypi
@@ -0,0 +1,53 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into an action to copy test data files into
+# an iOS app bundle. To use this the following variables need to be defined:
+#   test_data_files: list: paths to test data files or directories
+#   test_data_prefix: string: a directory prefix that will be prepended to each
+#                             output path.  Generally, this should be the base
+#                             directory of the gypi file containing the unittest
+#                             target (e.g. "base" or "chrome").
+#
+# To use this, create a gyp target with the following form:
+# {
+#   'target_name': 'my_unittests',
+#   'conditions': [
+#     ['OS == "ios"', {
+#       'actions': [
+#         {
+#           'action_name': 'copy_test_data',
+#           'variables': {
+#             'test_data_files': [
+#               'path/to/datafile.txt',
+#               'path/to/data/directory/',
+#             ]
+#             'test_data_prefix' : 'prefix',
+#           },
+#           'includes': ['path/to/this/gypi/file'],
+#         },
+#       ],
+#     }],
+# }
+#
+
+{
+  'inputs': [
+    # The |-o <(test_data_prefix)| is ignored; it is there to work around a
+    # caching bug in gyp (https://code.google.com/p/gyp/issues/detail?id=112).
+    # It caches command output when the string is the same, so if two copy
+    # steps have the same relative paths, there can be bogus cache hits that
+    # cause compile failures unless something varies.
+    '<!@pymod_do_main(copy_test_data_ios -o <(test_data_prefix) --inputs <(test_data_files))',
+  ],
+  'outputs': [
+    '<!@pymod_do_main(copy_test_data_ios -o <(PRODUCT_DIR)/<(_target_name).app/<(test_data_prefix) --outputs <(test_data_files))',
+  ],
+  'action': [
+    'python',
+    '<(DEPTH)/build/copy_test_data_ios.py',
+    '-o', '<(PRODUCT_DIR)/<(_target_name).app/<(test_data_prefix)',
+    '<@(_inputs)',
+  ],
+}
diff --git a/build/copy_test_data_ios.py b/build/copy_test_data_ios.py
new file mode 100755
index 0000000..6f0302f
--- /dev/null
+++ b/build/copy_test_data_ios.py
@@ -0,0 +1,105 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Copies test data files or directories into a given output directory."""
+
+import optparse
+import os
+import shutil
+import sys
+
+class WrongNumberOfArgumentsException(Exception):
+  pass
+
+def EscapePath(path):
+  """Returns a path with spaces escaped."""
+  return path.replace(" ", "\\ ")
+
+def ListFilesForPath(path):
+  """Returns a list of all the files under a given path."""
+  output = []
+  # Ignore revision control metadata directories.
+  if (os.path.basename(path).startswith('.git') or
+      os.path.basename(path).startswith('.svn')):
+    return output
+
+  # Files get returned without modification.
+  if not os.path.isdir(path):
+    output.append(path)
+    return output
+
+  # Directories get recursively expanded.
+  contents = os.listdir(path)
+  for item in contents:
+    full_path = os.path.join(path, item)
+    output.extend(ListFilesForPath(full_path))
+  return output
+
+def CalcInputs(inputs):
+  """Computes the full list of input files for a set of command-line arguments.
+  """
+  # |inputs| is a list of paths, which may be directories.
+  output = []
+  for input in inputs:
+    output.extend(ListFilesForPath(input))
+  return output
+
+def CopyFiles(relative_filenames, output_basedir):
+  """Copies files to the given output directory."""
+  for file in relative_filenames:
+    relative_dirname = os.path.dirname(file)
+    output_dir = os.path.join(output_basedir, relative_dirname)
+    output_filename = os.path.join(output_basedir, file)
+
+    # In cases where a directory has turned into a file or vice versa, delete it
+    # before copying it below.
+    if os.path.exists(output_dir) and not os.path.isdir(output_dir):
+      os.remove(output_dir)
+    if os.path.exists(output_filename) and os.path.isdir(output_filename):
+      shutil.rmtree(output_filename)
+
+    if not os.path.exists(output_dir):
+      os.makedirs(output_dir)
+    shutil.copy(file, output_filename)
+
+def DoMain(argv):
+  parser = optparse.OptionParser()
+  usage = 'Usage: %prog -o <output_dir> [--inputs] [--outputs] <input_files>'
+  parser.set_usage(usage)
+  parser.add_option('-o', dest='output_dir')
+  parser.add_option('--inputs', action='store_true', dest='list_inputs')
+  parser.add_option('--outputs', action='store_true', dest='list_outputs')
+  options, arglist = parser.parse_args(argv)
+
+  if len(arglist) == 0:
+    raise WrongNumberOfArgumentsException('<input_files> required.')
+
+  files_to_copy = CalcInputs(arglist)
+  escaped_files = [EscapePath(x) for x in CalcInputs(arglist)]
+  if options.list_inputs:
+    return '\n'.join(escaped_files)
+
+  if not options.output_dir:
+    raise WrongNumberOfArgumentsException('-o required.')
+
+  if options.list_outputs:
+    outputs = [os.path.join(options.output_dir, x) for x in escaped_files]
+    return '\n'.join(outputs)
+
+  CopyFiles(files_to_copy, options.output_dir)
+  return
+
+def main(argv):
+  try:
+    result = DoMain(argv[1:])
+  except WrongNumberOfArgumentsException, e:
+    print >>sys.stderr, e
+    return 1
+  if result:
+    print result
+  return 0
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv))
diff --git a/build/cp.py b/build/cp.py
new file mode 100755
index 0000000..0f32536
--- /dev/null
+++ b/build/cp.py
@@ -0,0 +1,23 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Copy a file.
+
+This module works much like the cp posix command - it takes 2 arguments:
+(src, dst) and copies the file with path |src| to |dst|.
+"""
+
+import os
+import shutil
+import sys
+
+
+def Main(src, dst):
+  # Use copy instead of copyfile to ensure the executable bit is copied.
+  return shutil.copy(src, os.path.normpath(dst))
+
+
+if __name__ == '__main__':
+  sys.exit(Main(sys.argv[1], sys.argv[2]))
diff --git a/build/detect_host_arch.py b/build/detect_host_arch.py
new file mode 100755
index 0000000..19579eb
--- /dev/null
+++ b/build/detect_host_arch.py
@@ -0,0 +1,40 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Outputs host CPU architecture in format recognized by gyp."""
+
+import platform
+import re
+import sys
+
+
+def HostArch():
+  """Returns the host architecture with a predictable string."""
+  host_arch = platform.machine()
+
+  # Convert machine type to format recognized by gyp.
+  if re.match(r'i.86', host_arch) or host_arch == 'i86pc':
+    host_arch = 'ia32'
+  elif host_arch in ['x86_64', 'amd64']:
+    host_arch = 'x64'
+  elif host_arch.startswith('arm'):
+    host_arch = 'arm'
+
+  # platform.machine is based on running kernel. It's possible to use 64-bit
+  # kernel with 32-bit userland, e.g. to give linker slightly more memory.
+  # Distinguish between different userland bitness by querying
+  # the python binary.
+  if host_arch == 'x64' and platform.architecture()[0] == '32bit':
+    host_arch = 'ia32'
+
+  return host_arch
+
+def DoMain(_):
+  """Hook to be called from gyp without starting a separate python
+  interpreter."""
+  return HostArch()
+
+if __name__ == '__main__':
+  print DoMain([])
diff --git a/build/dir_exists.py b/build/dir_exists.py
new file mode 100755
index 0000000..70d367e
--- /dev/null
+++ b/build/dir_exists.py
@@ -0,0 +1,23 @@
+#!/usr/bin/env python
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Writes True if the argument is a directory."""
+
+import os.path
+import sys
+
+def main():
+  sys.stdout.write(_is_dir(sys.argv[1]))
+  return 0
+
+def _is_dir(dir_name):
+  return str(os.path.isdir(dir_name))
+
+def DoMain(args):
+  """Hook to be called from gyp without starting a separate python
+  interpreter."""
+  return _is_dir(args[0])
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/build/download_gold_plugin.py b/build/download_gold_plugin.py
new file mode 100755
index 0000000..cd7ca41
--- /dev/null
+++ b/build/download_gold_plugin.py
@@ -0,0 +1,45 @@
+#!/usr/bin/env python
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Script to download LLVM gold plugin from google storage."""
+
+import json
+import os
+import shutil
+import subprocess
+import sys
+import zipfile
+
+SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__))
+CHROME_SRC = os.path.abspath(os.path.join(SCRIPT_DIR, os.pardir))
+sys.path.insert(0, os.path.join(CHROME_SRC, 'tools'))
+
+import find_depot_tools
+
+DEPOT_PATH = find_depot_tools.add_depot_tools_to_path()
+GSUTIL_PATH = os.path.join(DEPOT_PATH, 'gsutil.py')
+
+LLVM_BUILD_PATH = os.path.join(CHROME_SRC, 'third_party', 'llvm-build',
+                               'Release+Asserts')
+CLANG_UPDATE_PY = os.path.join(CHROME_SRC, 'tools', 'clang', 'scripts',
+                               'update.py')
+CLANG_REVISION = os.popen(CLANG_UPDATE_PY + ' --print-revision').read().rstrip()
+
+CLANG_BUCKET = 'gs://chromium-browser-clang/Linux_x64'
+
+def main():
+  targz_name = 'llvmgold-%s.tgz' % CLANG_REVISION
+  remote_path = '%s/%s' % (CLANG_BUCKET, targz_name)
+
+  os.chdir(LLVM_BUILD_PATH)
+
+  subprocess.check_call(['python', GSUTIL_PATH,
+                         'cp', remote_path, targz_name])
+  subprocess.check_call(['tar', 'xzf', targz_name])
+  os.remove(targz_name)
+  return 0
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/build/download_nacl_toolchains.py b/build/download_nacl_toolchains.py
new file mode 100755
index 0000000..b99b940
--- /dev/null
+++ b/build/download_nacl_toolchains.py
@@ -0,0 +1,59 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Shim to run nacl toolchain download script only if there is a nacl dir."""
+
+import os
+import shutil
+import sys
+
+
+def Main(args):
+  # Exit early if disable_nacl=1.
+  if 'disable_nacl=1' in os.environ.get('GYP_DEFINES', ''):
+    return 0
+  script_dir = os.path.dirname(os.path.abspath(__file__))
+  src_dir = os.path.dirname(script_dir)
+  nacl_dir = os.path.join(src_dir, 'native_client')
+  nacl_build_dir = os.path.join(nacl_dir, 'build')
+  package_version_dir = os.path.join(nacl_build_dir, 'package_version')
+  package_version = os.path.join(package_version_dir, 'package_version.py')
+  if not os.path.exists(package_version):
+    print "Can't find '%s'" % package_version
+    print 'Presumably you are intentionally building without NativeClient.'
+    print 'Skipping NativeClient toolchain download.'
+    sys.exit(0)
+  sys.path.insert(0, package_version_dir)
+  import package_version
+
+  # BUG:
+  # We remove this --optional-pnacl argument, and instead replace it with
+  # --no-pnacl for most cases.  However, if the bot name is an sdk
+  # bot then we will go ahead and download it.  This prevents increasing the
+  # gclient sync time for developers, or standard Chrome bots.
+  if '--optional-pnacl' in args:
+    args.remove('--optional-pnacl')
+    use_pnacl = False
+    buildbot_name = os.environ.get('BUILDBOT_BUILDERNAME', '')
+    if 'pnacl' in buildbot_name and 'sdk' in buildbot_name:
+      use_pnacl = True
+    if use_pnacl:
+      print '\n*** DOWNLOADING PNACL TOOLCHAIN ***\n'
+    else:
+      args = ['--exclude', 'pnacl_newlib'] + args
+
+  # Only download the ARM gcc toolchain if we are building for ARM
+  # TODO(olonho): we need to invent more reliable way to get build
+  # configuration info, to know if we're building for ARM.
+  if 'target_arch=arm' not in os.environ.get('GYP_DEFINES', ''):
+      args = ['--exclude', 'nacl_arm_newlib'] + args
+
+  package_version.main(args)
+
+  return 0
+
+
+if __name__ == '__main__':
+  sys.exit(Main(sys.argv[1:]))
diff --git a/build/download_sdk_extras.py b/build/download_sdk_extras.py
new file mode 100755
index 0000000..d7c5d6c
--- /dev/null
+++ b/build/download_sdk_extras.py
@@ -0,0 +1,71 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Script to download sdk/extras packages on the bots from google storage.
+
+The script expects arguments that specify zips file in the google storage
+bucket named: <dir in SDK extras>_<package name>_<version>.zip. The file will
+be extracted in the android_tools/sdk/extras directory on the test bots. This
+script will not do anything for developers.
+
+TODO(navabi): Move this script (crbug.com/459819).
+"""
+
+import json
+import os
+import shutil
+import subprocess
+import sys
+import zipfile
+
+SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__))
+CHROME_SRC = os.path.abspath(os.path.join(SCRIPT_DIR, os.pardir))
+sys.path.insert(0, os.path.join(SCRIPT_DIR, 'android'))
+sys.path.insert(1, os.path.join(CHROME_SRC, 'tools'))
+
+from pylib import constants
+import find_depot_tools
+
+DEPOT_PATH = find_depot_tools.add_depot_tools_to_path()
+GSUTIL_PATH = os.path.join(DEPOT_PATH, 'gsutil.py')
+SDK_EXTRAS_BUCKET = 'gs://chrome-sdk-extras'
+SDK_EXTRAS_PATH = os.path.join(constants.ANDROID_SDK_ROOT, 'extras')
+SDK_EXTRAS_JSON_FILE = os.path.join(os.path.dirname(__file__),
+                                    'android_sdk_extras.json')
+
+
+def clean_and_extract(dir_name, package_name, zip_file):
+  local_dir = '%s/%s/%s' % (SDK_EXTRAS_PATH, dir_name, package_name)
+  if os.path.exists(local_dir):
+    shutil.rmtree(local_dir)
+  local_zip = '%s/%s' % (SDK_EXTRAS_PATH, zip_file)
+  with zipfile.ZipFile(local_zip) as z:
+    z.extractall(path=SDK_EXTRAS_PATH)
+
+
+def main():
+  if not os.environ.get('CHROME_HEADLESS'):
+    # This is not a buildbot checkout.
+    return 0
+  # Update the android_sdk_extras.json file to update downloaded packages.
+  with open(SDK_EXTRAS_JSON_FILE) as json_file:
+    packages = json.load(json_file)
+  for package in packages:
+    local_zip = '%s/%s' % (SDK_EXTRAS_PATH, package['zip'])
+    if not os.path.exists(local_zip):
+      package_zip = '%s/%s' % (SDK_EXTRAS_BUCKET, package['zip'])
+      try:
+        subprocess.check_call(['python', GSUTIL_PATH, '--force-version', '4.7',
+                               'cp', package_zip, local_zip])
+      except subprocess.CalledProcessError:
+        print ('WARNING: Failed to download SDK packages. If this bot compiles '
+               'for Android, it may have errors.')
+        return 0
+    # Always clean dir and extract zip to ensure correct contents.
+    clean_and_extract(package['dir_name'], package['package'], package['zip'])
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/build/env_dump.py b/build/env_dump.py
new file mode 100755
index 0000000..21edfe6
--- /dev/null
+++ b/build/env_dump.py
@@ -0,0 +1,56 @@
+#!/usr/bin/python
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This script can either source a file and dump the enironment changes done by
+# it, or just simply dump the current environment as JSON into a file.
+
+import json
+import optparse
+import os
+import pipes
+import subprocess
+import sys
+
+
+def main():
+  parser = optparse.OptionParser()
+  parser.add_option('-f', '--output-json',
+                    help='File to dump the environment as JSON into.')
+  parser.add_option(
+      '-d', '--dump-mode', action='store_true',
+      help='Dump the environment to sys.stdout and exit immediately.')
+
+  parser.disable_interspersed_args()
+  options, args = parser.parse_args()
+  if options.dump_mode:
+    if args or options.output_json:
+      parser.error('Cannot specify args or --output-json with --dump-mode.')
+    json.dump(dict(os.environ), sys.stdout)
+  else:
+    if not options.output_json:
+      parser.error('Requires --output-json option.')
+
+    envsetup_cmd = ' '.join(map(pipes.quote, args))
+    full_cmd = [
+        'bash', '-c',
+        '. %s > /dev/null; %s -d' % (envsetup_cmd, os.path.abspath(__file__))
+    ]
+    try:
+      output = subprocess.check_output(full_cmd)
+    except Exception as e:
+      sys.exit('Error running %s and dumping environment.' % envsetup_cmd)
+
+    env_diff = {}
+    new_env = json.loads(output)
+    for k, val in new_env.items():
+      if k == '_' or (k in os.environ and os.environ[k] == val):
+        continue
+      env_diff[k] = val
+    with open(options.output_json, 'w') as f:
+      json.dump(env_diff, f)
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/build/extract_from_cab.py b/build/extract_from_cab.py
new file mode 100755
index 0000000..080370c
--- /dev/null
+++ b/build/extract_from_cab.py
@@ -0,0 +1,63 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Extracts a single file from a CAB archive."""
+
+import os
+import shutil
+import subprocess
+import sys
+import tempfile
+
+def run_quiet(*args):
+  """Run 'expand' suppressing noisy output. Returns returncode from process."""
+  popen = subprocess.Popen(args, stdout=subprocess.PIPE)
+  out, _ = popen.communicate()
+  if popen.returncode:
+    # expand emits errors to stdout, so if we fail, then print that out.
+    print out
+  return popen.returncode
+
+def main():
+  if len(sys.argv) != 4:
+    print 'Usage: extract_from_cab.py cab_path archived_file output_dir'
+    return 1
+
+  [cab_path, archived_file, output_dir] = sys.argv[1:]
+
+  # Expand.exe does its work in a fixed-named temporary directory created within
+  # the given output directory. This is a problem for concurrent extractions, so
+  # create a unique temp dir within the desired output directory to work around
+  # this limitation.
+  temp_dir = tempfile.mkdtemp(dir=output_dir)
+
+  try:
+    # Invoke the Windows expand utility to extract the file.
+    level = run_quiet('expand', cab_path, '-F:' + archived_file, temp_dir)
+    if level == 0:
+      # Move the output file into place, preserving expand.exe's behavior of
+      # paving over any preexisting file.
+      output_file = os.path.join(output_dir, archived_file)
+      try:
+        os.remove(output_file)
+      except OSError:
+        pass
+      os.rename(os.path.join(temp_dir, archived_file), output_file)
+  finally:
+    shutil.rmtree(temp_dir, True)
+
+  if level != 0:
+    return level
+
+  # The expand utility preserves the modification date and time of the archived
+  # file. Touch the extracted file. This helps build systems that compare the
+  # modification times of input and output files to determine whether to do an
+  # action.
+  os.utime(os.path.join(output_dir, archived_file), None)
+  return 0
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/build/filename_rules.gypi b/build/filename_rules.gypi
new file mode 100644
index 0000000..f67287f
--- /dev/null
+++ b/build/filename_rules.gypi
@@ -0,0 +1,106 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This gypi file defines the patterns used for determining whether a
+# file is excluded from the build on a given platform.  It is
+# included by common.gypi for chromium_code.
+
+{
+  'target_conditions': [
+    ['OS!="win" or >(nacl_untrusted_build)==1', {
+      'sources/': [ ['exclude', '_win(_browsertest|_unittest)?\\.(h|cc)$'],
+                    ['exclude', '(^|/)win/'],
+                    ['exclude', '(^|/)win_[^/]*\\.(h|cc)$'] ],
+    }],
+    ['OS!="mac" or >(nacl_untrusted_build)==1', {
+      'sources/': [ ['exclude', '_(cocoa|mac)(_unittest)?\\.(h|cc|mm?)$'],
+                    ['exclude', '(^|/)(cocoa|mac)/'] ],
+    }],
+    ['OS!="ios" or >(nacl_untrusted_build)==1', {
+      'sources/': [ ['exclude', '_ios(_unittest)?\\.(h|cc|mm?)$'],
+                    ['exclude', '(^|/)ios/'] ],
+    }],
+    ['(OS!="mac" and OS!="ios") or >(nacl_untrusted_build)==1', {
+      'sources/': [ ['exclude', '\\.mm?$' ] ],
+    }],
+    # Do not exclude the linux files on *BSD since most of them can be
+    # shared at this point.
+    # In case a file is not needed, it is going to be excluded later on.
+    # TODO(evan): the above is not correct; we shouldn't build _linux
+    # files on non-linux.
+    ['OS!="linux" and OS!="openbsd" and OS!="freebsd" or >(nacl_untrusted_build)==1', {
+      'sources/': [
+        ['exclude', '_linux(_unittest)?\\.(h|cc)$'],
+        ['exclude', '(^|/)linux/'],
+      ],
+    }],
+    ['OS!="android" or _toolset=="host" or >(nacl_untrusted_build)==1', {
+      'sources/': [
+        ['exclude', '_android(_unittest)?\\.(h|cc)$'],
+        ['exclude', '(^|/)android/'],
+      ],
+    }],
+    ['OS=="win" and >(nacl_untrusted_build)==0', {
+      'sources/': [
+        ['exclude', '_posix(_unittest)?\\.(h|cc)$'],
+        ['exclude', '(^|/)posix/'],
+      ],
+    }],
+    ['<(chromeos)!=1 or >(nacl_untrusted_build)==1', {
+      'sources/': [
+        ['exclude', '_chromeos(_unittest)?\\.(h|cc)$'],
+        ['exclude', '(^|/)chromeos/'],
+      ],
+    }],
+    ['>(nacl_untrusted_build)==0', {
+      'sources/': [
+        ['exclude', '_nacl(_unittest)?\\.(h|cc)$'],
+      ],
+    }],
+    ['OS!="linux" and OS!="openbsd" and OS!="freebsd" or >(nacl_untrusted_build)==1', {
+      'sources/': [
+        ['exclude', '_xdg(_unittest)?\\.(h|cc)$'],
+      ],
+    }],
+    ['<(use_x11)!=1 or >(nacl_untrusted_build)==1', {
+      'sources/': [
+        ['exclude', '_(x|x11)(_interactive_uitest|_unittest)?\\.(h|cc)$'],
+        ['exclude', '(^|/)x11_[^/]*\\.(h|cc)$'],
+        ['exclude', '(^|/)x11/'],
+        ['exclude', '(^|/)x/'],
+      ],
+    }],
+    ['<(toolkit_views)==0 or >(nacl_untrusted_build)==1', {
+      'sources/': [ ['exclude', '_views(_browsertest|_unittest)?\\.(h|cc)$'] ]
+    }],
+    ['<(use_aura)==0 or >(nacl_untrusted_build)==1', {
+      'sources/': [ ['exclude', '_aura(_browsertest|_unittest)?\\.(h|cc)$'],
+                    ['exclude', '(^|/)aura/'],
+      ]
+    }],
+    ['<(use_aura)==0 or <(use_x11)==0 or >(nacl_untrusted_build)==1', {
+      'sources/': [ ['exclude', '_aurax11(_browsertest|_unittest)?\\.(h|cc)$'] ]
+    }],
+    ['<(use_aura)==0 or OS!="win" or >(nacl_untrusted_build)==1', {
+      'sources/': [ ['exclude', '_aurawin\\.(h|cc)$'] ]
+    }],
+    ['<(use_aura)==0 or OS!="linux" or >(nacl_untrusted_build)==1', {
+      'sources/': [ ['exclude', '_auralinux\\.(h|cc)$'] ]
+    }],
+    ['<(use_ash)==0 or >(nacl_untrusted_build)==1', {
+      'sources/': [ ['exclude', '_ash(_browsertest|_unittest)?\\.(h|cc)$'],
+                    ['exclude', '(^|/)ash/'],
+      ]
+    }],
+    ['<(use_ash)==0 or OS!="win" or >(nacl_untrusted_build)==1', {
+      'sources/': [ ['exclude', '_ashwin\\.(h|cc)$'] ]
+    }],
+    ['<(use_ozone)==0 or >(nacl_untrusted_build)==1', {
+      'sources/': [ ['exclude', '_ozone(_browsertest|_unittest)?\\.(h|cc)$'] ]
+    }],
+    ['<(use_pango)==0', {
+      'sources/': [ ['exclude', '(^|_)pango(_util|_browsertest|_unittest)?\\.(h|cc)$'], ],
+    }],
+  ]
+}
diff --git a/build/find_isolated_tests.py b/build/find_isolated_tests.py
new file mode 100755
index 0000000..c5b3ab7
--- /dev/null
+++ b/build/find_isolated_tests.py
@@ -0,0 +1,78 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Scans build output directory for .isolated files, calculates their SHA1
+hashes, stores final list in JSON document and then removes *.isolated files
+found (to ensure no stale *.isolated stay around on the next build).
+
+Used to figure out what tests were build in isolated mode to trigger these
+tests to run on swarming.
+
+For more info see:
+https://sites.google.com/a/chromium.org/dev/developers/testing/isolated-testing
+"""
+
+import glob
+import hashlib
+import json
+import optparse
+import os
+import re
+import sys
+
+
+def hash_file(filepath):
+  """Calculates the hash of a file without reading it all in memory at once."""
+  digest = hashlib.sha1()
+  with open(filepath, 'rb') as f:
+    while True:
+      chunk = f.read(1024*1024)
+      if not chunk:
+        break
+      digest.update(chunk)
+  return digest.hexdigest()
+
+
+def main():
+  parser = optparse.OptionParser(
+      usage='%prog --build-dir <path> --output-json <path>',
+      description=sys.modules[__name__].__doc__)
+  parser.add_option(
+      '--build-dir',
+      help='Path to a directory to search for *.isolated files.')
+  parser.add_option(
+      '--output-json',
+      help='File to dump JSON results into.')
+
+  options, _ = parser.parse_args()
+  if not options.build_dir:
+    parser.error('--build-dir option is required')
+  if not options.output_json:
+    parser.error('--output-json option is required')
+
+  result = {}
+
+  # Get the file hash values and output the pair.
+  pattern = os.path.join(options.build_dir, '*.isolated')
+  for filepath in sorted(glob.glob(pattern)):
+    test_name = os.path.splitext(os.path.basename(filepath))[0]
+    if re.match(r'^.+?\.\d$', test_name):
+      # It's a split .isolated file, e.g. foo.0.isolated. Ignore these.
+      continue
+
+    # TODO(csharp): Remove deletion once the isolate tracked dependencies are
+    # inputs for the isolated files.
+    sha1_hash = hash_file(filepath)
+    os.remove(filepath)
+    result[test_name] = sha1_hash
+
+  with open(options.output_json, 'wb') as f:
+    json.dump(result, f)
+
+  return 0
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/build/gdb-add-index b/build/gdb-add-index
new file mode 100755
index 0000000..992ac16
--- /dev/null
+++ b/build/gdb-add-index
@@ -0,0 +1,162 @@
+#!/bin/bash
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# Saves the gdb index for a given binary and its shared library dependencies.
+#
+# This will run gdb index in parallel on a number of binaries using SIGUSR1
+# as the communication mechanism to simulate a semaphore. Because of the
+# nature of this technique, using "set -e" is very difficult. The SIGUSR1
+# terminates a "wait" with an error which we need to interpret.
+#
+# When modifying this code, most of the real logic is in the index_one_file
+# function. The rest is cleanup + sempahore plumbing.
+
+# Cleanup temp directory and ensure all child jobs are dead-dead.
+function on_exit {
+  trap "" EXIT USR1  # Avoid reentrancy.
+
+  local jobs=$(jobs -p)
+  if [ -n "$jobs" ]; then
+    echo -n "Killing outstanding index jobs..."
+    kill -KILL $(jobs -p)
+    wait
+    echo "done"
+  fi
+
+  if [ -f "$DIRECTORY" ]; then
+    echo -n "Removing temp directory $DIRECTORY..."
+    rm -rf $DIRECTORY
+    echo done
+  fi
+}
+
+# Add index to one binary.
+function index_one_file {
+  local file=$1
+  local basename=$(basename "$file")
+  local should_index="${SHOULD_INDEX}"
+
+  local readelf_out=$(${TOOLCHAIN_PREFIX}readelf -S "$file")
+  if [[ $readelf_out =~ "gdb_index" ]]; then
+    if [ "${REMOVE_INDEX}" = 1 ]; then
+      ${TOOLCHAIN_PREFIX}objcopy --remove-section .gdb_index "$file"
+      echo "Removed index from $basename."
+    else
+      echo "Skipped $basename -- already contains index."
+      should_index=0
+    fi
+  fi
+
+  if [ "${should_index}" = 1 ]; then
+    local start=$(date +"%s%N")
+    echo "Adding index to $basename..."
+
+    ${TOOLCHAIN_PREFIX}gdb -batch "$file" -ex "save gdb-index $DIRECTORY" \
+      -ex "quit"
+    local index_file="$DIRECTORY/$basename.gdb-index"
+    if [ -f "$index_file" ]; then
+      ${TOOLCHAIN_PREFIX}objcopy --add-section .gdb_index="$index_file" \
+        --set-section-flags .gdb_index=readonly "$file" "$file"
+      local finish=$(date +"%s%N")
+      local elapsed=$(((finish - start)/1000000))
+      echo "   ...$basename indexed. [${elapsed}ms]"
+    else
+      echo "   ...$basename unindexable."
+    fi
+  fi
+}
+
+# Functions that when combined, concurrently index all files in FILES_TO_INDEX
+# array. The global FILES_TO_INDEX is declared in the main body of the script.
+function async_index {
+  # Start a background subshell to run the index command.
+  {
+    index_one_file $1
+    kill -SIGUSR1 $$  # $$ resolves to the parent script.
+    exit 129  # See comment above wait loop at bottom.
+  } &
+}
+
+CUR_FILE_NUM=0
+function index_next {
+  if (( CUR_FILE_NUM >= ${#FILES_TO_INDEX[@]} )); then
+    return
+  fi
+
+  async_index "${FILES_TO_INDEX[CUR_FILE_NUM]}"
+  ((CUR_FILE_NUM += 1)) || true
+}
+
+
+########
+### Main body of the script.
+
+REMOVE_INDEX=0
+SHOULD_INDEX=1
+while getopts ":f:r" opt; do
+  case $opt in
+    f)
+      REMOVE_INDEX=1
+      shift
+      ;;
+    r)
+      REMOVE_INDEX=1
+      SHOULD_INDEX=0
+      shift
+      ;;
+    *)
+      echo "Invalid option: -$OPTARG" >&2
+      ;;
+  esac
+done
+
+if [[ ! $# == 1 ]]; then
+  echo "Usage: $0 [-f] [-r] path-to-binary"
+  echo "  -f forces replacement of an existing index."
+  echo "  -r removes the index section."
+  exit 1
+fi
+
+FILENAME="$1"
+if [[ ! -f "$FILENAME" ]]; then
+  echo "Path $FILENAME does not exist."
+  exit 1
+fi
+
+# Ensure we cleanup on on exit.
+trap on_exit EXIT
+
+# We're good to go! Create temp directory for index files.
+DIRECTORY=$(mktemp -d)
+echo "Made temp directory $DIRECTORY."
+
+# Create array with the filename and all shared libraries that
+# have the same dirname. The dirname is a signal that these
+# shared libraries were part of the same build as the binary.
+declare -a FILES_TO_INDEX=($FILENAME
+ $(ldd "$FILENAME" 2>/dev/null \
+  | grep $(dirname "$FILENAME") \
+  | sed "s/.*[ \t]\(.*\) (.*/\1/")
+)
+
+# Start concurrent indexing.
+trap index_next USR1
+
+# 4 is an arbitrary default. When changing, remember we are likely IO bound
+# so basing this off the number of cores is not sensible.
+INDEX_TASKS=${INDEX_TASKS:-4}
+for ((i=0;i<${INDEX_TASKS};i++)); do
+  index_next
+done
+
+# Do a wait loop. Bash waits that terminate due a trap have an exit
+# code > 128. We also ensure that our subshell's "normal" exit occurs with
+# an exit code > 128. This allows us to do consider a > 128 exit code as
+# an indication that the loop should continue. Unfortunately, it also means
+# we cannot use set -e since technically the "wait" is failing.
+wait
+while (( $? > 128 )); do
+  wait
+done
diff --git a/build/get_landmines.py b/build/get_landmines.py
new file mode 100755
index 0000000..92f81fe
--- /dev/null
+++ b/build/get_landmines.py
@@ -0,0 +1,42 @@
+#!/usr/bin/env python
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+This file emits the list of reasons why a particular build needs to be clobbered
+(or a list of 'landmines').
+"""
+
+import sys
+
+import landmine_utils
+
+
+builder = landmine_utils.builder
+distributor = landmine_utils.distributor
+gyp_defines = landmine_utils.gyp_defines
+gyp_msvs_version = landmine_utils.gyp_msvs_version
+platform = landmine_utils.platform
+
+
+def print_landmines():
+  """
+  ALL LANDMINES ARE EMITTED FROM HERE.
+  """
+  # DO NOT add landmines as part of a regular CL. Landmines are a last-effort
+  # bandaid fix if a CL that got landed has a build dependency bug and all bots
+  # need to be cleaned up. If you're writing a new CL that causes build
+  # dependency problems, fix the dependency problems instead of adding a
+  # landmine.
+
+  print 'Lets start a new landmines file.'
+
+
+def main():
+  print_landmines()
+  return 0
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/build/get_sdk_extras_packages.py b/build/get_sdk_extras_packages.py
new file mode 100755
index 0000000..a90b8a8
--- /dev/null
+++ b/build/get_sdk_extras_packages.py
@@ -0,0 +1,24 @@
+#!/usr/bin/env python
+# Copyright (c) 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import json
+import os
+import sys
+
+SDK_EXTRAS_JSON_FILE = os.path.join(os.path.dirname(__file__),
+                                    'android_sdk_extras.json')
+
+def main():
+  with open(SDK_EXTRAS_JSON_FILE) as json_file:
+    packages = json.load(json_file)
+
+  out = []
+  for package in packages:
+    out.append(package['package_id'])
+
+  print ','.join(out)
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/build/get_syzygy_binaries.py b/build/get_syzygy_binaries.py
new file mode 100755
index 0000000..1cab3fc
--- /dev/null
+++ b/build/get_syzygy_binaries.py
@@ -0,0 +1,487 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""A utility script for downloading versioned Syzygy binaries."""
+
+import hashlib
+import errno
+import json
+import logging
+import optparse
+import os
+import re
+import shutil
+import stat
+import sys
+import subprocess
+import tempfile
+import time
+import zipfile
+
+
+_LOGGER = logging.getLogger(os.path.basename(__file__))
+
+# The relative path where official builds are archived in their GS bucket.
+_SYZYGY_ARCHIVE_PATH = ('/builds/official/%(revision)s')
+
+# A JSON file containing the state of the download directory. If this file and
+# directory state do not agree, then the binaries will be downloaded and
+# installed again.
+_STATE = '.state'
+
+# This matches an integer (an SVN revision number) or a SHA1 value (a GIT hash).
+# The archive exclusively uses lowercase GIT hashes.
+_REVISION_RE = re.compile('^(?:\d+|[a-f0-9]{40})$')
+
+# This matches an MD5 hash.
+_MD5_RE = re.compile('^[a-f0-9]{32}$')
+
+# List of reources to be downloaded and installed. These are tuples with the
+# following format:
+# (basename, logging name, relative installation path, extraction filter)
+_RESOURCES = [
+  ('benchmark.zip', 'benchmark', '', None),
+  ('binaries.zip', 'binaries', 'exe', None),
+  ('symbols.zip', 'symbols', 'exe',
+      lambda x: x.filename.endswith('.dll.pdb'))]
+
+
+def _LoadState(output_dir):
+  """Loads the contents of the state file for a given |output_dir|, returning
+  None if it doesn't exist.
+  """
+  path = os.path.join(output_dir, _STATE)
+  if not os.path.exists(path):
+    _LOGGER.debug('No state file found.')
+    return None
+  with open(path, 'rb') as f:
+    _LOGGER.debug('Reading state file: %s', path)
+    try:
+      return json.load(f)
+    except ValueError:
+      _LOGGER.debug('Invalid state file.')
+      return None
+
+
+def _SaveState(output_dir, state, dry_run=False):
+  """Saves the |state| dictionary to the given |output_dir| as a JSON file."""
+  path = os.path.join(output_dir, _STATE)
+  _LOGGER.debug('Writing state file: %s', path)
+  if dry_run:
+    return
+  with open(path, 'wb') as f:
+    f.write(json.dumps(state, sort_keys=True, indent=2))
+
+
+def _Md5(path):
+  """Returns the MD5 hash of the file at |path|, which must exist."""
+  return hashlib.md5(open(path, 'rb').read()).hexdigest()
+
+
+def _StateIsValid(state):
+  """Returns true if the given state structure is valid."""
+  if not isinstance(state, dict):
+    _LOGGER.debug('State must be a dict.')
+    return False
+  r = state.get('revision', None)
+  if not isinstance(r, basestring) or not _REVISION_RE.match(r):
+    _LOGGER.debug('State contains an invalid revision.')
+    return False
+  c = state.get('contents', None)
+  if not isinstance(c, dict):
+    _LOGGER.debug('State must contain a contents dict.')
+    return False
+  for (relpath, md5) in c.iteritems():
+    if not isinstance(relpath, basestring) or len(relpath) == 0:
+      _LOGGER.debug('State contents dict contains an invalid path.')
+      return False
+    if not isinstance(md5, basestring) or not _MD5_RE.match(md5):
+      _LOGGER.debug('State contents dict contains an invalid MD5 digest.')
+      return False
+  return True
+
+
+def _BuildActualState(stored, revision, output_dir):
+  """Builds the actual state using the provided |stored| state as a template.
+  Only examines files listed in the stored state, causing the script to ignore
+  files that have been added to the directories locally. |stored| must be a
+  valid state dictionary.
+  """
+  contents = {}
+  state = { 'revision': revision, 'contents': contents }
+  for relpath, md5 in stored['contents'].iteritems():
+    abspath = os.path.abspath(os.path.join(output_dir, relpath))
+    if os.path.isfile(abspath):
+      m = _Md5(abspath)
+      contents[relpath] = m
+
+  return state
+
+
+def _StatesAreConsistent(stored, actual):
+  """Validates whether two state dictionaries are consistent. Both must be valid
+  state dictionaries. Additional entries in |actual| are ignored.
+  """
+  if stored['revision'] != actual['revision']:
+    _LOGGER.debug('Mismatched revision number.')
+    return False
+  cont_stored = stored['contents']
+  cont_actual = actual['contents']
+  for relpath, md5 in cont_stored.iteritems():
+    if relpath not in cont_actual:
+      _LOGGER.debug('Missing content: %s', relpath)
+      return False
+    if md5 != cont_actual[relpath]:
+      _LOGGER.debug('Modified content: %s', relpath)
+      return False
+  return True
+
+
+def _GetCurrentState(revision, output_dir):
+  """Loads the current state and checks to see if it is consistent. Returns
+  a tuple (state, bool). The returned state will always be valid, even if an
+  invalid state is present on disk.
+  """
+  stored = _LoadState(output_dir)
+  if not _StateIsValid(stored):
+    _LOGGER.debug('State is invalid.')
+    # Return a valid but empty state.
+    return ({'revision': '0', 'contents': {}}, False)
+  actual = _BuildActualState(stored, revision, output_dir)
+  # If the script has been modified consider the state invalid.
+  path = os.path.join(output_dir, _STATE)
+  if os.path.getmtime(__file__) > os.path.getmtime(path):
+    return (stored, False)
+  # Otherwise, explicitly validate the state.
+  if not _StatesAreConsistent(stored, actual):
+    return (stored, False)
+  return (stored, True)
+
+
+def _DirIsEmpty(path):
+  """Returns true if the given directory is empty, false otherwise."""
+  for root, dirs, files in os.walk(path):
+    return not dirs and not files
+
+
+def _RmTreeHandleReadOnly(func, path, exc):
+  """An error handling function for use with shutil.rmtree. This will
+  detect failures to remove read-only files, and will change their properties
+  prior to removing them. This is necessary on Windows as os.remove will return
+  an access error for read-only files, and git repos contain read-only
+  pack/index files.
+  """
+  excvalue = exc[1]
+  if func in (os.rmdir, os.remove) and excvalue.errno == errno.EACCES:
+    _LOGGER.debug('Removing read-only path: %s', path)
+    os.chmod(path, stat.S_IRWXU | stat.S_IRWXG | stat.S_IRWXO)
+    func(path)
+  else:
+    raise
+
+
+def _RmTree(path):
+  """A wrapper of shutil.rmtree that handles read-only files."""
+  shutil.rmtree(path, ignore_errors=False, onerror=_RmTreeHandleReadOnly)
+
+
+def _CleanState(output_dir, state, dry_run=False):
+  """Cleans up files/directories in |output_dir| that are referenced by
+  the given |state|. Raises an error if there are local changes. Returns a
+  dictionary of files that were deleted.
+  """
+  _LOGGER.debug('Deleting files from previous installation.')
+  deleted = {}
+
+  # Generate a list of files to delete, relative to |output_dir|.
+  contents = state['contents']
+  files = sorted(contents.keys())
+
+  # Try to delete the files. Keep track of directories to delete as well.
+  dirs = {}
+  for relpath in files:
+    fullpath = os.path.join(output_dir, relpath)
+    fulldir = os.path.dirname(fullpath)
+    dirs[fulldir] = True
+    if os.path.exists(fullpath):
+      # If somehow the file has become a directory complain about it.
+      if os.path.isdir(fullpath):
+        raise Exception('Directory exists where file expected: %s' % fullpath)
+
+      # Double check that the file doesn't have local changes. If it does
+      # then refuse to delete it.
+      if relpath in contents:
+        stored_md5 = contents[relpath]
+        actual_md5 = _Md5(fullpath)
+        if actual_md5 != stored_md5:
+          raise Exception('File has local changes: %s' % fullpath)
+
+      # The file is unchanged so it can safely be deleted.
+      _LOGGER.debug('Deleting file "%s".', fullpath)
+      deleted[relpath] = True
+      if not dry_run:
+        os.unlink(fullpath)
+
+  # Sort directories from longest name to shortest. This lets us remove empty
+  # directories from the most nested paths first.
+  dirs = sorted(dirs.keys(), key=lambda x: len(x), reverse=True)
+  for p in dirs:
+    if os.path.exists(p) and _DirIsEmpty(p):
+      _LOGGER.debug('Deleting empty directory "%s".', p)
+      if not dry_run:
+        _RmTree(p)
+
+  return deleted
+
+
+def _FindGsUtil():
+  """Looks for depot_tools and returns the absolute path to gsutil.py."""
+  for path in os.environ['PATH'].split(os.pathsep):
+    path = os.path.abspath(path)
+    git_cl = os.path.join(path, 'git_cl.py')
+    gs_util = os.path.join(path, 'gsutil.py')
+    if os.path.exists(git_cl) and os.path.exists(gs_util):
+      return gs_util
+  return None
+
+
+def _GsUtil(*cmd):
+  """Runs the given command in gsutil with exponential backoff and retries."""
+  gs_util = _FindGsUtil()
+  cmd = [sys.executable, gs_util] + list(cmd)
+
+  retries = 3
+  timeout = 4  # Seconds.
+  while True:
+    _LOGGER.debug('Running %s', cmd)
+    prog = subprocess.Popen(cmd, shell=False)
+    prog.communicate()
+
+    # Stop retrying on success.
+    if prog.returncode == 0:
+      return
+
+    # Raise a permanent failure if retries have been exhausted.
+    if retries == 0:
+      raise RuntimeError('Command "%s" returned %d.' % (cmd, prog.returncode))
+
+    _LOGGER.debug('Sleeping %d seconds and trying again.', timeout)
+    time.sleep(timeout)
+    retries -= 1
+    timeout *= 2
+
+
+def _Download(resource):
+  """Downloads the given GS resource to a temporary file, returning its path."""
+  tmp = tempfile.mkstemp(suffix='syzygy_archive')
+  os.close(tmp[0])
+  url = 'gs://syzygy-archive' + resource
+  _GsUtil('cp', url, tmp[1])
+  return tmp[1]
+
+
+def _InstallBinaries(options, deleted={}):
+  """Installs Syzygy binaries. This assumes that the output directory has
+  already been cleaned, as it will refuse to overwrite existing files."""
+  contents = {}
+  state = { 'revision': options.revision, 'contents': contents }
+  archive_path = _SYZYGY_ARCHIVE_PATH % { 'revision': options.revision }
+  if options.resources:
+    resources = [(resource, resource, '', None)
+                 for resource in options.resources]
+  else:
+    resources = _RESOURCES
+  for (base, name, subdir, filt) in resources:
+    # Create the output directory if it doesn't exist.
+    fulldir = os.path.join(options.output_dir, subdir)
+    if os.path.isfile(fulldir):
+      raise Exception('File exists where a directory needs to be created: %s' %
+                      fulldir)
+    if not os.path.exists(fulldir):
+      _LOGGER.debug('Creating directory: %s', fulldir)
+      if not options.dry_run:
+        os.makedirs(fulldir)
+
+    # Download and read the archive.
+    resource = archive_path + '/' + base
+    _LOGGER.debug('Retrieving %s archive at "%s".', name, resource)
+    path = _Download(resource)
+
+    _LOGGER.debug('Unzipping %s archive.', name)
+    with open(path, 'rb') as data:
+      archive = zipfile.ZipFile(data)
+      for entry in archive.infolist():
+        if not filt or filt(entry):
+          fullpath = os.path.normpath(os.path.join(fulldir, entry.filename))
+          relpath = os.path.relpath(fullpath, options.output_dir)
+          if os.path.exists(fullpath):
+            # If in a dry-run take into account the fact that the file *would*
+            # have been deleted.
+            if options.dry_run and relpath in deleted:
+              pass
+            else:
+              raise Exception('Path already exists: %s' % fullpath)
+
+          # Extract the file and update the state dictionary.
+          _LOGGER.debug('Extracting "%s".', fullpath)
+          if not options.dry_run:
+            archive.extract(entry.filename, fulldir)
+            md5 = _Md5(fullpath)
+            contents[relpath] = md5
+            if sys.platform == 'cygwin':
+              os.chmod(fullpath, os.stat(fullpath).st_mode | stat.S_IXUSR)
+
+    _LOGGER.debug('Removing temporary file "%s".', path)
+    os.remove(path)
+
+  return state
+
+
+def _ParseCommandLine():
+  """Parses the command-line and returns an options structure."""
+  option_parser = optparse.OptionParser()
+  option_parser.add_option('--dry-run', action='store_true', default=False,
+      help='If true then will simply list actions that would be performed.')
+  option_parser.add_option('--force', action='store_true', default=False,
+      help='Force an installation even if the binaries are up to date.')
+  option_parser.add_option('--no-cleanup', action='store_true', default=False,
+      help='Allow installation on non-Windows platforms, and skip the forced '
+           'cleanup step.')
+  option_parser.add_option('--output-dir', type='string',
+      help='The path where the binaries will be replaced. Existing binaries '
+           'will only be overwritten if not up to date.')
+  option_parser.add_option('--overwrite', action='store_true', default=False,
+      help='If specified then the installation will happily delete and rewrite '
+           'the entire output directory, blasting any local changes.')
+  option_parser.add_option('--revision', type='string',
+      help='The SVN revision or GIT hash associated with the required version.')
+  option_parser.add_option('--revision-file', type='string',
+      help='A text file containing an SVN revision or GIT hash.')
+  option_parser.add_option('--resource', type='string', action='append',
+      dest='resources', help='A resource to be downloaded.')
+  option_parser.add_option('--verbose', dest='log_level', action='store_const',
+      default=logging.INFO, const=logging.DEBUG,
+      help='Enables verbose logging.')
+  option_parser.add_option('--quiet', dest='log_level', action='store_const',
+      default=logging.INFO, const=logging.ERROR,
+      help='Disables all output except for errors.')
+  options, args = option_parser.parse_args()
+  if args:
+    option_parser.error('Unexpected arguments: %s' % args)
+  if not options.output_dir:
+    option_parser.error('Must specify --output-dir.')
+  if not options.revision and not options.revision_file:
+    option_parser.error('Must specify one of --revision or --revision-file.')
+  if options.revision and options.revision_file:
+    option_parser.error('Must not specify both --revision and --revision-file.')
+
+  # Configure logging.
+  logging.basicConfig(level=options.log_level)
+
+  # If a revision file has been specified then read it.
+  if options.revision_file:
+    options.revision = open(options.revision_file, 'rb').read().strip()
+    _LOGGER.debug('Parsed revision "%s" from file "%s".',
+                 options.revision, options.revision_file)
+
+  # Ensure that the specified SVN revision or GIT hash is valid.
+  if not _REVISION_RE.match(options.revision):
+    option_parser.error('Must specify a valid SVN or GIT revision.')
+
+  # This just makes output prettier to read.
+  options.output_dir = os.path.normpath(options.output_dir)
+
+  return options
+
+
+def _RemoveOrphanedFiles(options):
+  """This is run on non-Windows systems to remove orphaned files that may have
+  been downloaded by a previous version of this script.
+  """
+  # Reconfigure logging to output info messages. This will allow inspection of
+  # cleanup status on non-Windows buildbots.
+  _LOGGER.setLevel(logging.INFO)
+
+  output_dir = os.path.abspath(options.output_dir)
+
+  # We only want to clean up the folder in 'src/third_party/syzygy', and we
+  # expect to be called with that as an output directory. This is an attempt to
+  # not start deleting random things if the script is run from an alternate
+  # location, or not called from the gclient hooks.
+  expected_syzygy_dir = os.path.abspath(os.path.join(
+      os.path.dirname(__file__), '..', 'third_party', 'syzygy'))
+  expected_output_dir = os.path.join(expected_syzygy_dir, 'binaries')
+  if expected_output_dir != output_dir:
+    _LOGGER.info('Unexpected output directory, skipping cleanup.')
+    return
+
+  if not os.path.isdir(expected_syzygy_dir):
+    _LOGGER.info('Output directory does not exist, skipping cleanup.')
+    return
+
+  def OnError(function, path, excinfo):
+    """Logs error encountered by shutil.rmtree."""
+    _LOGGER.error('Error when running %s(%s)', function, path, exc_info=excinfo)
+
+  _LOGGER.info('Removing orphaned files from %s', expected_syzygy_dir)
+  if not options.dry_run:
+    shutil.rmtree(expected_syzygy_dir, True, OnError)
+
+
+def main():
+  options = _ParseCommandLine()
+
+  if options.dry_run:
+    _LOGGER.debug('Performing a dry-run.')
+
+  # We only care about Windows platforms, as the Syzygy binaries aren't used
+  # elsewhere. However, there was a short period of time where this script
+  # wasn't gated on OS types, and those OSes downloaded and installed binaries.
+  # This will cleanup orphaned files on those operating systems.
+  if sys.platform not in ('win32', 'cygwin'):
+    if options.no_cleanup:
+      _LOGGER.debug('Skipping usual cleanup for non-Windows platforms.')
+    else:
+      return _RemoveOrphanedFiles(options)
+
+  # Load the current installation state, and validate it against the
+  # requested installation.
+  state, is_consistent = _GetCurrentState(options.revision, options.output_dir)
+
+  # Decide whether or not an install is necessary.
+  if options.force:
+    _LOGGER.debug('Forcing reinstall of binaries.')
+  elif is_consistent:
+    # Avoid doing any work if the contents of the directory are consistent.
+    _LOGGER.debug('State unchanged, no reinstall necessary.')
+    return
+
+  # Under normal logging this is the only only message that will be reported.
+  _LOGGER.info('Installing revision %s Syzygy binaries.',
+               options.revision[0:12])
+
+  # Clean up the old state to begin with.
+  deleted = []
+  if options.overwrite:
+    if os.path.exists(options.output_dir):
+      # If overwrite was specified then take a heavy-handed approach.
+      _LOGGER.debug('Deleting entire installation directory.')
+      if not options.dry_run:
+        _RmTree(options.output_dir)
+  else:
+    # Otherwise only delete things that the previous installation put in place,
+    # and take care to preserve any local changes.
+    deleted = _CleanState(options.output_dir, state, options.dry_run)
+
+  # Install the new binaries. In a dry-run this will actually download the
+  # archives, but it won't write anything to disk.
+  state = _InstallBinaries(options, deleted)
+
+  # Build and save the state for the directory.
+  _SaveState(options.output_dir, state, options.dry_run)
+
+
+if __name__ == '__main__':
+  main()
diff --git a/build/git-hooks/OWNERS b/build/git-hooks/OWNERS
new file mode 100644
index 0000000..3e327dc
--- /dev/null
+++ b/build/git-hooks/OWNERS
@@ -0,0 +1,3 @@
+set noparent
+szager@chromium.org
+cmp@chromium.org
diff --git a/build/git-hooks/pre-commit b/build/git-hooks/pre-commit
new file mode 100755
index 0000000..41b5963
--- /dev/null
+++ b/build/git-hooks/pre-commit
@@ -0,0 +1,60 @@
+#!/bin/sh
+
+submodule_diff() {
+  if test -n "$2"; then
+    git diff-tree -r --ignore-submodules=dirty "$1" "$2" | grep -e '^:160000' -e '^:...... 160000' | xargs
+  else
+    git diff-index --cached --ignore-submodules=dirty "$1" | grep -e '^:160000' -e '^:...... 160000' | xargs
+  fi
+}
+
+if git rev-parse --verify --quiet --no-revs MERGE_HEAD; then
+  merge_base=$(git merge-base HEAD MERGE_HEAD)
+  if test -z "$(submodule_diff $merge_base HEAD)"; then
+    # Most up-to-date submodules are in MERGE_HEAD.
+    head_ref=MERGE_HEAD
+  else
+    # Most up-to-date submodules are in HEAD.
+    head_ref=HEAD
+  fi
+else
+  # No merge in progress. Submodules must match HEAD.
+  head_ref=HEAD
+fi
+
+submods=$(submodule_diff $head_ref)
+if test "$submods"; then
+  echo "You are trying to commit changes to the following submodules:" 1>&2
+  echo 1>&2
+  echo $submods | cut -d ' ' -f 6 | sed 's/^/  /g' 1>&2
+  cat <<EOF 1>&2
+
+Submodule commits are not allowed.  Please run:
+
+  git status --ignore-submodules=dirty
+
+and/or:
+
+  git diff-index --cached --ignore-submodules=dirty HEAD
+
+... to see what's in your index.
+
+If you're really and truly trying to roll the version of a submodule, you should
+commit the new version to DEPS, instead.
+EOF
+  exit 1
+fi
+
+gitmodules_diff() {
+  git diff-index --cached "$1" .gitmodules
+}
+
+if [ "$(git ls-files .gitmodules)" ] && [ "$(gitmodules_diff $head_ref)" ]; then
+  cat <<EOF 1>&2
+You are trying to commit a change to .gitmodules.  That is not allowed.
+To make changes to submodule names/paths, edit DEPS.
+EOF
+  exit 1
+fi
+
+exit 0
diff --git a/build/gn_helpers.py b/build/gn_helpers.py
new file mode 100644
index 0000000..3b0647d
--- /dev/null
+++ b/build/gn_helpers.py
@@ -0,0 +1,39 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Helper functions useful when writing scripts that are run from GN's
+exec_script function."""
+
+class GNException(Exception):
+  pass
+
+
+def ToGNString(value, allow_dicts = True):
+  """Prints the given value to stdout.
+
+  allow_dicts indicates if this function will allow converting dictionaries
+  to GN scopes. This is only possible at the top level, you can't nest a
+  GN scope in a list, so this should be set to False for recursive calls."""
+  if isinstance(value, str):
+    if value.find('\n') >= 0:
+      raise GNException("Trying to print a string with a newline in it.")
+    return '"' + value.replace('"', '\\"') + '"'
+
+  if isinstance(value, list):
+    return '[ %s ]' % ', '.join(ToGNString(v) for v in value)
+
+  if isinstance(value, dict):
+    if not allow_dicts:
+      raise GNException("Attempting to recursively print a dictionary.")
+    result = ""
+    for key in value:
+      if not isinstance(key, str):
+        raise GNException("Dictionary key is not a string.")
+      result += "%s = %s\n" % (key, ToGNString(value[key], False))
+    return result
+
+  if isinstance(value, int):
+    return str(value)
+
+  raise GNException("Unsupported type when printing to GN.")
diff --git a/build/gn_migration.gypi b/build/gn_migration.gypi
new file mode 100644
index 0000000..2527b2e
--- /dev/null
+++ b/build/gn_migration.gypi
@@ -0,0 +1,726 @@
+# Copyright (c) 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file defines five targets that we are using to track the progress of the
+# GYP->GN migration:
+#
+# 'both_gn_and_gyp' lists what GN is currently capable of building and should
+# match the 'both_gn_and_gyp' target in //BUILD.gn.
+#
+# 'gyp_all' Should include everything built when building "all"; i.e., if you
+# type 'ninja gyp_all' and then 'ninja all', the second build should do
+# nothing. 'gyp_all' should just depend on the other four targets.
+#
+# 'gyp_only' lists any targets that are not meant to be ported over to the GN
+# build.
+#
+# 'gyp_remaining' lists all of the targets that still need to be converted,
+# i.e., all of the other (non-empty) targets that a GYP build will build.
+#
+# TODO(GYP): crbug.com/481694. Add a build step to the bot that enforces the
+# above contracts.
+
+{
+  'targets': [
+    {
+      'target_name': 'gyp_all',
+      'type': 'none',
+      'dependencies': [
+        'both_gn_and_gyp',
+        'gyp_only',
+        'gyp_remaining',
+      ]
+    },
+    {
+      # This target should mirror the structure of //:both_gn_and_gyp
+      # in src/BUILD.gn as closely as possible, for ease of comparison.
+      'target_name': 'both_gn_and_gyp',
+      'type': 'none',
+      'dependencies': [
+        '../base/base.gyp:base_i18n_perftests',
+        '../base/base.gyp:base_perftests',
+        '../base/base.gyp:base_unittests',
+        '../base/base.gyp:build_utf8_validator_tables#host',
+        '../base/base.gyp:check_example',
+        '../cc/cc_tests.gyp:cc_perftests',
+        '../cc/cc_tests.gyp:cc_unittests',
+        '../cc/blink/cc_blink_tests.gyp:cc_blink_unittests',
+        '../chrome/chrome.gyp:chrome',
+        '../chrome/chrome.gyp:browser_tests',
+        '../chrome/chrome.gyp:chrome_app_unittests',
+        '../chrome/chrome.gyp:chromedriver',
+        '../chrome/chrome.gyp:chromedriver_tests',
+        '../chrome/chrome.gyp:chromedriver_unittests',
+        '../chrome/chrome.gyp:interactive_ui_tests',
+        '../chrome/chrome.gyp:load_library_perf_tests',
+        '../chrome/chrome.gyp:performance_browser_tests',
+        '../chrome/chrome.gyp:sync_integration_tests',
+        '../chrome/chrome.gyp:sync_performance_tests',
+        '../chrome/chrome.gyp:unit_tests',
+        '../chrome/tools/profile_reset/jtl_compiler.gyp:jtl_compiler',
+        '../cloud_print/cloud_print.gyp:cloud_print_unittests',
+        '../components/components.gyp:network_hints_browser',
+        '../components/components.gyp:policy_templates',
+        '../components/components_tests.gyp:components_browsertests',
+        '../components/components_tests.gyp:components_perftests',
+        '../components/components_tests.gyp:components_unittests',
+        '../content/content.gyp:content_app_browser',
+        '../content/content.gyp:content_app_child',
+        '../content/content_shell_and_tests.gyp:content_browsertests',
+        '../content/content_shell_and_tests.gyp:content_gl_benchmark',
+        '../content/content_shell_and_tests.gyp:content_gl_tests',
+        '../content/content_shell_and_tests.gyp:content_perftests',
+        '../content/content_shell_and_tests.gyp:content_shell',
+        '../content/content_shell_and_tests.gyp:content_unittests',
+        '../courgette/courgette.gyp:courgette',
+        '../courgette/courgette.gyp:courgette_fuzz',
+        '../courgette/courgette.gyp:courgette_minimal_tool',
+        '../courgette/courgette.gyp:courgette_unittests',
+        '../crypto/crypto.gyp:crypto_unittests',
+        '../extensions/extensions_tests.gyp:extensions_browsertests',
+        '../extensions/extensions_tests.gyp:extensions_unittests',
+        '../device/device_tests.gyp:device_unittests',
+        '../gin/gin.gyp:gin_v8_snapshot_fingerprint',
+        '../gin/gin.gyp:gin_shell',
+        '../gin/gin.gyp:gin_unittests',
+        '../google_apis/gcm/gcm.gyp:gcm_unit_tests',
+        '../google_apis/gcm/gcm.gyp:mcs_probe',
+        '../google_apis/google_apis.gyp:google_apis_unittests',
+        '../gpu/gpu.gyp:angle_unittests',
+        '../gpu/gpu.gyp:gl_tests',
+        '../gpu/gpu.gyp:gpu_perftests',
+        '../gpu/gpu.gyp:gpu_unittests',
+        '../gpu/gles2_conform_support/gles2_conform_support.gyp:gles2_conform_support',  # TODO(GYP) crbug.com/471920
+        '../gpu/gles2_conform_support/gles2_conform_test.gyp:gles2_conform_test',  # TODO(GYP) crbug.com/471920
+        '../gpu/khronos_glcts_support/khronos_glcts_test.gyp:khronos_glcts_test',  # TODO(GYP) crbug.com/471903 to make this complete.
+        '../ipc/ipc.gyp:ipc_perftests',
+        '../ipc/ipc.gyp:ipc_tests',
+        '../ipc/mojo/ipc_mojo.gyp:ipc_mojo_unittests',
+        '../jingle/jingle.gyp:jingle_unittests',
+        '../media/media.gyp:ffmpeg_regression_tests',  # TODO(GYP) this should be conditional on media_use_ffmpeg
+        '../media/media.gyp:media_perftests',
+        '../media/media.gyp:media_unittests',
+        '../media/midi/midi.gyp:midi_unittests',
+        '../media/cast/cast.gyp:cast_benchmarks',
+        '../media/cast/cast.gyp:cast_unittests',
+        '../media/cast/cast.gyp:generate_barcode_video',
+        '../media/cast/cast.gyp:generate_timecode_audio',
+        '../mojo/mojo.gyp:mojo',
+        '../mojo/mojo_base.gyp:mojo_application_base',
+        '../mojo/mojo_base.gyp:mojo_common_unittests',
+        '../net/net.gyp:crash_cache',
+        '../net/net.gyp:crl_set_dump',
+        '../net/net.gyp:dns_fuzz_stub',
+        '../net/net.gyp:dump_cache',
+        '../net/net.gyp:gdig',
+        '../net/net.gyp:get_server_time',
+        '../net/net.gyp:hpack_example_generator',
+        '../net/net.gyp:hpack_fuzz_mutator',
+        '../net/net.gyp:hpack_fuzz_wrapper',
+        '../net/net.gyp:net_perftests',
+        '../net/net.gyp:net_unittests',
+        '../net/net.gyp:net_watcher',  # TODO(GYP): This should be conditional on use_v8_in_net
+        '../net/net.gyp:run_testserver',
+        '../net/net.gyp:stress_cache',
+        '../net/net.gyp:tld_cleanup',
+        '../ppapi/ppapi_internal.gyp:ppapi_example_audio',
+        '../ppapi/ppapi_internal.gyp:ppapi_example_audio_input',
+        '../ppapi/ppapi_internal.gyp:ppapi_example_c_stub',
+        '../ppapi/ppapi_internal.gyp:ppapi_example_cc_stub',
+        '../ppapi/ppapi_internal.gyp:ppapi_example_compositor',
+        '../ppapi/ppapi_internal.gyp:ppapi_example_crxfs',
+        '../ppapi/ppapi_internal.gyp:ppapi_example_enumerate_devices',
+        '../ppapi/ppapi_internal.gyp:ppapi_example_file_chooser',
+        '../ppapi/ppapi_internal.gyp:ppapi_example_flash_topmost',
+        '../ppapi/ppapi_internal.gyp:ppapi_example_gamepad',
+        '../ppapi/ppapi_internal.gyp:ppapi_example_gles2',
+        '../ppapi/ppapi_internal.gyp:ppapi_example_gles2_spinning_cube',
+        '../ppapi/ppapi_internal.gyp:ppapi_example_graphics_2d',
+        '../ppapi/ppapi_internal.gyp:ppapi_example_ime',
+        '../ppapi/ppapi_internal.gyp:ppapi_example_input',
+        '../ppapi/ppapi_internal.gyp:ppapi_example_media_stream_audio',
+        '../ppapi/ppapi_internal.gyp:ppapi_example_media_stream_video',
+        '../ppapi/ppapi_internal.gyp:ppapi_example_mouse_cursor',
+        '../ppapi/ppapi_internal.gyp:ppapi_example_mouse_lock',
+        '../ppapi/ppapi_internal.gyp:ppapi_example_paint_manager',
+        '../ppapi/ppapi_internal.gyp:ppapi_example_post_message',
+        '../ppapi/ppapi_internal.gyp:ppapi_example_printing',
+        '../ppapi/ppapi_internal.gyp:ppapi_example_scaling',
+        '../ppapi/ppapi_internal.gyp:ppapi_example_scroll',
+        '../ppapi/ppapi_internal.gyp:ppapi_example_simple_font',
+        '../ppapi/ppapi_internal.gyp:ppapi_example_threading',
+        '../ppapi/ppapi_internal.gyp:ppapi_example_url_loader',
+        '../ppapi/ppapi_internal.gyp:ppapi_example_url_loader_file',
+        '../ppapi/ppapi_internal.gyp:ppapi_example_vc',
+        '../ppapi/ppapi_internal.gyp:ppapi_example_video_decode',
+        '../ppapi/ppapi_internal.gyp:ppapi_example_video_decode_dev',
+        '../ppapi/ppapi_internal.gyp:ppapi_example_video_effects',
+        '../ppapi/ppapi_internal.gyp:ppapi_example_video_encode',
+        '../ppapi/ppapi_internal.gyp:ppapi_tests',
+        '../ppapi/ppapi_internal.gyp:ppapi_perftests',
+        '../ppapi/ppapi_internal.gyp:ppapi_unittests',
+        '../ppapi/tools/ppapi_tools.gyp:pepper_hash_for_uma',
+        '../printing/printing.gyp:printing_unittests',
+        '../skia/skia_tests.gyp:skia_unittests',
+        '../skia/skia.gyp:filter_fuzz_stub',
+        '../skia/skia.gyp:image_operations_bench',
+        '../sql/sql.gyp:sql_unittests',
+        '../sync/sync.gyp:run_sync_testserver',
+        '../sync/sync.gyp:sync_unit_tests',
+        '../sync/tools/sync_tools.gyp:sync_client',
+        '../sync/tools/sync_tools.gyp:sync_listen_notifications',
+        '../testing/gmock.gyp:gmock_main',
+        '../third_party/WebKit/Source/platform/blink_platform_tests.gyp:blink_heap_unittests',
+        '../third_party/WebKit/Source/platform/blink_platform_tests.gyp:blink_platform_unittests',
+        '../third_party/WebKit/Source/web/web_tests.gyp:webkit_unit_tests',
+        '../third_party/WebKit/Source/wtf/wtf_tests.gyp:wtf_unittests',
+        '../third_party/cacheinvalidation/cacheinvalidation.gyp:cacheinvalidation_unittests',
+        '../third_party/codesighs/codesighs.gyp:codesighs',
+        '../third_party/codesighs/codesighs.gyp:maptsvdifftool',
+        '../third_party/leveldatabase/leveldatabase.gyp:env_chromium_unittests',
+        '../third_party/libphonenumber/libphonenumber.gyp:libphonenumber_unittests',
+        '../third_party/libaddressinput/libaddressinput.gyp:libaddressinput_unittests',
+        '../third_party/mojo/mojo_edk_tests.gyp:mojo_system_unittests',
+        '../third_party/mojo/mojo_edk_tests.gyp:mojo_public_bindings_unittests',
+        '../third_party/mojo/mojo_edk_tests.gyp:mojo_public_environment_unittests',
+        '../third_party/mojo/mojo_edk_tests.gyp:mojo_public_system_perftests',
+        '../third_party/mojo/mojo_edk_tests.gyp:mojo_public_system_unittests',
+        '../third_party/mojo/mojo_edk_tests.gyp:mojo_public_utility_unittests',
+        '../third_party/pdfium/samples/samples.gyp:pdfium_diff',
+        '../third_party/pdfium/samples/samples.gyp:pdfium_test',
+        '../third_party/smhasher/smhasher.gyp:pmurhash',
+        '../tools/gn/gn.gyp:gn',
+        '../tools/gn/gn.gyp:generate_test_gn_data',
+        '../tools/gn/gn.gyp:gn_unittests',
+        '../tools/imagediff/image_diff.gyp:image_diff',
+        '../tools/perf/clear_system_cache/clear_system_cache.gyp:clear_system_cache',
+        '../tools/telemetry/telemetry.gyp:bitmaptools#host',
+        '../ui/accessibility/accessibility.gyp:accessibility_unittests',
+        '../ui/app_list/app_list.gyp:app_list_unittests',
+        '../ui/base/ui_base_tests.gyp:ui_base_unittests',
+        '../ui/compositor/compositor.gyp:compositor_unittests',
+        '../ui/display/display.gyp:display_unittests',
+        '../ui/events/events.gyp:events_unittests',
+        '../ui/gfx/gfx_tests.gyp:gfx_unittests',
+        '../ui/gl/gl_tests.gyp:gl_unittests',
+        '../ui/message_center/message_center.gyp:message_center_unittests',
+        '../ui/snapshot/snapshot.gyp:snapshot_unittests',
+        '../ui/touch_selection/ui_touch_selection.gyp:ui_touch_selection_unittests',
+        '../ui/views/examples/examples.gyp:views_examples_with_content_exe',
+        '../url/url.gyp:url_unittests',
+        '../v8/tools/gyp/v8.gyp:v8_snapshot',
+        '../v8/tools/gyp/v8.gyp:postmortem-metadata',
+      ],
+      'conditions': [
+        ['clang==1', {
+          'dependencies': [
+            '../build/sanitizers/sanitizers.gyp:llvm-symbolizer',
+          ],
+        }],
+        ['disable_nacl==0 and disable_nacl_untrusted==0', {
+          'dependencies': [
+            '../components/nacl.gyp:nacl_loader_unittests',
+          ]
+        }],
+        ['enable_extensions==1 and OS!="mac"', {
+          'dependencies': [
+            '../extensions/shell/app_shell.gyp:app_shell',
+            '../extensions/shell/app_shell.gyp:app_shell_unittests',
+          ],
+        }],
+        ['enable_mdns==1', {
+          'dependencies': [
+            '../chrome/chrome.gyp:service_discovery_sniffer',
+          ]
+        }],
+        ['remoting==1', {
+          'dependencies': [
+            '../remoting/remoting_all.gyp:remoting_all',
+          ],
+        }],
+        ['remoting==1 and chromeos==0 and use_x11==1', {
+          'dependencies': [
+            '../remoting/remoting.gyp:remoting_me2me_host',
+            '../remoting/remoting.gyp:remoting_me2me_native_messaging_host',
+          ],
+        }],
+        ['toolkit_views==1', {
+          'dependencies': [
+            '../ui/app_list/app_list.gyp:app_list_demo',
+            '../ui/views/views.gyp:views_unittests',
+          ],
+        }],
+        ['use_ash==1', {
+          'dependencies': [
+            '../ash/ash.gyp:ash_shell',
+            '../ash/ash.gyp:ash_shell_unittests',
+            '../ash/ash.gyp:ash_unittests',
+          ],
+        }],
+        ['use_ash==1 or chromeos== 1', {
+          'dependencies': [
+            '../components/components.gyp:session_manager_component',
+          ]
+        }],
+        ['use_aura==1', {
+          'dependencies': [
+            '../ui/aura/aura.gyp:aura_bench',
+            '../ui/aura/aura.gyp:aura_demo',
+            '../ui/aura/aura.gyp:aura_unittests',
+            '../ui/keyboard/keyboard.gyp:keyboard_unittests',
+            '../ui/wm/wm.gyp:wm_unittests',
+          ],
+        }],
+        ['use_ozone==1', {
+          'dependencies': [
+            '../ui/ozone/ozone.gyp:ozone',
+          ],
+        }],
+        ['use_x11==1', {
+          'dependencies': [
+            '../tools/xdisplaycheck/xdisplaycheck.gyp:xdisplaycheck',
+          ],
+          'conditions': [
+            ['target_arch!="arm"', {
+              'dependencies': [
+                '../gpu/tools/tools.gyp:compositor_model_bench',
+              ],
+            }],
+          ],
+        }],
+        ['OS=="android"', {
+          'dependencies': [
+            '../base/base.gyp:chromium_android_linker',
+            '../breakpad/breakpad.gyp:dump_syms',
+            '../build/android/rezip.gyp:rezip_apk_jar',
+            '../chrome/chrome.gyp:chrome_public_apk',
+            '../chrome/chrome.gyp:chrome_public_test_apk',
+            '../chrome/chrome.gyp:chrome_shell_apk',
+            '../chrome/chrome.gyp:chromedriver_webview_shell_apk',
+            #"//clank" TODO(GYP) - conditional somehow?
+            '../tools/imagediff/image_diff.gyp:image_diff#host',
+            '../tools/telemetry/telemetry.gyp:bitmaptools#host',
+
+            # TODO(GYP): Remove these when the components_unittests work.
+            #"//components/history/core/test:test",
+            #"//components/policy:policy_component_test_support",
+            #"//components/policy:test_support",
+            #"//components/rappor:test_support",
+            #"//components/signin/core/browser:test_support",
+            #"//components/sync_driver:test_support",
+            #"//components/user_manager",
+            #"//components/wallpaper",
+
+            '../content/content_shell_and_tests.gyp:content_shell_apk',
+
+            '../third_party/WebKit/Source/platform/blink_platform_tests.gyp:blink_heap_unittests_apk',
+            '../third_party/WebKit/Source/platform/blink_platform_tests.gyp:blink_platform_unittests_apk',
+            '../third_party/WebKit/Source/web/web_tests.gyp:webkit_unit_tests_apk',
+            '../third_party/WebKit/Source/wtf/wtf_tests.gyp:wtf_unittests_apk',
+            # TODO(GYP): Are these needed, or will they be pulled in automatically?
+            #"//third_party/android_tools:android_gcm_java",
+            #"//third_party/android_tools:uiautomator_java",
+            #"//third_party/android_tools:android_support_v13_java",
+            #"//third_party/android_tools:android_support_v7_appcompat_java",
+            #"//third_party/android_tools:android_support_v7_mediarouter_java",
+            #"//third_party/mesa",
+            #"//third_party/mockito:mockito_java",
+            #"//third_party/openmax_dl/dl",
+            #"//third_party/speex",
+            #"//ui/android:ui_java",
+
+            # TODO(GYP): Are these needed?
+            #"//chrome/test:test_support_unit",
+            #"//third_party/smhasher:murmurhash3",
+            #"//ui/message_center:test_support",
+          ],
+          'dependencies!': [
+            '../breakpad/breakpad.gyp:symupload',
+            '../chrome/chrome.gyp:browser_tests',
+            '../chrome/chrome.gyp:chromedriver',
+            '../chrome/chrome.gyp:chromedriver_unitests',
+            '../chrome/chrome.gyp:interactive_ui_tests',
+            '../chrome/chrome.gyp:performance_browser_tests',
+            '../chrome/chrome.gyp:sync_integration_tests',
+            '../chrome/chrome.gyp:unit_tests',
+            '../extensions/extensions_tests.gyp:extensions_browsertests',
+            '../extensions/extensions_tests.gyp:extensions_unittests',
+            '../google_apis/gcm/gcm.gyp:gcm_unit_tests',
+            '../ipc/ipc.gyp:ipc_tests',
+            '../jingle/jingle.gyp:jingle_unittests',
+            '../net/net.gyp:net_unittests',
+            #"//ppapi/examples",
+            '../third_party/pdfium/samples/samples.gyp:pdfium_test',
+            '../tools/gn/gn.gyp:gn',
+            '../tools/gn/gn.gyp:gn_unittests',
+            '../tools/imagediff/image_diff.gyp:image_diff',
+            '../tools/gn/gn.gyp:gn',
+            '../tools/gn/gn.gyp:gn_unittests',
+            '../ui/app_list/app_list.gyp:app_list_unittests',
+            '../url/url.gyp:url_unittests',
+          ],
+        }],
+        ['OS=="android" or OS=="linux"', {
+          'dependencies': [
+            '../net/net.gyp:disk_cache_memory_test',
+          ],
+        }],
+        ['chromeos==1', {
+          'dependencies': [
+            '../chromeos/chromeos.gyp:chromeos_unittests',
+            '../ui/chromeos/ui_chromeos.gyp:ui_chromeos_unittests',
+          ]
+        }],
+        ['chromeos==1 or OS=="win" or OS=="mac"', {
+          'dependencies': [
+            '../rlz/rlz.gyp:rlz_id',
+            '../rlz/rlz.gyp:rlz_lib',
+            '../rlz/rlz.gyp:rlz_unittests',
+          ],
+        }],
+        ['OS=="android" or OS=="linux" or os_bsd==1', {
+          'dependencies': [
+            '../breakpad/breakpad.gyp:core-2-minidump',
+            '../breakpad/breakpad.gyp:microdump_stackwalk',
+            '../breakpad/breakpad.gyp:minidump_dump',
+            '../breakpad/breakpad.gyp:minidump_stackwalk',
+            '../breakpad/breakpad.gyp:symupload',
+            '../third_party/codesighs/codesighs.gyp:nm2tsv',
+          ],
+        }],
+        ['OS=="linux"', {
+          'dependencies': [
+            '../breakpad/breakpad.gyp:breakpad_unittests',
+            '../breakpad/breakpad.gyp:dump_syms#host',
+            '../breakpad/breakpad.gyp:generate_test_dump',
+            '../breakpad/breakpad.gyp:minidump-2-core',
+            '../dbus/dbus.gyp:dbus_test_server',
+            '../dbus/dbus.gyp:dbus_unittests',
+            '../media/cast/cast.gyp:tap_proxy',
+            '../net/net.gyp:disk_cache_memory_test',
+            '../net/net.gyp:flip_in_mem_edsm_server',
+            '../net/net.gyp:flip_in_mem_edsm_server_unittests',
+            '../net/net.gyp:epoll_quic_client',
+            '../net/net.gyp:epoll_quic_server',
+            '../net/net.gyp:hpack_example_generator',
+            '../net/net.gyp:hpack_fuzz_mutator',
+            '../net/net.gyp:hpack_fuzz_wrapper',
+            '../net/net.gyp:net_perftests',
+            '../net/net.gyp:quic_client',
+            '../net/net.gyp:quic_server',
+            '../sandbox/sandbox.gyp:chrome_sandbox',
+            '../sandbox/sandbox.gyp:sandbox_linux_unittests',
+            '../sandbox/sandbox.gyp:sandbox_linux_jni_unittests',
+            '../third_party/sqlite/sqlite.gyp:sqlite_shell',
+         ],
+        }],
+        ['OS=="mac"', {
+          'dependencies': [
+            '../breakpad/breakpad.gyp:crash_inspector',
+            '../breakpad/breakpad.gyp:dump_syms',
+            '../breakpad/breakpad.gyp:symupload',
+            '../third_party/apple_sample_code/apple_sample_code.gyp:apple_sample_code',
+            '../third_party/molokocacao/molokocacao.gyp:molokocacao',
+
+            # TODO(GYP): remove these when the corresponding root targets work.
+            #"//cc/blink",
+            #"//components/ui/zoom:ui_zoom",
+            #"//content",
+            #"//content/test:test_support",
+            #"//device/battery",
+            #"//device/bluetooth",
+            #"//device/nfc",
+            #"//device/usb",
+            #"//device/vibration",
+            #"//media/blink",
+            #"//pdf",
+            #"//storage/browser",
+            #"//third_party/brotli",
+            #"//third_party/flac",
+            #"//third_party/hunspell",
+            #//third_party/iccjpeg",
+            #"//third_party/libphonenumber",
+            #"//third_party/ots",
+            #"//third_party/qcms",
+            #"//third_party/smhasher:murmurhash3",
+            #"//third_party/speex",
+            #"//third_party/webrtc/system_wrappers",
+            #"//ui/native_theme",
+            #"//ui/snapshot",
+            #"//ui/surface",
+          ],
+          'dependencies!': [
+            #"//chrome",  # TODO(GYP)
+            #"//chrome/test:browser_tests",  # TODO(GYP)
+            #"//chrome/test:interactive_ui_tests",  # TODO(GYP)
+            #"//chrome/test:sync_integration_tests",  # TODO(GYP)
+            #"//chrome/test:unit_tests",  # TODO(GYP)
+            #"//components:components_unittests",  # TODO(GYP)
+            #"//content/test:content_browsertests",  # TODO(GYP)
+            #"//content/test:content_perftests",  # TODO(GYP)
+            #"//content/test:content_unittests",  # TODO(GYP)
+            #"//extensions:extensions_browsertests",  # TODO(GYP)
+            #"//extensions:extensions_unittests",  # TODO(GYP)
+            #"//net:net_unittests",  # TODO(GYP)
+            #"//third_party/usrsctp",  # TODO(GYP)
+            #"//ui/app_list:app_list_unittests",  # TODO(GYP)
+            #"//ui/gfx:gfx_unittests",  # TODO(GYP)
+          ],
+        }],
+        ['OS=="win"', {
+          'dependencies': [
+            '../base/base.gyp:pe_image_test',
+            '../chrome/chrome.gyp:crash_service',
+            '../chrome/chrome.gyp:setup_unittests',
+            '../chrome_elf/chrome_elf.gyp:chrome_elf_unittests',
+            '../chrome_elf/chrome_elf.gyp:dll_hash_main',
+            '../components/components.gyp:wifi_test',
+            '../net/net.gyp:quic_client',
+            '../net/net.gyp:quic_server',
+            '../sandbox/sandbox.gyp:pocdll',
+            '../sandbox/sandbox.gyp:sandbox_poc',
+            '../sandbox/sandbox.gyp:sbox_integration_tests',
+            '../sandbox/sandbox.gyp:sbox_unittests',
+            '../sandbox/sandbox.gyp:sbox_validation_tests',
+            '../testing/gtest.gyp:gtest_main',
+            '../third_party/codesighs/codesighs.gyp:msdump2symdb',
+            '../third_party/codesighs/codesighs.gyp:msmap2tsv',
+            '../third_party/pdfium/samples/samples.gyp:pdfium_diff',
+            '../win8/win8.gyp:metro_viewer',
+          ],
+        }],
+      ],
+    },
+    {
+      'target_name': 'gyp_only',
+      'type': 'none',
+      'conditions': [
+        ['OS=="linux" or OS=="win"', {
+          'conditions': [
+            ['disable_nacl==0 and disable_nacl_untrusted==0', {
+              'dependencies': [
+                '../mojo/mojo_nacl.gyp:monacl_shell',  # This should not be built in chromium.
+              ]
+            }],
+          ]
+        }],
+      ],
+    },
+    {
+      'target_name': 'gyp_remaining',
+      'type': 'none',
+      'conditions': [
+        ['remoting==1', {
+          'dependencies': [
+            '../remoting/app_remoting_webapp.gyp:ar_sample_app',  # crbug.com/471916
+          ],
+        }],
+        ['test_isolation_mode!="noop"', {
+          'dependencies': [
+            '../base/base.gyp:base_unittests_run',
+            '../cc/cc_tests.gyp:cc_unittests_run',
+            '../chrome/chrome.gyp:browser_tests_run',
+            '../chrome/chrome.gyp:chrome_run',
+            '../chrome/chrome.gyp:interactive_ui_tests_run',
+            '../chrome/chrome.gyp:sync_integration_tests_run',
+            '../chrome/chrome.gyp:unit_tests_run',
+            '../components/components_tests.gyp:components_browsertests_run',
+            '../components/components_tests.gyp:components_unittests_run',
+            '../content/content_shell_and_tests.gyp:content_browsertests_run',
+            '../content/content_shell_and_tests.gyp:content_unittests_run',
+            '../courgette/courgette.gyp:courgette_unittests_run',
+            '../crypto/crypto.gyp:crypto_unittests_run',
+            '../google_apis/gcm/gcm.gyp:gcm_unit_tests_run',
+            '../gpu/gpu.gyp:gpu_unittests_run',
+            '../ipc/ipc.gyp:ipc_tests_run',
+            '../media/cast/cast.gyp:cast_unittests_run',
+            '../media/media.gyp:media_unittests_run',
+            '../media/midi/midi.gyp:midi_unittests_run',
+            '../net/net.gyp:net_unittests_run',
+            '../printing/printing.gyp:printing_unittests_run',
+            '../remoting/remoting.gyp:remoting_unittests_run',
+            '../skia/skia_tests.gyp:skia_unittests_run',
+            '../sql/sql.gyp:sql_unittests_run',
+            '../sync/sync.gyp:sync_unit_tests_run',
+            '../third_party/cacheinvalidation/cacheinvalidation.gyp:cacheinvalidation_unittests_run',
+            '../third_party/mojo/mojo_edk_tests.gyp:mojo_public_bindings_unittests_run',
+            '../tools/gn/gn.gyp:gn_unittests_run',
+            '../ui/accessibility/accessibility.gyp:accessibility_unittests_run',
+            '../ui/app_list/app_list.gyp:app_list_unittests_run',
+            '../ui/compositor/compositor.gyp:compositor_unittests_run',
+            '../ui/events/events.gyp:events_unittests_run',
+            '../ui/gl/gl_tests.gyp:gl_unittests_run',
+            '../ui/message_center/message_center.gyp:message_center_unittests_run',
+            '../ui/touch_selection/ui_touch_selection.gyp:ui_touch_selection_unittests_run',
+            '../url/url.gyp:url_unittests_run',
+          ],
+          'conditions': [
+            ['OS=="linux"', {
+              'dependencies': [
+                '../sandbox/sandbox.gyp:sandbox_linux_unittests_run',
+                '../ui/display/display.gyp:display_unittests_run',
+              ],
+            }],
+            ['OS=="mac"', {
+              'dependencies': [
+                '../sandbox/sandbox.gyp:sandbox_mac_unittests_run',
+              ],
+            }],
+            ['OS=="win"', {
+              'dependencies': [
+                '../chrome/chrome.gyp:installer_util_unittests_run',
+                '../chrome/chrome.gyp:setup_unittests_run',
+                '../sandbox/sandbox.gyp:sbox_integration_tests',
+                '../sandbox/sandbox.gyp:sbox_unittests',
+                '../sandbox/sandbox.gyp:sbox_validation_tests',
+              ],
+            }],
+            ['use_ash==1', {
+              'dependencies': [
+                '../ash/ash.gyp:ash_unittests_run',
+              ],
+            }],
+            ['use_aura==1', {
+              'dependencies': [
+                '../ui/aura/aura.gyp:aura_unittests_run',
+                '../ui/wm/wm.gyp:wm_unittests_run',
+              ],
+            }],
+            ['enable_webrtc==1 or OS!="android"', {
+              'dependencies': [
+                '../jingle/jingle.gyp:jingle_unittests_run',
+              ],
+            }],
+            ['disable_nacl==0 and disable_nacl_untrusted==0', {
+              'dependencies': [
+                '../components/nacl.gyp:nacl_loader_unittests_run',
+              ]
+            }],
+          ],
+        }],
+        ['use_openssl==1', {
+          'dependencies': [
+            # TODO(GYP): All of these targets still need to be converted.
+            '../third_party/boringssl/boringssl_tests.gyp:boringssl_ecdsa_test',
+            '../third_party/boringssl/boringssl_tests.gyp:boringssl_bn_test',
+            '../third_party/boringssl/boringssl_tests.gyp:boringssl_pqueue_test',
+            '../third_party/boringssl/boringssl_tests.gyp:boringssl_digest_test',
+            '../third_party/boringssl/boringssl_tests.gyp:boringssl_cipher_test',
+            '../third_party/boringssl/boringssl_tests.gyp:boringssl_hkdf_test',
+            '../third_party/boringssl/boringssl_tests.gyp:boringssl_constant_time_test',
+            '../third_party/boringssl/boringssl_tests.gyp:boringssl_thread_test',
+            '../third_party/boringssl/boringssl_tests.gyp:boringssl_base64_test',
+            '../third_party/boringssl/boringssl_tests.gyp:boringssl_gcm_test',
+            '../third_party/boringssl/boringssl_tests.gyp:boringssl_bytestring_test',
+            '../third_party/boringssl/boringssl_tests.gyp:boringssl_evp_test',
+            '../third_party/boringssl/boringssl_tests.gyp:boringssl_dsa_test',
+            '../third_party/boringssl/boringssl_tests.gyp:boringssl_rsa_test',
+            '../third_party/boringssl/boringssl_tests.gyp:boringssl_hmac_test',
+            '../third_party/boringssl/boringssl_tests.gyp:boringssl_aead_test',
+            '../third_party/boringssl/boringssl_tests.gyp:boringssl_ssl_test',
+            '../third_party/boringssl/boringssl_tests.gyp:boringssl_err_test',
+            '../third_party/boringssl/boringssl_tests.gyp:boringssl_lhash_test',
+            '../third_party/boringssl/boringssl_tests.gyp:boringssl_pbkdf_test',
+            '../third_party/boringssl/boringssl_tests.gyp:boringssl_dh_test',
+            '../third_party/boringssl/boringssl_tests.gyp:boringssl_pkcs12_test',
+            '../third_party/boringssl/boringssl_tests.gyp:boringssl_example_mul',
+            '../third_party/boringssl/boringssl_tests.gyp:boringssl_ec_test',
+            '../third_party/boringssl/boringssl_tests.gyp:boringssl_bio_test',
+            '../third_party/boringssl/boringssl_tests.gyp:boringssl_pkcs7_test',
+            '../third_party/boringssl/boringssl_tests.gyp:boringssl_unittests',
+          ],
+        }],
+        ['chromeos==1', {
+          'dependencies': [
+            '../content/content_shell_and_tests.gyp:jpeg_decode_accelerator_unittest',
+            '../content/content_shell_and_tests.gyp:video_encode_accelerator_unittest',
+          ],
+        }],
+        ['chromeos==1 and target_arch != "arm"', {
+          'dependencies': [
+            '../content/content_shell_and_tests.gyp:vaapi_jpeg_decoder_unittest',
+          ],
+        }],
+        ['chromeos==1 or OS=="win" or OS=="android"', {
+          'dependencies': [
+            '../content/content_shell_and_tests.gyp:video_decode_accelerator_unittest',
+          ],
+        }],
+        ['OS=="linux" or OS=="win"', {
+          'dependencies': [
+            # TODO(GYP): Figure out which of these run on android/mac/win/ios/etc.
+            '../net/net.gyp:net_docs',
+            '../remoting/remoting.gyp:ar_sample_test_driver',
+
+            # TODO(GYP): in progress - see tfarina.
+            '../third_party/webrtc/tools/tools.gyp:frame_analyzer',
+            '../third_party/webrtc/tools/tools.gyp:rgba_to_i420_converter',
+          ],
+        }],
+        ['OS=="win"', {
+          'dependencies': [
+            # TODO(GYP): All of these targets still need to be converted.
+            '../base/base.gyp:debug_message',
+            '../chrome/chrome.gyp:app_shim',
+            '../chrome/chrome.gyp:gcapi_dll',
+            '../chrome/chrome.gyp:gcapi_test',
+            '../chrome/chrome.gyp:installer_util_unittests',
+            '../chrome/chrome.gyp:pack_policy_templates',
+            '../chrome/chrome.gyp:sb_sigutil',
+            '../chrome/chrome.gyp:setup',
+            '../chrome/installer/mini_installer.gyp:mini_installer',
+            '../chrome/tools/crash_service/caps/caps.gyp:caps',
+            '../cloud_print/gcp20/prototype/gcp20_device.gyp:gcp20_device',
+            '../cloud_print/gcp20/prototype/gcp20_device.gyp:gcp20_device_unittests',
+            '../cloud_print/service/win/service.gyp:cloud_print_service',
+            '../cloud_print/service/win/service.gyp:cloud_print_service_config',
+            '../cloud_print/service/win/service.gyp:cloud_print_service_setup',
+            '../cloud_print/virtual_driver/win/install/virtual_driver_install.gyp:virtual_driver_setup',
+            '../cloud_print/virtual_driver/win/virtual_driver.gyp:gcp_portmon',
+            '../components/test_runner/test_runner.gyp:layout_test_helper',
+            '../content/content_shell_and_tests.gyp:content_shell_crash_service',
+            '../gpu/gpu.gyp:angle_end2end_tests',
+            '../gpu/gpu.gyp:angle_perftests',
+            '../net/net.gyp:net_docs',
+            '../ppapi/ppapi_internal.gyp:ppapi_perftests',
+            '../remoting/remoting.gyp:ar_sample_test_driver',
+            '../remoting/remoting.gyp:remoting_breakpad_tester',
+            '../remoting/remoting.gyp:remoting_console',
+            '../remoting/remoting.gyp:remoting_desktop',
+            '../rlz/rlz.gyp:rlz',
+            '../tools/win/static_initializers/static_initializers.gyp:static_initializers',
+          ],
+        }],
+        ['OS=="win" and win_use_allocator_shim==1', {
+          'dependencies': [
+            '../base/allocator/allocator.gyp:allocator_unittests',
+          ]
+        }],
+        ['OS=="win" and target_arch=="ia32"', {
+          'dependencies': [
+            # TODO(GYP): All of these targets need to be ported over.
+            '../base/base.gyp:base_win64',
+            '../base/base.gyp:base_i18n_nacl_win64',
+            '../chrome/chrome.gyp:crash_service_win64',
+            '../chrome/chrome.gyp:launcher_support64',
+            '../components/components.gyp:breakpad_win64',
+            '../courgette/courgette.gyp:courgette64',
+            '../crypto/crypto.gyp:crypto_nacl_win64',
+            '../ipc/ipc.gyp:ipc_win64',
+            '../sandbox/sandbox.gyp:sandbox_win64',
+            '../cloud_print/virtual_driver/win/virtual_driver64.gyp:gcp_portmon64',
+            '../cloud_print/virtual_driver/win/virtual_driver64.gyp:virtual_driver_lib64',
+          ],
+        }],
+        ['OS=="win" and target_arch=="ia32" and configuration_policy==1', {
+          'dependencies': [
+            # TODO(GYP): All of these targets need to be ported over.
+            '../components/components.gyp:policy_win64',
+          ]
+        }],
+      ],
+    },
+  ]
+}
+
diff --git a/build/gn_run_binary.py b/build/gn_run_binary.py
new file mode 100644
index 0000000..7d83f61
--- /dev/null
+++ b/build/gn_run_binary.py
@@ -0,0 +1,22 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Helper script for GN to run an arbitrary binary. See compiled_action.gni.
+
+Run with:
+  python gn_run_binary.py <binary_name> [args ...]
+"""
+
+import sys
+import subprocess
+
+# This script is designed to run binaries produced by the current build. We
+# always prefix it with "./" to avoid picking up system versions that might
+# also be on the path.
+path = './' + sys.argv[1]
+
+# The rest of the arguements are passed directly to the executable.
+args = [path] + sys.argv[2:]
+
+sys.exit(subprocess.call(args))
diff --git a/build/grit_action.gypi b/build/grit_action.gypi
new file mode 100644
index 0000000..b24f0f8
--- /dev/null
+++ b/build/grit_action.gypi
@@ -0,0 +1,71 @@
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into an action to invoke grit in a
+# consistent manner. To use this the following variables need to be
+# defined:
+#   grit_grd_file: string: grd file path
+#   grit_out_dir: string: the output directory path
+
+# It would be really nice to do this with a rule instead of actions, but it
+# would need to determine inputs and outputs via grit_info on a per-file
+# basis. GYP rules don't currently support that. They could be extended to
+# do this, but then every generator would need to be updated to handle this.
+
+{
+  'variables': {
+    'grit_cmd': ['python', '<(DEPTH)/tools/grit/grit.py'],
+    'grit_resource_ids%': '<(DEPTH)/tools/gritsettings/resource_ids',
+    # This makes it possible to add more defines in specific targets,
+    # instead of build/common.gypi .
+    'grit_additional_defines%': [],
+    'grit_rc_header_format%': [],
+    'grit_whitelist%': '',
+
+    'conditions': [
+      # These scripts can skip writing generated files if they are identical
+      # to the already existing files, which avoids further build steps, like
+      # recompilation. However, a dependency (earlier build step) having a
+      # newer timestamp than an output (later build step) confuses some build
+      # systems, so only use this on ninja, which explicitly supports this use
+      # case (gyp turns all actions into ninja restat rules).
+      ['"<(GENERATOR)"=="ninja"', {
+        'write_only_new': '1',
+      }, {
+        'write_only_new': '0',
+      }],
+    ],
+  },
+  'conditions': [
+    ['"<(grit_whitelist)"==""', {
+      'variables': {
+        'grit_whitelist_flag': [],
+      }
+    }, {
+      'variables': {
+        'grit_whitelist_flag': ['-w', '<(grit_whitelist)'],
+      }
+    }]
+  ],
+  'inputs': [
+    '<!@pymod_do_main(grit_info <@(grit_defines) <@(grit_additional_defines) '
+        '<@(grit_whitelist_flag) --inputs <(grit_grd_file) '
+        '-f "<(grit_resource_ids)")',
+  ],
+  'outputs': [
+    '<!@pymod_do_main(grit_info <@(grit_defines) <@(grit_additional_defines) '
+        '<@(grit_whitelist_flag) --outputs \'<(grit_out_dir)\' '
+        '<(grit_grd_file) -f "<(grit_resource_ids)")',
+  ],
+  'action': ['<@(grit_cmd)',
+             '-i', '<(grit_grd_file)', 'build',
+             '-f', '<(grit_resource_ids)',
+             '-o', '<(grit_out_dir)',
+             '--write-only-new=<(write_only_new)',
+             '<@(grit_defines)',
+             '<@(grit_whitelist_flag)',
+             '<@(grit_additional_defines)',
+             '<@(grit_rc_header_format)'],
+  'message': 'Generating resources from <(grit_grd_file)',
+}
diff --git a/build/grit_target.gypi b/build/grit_target.gypi
new file mode 100644
index 0000000..179f986
--- /dev/null
+++ b/build/grit_target.gypi
@@ -0,0 +1,31 @@
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into a target that will have one or more
+# uses of grit_action.gypi. To use this the following variables need to be
+# defined:
+#   grit_out_dir: string: the output directory path
+
+# DO NOT USE THIS FILE. Instead, use qualified includes.
+# TODO: Convert everything to qualified includes, and delete this file,
+# http://crbug.com/401588
+{
+  'conditions': [
+    # If the target is a direct binary, it needs to be able to find the header,
+    # otherwise it probably a supporting target just for grit so the include
+    # dir needs to be set on anything that depends on this action.
+    ['_type=="executable" or _type=="shared_library" or \
+      _type=="loadable_module" or _type=="static_library"', {
+      'include_dirs': [
+        '<(grit_out_dir)',
+      ],
+    }, {
+      'direct_dependent_settings': {
+        'include_dirs': [
+          '<(grit_out_dir)',
+        ],
+      },
+    }],
+  ],
+}
diff --git a/build/gyp_chromium b/build/gyp_chromium
new file mode 100755
index 0000000..9dac871
--- /dev/null
+++ b/build/gyp_chromium
@@ -0,0 +1,333 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This script is wrapper for Chromium that adds some support for how GYP
+# is invoked by Chromium beyond what can be done in the gclient hooks.
+
+import argparse
+import glob
+import gyp_environment
+import os
+import re
+import shlex
+import subprocess
+import string
+import sys
+import vs_toolchain
+
+script_dir = os.path.dirname(os.path.realpath(__file__))
+chrome_src = os.path.abspath(os.path.join(script_dir, os.pardir))
+
+sys.path.insert(0, os.path.join(chrome_src, 'tools', 'gyp', 'pylib'))
+import gyp
+
+# Assume this file is in a one-level-deep subdirectory of the source root.
+SRC_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
+
+# Add paths so that pymod_do_main(...) can import files.
+sys.path.insert(1, os.path.join(chrome_src, 'android_webview', 'tools'))
+sys.path.insert(1, os.path.join(chrome_src, 'build', 'android', 'gyp'))
+sys.path.insert(1, os.path.join(chrome_src, 'chrome', 'tools', 'build'))
+sys.path.insert(1, os.path.join(chrome_src, 'chromecast', 'tools', 'build'))
+sys.path.insert(1, os.path.join(chrome_src, 'ios', 'chrome', 'tools', 'build'))
+sys.path.insert(1, os.path.join(chrome_src, 'native_client', 'build'))
+sys.path.insert(1, os.path.join(chrome_src, 'native_client_sdk', 'src',
+    'build_tools'))
+sys.path.insert(1, os.path.join(chrome_src, 'remoting', 'tools', 'build'))
+sys.path.insert(1, os.path.join(chrome_src, 'third_party', 'liblouis'))
+sys.path.insert(1, os.path.join(chrome_src, 'third_party', 'WebKit',
+    'Source', 'build', 'scripts'))
+sys.path.insert(1, os.path.join(chrome_src, 'tools'))
+sys.path.insert(1, os.path.join(chrome_src, 'tools', 'generate_shim_headers'))
+sys.path.insert(1, os.path.join(chrome_src, 'tools', 'grit'))
+
+# On Windows, Psyco shortens warm runs of build/gyp_chromium by about
+# 20 seconds on a z600 machine with 12 GB of RAM, from 90 down to 70
+# seconds.  Conversely, memory usage of build/gyp_chromium with Psyco
+# maxes out at about 158 MB vs. 132 MB without it.
+#
+# Psyco uses native libraries, so we need to load a different
+# installation depending on which OS we are running under. It has not
+# been tested whether using Psyco on our Mac and Linux builds is worth
+# it (the GYP running time is a lot shorter, so the JIT startup cost
+# may not be worth it).
+if sys.platform == 'win32':
+  try:
+    sys.path.insert(0, os.path.join(chrome_src, 'third_party', 'psyco_win32'))
+    import psyco
+  except:
+    psyco = None
+else:
+  psyco = None
+
+
+def GetSupplementalFiles():
+  """Returns a list of the supplemental files that are included in all GYP
+  sources."""
+  return glob.glob(os.path.join(chrome_src, '*', 'supplement.gypi'))
+
+
+def ProcessGypDefinesItems(items):
+  """Converts a list of strings to a list of key-value pairs."""
+  result = []
+  for item in items:
+    tokens = item.split('=', 1)
+    # Some GYP variables have hyphens, which we don't support.
+    if len(tokens) == 2:
+      result += [(tokens[0], tokens[1])]
+    else:
+      # No value supplied, treat it as a boolean and set it. Note that we
+      # use the string '1' here so we have a consistent definition whether
+      # you do 'foo=1' or 'foo'.
+      result += [(tokens[0], '1')]
+  return result
+
+
+def GetGypVars(supplemental_files):
+  """Returns a dictionary of all GYP vars."""
+  # Find the .gyp directory in the user's home directory.
+  home_dot_gyp = os.environ.get('GYP_CONFIG_DIR', None)
+  if home_dot_gyp:
+    home_dot_gyp = os.path.expanduser(home_dot_gyp)
+  if not home_dot_gyp:
+    home_vars = ['HOME']
+    if sys.platform in ('cygwin', 'win32'):
+      home_vars.append('USERPROFILE')
+    for home_var in home_vars:
+      home = os.getenv(home_var)
+      if home != None:
+        home_dot_gyp = os.path.join(home, '.gyp')
+        if not os.path.exists(home_dot_gyp):
+          home_dot_gyp = None
+        else:
+          break
+
+  if home_dot_gyp:
+    include_gypi = os.path.join(home_dot_gyp, "include.gypi")
+    if os.path.exists(include_gypi):
+      supplemental_files += [include_gypi]
+
+  # GYP defines from the supplemental.gypi files.
+  supp_items = []
+  for supplement in supplemental_files:
+    with open(supplement, 'r') as f:
+      try:
+        file_data = eval(f.read(), {'__builtins__': None}, None)
+      except SyntaxError, e:
+        e.filename = os.path.abspath(supplement)
+        raise
+      variables = file_data.get('variables', [])
+      for v in variables:
+        supp_items += [(v, str(variables[v]))]
+
+  # GYP defines from the environment.
+  env_items = ProcessGypDefinesItems(
+      shlex.split(os.environ.get('GYP_DEFINES', '')))
+
+  # GYP defines from the command line.
+  parser = argparse.ArgumentParser()
+  parser.add_argument('-D', dest='defines', action='append', default=[])
+  cmdline_input_items = parser.parse_known_args()[0].defines
+  cmdline_items = ProcessGypDefinesItems(cmdline_input_items)
+
+  vars_dict = dict(supp_items + env_items + cmdline_items)
+  return vars_dict
+
+
+def GetOutputDirectory():
+  """Returns the output directory that GYP will use."""
+
+  # Handle command line generator flags.
+  parser = argparse.ArgumentParser()
+  parser.add_argument('-G', dest='genflags', default=[], action='append')
+  genflags = parser.parse_known_args()[0].genflags
+
+  # Handle generator flags from the environment.
+  genflags += shlex.split(os.environ.get('GYP_GENERATOR_FLAGS', ''))
+
+  needle = 'output_dir='
+  for item in genflags:
+    if item.startswith(needle):
+      return item[len(needle):]
+
+  return 'out'
+
+
+def additional_include_files(supplemental_files, args=[]):
+  """
+  Returns a list of additional (.gypi) files to include, without duplicating
+  ones that are already specified on the command line. The list of supplemental
+  include files is passed in as an argument.
+  """
+  # Determine the include files specified on the command line.
+  # This doesn't cover all the different option formats you can use,
+  # but it's mainly intended to avoid duplicating flags on the automatic
+  # makefile regeneration which only uses this format.
+  specified_includes = set()
+  for arg in args:
+    if arg.startswith('-I') and len(arg) > 2:
+      specified_includes.add(os.path.realpath(arg[2:]))
+
+  result = []
+  def AddInclude(path):
+    if os.path.realpath(path) not in specified_includes:
+      result.append(path)
+
+  if os.environ.get('GYP_INCLUDE_FIRST') != None:
+    AddInclude(os.path.join(chrome_src, os.environ.get('GYP_INCLUDE_FIRST')))
+
+  # Always include common.gypi.
+  AddInclude(os.path.join(script_dir, 'common.gypi'))
+
+  # Optionally add supplemental .gypi files if present.
+  for supplement in supplemental_files:
+    AddInclude(supplement)
+
+  if os.environ.get('GYP_INCLUDE_LAST') != None:
+    AddInclude(os.path.join(chrome_src, os.environ.get('GYP_INCLUDE_LAST')))
+
+  return result
+
+
+if __name__ == '__main__':
+  # Disabling garbage collection saves about 1 second out of 16 on a Linux
+  # z620 workstation. Since this is a short-lived process it's not a problem to
+  # leak a few cyclyc references in order to spare the CPU cycles for
+  # scanning the heap.
+  import gc
+  gc.disable()
+
+  args = sys.argv[1:]
+
+  use_analyzer = len(args) and args[0] == '--analyzer'
+  if use_analyzer:
+    args.pop(0)
+    os.environ['GYP_GENERATORS'] = 'analyzer'
+    args.append('-Gconfig_path=' + args.pop(0))
+    args.append('-Ganalyzer_output_path=' + args.pop(0))
+
+  if int(os.environ.get('GYP_CHROMIUM_NO_ACTION', 0)):
+    print 'Skipping gyp_chromium due to GYP_CHROMIUM_NO_ACTION env var.'
+    sys.exit(0)
+
+  # Use the Psyco JIT if available.
+  if psyco:
+    psyco.profile()
+    print "Enabled Psyco JIT."
+
+  # Fall back on hermetic python if we happen to get run under cygwin.
+  # TODO(bradnelson): take this out once this issue is fixed:
+  #    http://code.google.com/p/gyp/issues/detail?id=177
+  if sys.platform == 'cygwin':
+    import find_depot_tools
+    depot_tools_path = find_depot_tools.add_depot_tools_to_path()
+    python_dir = sorted(glob.glob(os.path.join(depot_tools_path,
+                                               'python2*_bin')))[-1]
+    env = os.environ.copy()
+    env['PATH'] = python_dir + os.pathsep + env.get('PATH', '')
+    cmd = [os.path.join(python_dir, 'python.exe')] + sys.argv
+    sys.exit(subprocess.call(cmd, env=env))
+
+  # This could give false positives since it doesn't actually do real option
+  # parsing.  Oh well.
+  gyp_file_specified = any(arg.endswith('.gyp') for arg in args)
+
+  gyp_environment.SetEnvironment()
+
+  # If we didn't get a file, check an env var, and then fall back to
+  # assuming 'all.gyp' from the same directory as the script.
+  if not gyp_file_specified:
+    gyp_file = os.environ.get('CHROMIUM_GYP_FILE')
+    if gyp_file:
+      # Note that CHROMIUM_GYP_FILE values can't have backslashes as
+      # path separators even on Windows due to the use of shlex.split().
+      args.extend(shlex.split(gyp_file))
+    else:
+      args.append(os.path.join(script_dir, 'all.gyp'))
+
+  supplemental_includes = GetSupplementalFiles()
+  gyp_vars_dict = GetGypVars(supplemental_includes)
+  # There shouldn't be a circular dependency relationship between .gyp files,
+  # but in Chromium's .gyp files, on non-Mac platforms, circular relationships
+  # currently exist.  The check for circular dependencies is currently
+  # bypassed on other platforms, but is left enabled on iOS, where a violation
+  # of the rule causes Xcode to misbehave badly.
+  # TODO(mark): Find and kill remaining circular dependencies, and remove this
+  # option.  http://crbug.com/35878.
+  # TODO(tc): Fix circular dependencies in ChromiumOS then add linux2 to the
+  # list.
+  if gyp_vars_dict.get('OS') != 'ios':
+    args.append('--no-circular-check')
+
+  # libtool on Mac warns about duplicate basenames in static libraries, so
+  # they're disallowed in general by gyp. We are lax on this point, so disable
+  # this check other than on Mac. GN does not use static libraries as heavily,
+  # so over time this restriction will mostly go away anyway, even on Mac.
+  # https://code.google.com/p/gyp/issues/detail?id=384
+  if sys.platform != 'darwin':
+    args.append('--no-duplicate-basename-check')
+
+  # We explicitly don't support the make gyp generator (crbug.com/348686). Be
+  # nice and fail here, rather than choking in gyp.
+  if re.search(r'(^|,|\s)make($|,|\s)', os.environ.get('GYP_GENERATORS', '')):
+    print 'Error: make gyp generator not supported (check GYP_GENERATORS).'
+    sys.exit(1)
+
+  # We explicitly don't support the native msvs gyp generator. Be nice and
+  # fail here, rather than generating broken projects.
+  if re.search(r'(^|,|\s)msvs($|,|\s)', os.environ.get('GYP_GENERATORS', '')):
+    print 'Error: msvs gyp generator not supported (check GYP_GENERATORS).'
+    print 'Did you mean to use the `msvs-ninja` generator?'
+    sys.exit(1)
+
+  # If CHROMIUM_GYP_SYNTAX_CHECK is set to 1, it will invoke gyp with --check
+  # to enfore syntax checking.
+  syntax_check = os.environ.get('CHROMIUM_GYP_SYNTAX_CHECK')
+  if syntax_check and int(syntax_check):
+    args.append('--check')
+
+  # TODO(dmikurube): Remove these checks and messages after a while.
+  if ('linux_use_tcmalloc' in gyp_vars_dict or
+      'android_use_tcmalloc' in gyp_vars_dict):
+    print '*****************************************************************'
+    print '"linux_use_tcmalloc" and "android_use_tcmalloc" are deprecated!'
+    print '-----------------------------------------------------------------'
+    print 'You specify "linux_use_tcmalloc" or "android_use_tcmalloc" in'
+    print 'your GYP_DEFINES. Please switch them into "use_allocator" now.'
+    print 'See http://crbug.com/345554 for the details.'
+    print '*****************************************************************'
+
+  # Automatically turn on crosscompile support for platforms that need it.
+  # (The Chrome OS build sets CC_host / CC_target which implicitly enables
+  # this mode.)
+  if all(('ninja' in os.environ.get('GYP_GENERATORS', ''),
+          gyp_vars_dict.get('OS') in ['android', 'ios'],
+          'GYP_CROSSCOMPILE' not in os.environ)):
+    os.environ['GYP_CROSSCOMPILE'] = '1'
+  if gyp_vars_dict.get('OS') == 'android':
+    args.append('--check')
+
+  args.extend(
+      ['-I' + i for i in additional_include_files(supplemental_includes, args)])
+
+  args.extend(['-D', 'gyp_output_dir=' + GetOutputDirectory()])
+
+  if not use_analyzer:
+    print 'Updating projects from gyp files...'
+    sys.stdout.flush()
+
+  # Off we go...
+  gyp_rc = gyp.main(args)
+
+  if not use_analyzer:
+    vs2013_runtime_dll_dirs = vs_toolchain.SetEnvironmentAndGetRuntimeDllDirs()
+    if vs2013_runtime_dll_dirs:
+      x64_runtime, x86_runtime = vs2013_runtime_dll_dirs
+      vs_toolchain.CopyVsRuntimeDlls(
+        os.path.join(chrome_src, GetOutputDirectory()),
+        (x86_runtime, x64_runtime))
+
+  sys.exit(gyp_rc)
diff --git a/build/gyp_chromium.py b/build/gyp_chromium.py
new file mode 100644
index 0000000..f9e8ac8
--- /dev/null
+++ b/build/gyp_chromium.py
@@ -0,0 +1,18 @@
+# Copyright 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is (possibly, depending on python version) imported by
+# gyp_chromium when GYP_PARALLEL=1 and it creates sub-processes
+# through the multiprocessing library.
+
+# Importing in Python 2.6 (fixed in 2.7) on Windows doesn't search for
+# imports that don't end in .py (and aren't directories with an
+# __init__.py). This wrapper makes "import gyp_chromium" work with
+# those old versions and makes it possible to execute gyp_chromium.py
+# directly on Windows where the extension is useful.
+
+import os
+
+path = os.path.abspath(os.path.split(__file__)[0])
+execfile(os.path.join(path, 'gyp_chromium'))
diff --git a/build/gyp_chromium_test.py b/build/gyp_chromium_test.py
new file mode 100755
index 0000000..0c0e479
--- /dev/null
+++ b/build/gyp_chromium_test.py
@@ -0,0 +1,66 @@
+#!/usr/bin/env python
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+import sys
+import unittest
+
+SCRIPT_DIR = os.path.abspath(os.path.dirname(__file__))
+SRC_DIR = os.path.dirname(SCRIPT_DIR)
+
+sys.path.append(os.path.join(SRC_DIR, 'third_party', 'pymock'))
+
+import mock
+
+# TODO(sbc): Make gyp_chromium more testable by putting the code in
+# a .py file.
+gyp_chromium = __import__('gyp_chromium')
+
+
+class TestGetOutputDirectory(unittest.TestCase):
+  @mock.patch('os.environ', {})
+  @mock.patch('sys.argv', [__file__])
+  def testDefaultValue(self):
+    self.assertEqual(gyp_chromium.GetOutputDirectory(), 'out')
+
+  @mock.patch('os.environ', {'GYP_GENERATOR_FLAGS': 'output_dir=envfoo'})
+  @mock.patch('sys.argv', [__file__])
+  def testEnvironment(self):
+    self.assertEqual(gyp_chromium.GetOutputDirectory(), 'envfoo')
+
+  @mock.patch('os.environ', {'GYP_GENERATOR_FLAGS': 'output_dir=envfoo'})
+  @mock.patch('sys.argv', [__file__, '-Goutput_dir=cmdfoo'])
+  def testGFlagOverridesEnv(self):
+    self.assertEqual(gyp_chromium.GetOutputDirectory(), 'cmdfoo')
+
+  @mock.patch('os.environ', {})
+  @mock.patch('sys.argv', [__file__, '-G', 'output_dir=foo'])
+  def testGFlagWithSpace(self):
+    self.assertEqual(gyp_chromium.GetOutputDirectory(), 'foo')
+
+
+class TestGetGypVars(unittest.TestCase):
+  @mock.patch('os.environ', {})
+  def testDefault(self):
+    self.assertEqual(gyp_chromium.GetGypVars([]), {})
+
+  @mock.patch('os.environ', {})
+  @mock.patch('sys.argv', [__file__, '-D', 'foo=bar'])
+  def testDFlags(self):
+    self.assertEqual(gyp_chromium.GetGypVars([]), {'foo': 'bar'})
+
+  @mock.patch('os.environ', {})
+  @mock.patch('sys.argv', [__file__, '-D', 'foo'])
+  def testDFlagsNoValue(self):
+    self.assertEqual(gyp_chromium.GetGypVars([]), {'foo': '1'})
+
+  @mock.patch('os.environ', {})
+  @mock.patch('sys.argv', [__file__, '-D', 'foo=bar', '-Dbaz'])
+  def testDFlagMulti(self):
+    self.assertEqual(gyp_chromium.GetGypVars([]), {'foo': 'bar', 'baz': '1'})
+
+
+if __name__ == '__main__':
+  unittest.main()
diff --git a/build/gyp_environment.py b/build/gyp_environment.py
new file mode 100644
index 0000000..fb50645
--- /dev/null
+++ b/build/gyp_environment.py
@@ -0,0 +1,33 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Sets up various automatic gyp environment variables. These are used by
+gyp_chromium and landmines.py which run at different stages of runhooks. To
+make sure settings are consistent between them, all setup should happen here.
+"""
+
+import gyp_helper
+import os
+import sys
+import vs_toolchain
+
+def SetEnvironment():
+  """Sets defaults for GYP_* variables."""
+  gyp_helper.apply_chromium_gyp_env()
+
+  # Default to ninja on linux and windows, but only if no generator has
+  # explicitly been set.
+  # Also default to ninja on mac, but only when not building chrome/ios.
+  # . -f / --format has precedence over the env var, no need to check for it
+  # . set the env var only if it hasn't been set yet
+  # . chromium.gyp_env has been applied to os.environ at this point already
+  if sys.platform.startswith(('linux', 'win', 'freebsd')) and \
+      not os.environ.get('GYP_GENERATORS'):
+    os.environ['GYP_GENERATORS'] = 'ninja'
+  elif sys.platform == 'darwin' and not os.environ.get('GYP_GENERATORS') and \
+      not 'OS=ios' in os.environ.get('GYP_DEFINES', []):
+    os.environ['GYP_GENERATORS'] = 'ninja'
+
+  vs_toolchain.SetEnvironmentAndGetRuntimeDllDirs()
diff --git a/build/gyp_helper.py b/build/gyp_helper.py
new file mode 100644
index 0000000..c840f2d
--- /dev/null
+++ b/build/gyp_helper.py
@@ -0,0 +1,68 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file helps gyp_chromium and landmines correctly set up the gyp
+# environment from chromium.gyp_env on disk
+
+import os
+
+SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__))
+CHROME_SRC = os.path.dirname(SCRIPT_DIR)
+
+
+def apply_gyp_environment_from_file(file_path):
+  """Reads in a *.gyp_env file and applies the valid keys to os.environ."""
+  if not os.path.exists(file_path):
+    return
+  with open(file_path, 'rU') as f:
+    file_contents = f.read()
+  try:
+    file_data = eval(file_contents, {'__builtins__': None}, None)
+  except SyntaxError, e:
+    e.filename = os.path.abspath(file_path)
+    raise
+  supported_vars = (
+      'CC',
+      'CC_wrapper',
+      'CC.host_wrapper',
+      'CHROMIUM_GYP_FILE',
+      'CHROMIUM_GYP_SYNTAX_CHECK',
+      'CXX',
+      'CXX_wrapper',
+      'CXX.host_wrapper',
+      'GYP_DEFINES',
+      'GYP_GENERATOR_FLAGS',
+      'GYP_CROSSCOMPILE',
+      'GYP_GENERATOR_OUTPUT',
+      'GYP_GENERATORS',
+      'GYP_INCLUDE_FIRST',
+      'GYP_INCLUDE_LAST',
+      'GYP_MSVS_VERSION',
+  )
+  for var in supported_vars:
+    file_val = file_data.get(var)
+    if file_val:
+      if var in os.environ:
+        behavior = 'replaces'
+        if var == 'GYP_DEFINES':
+          result = file_val + ' ' + os.environ[var]
+          behavior = 'merges with, and individual components override,'
+        else:
+          result = os.environ[var]
+        print 'INFO: Environment value for "%s" %s value in %s' % (
+            var, behavior, os.path.abspath(file_path)
+        )
+        string_padding = max(len(var), len(file_path), len('result'))
+        print '      %s: %s' % (var.rjust(string_padding), os.environ[var])
+        print '      %s: %s' % (file_path.rjust(string_padding), file_val)
+        os.environ[var] = result
+      else:
+        os.environ[var] = file_val
+
+
+def apply_chromium_gyp_env():
+  if 'SKIP_CHROMIUM_GYP_ENV' not in os.environ:
+    # Update the environment based on chromium.gyp_env
+    path = os.path.join(os.path.dirname(CHROME_SRC), 'chromium.gyp_env')
+    apply_gyp_environment_from_file(path)
diff --git a/build/gypi_to_gn.py b/build/gypi_to_gn.py
new file mode 100644
index 0000000..a107f94
--- /dev/null
+++ b/build/gypi_to_gn.py
@@ -0,0 +1,167 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Converts a given gypi file to a python scope and writes the result to stdout.
+
+It is assumed that the file contains a toplevel dictionary, and this script
+will return that dictionary as a GN "scope" (see example below). This script
+does not know anything about GYP and it will not expand variables or execute
+conditions.
+
+It will strip conditions blocks.
+
+A variables block at the top level will be flattened so that the variables
+appear in the root dictionary. This way they can be returned to the GN code.
+
+Say your_file.gypi looked like this:
+  {
+     'sources': [ 'a.cc', 'b.cc' ],
+     'defines': [ 'ENABLE_DOOM_MELON' ],
+  }
+
+You would call it like this:
+  gypi_values = exec_script("//build/gypi_to_gn.py",
+                            [ rebase_path("your_file.gypi") ],
+                            "scope",
+                            [ "your_file.gypi" ])
+
+Notes:
+ - The rebase_path call converts the gypi file from being relative to the
+   current build file to being system absolute for calling the script, which
+   will have a different current directory than this file.
+
+ - The "scope" parameter tells GN to interpret the result as a series of GN
+   variable assignments.
+
+ - The last file argument to exec_script tells GN that the given file is a
+   dependency of the build so Ninja can automatically re-run GN if the file
+   changes.
+
+Read the values into a target like this:
+  component("mycomponent") {
+    sources = gypi_values.sources
+    defines = gypi_values.defines
+  }
+
+Sometimes your .gypi file will include paths relative to a different
+directory than the current .gn file. In this case, you can rebase them to
+be relative to the current directory.
+  sources = rebase_path(gypi_values.sources, ".",
+                        "//path/gypi/input/values/are/relative/to")
+
+This script will tolerate a 'variables' in the toplevel dictionary or not. If
+the toplevel dictionary just contains one item called 'variables', it will be
+collapsed away and the result will be the contents of that dictinoary. Some
+.gypi files are written with or without this, depending on how they expect to
+be embedded into a .gyp file.
+
+This script also has the ability to replace certain substrings in the input.
+Generally this is used to emulate GYP variable expansion. If you passed the
+argument "--replace=<(foo)=bar" then all instances of "<(foo)" in strings in
+the input will be replaced with "bar":
+
+  gypi_values = exec_script("//build/gypi_to_gn.py",
+                            [ rebase_path("your_file.gypi"),
+                              "--replace=<(foo)=bar"],
+                            "scope",
+                            [ "your_file.gypi" ])
+
+"""
+
+import gn_helpers
+from optparse import OptionParser
+import sys
+
+def LoadPythonDictionary(path):
+  file_string = open(path).read()
+  try:
+    file_data = eval(file_string, {'__builtins__': None}, None)
+  except SyntaxError, e:
+    e.filename = path
+    raise
+  except Exception, e:
+    raise Exception("Unexpected error while reading %s: %s" % (path, str(e)))
+
+  assert isinstance(file_data, dict), "%s does not eval to a dictionary" % path
+
+  # Flatten any variables to the top level.
+  if 'variables' in file_data:
+    file_data.update(file_data['variables'])
+    del file_data['variables']
+
+  # Strip any conditions.
+  if 'conditions' in file_data:
+    del file_data['conditions']
+  if 'target_conditions' in file_data:
+    del file_data['target_conditions']
+
+  # Strip targets in the toplevel, since some files define these and we can't
+  # slurp them in.
+  if 'targets' in file_data:
+    del file_data['targets']
+
+  return file_data
+
+
+def ReplaceSubstrings(values, search_for, replace_with):
+  """Recursively replaces substrings in a value.
+
+  Replaces all substrings of the "search_for" with "repace_with" for all
+  strings occurring in "values". This is done by recursively iterating into
+  lists as well as the keys and values of dictionaries."""
+  if isinstance(values, str):
+    return values.replace(search_for, replace_with)
+
+  if isinstance(values, list):
+    return [ReplaceSubstrings(v, search_for, replace_with) for v in values]
+
+  if isinstance(values, dict):
+    # For dictionaries, do the search for both the key and values.
+    result = {}
+    for key, value in values.items():
+      new_key = ReplaceSubstrings(key, search_for, replace_with)
+      new_value = ReplaceSubstrings(value, search_for, replace_with)
+      result[new_key] = new_value
+    return result
+
+  # Assume everything else is unchanged.
+  return values
+
+def main():
+  parser = OptionParser()
+  parser.add_option("-r", "--replace", action="append",
+    help="Replaces substrings. If passed a=b, replaces all substrs a with b.")
+  (options, args) = parser.parse_args()
+
+  if len(args) != 1:
+    raise Exception("Need one argument which is the .gypi file to read.")
+
+  data = LoadPythonDictionary(args[0])
+  if options.replace:
+    # Do replacements for all specified patterns.
+    for replace in options.replace:
+      split = replace.split('=')
+      # Allow "foo=" to replace with nothing.
+      if len(split) == 1:
+        split.append('')
+      assert len(split) == 2, "Replacement must be of the form 'key=value'."
+      data = ReplaceSubstrings(data, split[0], split[1])
+
+  # Sometimes .gypi files use the GYP syntax with percents at the end of the
+  # variable name (to indicate not to overwrite a previously-defined value):
+  #   'foo%': 'bar',
+  # Convert these to regular variables.
+  for key in data:
+    if len(key) > 1 and key[len(key) - 1] == '%':
+      data[key[:-1]] = data[key]
+      del data[key]
+
+  print gn_helpers.ToGNString(data)
+
+if __name__ == '__main__':
+  try:
+    main()
+  except Exception, e:
+    print str(e)
+    sys.exit(1)
diff --git a/build/host_jar.gypi b/build/host_jar.gypi
new file mode 100644
index 0000000..a47f6bb
--- /dev/null
+++ b/build/host_jar.gypi
@@ -0,0 +1,146 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into a target to provide a rule to build
+# a JAR file for use on a host in a consistent manner. If a main class is
+# specified, this file will also generate an executable to run the jar in the
+# output folder's /bin/ directory.
+#
+# To use this, create a gyp target with the following form:
+# {
+#   'target_name': 'my_jar',
+#   'type': 'none',
+#   'variables': {
+#     'src_paths': [
+#       'path/to/directory',
+#       'path/to/other/directory',
+#       'path/to/individual_file.java',
+#       ...
+#     ],
+#   },
+#   'includes': [ 'path/to/this/gypi/file' ],
+# }
+#
+# Required variables:
+#   src_paths - A list of all paths containing java files that should be
+#     included in the jar. Paths can be either directories or files.
+# Optional/automatic variables:
+#   excluded_src_paths - A list of all paths that should be excluded from
+#     the jar.
+#   generated_src_dirs - Directories containing additional .java files
+#     generated at build time.
+#   input_jars_paths - A list of paths to the jars that should be included
+#     in the classpath.
+#   main_class - The class containing the main() function that should be called
+#     when running the jar file.
+#   jar_excluded_classes - A list of .class files that should be excluded
+#     from the jar.
+
+{
+  'dependencies': [
+    '<(DEPTH)/build/android/setup.gyp:build_output_dirs',
+  ],
+  'variables': {
+    'classes_dir': '<(intermediate_dir)/classes',
+    'excluded_src_paths': [],
+    'generated_src_dirs': [],
+    'input_jars_paths': [],
+    'intermediate_dir': '<(SHARED_INTERMEDIATE_DIR)/<(_target_name)',
+    'jar_dir': '<(PRODUCT_DIR)/lib.java',
+    'jar_excluded_classes': [],
+    'jar_name': '<(_target_name).jar',
+    'jar_path': '<(jar_dir)/<(jar_name)',
+    'main_class%': '',
+    'stamp': '<(intermediate_dir)/jar.stamp',
+    'enable_errorprone%': '0',
+    'errorprone_exe_path': '<(PRODUCT_DIR)/bin.java/chromium_errorprone',
+  },
+  'all_dependent_settings': {
+    'variables': {
+      'input_jars_paths': ['<(jar_path)']
+    },
+  },
+  'actions': [
+    {
+      'action_name': 'javac_<(_target_name)',
+      'message': 'Compiling <(_target_name) java sources',
+      'variables': {
+        'extra_args': [],
+        'extra_inputs': [],
+        'java_sources': [ '<!@(find <@(src_paths) -name "*.java")' ],
+        'conditions': [
+          ['"<(excluded_src_paths)" != ""', {
+            'java_sources!': ['<!@(find <@(excluded_src_paths) -name "*.java")']
+          }],
+          ['"<(jar_excluded_classes)" != ""', {
+            'extra_args': ['--jar-excluded-classes=<(jar_excluded_classes)']
+          }],
+          ['main_class != ""', {
+            'extra_args': ['--main-class=>(main_class)']
+          }],
+          ['enable_errorprone == 1', {
+            'extra_inputs': [
+              '<(errorprone_exe_path)',
+            ],
+            'extra_args': [ '--use-errorprone-path=<(errorprone_exe_path)' ],
+          }],
+        ],
+      },
+      'inputs': [
+        '<(DEPTH)/build/android/gyp/util/build_utils.py',
+        '<(DEPTH)/build/android/gyp/javac.py',
+        '^@(java_sources)',
+        '>@(input_jars_paths)',
+        '<@(extra_inputs)',
+      ],
+      'outputs': [
+        '<(jar_path)',
+        '<(stamp)',
+      ],
+      'action': [
+        'python', '<(DEPTH)/build/android/gyp/javac.py',
+        '--classpath=>(input_jars_paths)',
+        '--src-gendirs=>(generated_src_dirs)',
+        '--chromium-code=<(chromium_code)',
+        '--stamp=<(stamp)',
+        '--jar-path=<(jar_path)',
+        '<@(extra_args)',
+        '^@(java_sources)',
+      ],
+    },
+  ],
+  'conditions': [
+    ['main_class != ""', {
+      'actions': [
+        {
+          'action_name': 'create_java_binary_script_<(_target_name)',
+          'message': 'Creating java binary script <(_target_name)',
+          'variables': {
+            'output': '<(PRODUCT_DIR)/bin/<(_target_name)',
+          },
+          'inputs': [
+            '<(DEPTH)/build/android/gyp/create_java_binary_script.py',
+            '<(jar_path)',
+          ],
+          'outputs': [
+            '<(output)',
+          ],
+          'action': [
+            'python', '<(DEPTH)/build/android/gyp/create_java_binary_script.py',
+            '--classpath=>(input_jars_paths)',
+            '--jar-path=<(jar_path)',
+            '--output=<(output)',
+            '--main-class=>(main_class)',
+          ]
+        }
+      ]
+    }],
+    ['enable_errorprone == 1', {
+      'dependencies': [
+        '<(DEPTH)/third_party/errorprone/errorprone.gyp:chromium_errorprone',
+      ],
+    }],
+  ]
+}
+
diff --git a/build/host_prebuilt_jar.gypi b/build/host_prebuilt_jar.gypi
new file mode 100644
index 0000000..feed5ca
--- /dev/null
+++ b/build/host_prebuilt_jar.gypi
@@ -0,0 +1,50 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into a target to provide a rule to
+# copy a prebuilt JAR for use on a host to the output directory.
+#
+# To use this, create a gyp target with the following form:
+# {
+#   'target_name': 'my_prebuilt_jar',
+#   'type': 'none',
+#   'variables': {
+#     'jar_path': 'path/to/prebuilt.jar',
+#   },
+#   'includes': [ 'path/to/this/gypi/file' ],
+# }
+#
+# Required variables:
+#   jar_path - The path to the prebuilt jar.
+
+{
+  'dependencies': [
+  ],
+  'variables': {
+    'dest_path': '<(PRODUCT_DIR)/lib.java/<(_target_name).jar',
+    'src_path': '<(jar_path)',
+  },
+  'all_dependent_settings': {
+    'variables': {
+      'input_jars_paths': [
+        '<(dest_path)',
+      ]
+    },
+  },
+  'actions': [
+    {
+      'action_name': 'copy_prebuilt_jar',
+      'message': 'Copy <(src_path) to <(dest_path)',
+      'inputs': [
+        '<(src_path)',
+      ],
+      'outputs': [
+        '<(dest_path)',
+      ],
+      'action': [
+        'python', '<(DEPTH)/build/cp.py', '<(src_path)', '<(dest_path)',
+      ],
+    }
+  ]
+}
diff --git a/build/install-android-sdks.sh b/build/install-android-sdks.sh
new file mode 100755
index 0000000..1119b7d
--- /dev/null
+++ b/build/install-android-sdks.sh
@@ -0,0 +1,20 @@
+#!/bin/bash -e
+
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Script to install SDKs needed to build chromium on android.
+# See http://code.google.com/p/chromium/wiki/AndroidBuildInstructions
+
+echo 'checking for sdk packages install'
+# Use absolute path to call 'android' so script can be run from any directory.
+cwd=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )
+# Get the SDK extras packages to install from the DEPS file 'sdkextras' hook.
+packages="$(python ${cwd}/get_sdk_extras_packages.py)"
+if [[ -n "${packages}" ]]; then
+  ${cwd}/../third_party/android_tools/sdk/tools/android update sdk --no-ui \
+      --filter ${packages}
+fi
+
+echo "install-android-sdks.sh complete."
diff --git a/build/install-build-deps-android.sh b/build/install-build-deps-android.sh
new file mode 100755
index 0000000..cf87381
--- /dev/null
+++ b/build/install-build-deps-android.sh
@@ -0,0 +1,100 @@
+#!/bin/bash -e
+
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Script to install everything needed to build chromium on android, including
+# items requiring sudo privileges.
+# See http://code.google.com/p/chromium/wiki/AndroidBuildInstructions
+
+# This script installs the sun-java6 packages (bin, jre and jdk). Sun requires
+# a license agreement, so upon installation it will prompt the user. To get
+# past the curses-based dialog press TAB <ret> TAB <ret> to agree.
+
+args="$@"
+if test "$1" = "--skip-sdk-packages"; then
+  skip_inst_sdk_packages=1
+  args="${@:2}"
+else
+  skip_inst_sdk_packages=0
+fi
+
+if ! uname -m | egrep -q "i686|x86_64"; then
+  echo "Only x86 architectures are currently supported" >&2
+  exit
+fi
+
+# Install first the default Linux build deps.
+"$(dirname "${BASH_SOURCE[0]}")/install-build-deps.sh" \
+  --no-syms --lib32 --no-arm --no-chromeos-fonts --no-nacl --no-prompt "${args}"
+
+lsb_release=$(lsb_release --codename --short)
+
+# The temporary directory used to store output of update-java-alternatives
+TEMPDIR=$(mktemp -d)
+cleanup() {
+  local status=${?}
+  trap - EXIT
+  rm -rf "${TEMPDIR}"
+  exit ${status}
+}
+trap cleanup EXIT
+
+# Fix deps
+sudo apt-get -f install
+
+# Install deps
+# This step differs depending on what Ubuntu release we are running
+# on since the package names are different, and Sun's Java must
+# be installed manually on late-model versions.
+
+# common
+sudo apt-get -y install lighttpd python-pexpect xvfb x11-utils
+
+# Some binaries in the Android SDK require 32-bit libraries on the host.
+# See https://developer.android.com/sdk/installing/index.html?pkg=tools
+if [[ $lsb_release == "precise" ]]; then
+  sudo apt-get -y install ia32-libs
+else
+  sudo apt-get -y install libncurses5:i386 libstdc++6:i386 zlib1g:i386
+fi
+
+sudo apt-get -y install ant
+
+# Install openjdk and openjre 7 stuff
+sudo apt-get -y install openjdk-7-jre openjdk-7-jdk
+
+# Switch version of Java to openjdk 7.
+# Some Java plugins (e.g. for firefox, mozilla) are not required to build, and
+# thus are treated only as warnings. Any errors in updating java alternatives
+# which are not '*-javaplugin.so' will cause errors and stop the script from
+# completing successfully.
+if ! sudo update-java-alternatives -s java-1.7.0-openjdk-amd64 \
+           >& "${TEMPDIR}"/update-java-alternatives.out
+then
+  # Check that there are the expected javaplugin.so errors for the update
+  if grep 'javaplugin.so' "${TEMPDIR}"/update-java-alternatives.out >& \
+      /dev/null
+  then
+    # Print as warnings all the javaplugin.so errors
+    echo 'WARNING: java-6-sun has no alternatives for the following plugins:'
+    grep 'javaplugin.so' "${TEMPDIR}"/update-java-alternatives.out
+  fi
+  # Check if there are any errors that are not javaplugin.so
+  if grep -v 'javaplugin.so' "${TEMPDIR}"/update-java-alternatives.out \
+      >& /dev/null
+  then
+    # If there are non-javaplugin.so errors, treat as errors and exit
+    echo 'ERRORS: Failed to update alternatives for java-6-sun:'
+    grep -v 'javaplugin.so' "${TEMPDIR}"/update-java-alternatives.out
+    exit 1
+  fi
+fi
+
+# Install SDK packages for android
+if test "$skip_inst_sdk_packages" != 1; then
+  "$(dirname "${BASH_SOURCE[0]}")/install-android-sdks.sh"
+fi
+
+echo "install-build-deps-android.sh complete."
diff --git a/build/install-build-deps.py b/build/install-build-deps.py
new file mode 100755
index 0000000..7cc3760
--- /dev/null
+++ b/build/install-build-deps.py
@@ -0,0 +1,430 @@
+#!/usr/bin/env python
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import operator
+import os
+import platform
+import re
+import subprocess
+import sys
+
+
+SUPPORTED_UBUNTU_VERSIONS = (
+  {'number': '12.04', 'codename': 'precise'},
+  {'number': '14.04', 'codename': 'trusty'},
+  {'number': '14.10', 'codename': 'utopic'},
+  {'number': '15.04', 'codename': 'vivid'},
+)
+
+
+# Packages needed for chromeos only.
+_packages_chromeos_dev = (
+  'libbluetooth-dev',
+  'libxkbcommon-dev',
+  'realpath',
+)
+
+
+# Packages needed for development.
+_packages_dev = (
+  'apache2.2-bin',
+  'bison',
+  'cdbs',
+  'curl',
+  'devscripts',
+  'dpkg-dev',
+  'elfutils',
+  'fakeroot',
+  'flex',
+  'fonts-thai-tlwg',
+  'g++',
+  'git-core',
+  'git-svn',
+  'gperf',
+  'language-pack-da',
+  'language-pack-fr',
+  'language-pack-he',
+  'language-pack-zh-hant',
+  'libapache2-mod-php5',
+  'libasound2-dev',
+  'libav-tools',
+  'libbrlapi-dev',
+  'libbz2-dev',
+  'libcairo2-dev',
+  'libcap-dev',
+  'libcups2-dev',
+  'libcurl4-gnutls-dev',
+  'libdrm-dev',
+  'libelf-dev',
+  'libexif-dev',
+  'libgconf2-dev',
+  'libglib2.0-dev',
+  'libglu1-mesa-dev',
+  'libgnome-keyring-dev',
+  'libgtk2.0-dev',
+  'libkrb5-dev',
+  'libnspr4-dev',
+  'libnss3-dev',
+  'libpam0g-dev',
+  'libpci-dev',
+  'libpulse-dev',
+  'libsctp-dev',
+  'libspeechd-dev',
+  'libsqlite3-dev',
+  'libssl-dev',
+  'libudev-dev',
+  'libwww-perl',
+  'libxslt1-dev',
+  'libxss-dev',
+  'libxt-dev',
+  'libxtst-dev',
+  'openbox',
+  'patch',
+  'perl',
+  'php5-cgi',
+  'pkg-config',
+  'python',
+  'python-cherrypy3',
+  'python-crypto',
+  'python-dev',
+  'python-numpy',
+  'python-opencv',
+  'python-openssl',
+  'python-psutil',
+  'python-yaml',
+  'rpm',
+  'ruby',
+  'subversion',
+  'ttf-dejavu-core',
+  'ttf-indic-fonts',
+  'ttf-kochi-gothic',
+  'ttf-kochi-mincho',
+  'wdiff',
+  'xfonts-mathml',
+  'zip',
+)
+
+
+# Run-time libraries required by chromeos only.
+_packages_chromeos_lib = (
+  'libbz2-1.0',
+  'libpulse0',
+)
+
+
+# Full list of required run-time libraries.
+_packages_lib = (
+  'libasound2',
+  'libatk1.0-0',
+  'libc6',
+  'libcairo2',
+  'libcap2',
+  'libcups2',
+  'libexif12',
+  'libexpat1',
+  'libfontconfig1',
+  'libfreetype6',
+  'libglib2.0-0',
+  'libgnome-keyring0',
+  'libgtk2.0-0',
+  'libpam0g',
+  'libpango1.0-0',
+  'libpci3',
+  'libpcre3',
+  'libpixman-1-0',
+  'libpng12-0',
+  'libspeechd2',
+  'libsqlite3-0',
+  'libstdc++6',
+  'libx11-6',
+  'libxau6',
+  'libxcb1',
+  'libxcomposite1',
+  'libxcursor1',
+  'libxdamage1',
+  'libxdmcp6',
+  'libxext6',
+  'libxfixes3',
+  'libxi6',
+  'libxinerama1',
+  'libxrandr2',
+  'libxrender1',
+  'libxtst6',
+  'zlib1g',
+)
+
+
+# Debugging symbols for all of the run-time libraries.
+_packages_dbg = (
+  'libatk1.0-dbg',
+  'libc6-dbg',
+  'libcairo2-dbg',
+  'libfontconfig1-dbg',
+  'libglib2.0-0-dbg',
+  'libgtk2.0-0-dbg',
+  'libpango1.0-0-dbg',
+  'libpcre3-dbg',
+  'libpixman-1-0-dbg',
+  'libsqlite3-0-dbg',
+  'libx11-6-dbg',
+  'libxau6-dbg',
+  'libxcb1-dbg',
+  'libxcomposite1-dbg',
+  'libxcursor1-dbg',
+  'libxdamage1-dbg',
+  'libxdmcp6-dbg',
+  'libxext6-dbg',
+  'libxfixes3-dbg',
+  'libxi6-dbg',
+  'libxinerama1-dbg',
+  'libxrandr2-dbg',
+  'libxrender1-dbg',
+  'libxtst6-dbg',
+  'zlib1g-dbg',
+)
+
+
+# 32-bit libraries needed e.g. to compile V8 snapshot for Android or armhf.
+_packages_lib32 = (
+  'linux-libc-dev:i386',
+)
+
+
+# arm cross toolchain packages needed to build chrome on armhf.
+_packages_arm = (
+  'g++-arm-linux-gnueabihf',
+  'libc6-dev-armhf-cross',
+  'linux-libc-dev-armhf-cross',
+)
+
+
+# Packages to build NaCl, its toolchains, and its ports.
+_packages_naclports = (
+  'ant',
+  'autoconf',
+  'bison',
+  'cmake',
+  'gawk',
+  'intltool',
+  'xsltproc',
+  'xutils-dev',
+)
+_packages_nacl = (
+  'g++-mingw-w64-i686',
+  'lib32ncurses5-dev',
+  'lib32z1-dev',
+  'libasound2:i386',
+  'libcap2:i386',
+  'libelf-dev:i386',
+  'libexif12:i386',
+  'libfontconfig1:i386',
+  'libgconf-2-4:i386',
+  'libglib2.0-0:i386',
+  'libgpm2:i386',
+  'libgtk2.0-0:i386',
+  'libncurses5:i386',
+  'libnss3:i386',
+  'libpango1.0-0:i386',
+  'libssl1.0.0:i386',
+  'libtinfo-dev',
+  'libtinfo-dev:i386',
+  'libtool',
+  'libxcomposite1:i386',
+  'libxcursor1:i386',
+  'libxdamage1:i386',
+  'libxi6:i386',
+  'libxrandr2:i386',
+  'libxss1:i386',
+  'libxtst6:i386',
+  'texinfo',
+  'xvfb',
+)
+
+
+def is_userland_64_bit():
+  return platform.architecture()[0] == '64bit'
+
+
+def package_exists(pkg):
+  return pkg in subprocess.check_output(['apt-cache', 'pkgnames']).splitlines()
+
+
+def lsb_release_short_codename():
+  return subprocess.check_output(
+      ['lsb_release', '--codename', '--short']).strip()
+
+
+def write_error(message):
+  sys.stderr.write('ERROR: %s\n' % message)
+  sys.stderr.flush()
+
+
+def nonfatal_get_output(*popenargs, **kwargs):
+  process = subprocess.Popen(
+      stdout=subprocess.PIPE, stderr=subprocess.PIPE, *popenargs, **kwargs)
+  stdout, stderr = process.communicate()
+  retcode = process.poll()
+  return retcode, stdout, stderr
+
+
+def compute_dynamic_package_lists():
+  global _packages_arm
+  global _packages_dbg
+  global _packages_dev
+  global _packages_lib
+  global _packages_lib32
+  global _packages_nacl
+
+  if is_userland_64_bit():
+    # 64-bit systems need a minimum set of 32-bit compat packages
+    # for the pre-built NaCl binaries.
+    _packages_dev += (
+      'lib32gcc1',
+      'lib32stdc++6',
+      'libc6-i386',
+    )
+
+    # When cross building for arm/Android on 64-bit systems the host binaries
+    # that are part of v8 need to be compiled with -m32 which means
+    # that basic multilib support is needed.
+    # gcc-multilib conflicts with the arm cross compiler (at least in trusty)
+    # but g++-X.Y-multilib gives us the 32-bit support that we need. Find out
+    # the appropriate value of X and Y by seeing what version the current
+    # distribution's g++-multilib package depends on.
+    output = subprocess.check_output(['apt-cache', 'depends', 'g++-multilib'])
+    multilib_package = re.search(r'g\+\+-[0-9.]+-multilib', output).group()
+    _packages_lib32 += (multilib_package,)
+
+  lsb_codename = lsb_release_short_codename()
+
+  # Find the proper version of libstdc++6-4.x-dbg.
+  if lsb_codename == 'precise':
+    _packages_dbg += ('libstdc++6-4.6-dbg',)
+  elif lsb_codename == 'trusty':
+    _packages_dbg += ('libstdc++6-4.8-dbg',)
+  else:
+    _packages_dbg += ('libstdc++6-4.9-dbg',)
+
+  # Work around for dependency issue Ubuntu/Trusty: http://crbug.com/435056 .
+  if lsb_codename == 'trusty':
+    _packages_arm += (
+      'g++-4.8-multilib-arm-linux-gnueabihf',
+      'gcc-4.8-multilib-arm-linux-gnueabihf',
+    )
+
+  # Find the proper version of libgbm-dev. We can't just install libgbm-dev as
+  # it depends on mesa, and only one version of mesa can exists on the system.
+  # Hence we must match the same version or this entire script will fail.
+  mesa_variant = ''
+  for variant in ('-lts-trusty', '-lts-utopic'):
+    rc, stdout, stderr = nonfatal_get_output(
+        ['dpkg-query', '-Wf\'{Status}\'', 'libgl1-mesa-glx' + variant])
+    if 'ok installed' in output:
+      mesa_variant = variant
+  _packages_dev += (
+    'libgbm-dev' + mesa_variant,
+    'libgl1-mesa-dev' + mesa_variant,
+    'libgles2-mesa-dev' + mesa_variant,
+    'mesa-common-dev' + mesa_variant,
+  )
+
+  if package_exists('ttf-mscorefonts-installer'):
+    _packages_dev += ('ttf-mscorefonts-installer',)
+  else:
+    _packages_dev += ('msttcorefonts',)
+
+  if package_exists('libnspr4-dbg'):
+    _packages_dbg += ('libnspr4-dbg', 'libnss3-dbg')
+    _packages_lib += ('libnspr4', 'libnss3')
+  else:
+    _packages_dbg += ('libnspr4-0d-dbg', 'libnss3-1d-dbg')
+    _packages_lib += ('libnspr4-0d', 'libnss3-1d')
+
+  if package_exists('libjpeg-dev'):
+    _packages_dev += ('libjpeg-dev',)
+  else:
+    _packages_dev += ('libjpeg62-dev',)
+
+  if package_exists('libudev1'):
+    _packages_dev += ('libudev1',)
+    _packages_nacl += ('libudev1:i386',)
+  else:
+    _packages_dev += ('libudev0',)
+    _packages_nacl += ('libudev0:i386',)
+
+  if package_exists('libbrlapi0.6'):
+    _packages_dev += ('libbrlapi0.6',)
+  else:
+    _packages_dev += ('libbrlapi0.5',)
+
+  # Some packages are only needed if the distribution actually supports
+  # installing them.
+  if package_exists('appmenu-gtk'):
+    _packages_lib += ('appmenu-gtk',)
+
+  _packages_dev += _packages_chromeos_dev
+  _packages_lib += _packages_chromeos_lib
+  _packages_nacl += _packages_naclports
+
+
+def quick_check(packages):
+  rc, stdout, stderr = nonfatal_get_output([
+      'dpkg-query', '-W', '-f', '${PackageSpec}:${Status}\n'] + list(packages))
+  if rc == 0 and not stderr:
+    return 0
+  print stderr
+  return 1
+
+
+def main(argv):
+  parser = argparse.ArgumentParser()
+  parser.add_argument('--quick-check', action='store_true',
+                      help='quickly try to determine if dependencies are '
+                           'installed (this avoids interactive prompts and '
+                           'sudo commands so might not be 100% accurate)')
+  parser.add_argument('--unsupported', action='store_true',
+                      help='attempt installation even on unsupported systems')
+  args = parser.parse_args(argv)
+
+  lsb_codename = lsb_release_short_codename()
+  if not args.unsupported and not args.quick_check:
+    if lsb_codename not in map(
+        operator.itemgetter('codename'), SUPPORTED_UBUNTU_VERSIONS):
+      supported_ubuntus = ['%(number)s (%(codename)s)' % v
+                           for v in SUPPORTED_UBUNTU_VERSIONS]
+      write_error('Only Ubuntu %s are currently supported.' %
+                  ', '.join(supported_ubuntus))
+      return 1
+
+    if platform.machine() not in ('i686', 'x86_64'):
+      write_error('Only x86 architectures are currently supported.')
+      return 1
+
+  if os.geteuid() != 0 and not args.quick_check:
+    print 'Running as non-root user.'
+    print 'You might have to enter your password one or more times'
+    print 'for \'sudo\'.'
+    print
+
+  compute_dynamic_package_lists()
+
+  packages = (_packages_dev + _packages_lib + _packages_dbg + _packages_lib32 +
+              _packages_arm + _packages_nacl)
+  def packages_key(pkg):
+    s = pkg.rsplit(':', 1)
+    if len(s) == 1:
+      return (s, '')
+    return s
+  packages = sorted(set(packages), key=packages_key)
+
+  if args.quick_check:
+    return quick_check(packages)
+
+  return 0
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv[1:]))
diff --git a/build/install-build-deps.sh b/build/install-build-deps.sh
new file mode 100755
index 0000000..57f7216
--- /dev/null
+++ b/build/install-build-deps.sh
@@ -0,0 +1,477 @@
+#!/bin/bash -e
+
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Script to install everything needed to build chromium (well, ideally, anyway)
+# See http://code.google.com/p/chromium/wiki/LinuxBuildInstructions
+# and http://code.google.com/p/chromium/wiki/LinuxBuild64Bit
+
+usage() {
+  echo "Usage: $0 [--options]"
+  echo "Options:"
+  echo "--[no-]syms: enable or disable installation of debugging symbols"
+  echo "--lib32: enable installation of 32-bit libraries, e.g. for V8 snapshot"
+  echo "--[no-]arm: enable or disable installation of arm cross toolchain"
+  echo "--[no-]chromeos-fonts: enable or disable installation of Chrome OS"\
+       "fonts"
+  echo "--[no-]nacl: enable or disable installation of prerequisites for"\
+       "building standalone NaCl and all its toolchains"
+  echo "--no-prompt: silently select standard options/defaults"
+  echo "--quick-check: quickly try to determine if dependencies are installed"
+  echo "               (this avoids interactive prompts and sudo commands,"
+  echo "               so might not be 100% accurate)"
+  echo "--unsupported: attempt installation even on unsupported systems"
+  echo "Script will prompt interactively if options not given."
+  exit 1
+}
+
+# Checks whether a particular package is available in the repos.
+# USAGE: $ package_exists <package name>
+package_exists() {
+  apt-cache pkgnames | grep -x "$1" > /dev/null 2>&1
+}
+
+# These default to on because (some) bots need them and it keeps things
+# simple for the bot setup if all bots just run the script in its default
+# mode.  Developers who don't want stuff they don't need installed on their
+# own workstations can pass --no-arm --no-nacl when running the script.
+do_inst_arm=1
+do_inst_nacl=1
+
+while test "$1" != ""
+do
+  case "$1" in
+  --syms)                   do_inst_syms=1;;
+  --no-syms)                do_inst_syms=0;;
+  --lib32)                  do_inst_lib32=1;;
+  --arm)                    do_inst_arm=1;;
+  --no-arm)                 do_inst_arm=0;;
+  --chromeos-fonts)         do_inst_chromeos_fonts=1;;
+  --no-chromeos-fonts)      do_inst_chromeos_fonts=0;;
+  --nacl)                   do_inst_nacl=1;;
+  --no-nacl)                do_inst_nacl=0;;
+  --no-prompt)              do_default=1
+                            do_quietly="-qq --assume-yes"
+    ;;
+  --quick-check)            do_quick_check=1;;
+  --unsupported)            do_unsupported=1;;
+  *) usage;;
+  esac
+  shift
+done
+
+if test "$do_inst_arm" = "1"; then
+  do_inst_lib32=1
+fi
+
+# Check for lsb_release command in $PATH
+if ! which lsb_release > /dev/null; then
+  echo "ERROR: lsb_release not found in \$PATH" >&2
+  exit 1;
+fi
+
+distro=$(lsb_release --id --short)
+codename=$(lsb_release --codename --short)
+ubuntu_codenames="(precise|trusty|utopic|vivid)"
+debian_codenames="(stretch)"
+if [ 0 -eq "${do_unsupported-0}" ] && [ 0 -eq "${do_quick_check-0}" ] ; then
+  if [[ ! $codename =~ $ubuntu_codenames && ! $codename =~ $debian_codenames ]]; then
+    echo "ERROR: Only Ubuntu 12.04 (precise), 14.04 (trusty), " \
+      "14.10 (utopic) and 15.04 (vivid), and Debian Testing (stretch) are currently supported" >&2
+    exit 1
+  fi
+
+  if ! uname -m | egrep -q "i686|x86_64"; then
+    echo "Only x86 architectures are currently supported" >&2
+    exit
+  fi
+fi
+
+if [ "x$(id -u)" != x0 ] && [ 0 -eq "${do_quick_check-0}" ]; then
+  echo "Running as non-root user."
+  echo "You might have to enter your password one or more times for 'sudo'."
+  echo
+fi
+
+# Packages needed for chromeos only
+chromeos_dev_list="libbluetooth-dev libxkbcommon-dev realpath"
+
+# Packages needed for development
+if [[ $distro = Debian ]] ; then
+  # Debian-specific package names
+  dev_list="apache2-bin fonts-indic fonts-lyx"
+else
+  # Ubuntu-specific package names
+  dev_list="apache2.2-bin ttf-indic-fonts xfonts-mathml language-pack-da
+            language-pack-fr language-pack-he language-pack-zh-hant"
+fi
+dev_list="$dev_list bison cdbs curl dpkg-dev elfutils devscripts fakeroot
+          flex fonts-thai-tlwg g++ git-core git-svn gperf libapache2-mod-php5
+          libasound2-dev libbrlapi-dev libav-tools
+          libbz2-dev libcairo2-dev libcap-dev libcups2-dev libcurl4-gnutls-dev
+          libdrm-dev libelf-dev libexif-dev libgconf2-dev libglib2.0-dev
+          libglu1-mesa-dev libgnome-keyring-dev libgtk2.0-dev libkrb5-dev
+          libnspr4-dev libnss3-dev libpam0g-dev libpci-dev libpulse-dev
+          libsctp-dev libspeechd-dev libsqlite3-dev libssl-dev libudev-dev
+          libwww-perl libxslt1-dev libxss-dev libxt-dev libxtst-dev openbox
+          patch perl php5-cgi pkg-config python python-cherrypy3 python-crypto
+          python-dev python-numpy python-opencv python-openssl python-psutil
+          python-yaml rpm ruby subversion ttf-dejavu-core
+          ttf-kochi-gothic ttf-kochi-mincho wdiff zip
+          $chromeos_dev_list"
+
+# 64-bit systems need a minimum set of 32-bit compat packages for the pre-built
+# NaCl binaries.
+if file /sbin/init | grep -q 'ELF 64-bit'; then
+  dev_list="${dev_list} libc6-i386 lib32gcc1 lib32stdc++6"
+fi
+
+# Run-time libraries required by chromeos only
+chromeos_lib_list="libpulse0 libbz2-1.0"
+
+# Full list of required run-time libraries
+lib_list="libatk1.0-0 libc6 libasound2 libcairo2 libcap2 libcups2 libexpat1
+          libexif12 libfontconfig1 libfreetype6 libglib2.0-0 libgnome-keyring0
+          libgtk2.0-0 libpam0g libpango1.0-0 libpci3 libpcre3 libpixman-1-0
+          libpng12-0 libspeechd2 libstdc++6 libsqlite3-0 libx11-6
+          libxau6 libxcb1 libxcomposite1 libxcursor1 libxdamage1 libxdmcp6
+          libxext6 libxfixes3 libxi6 libxinerama1 libxrandr2 libxrender1
+          libxtst6 zlib1g $chromeos_lib_list"
+
+# Debugging symbols for all of the run-time libraries
+dbg_list="libatk1.0-dbg libc6-dbg libcairo2-dbg libfontconfig1-dbg
+          libglib2.0-0-dbg libgtk2.0-0-dbg libpango1.0-0-dbg libpcre3-dbg
+          libpixman-1-0-dbg libsqlite3-0-dbg libx11-6-dbg libxau6-dbg
+          libxcb1-dbg libxcomposite1-dbg libxcursor1-dbg libxdamage1-dbg
+          libxdmcp6-dbg libxext6-dbg libxfixes3-dbg libxi6-dbg libxinerama1-dbg
+          libxrandr2-dbg libxrender1-dbg libxtst6-dbg zlib1g-dbg"
+
+# Find the proper version of libstdc++6-4.x-dbg.
+if [ "x$codename" = "xprecise" ]; then
+  dbg_list="${dbg_list} libstdc++6-4.6-dbg"
+elif [ "x$codename" = "xtrusty" ]; then
+  dbg_list="${dbg_list} libstdc++6-4.8-dbg"
+else
+  dbg_list="${dbg_list} libstdc++6-4.9-dbg"
+fi
+
+# 32-bit libraries needed e.g. to compile V8 snapshot for Android or armhf
+lib32_list="linux-libc-dev:i386"
+
+# arm cross toolchain packages needed to build chrome on armhf
+arm_list="libc6-dev-armhf-cross
+          linux-libc-dev-armhf-cross"
+
+# Work around for dependency issue Debian/Stretch
+if [ "x$codename" = "xstretch" ]; then
+  arm_list+=" g++-5-arm-linux-gnueabihf"
+else
+  arm_list+=" g++-arm-linux-gnueabihf"
+fi
+
+# Work around for dependency issue Ubuntu/Trusty: http://crbug.com/435056
+if [ "x$codename" = "xtrusty" ]; then
+  arm_list+=" g++-4.8-multilib-arm-linux-gnueabihf
+              gcc-4.8-multilib-arm-linux-gnueabihf"
+fi
+
+# Packages to build NaCl, its toolchains, and its ports.
+naclports_list="ant autoconf bison cmake gawk intltool xutils-dev xsltproc"
+nacl_list="g++-mingw-w64-i686 lib32z1-dev
+           libasound2:i386 libcap2:i386 libelf-dev:i386 libexif12:i386
+           libfontconfig1:i386 libgconf-2-4:i386 libglib2.0-0:i386 libgpm2:i386
+           libgtk2.0-0:i386 libncurses5:i386 lib32ncurses5-dev
+           libnss3:i386 libpango1.0-0:i386
+           libssl1.0.0:i386 libtinfo-dev libtinfo-dev:i386 libtool
+           libxcomposite1:i386 libxcursor1:i386 libxdamage1:i386 libxi6:i386
+           libxrandr2:i386 libxss1:i386 libxtst6:i386 texinfo xvfb
+           ${naclports_list}"
+
+# Find the proper version of libgbm-dev. We can't just install libgbm-dev as
+# it depends on mesa, and only one version of mesa can exists on the system.
+# Hence we must match the same version or this entire script will fail.
+mesa_variant=""
+for variant in "-lts-trusty" "-lts-utopic"; do
+  if $(dpkg-query -Wf'${Status}' libgl1-mesa-glx${variant} 2>/dev/null | \
+       grep -q " ok installed"); then
+    mesa_variant="${variant}"
+  fi
+done
+dev_list="${dev_list} libgbm-dev${mesa_variant}
+          libgles2-mesa-dev${mesa_variant} libgl1-mesa-dev${mesa_variant}
+          mesa-common-dev${mesa_variant}"
+nacl_list="${nacl_list} libgl1-mesa-glx${mesa_variant}:i386"
+
+# Some package names have changed over time
+if package_exists ttf-mscorefonts-installer; then
+  dev_list="${dev_list} ttf-mscorefonts-installer"
+else
+  dev_list="${dev_list} msttcorefonts"
+fi
+if package_exists libnspr4-dbg; then
+  dbg_list="${dbg_list} libnspr4-dbg libnss3-dbg"
+  lib_list="${lib_list} libnspr4 libnss3"
+else
+  dbg_list="${dbg_list} libnspr4-0d-dbg libnss3-1d-dbg"
+  lib_list="${lib_list} libnspr4-0d libnss3-1d"
+fi
+if package_exists libjpeg-dev; then
+  dev_list="${dev_list} libjpeg-dev"
+else
+  dev_list="${dev_list} libjpeg62-dev"
+fi
+if package_exists libudev1; then
+  dev_list="${dev_list} libudev1"
+  nacl_list="${nacl_list} libudev1:i386"
+else
+  dev_list="${dev_list} libudev0"
+  nacl_list="${nacl_list} libudev0:i386"
+fi
+if package_exists libbrlapi0.6; then
+  dev_list="${dev_list} libbrlapi0.6"
+else
+  dev_list="${dev_list} libbrlapi0.5"
+fi
+
+
+# Some packages are only needed if the distribution actually supports
+# installing them.
+if package_exists appmenu-gtk; then
+  lib_list="$lib_list appmenu-gtk"
+fi
+
+# When cross building for arm/Android on 64-bit systems the host binaries
+# that are part of v8 need to be compiled with -m32 which means
+# that basic multilib support is needed.
+if file /sbin/init | grep -q 'ELF 64-bit'; then
+  # gcc-multilib conflicts with the arm cross compiler (at least in trusty) but
+  # g++-X.Y-multilib gives us the 32-bit support that we need. Find out the
+  # appropriate value of X and Y by seeing what version the current
+  # distribution's g++-multilib package depends on.
+  multilib_package=$(apt-cache depends g++-multilib --important | \
+      grep -E --color=never --only-matching '\bg\+\+-[0-9.]+-multilib\b')
+  lib32_list="$lib32_list $multilib_package"
+fi
+
+# Waits for the user to press 'Y' or 'N'. Either uppercase of lowercase is
+# accepted. Returns 0 for 'Y' and 1 for 'N'. If an optional parameter has
+# been provided to yes_no(), the function also accepts RETURN as a user input.
+# The parameter specifies the exit code that should be returned in that case.
+# The function will echo the user's selection followed by a newline character.
+# Users can abort the function by pressing CTRL-C. This will call "exit 1".
+yes_no() {
+  if [ 0 -ne "${do_default-0}" ] ; then
+    [ $1 -eq 0 ] && echo "Y" || echo "N"
+    return $1
+  fi
+  local c
+  while :; do
+    c="$(trap 'stty echo -iuclc icanon 2>/dev/null' EXIT INT TERM QUIT
+         stty -echo iuclc -icanon 2>/dev/null
+         dd count=1 bs=1 2>/dev/null | od -An -tx1)"
+    case "$c" in
+      " 0a") if [ -n "$1" ]; then
+               [ $1 -eq 0 ] && echo "Y" || echo "N"
+               return $1
+             fi
+             ;;
+      " 79") echo "Y"
+             return 0
+             ;;
+      " 6e") echo "N"
+             return 1
+             ;;
+      "")    echo "Aborted" >&2
+             exit 1
+             ;;
+      *)     # The user pressed an unrecognized key. As we are not echoing
+             # any incorrect user input, alert the user by ringing the bell.
+             (tput bel) 2>/dev/null
+             ;;
+    esac
+  done
+}
+
+if test "$do_inst_syms" = "" && test 0 -eq ${do_quick_check-0}
+then
+  echo "This script installs all tools and libraries needed to build Chromium."
+  echo ""
+  echo "For most of the libraries, it can also install debugging symbols, which"
+  echo "will allow you to debug code in the system libraries. Most developers"
+  echo "won't need these symbols."
+  echo -n "Do you want me to install them for you (y/N) "
+  if yes_no 1; then
+    do_inst_syms=1
+  fi
+fi
+if test "$do_inst_syms" = "1"; then
+  echo "Including debugging symbols."
+else
+  echo "Skipping debugging symbols."
+  dbg_list=
+fi
+
+if test "$do_inst_lib32" = "1" ; then
+  echo "Including 32-bit libraries for ARM/Android."
+else
+  echo "Skipping 32-bit libraries for ARM/Android."
+  lib32_list=
+fi
+
+if test "$do_inst_arm" = "1" ; then
+  echo "Including ARM cross toolchain."
+else
+  echo "Skipping ARM cross toolchain."
+  arm_list=
+fi
+
+if test "$do_inst_nacl" = "1"; then
+  echo "Including NaCl, NaCl toolchain, NaCl ports dependencies."
+else
+  echo "Skipping NaCl, NaCl toolchain, NaCl ports dependencies."
+  nacl_list=
+fi
+
+# The `sort -r -s -t: -k2` sorts all the :i386 packages to the front, to avoid
+# confusing dpkg-query (crbug.com/446172).
+packages="$(
+  echo "${dev_list} ${lib_list} ${dbg_list} ${lib32_list} ${arm_list}"\
+       "${nacl_list}" | tr " " "\n" | sort -u | sort -r -s -t: -k2 | tr "\n" " "
+)"
+
+if [ 1 -eq "${do_quick_check-0}" ] ; then
+  failed_check="$(dpkg-query -W -f '${PackageSpec}:${Status}\n' \
+    ${packages} 2>&1 | grep -v "ok installed" || :)"
+  if [ -n "${failed_check}" ]; then
+    echo
+    nomatch="$(echo "${failed_check}" | \
+      sed -e "s/^No packages found matching \(.*\).$/\1/;t;d")"
+    missing="$(echo "${failed_check}" | \
+      sed -e "/^No packages found matching/d;s/^\(.*\):.*$/\1/")"
+    if [ "$nomatch" ]; then
+      # Distinguish between packages that actually aren't available to the
+      # system (i.e. not in any repo) and packages that just aren't known to
+      # dpkg (i.e. managed by apt).
+      unknown=""
+      for p in ${nomatch}; do
+        if apt-cache show ${p} > /dev/null 2>&1; then
+          missing="${p}\n${missing}"
+        else
+          unknown="${p}\n${unknown}"
+        fi
+      done
+      if [ -n "${unknown}" ]; then
+        echo "WARNING: The following packages are unknown to your system"
+        echo "(maybe missing a repo or need to 'sudo apt-get update'):"
+        echo -e "${unknown}" | sed -e "s/^/  /"
+      fi
+    fi
+    if [ -n "${missing}" ]; then
+      echo "WARNING: The following packages are not installed:"
+      echo -e "${missing}" | sed -e "s/^/  /"
+    fi
+    exit 1
+  fi
+  exit 0
+fi
+
+if test "$do_inst_lib32" = "1" || test "$do_inst_nacl" = "1"; then
+  if [[ ! $codename =~ (precise) ]]; then
+    sudo dpkg --add-architecture i386
+  fi
+fi
+sudo apt-get update
+
+# We initially run "apt-get" with the --reinstall option and parse its output.
+# This way, we can find all the packages that need to be newly installed
+# without accidentally promoting any packages from "auto" to "manual".
+# We then re-run "apt-get" with just the list of missing packages.
+echo "Finding missing packages..."
+# Intentionally leaving $packages unquoted so it's more readable.
+echo "Packages required: " $packages
+echo
+new_list_cmd="sudo apt-get install --reinstall $(echo $packages)"
+if new_list="$(yes n | LANGUAGE=en LANG=C $new_list_cmd)"; then
+  # We probably never hit this following line.
+  echo "No missing packages, and the packages are up-to-date."
+elif [ $? -eq 1 ]; then
+  # We expect apt-get to have exit status of 1.
+  # This indicates that we cancelled the install with "yes n|".
+  new_list=$(echo "$new_list" |
+    sed -e '1,/The following NEW packages will be installed:/d;s/^  //;t;d')
+  new_list=$(echo "$new_list" | sed 's/ *$//')
+  if [ -z "$new_list" ] ; then
+    echo "No missing packages, and the packages are up-to-date."
+  else
+    echo "Installing missing packages: $new_list."
+    sudo apt-get install ${do_quietly-} ${new_list}
+  fi
+  echo
+else
+  # An apt-get exit status of 100 indicates that a real error has occurred.
+
+  # I am intentionally leaving out the '"'s around new_list_cmd,
+  # as this makes it easier to cut and paste the output
+  echo "The following command failed: " ${new_list_cmd}
+  echo
+  echo "It produces the following output:"
+  yes n | $new_list_cmd || true
+  echo
+  echo "You will have to install the above packages yourself."
+  echo
+  exit 100
+fi
+
+# Install the Chrome OS default fonts. This must go after running
+# apt-get, since install-chromeos-fonts depends on curl.
+if test "$do_inst_chromeos_fonts" != "0"; then
+  echo
+  echo "Installing Chrome OS fonts."
+  dir=`echo $0 | sed -r -e 's/\/[^/]+$//'`
+  if ! sudo $dir/linux/install-chromeos-fonts.py; then
+    echo "ERROR: The installation of the Chrome OS default fonts failed."
+    if [ `stat -f -c %T $dir` == "nfs" ]; then
+      echo "The reason is that your repo is installed on a remote file system."
+    else
+      echo "This is expected if your repo is installed on a remote file system."
+    fi
+    echo "It is recommended to install your repo on a local file system."
+    echo "You can skip the installation of the Chrome OS default founts with"
+    echo "the command line option: --no-chromeos-fonts."
+    exit 1
+  fi
+else
+  echo "Skipping installation of Chrome OS fonts."
+fi
+
+# $1 - target name
+# $2 - link name
+create_library_symlink() {
+  target=$1
+  linkname=$2
+  if [ -L $linkname ]; then
+    if [ "$(basename $(readlink $linkname))" != "$(basename $target)" ]; then
+      sudo rm $linkname
+    fi
+  fi
+  if [ ! -r $linkname ]; then
+    echo "Creating link: $linkname"
+    sudo ln -fs $target $linkname
+  fi
+}
+
+if test "$do_inst_nacl" = "1"; then
+  echo "Installing symbolic links for NaCl."
+  # naclports needs to cross build python for i386, but libssl1.0.0:i386
+  # only contains libcrypto.so.1.0.0 and not the symlink needed for
+  # linking (libcrypto.so).
+  create_library_symlink /lib/i386-linux-gnu/libcrypto.so.1.0.0 \
+      /usr/lib/i386-linux-gnu/libcrypto.so
+
+  create_library_symlink /lib/i386-linux-gnu/libssl.so.1.0.0 \
+      /usr/lib/i386-linux-gnu/libssl.so
+else
+  echo "Skipping symbolic links for NaCl."
+fi
diff --git a/build/install-chroot.sh b/build/install-chroot.sh
new file mode 100755
index 0000000..99451ed
--- /dev/null
+++ b/build/install-chroot.sh
@@ -0,0 +1,888 @@
+#!/bin/bash -e
+
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This script installs Debian-derived distributions in a chroot environment.
+# It can for example be used to have an accurate 32bit build and test
+# environment when otherwise working on a 64bit machine.
+# N. B. it is unlikely that this script will ever work on anything other than a
+# Debian-derived system.
+
+# Older Debian based systems had both "admin" and "adm" groups, with "admin"
+# apparently being used in more places. Newer distributions have standardized
+# on just the "adm" group. Check /etc/group for the preferred name of the
+# administrator group.
+admin=$(grep '^admin:' /etc/group >&/dev/null && echo admin || echo adm)
+
+usage() {
+  echo "usage: ${0##*/} [-m mirror] [-g group,...] [-s] [-c]"
+  echo "-b dir       additional directories that should be bind mounted,"
+  echo '             or "NONE".'
+  echo "             Default: if local filesystems present, ask user for help"
+  echo "-g group,... groups that can use the chroot unauthenticated"
+  echo "             Default: '${admin}' and current user's group ('$(id -gn)')"
+  echo "-l           List all installed chroot environments"
+  echo "-m mirror    an alternate repository mirror for package downloads"
+  echo "-s           configure default deb-srcs"
+  echo "-c           always copy 64bit helper binaries to 32bit chroot"
+  echo "-h           this help message"
+}
+
+process_opts() {
+  local OPTNAME OPTIND OPTERR OPTARG
+  while getopts ":b:g:lm:sch" OPTNAME; do
+    case "$OPTNAME" in
+      b)
+        if [ "${OPTARG}" = "NONE" -a -z "${bind_mounts}" ]; then
+          bind_mounts="${OPTARG}"
+        else
+          if [ "${bind_mounts}" = "NONE" -o "${OPTARG}" = "${OPTARG#/}" -o \
+               ! -d "${OPTARG}" ]; then
+            echo "Invalid -b option(s)"
+            usage
+            exit 1
+          fi
+          bind_mounts="${bind_mounts}
+${OPTARG} ${OPTARG} none rw,bind 0 0"
+        fi
+        ;;
+      g)
+        [ -n "${OPTARG}" ] &&
+          chroot_groups="${chroot_groups}${chroot_groups:+,}${OPTARG}"
+        ;;
+      l)
+        list_all_chroots
+        exit
+        ;;
+      m)
+        if [ -n "${mirror}" ]; then
+          echo "You can only specify exactly one mirror location"
+          usage
+          exit 1
+        fi
+        mirror="$OPTARG"
+        ;;
+      s)
+        add_srcs="y"
+        ;;
+      c)
+        copy_64="y"
+        ;;
+      h)
+        usage
+        exit 0
+        ;;
+      \:)
+        echo "'-$OPTARG' needs an argument."
+        usage
+        exit 1
+        ;;
+      *)
+        echo "invalid command-line option: $OPTARG"
+        usage
+        exit 1
+        ;;
+    esac
+  done
+
+  if [ $# -ge ${OPTIND} ]; then
+    eval echo "Unexpected command line argument: \${${OPTIND}}"
+    usage
+    exit 1
+  fi
+}
+
+list_all_chroots() {
+  for i in /var/lib/chroot/*; do
+    i="${i##*/}"
+    [ "${i}" = "*" ] && continue
+    [ -x "/usr/local/bin/${i%bit}" ] || continue
+    grep -qs "^\[${i%bit}\]\$" /etc/schroot/schroot.conf || continue
+    [ -r "/etc/schroot/script-${i}" -a \
+      -r "/etc/schroot/mount-${i}" ] || continue
+    echo "${i%bit}"
+  done
+}
+
+getkey() {
+  (
+    trap 'stty echo -iuclc icanon 2>/dev/null' EXIT INT TERM QUIT HUP
+    stty -echo iuclc -icanon 2>/dev/null
+    dd count=1 bs=1 2>/dev/null
+  )
+}
+
+chr() {
+  printf "\\$(printf '%03o' "$1")"
+}
+
+ord() {
+  printf '%d' $(printf '%c' "$1" | od -tu1 -An)
+}
+
+is_network_drive() {
+  stat -c %T -f "$1/" 2>/dev/null |
+    egrep -qs '^nfs|cifs|smbfs'
+}
+
+# Check that we are running as a regular user
+[ "$(id -nu)" = root ] && {
+  echo "Run this script as a regular user and provide your \"sudo\""           \
+       "password if requested" >&2
+  exit 1
+}
+
+process_opts "$@"
+
+echo "This script will help you through the process of installing a"
+echo "Debian or Ubuntu distribution in a chroot environment. You will"
+echo "have to provide your \"sudo\" password when requested."
+echo
+
+# Error handler
+trap 'exit 1' INT TERM QUIT HUP
+trap 'sudo apt-get clean; tput bel; echo; echo Failed' EXIT
+
+# Install any missing applications that this script relies on. If these packages
+# are already installed, don't force another "apt-get install". That would
+# prevent them from being auto-removed, if they ever become eligible for that.
+# And as this script only needs the packages once, there is no good reason to
+# introduce a hard dependency on things such as dchroot and debootstrap.
+dep=
+for i in dchroot debootstrap libwww-perl; do
+  [ -d /usr/share/doc/"$i" ] || dep="$dep $i"
+done
+[ -n "$dep" ] && sudo apt-get -y install $dep
+sudo apt-get -y install schroot
+
+# Create directory for chroot
+sudo mkdir -p /var/lib/chroot
+
+# Find chroot environments that can be installed with debootstrap
+targets="$(cd /usr/share/debootstrap/scripts
+           ls | grep '^[a-z]*$')"
+
+# Ask user to pick one of the available targets
+echo "The following targets are available to be installed in a chroot:"
+j=1; for i in $targets; do
+  printf '%4d: %s\n' "$j" "$i"
+  j=$(($j+1))
+done
+while :; do
+  printf "Which target would you like to install: "
+  read n
+  [ "$n" -gt 0 -a "$n" -lt "$j" ] >&/dev/null && break
+done
+j=1; for i in $targets; do
+  [ "$j" -eq "$n" ] && { distname="$i"; break; }
+  j=$(($j+1))
+done
+echo
+
+# On x86-64, ask whether the user wants to install x86-32 or x86-64
+archflag=
+arch=
+if [ "$(uname -m)" = x86_64 ]; then
+  while :; do
+    echo "You are running a 64bit kernel. This allows you to install either a"
+    printf "32bit or a 64bit chroot environment. %s"                           \
+           "Which one do you want (32, 64) "
+    read arch
+    [ "${arch}" == 32 -o "${arch}" == 64 ] && break
+  done
+  [ "${arch}" == 32 ] && archflag="--arch i386" || archflag="--arch amd64"
+  arch="${arch}bit"
+  echo
+fi
+target="${distname}${arch}"
+
+# Don't accidentally overwrite an existing installation
+[ -d /var/lib/chroot/"${target}" ] && {
+  while :; do
+    echo "This chroot already exists on your machine."
+    if schroot -l --all-sessions 2>&1 |
+       sed 's/^session://' |
+       grep -qs "^${target%bit}-"; then
+      echo "And it appears to be in active use. Terminate all programs that"
+      echo "are currently using the chroot environment and then re-run this"
+      echo "script."
+      echo "If you still get an error message, you might have stale mounts"
+      echo "that you forgot to delete. You can always clean up mounts by"
+      echo "executing \"${target%bit} -c\"."
+      exit 1
+    fi
+    echo "I can abort installation, I can overwrite the existing chroot,"
+    echo "or I can delete the old one and then exit. What would you like to"
+    printf "do (a/o/d)? "
+    read choice
+    case "${choice}" in
+      a|A) exit 1;;
+      o|O) sudo rm -rf "/var/lib/chroot/${target}"; break;;
+      d|D) sudo rm -rf "/var/lib/chroot/${target}"      \
+                       "/usr/local/bin/${target%bit}"   \
+                       "/etc/schroot/mount-${target}"   \
+                       "/etc/schroot/script-${target}"  \
+                       "/etc/schroot/${target}"
+           sudo sed -ni '/^[[]'"${target%bit}"']$/,${
+                         :1;n;/^[[]/b2;b1;:2;p;n;b2};p' \
+                       "/etc/schroot/schroot.conf"
+           trap '' INT TERM QUIT HUP
+           trap '' EXIT
+           echo "Deleted!"
+           exit 0;;
+    esac
+  done
+  echo
+}
+sudo mkdir -p /var/lib/chroot/"${target}"
+
+# Offer to include additional standard repositories for Ubuntu-based chroots.
+alt_repos=
+grep -qs ubuntu.com /usr/share/debootstrap/scripts/"${distname}" && {
+  while :; do
+    echo "Would you like to add ${distname}-updates and ${distname}-security "
+    printf "to the chroot's sources.list (y/n)? "
+    read alt_repos
+    case "${alt_repos}" in
+      y|Y)
+        alt_repos="y"
+        break
+      ;;
+      n|N)
+        break
+      ;;
+    esac
+  done
+  echo
+}
+
+# Check for non-standard file system mount points and ask the user whether
+# they should be imported into the chroot environment
+# We limit to the first 26 mount points that much some basic heuristics,
+# because a) that allows us to enumerate choices with a single character,
+# and b) if we find more than 26 mount points, then these are probably
+# false-positives and something is very unusual about the system's
+# configuration. No need to spam the user with even more information that
+# is likely completely irrelevant.
+if [ -z "${bind_mounts}" ]; then
+  mounts="$(awk '$2 != "/" && $2 !~ "^/boot" && $2 !~ "^/home" &&
+                 $2 !~ "^/media" && $2 !~ "^/run" &&
+                 ($3 ~ "ext[2-4]" || $3 == "reiserfs" || $3 == "btrfs" ||
+                 $3 == "xfs" || $3 == "jfs" || $3 == "u?msdos" ||
+                 $3 == "v?fat" || $3 == "hfs" || $3 == "ntfs" ||
+                 $3 ~ "nfs[4-9]?" || $3 == "smbfs" || $3 == "cifs") {
+                   print $2
+                 }' /proc/mounts |
+            head -n26)"
+  if [ -n "${mounts}" ]; then
+    echo "You appear to have non-standard mount points that you"
+    echo "might want to import into the chroot environment:"
+    echo
+    sel=
+    while :; do
+      # Print a menu, listing all non-default mounts of local or network
+      # file systems.
+      j=1; for m in ${mounts}; do
+        c="$(printf $(printf '\\%03o' $((64+$j))))"
+        echo "$sel" | grep -qs $c &&
+          state="mounted in chroot" || state="$(tput el)"
+        printf "   $c) %-40s${state}\n" "$m"
+        j=$(($j+1))
+      done
+      # Allow user to interactively (de-)select any of the entries
+      echo
+      printf "Select mount points that you want to be included or press %s" \
+             "SPACE to continue"
+      c="$(getkey | tr a-z A-Z)"
+      [ "$c" == " " ] && { echo; echo; break; }
+      if [ -z "$c" ] ||
+         [ "$c" '<' 'A' -o $(ord "$c") -gt $((64 + $(ord "$j"))) ]; then
+          # Invalid input, ring the console bell
+          tput bel
+      else
+        # Toggle the selection for the given entry
+        if echo "$sel" | grep -qs $c; then
+          sel="$(printf "$sel" | sed "s/$c//")"
+        else
+          sel="$sel$c"
+        fi
+      fi
+      # Reposition cursor to the top of the list of entries
+      tput cuu $(($j + 1))
+      echo
+    done
+  fi
+  j=1; for m in ${mounts}; do
+    c="$(chr $(($j + 64)))"
+    if echo "$sel" | grep -qs $c; then
+      bind_mounts="${bind_mounts}$m $m none rw,bind 0 0
+"
+    fi
+    j=$(($j+1))
+  done
+fi
+
+# Remove stale entry from /etc/schroot/schroot.conf. Entries start
+# with the target name in square brackets, followed by an arbitrary
+# number of lines. The entry stops when either the end of file has
+# been reached, or when the beginning of a new target is encountered.
+# This means, we cannot easily match for a range of lines in
+# "sed". Instead, we actually have to iterate over each line and check
+# whether it is the beginning of a new entry.
+sudo sed -ni '/^[[]'"${target%bit}"']$/,${:1;n;/^[[]/b2;b1;:2;p;n;b2};p'       \
+         /etc/schroot/schroot.conf
+
+# Download base system. This takes some time
+if [ -z "${mirror}" ]; then
+ grep -qs ubuntu.com /usr/share/debootstrap/scripts/"${distname}" &&
+   mirror="http://archive.ubuntu.com/ubuntu" ||
+   mirror="http://ftp.us.debian.org/debian"
+fi
+
+sudo ${http_proxy:+http_proxy="${http_proxy}"} debootstrap ${archflag} \
+    "${distname}" "/var/lib/chroot/${target}"  "$mirror"
+
+# Add new entry to /etc/schroot/schroot.conf
+grep -qs ubuntu.com /usr/share/debootstrap/scripts/"${distname}" &&
+  brand="Ubuntu" || brand="Debian"
+if [ -z "${chroot_groups}" ]; then
+  chroot_groups="${admin},$(id -gn)"
+fi
+
+if [ -d '/etc/schroot/default' ]; then
+  new_version=1
+  fstab="/etc/schroot/${target}/fstab"
+else
+  new_version=0
+  fstab="/etc/schroot/mount-${target}"
+fi
+
+if [ "$new_version" = "1" ]; then
+  sudo cp -ar /etc/schroot/default /etc/schroot/${target}
+
+  sudo sh -c 'cat >>/etc/schroot/schroot.conf' <<EOF
+[${target%bit}]
+description=${brand} ${distname} ${arch}
+type=directory
+directory=/var/lib/chroot/${target}
+users=root
+groups=${chroot_groups}
+root-groups=${chroot_groups}
+personality=linux$([ "${arch}" != 64bit ] && echo 32)
+profile=${target}
+
+EOF
+  [ -n "${bind_mounts}" -a "${bind_mounts}" != "NONE" ] &&
+    printf "${bind_mounts}" |
+      sudo sh -c "cat >>${fstab}"
+else
+  # Older versions of schroot wanted a "priority=" line, whereas recent
+  # versions deprecate "priority=" and warn if they see it. We don't have
+  # a good feature test, but scanning for the string "priority=" in the
+  # existing "schroot.conf" file is a good indication of what to do.
+  priority=$(grep -qs 'priority=' /etc/schroot/schroot.conf &&
+           echo 'priority=3' || :)
+  sudo sh -c 'cat >>/etc/schroot/schroot.conf' <<EOF
+[${target%bit}]
+description=${brand} ${distname} ${arch}
+type=directory
+directory=/var/lib/chroot/${target}
+users=root
+groups=${chroot_groups}
+root-groups=${chroot_groups}
+personality=linux$([ "${arch}" != 64bit ] && echo 32)
+script-config=script-${target}
+${priority}
+
+EOF
+
+  # Set up a list of mount points that is specific to this
+  # chroot environment.
+  sed '/^FSTAB=/s,"[^"]*","'"${fstab}"'",' \
+           /etc/schroot/script-defaults |
+    sudo sh -c 'cat >/etc/schroot/script-'"${target}"
+  sed '\,^/home[/[:space:]],s/\([,[:space:]]\)bind[[:space:]]/\1rbind /' \
+    /etc/schroot/mount-defaults |
+    sudo sh -c "cat > ${fstab}"
+fi
+
+# Add the extra mount points that the user told us about
+[ -n "${bind_mounts}" -a "${bind_mounts}" != "NONE" ] &&
+  printf "${bind_mounts}" |
+    sudo sh -c 'cat >>'"${fstab}"
+
+# If this system has a "/media" mountpoint, import it into the chroot
+# environment. Most modern distributions use this mount point to
+# automatically mount devices such as CDROMs, USB sticks, etc...
+if [ -d /media ] &&
+   ! grep -qs '^/media' "${fstab}"; then
+  echo '/media /media none rw,rbind 0 0' |
+    sudo sh -c 'cat >>'"${fstab}"
+fi
+
+# Share /dev/shm, /run and /run/shm.
+grep -qs '^/dev/shm' "${fstab}" ||
+  echo '/dev/shm /dev/shm none rw,bind 0 0' |
+    sudo sh -c 'cat >>'"${fstab}"
+if [ ! -d "/var/lib/chroot/${target}/run" ] &&
+   ! grep -qs '^/run' "${fstab}"; then
+  echo '/run /run none rw,bind 0 0' |
+    sudo sh -c 'cat >>'"${fstab}"
+fi
+if ! grep -qs '^/run/shm' "${fstab}"; then
+  { [ -d /run ] && echo '/run/shm /run/shm none rw,bind 0 0' ||
+                   echo '/dev/shm /run/shm none rw,bind 0 0'; } |
+    sudo sh -c 'cat >>'"${fstab}"
+fi
+
+# Set up a special directory that changes contents depending on the target
+# that is executing.
+d="$(readlink -f "${HOME}/chroot" 2>/dev/null || echo "${HOME}/chroot")"
+s="${d}/.${target}"
+echo "${s} ${d} none rw,bind 0 0" |
+  sudo sh -c 'cat >>'"${target}"
+mkdir -p "${s}"
+
+# Install a helper script to launch commands in the chroot
+sudo sh -c 'cat >/usr/local/bin/'"${target%bit}" <<'EOF'
+#!/bin/bash
+
+chroot="${0##*/}"
+
+wrap() {
+  # Word-wrap the text passed-in on stdin. Optionally, on continuation lines
+  # insert the same number of spaces as the number of characters in the
+  # parameter(s) passed to this function.
+  # If the "fold" program cannot be found, or if the actual width of the
+  # terminal cannot be determined, this function doesn't attempt to do any
+  # wrapping.
+  local f="$(type -P fold)"
+  [ -z "${f}" ] && { cat; return; }
+  local c="$(stty -a </dev/tty 2>/dev/null |
+             sed 's/.*columns[[:space:]]*\([0-9]*\).*/\1/;t;d')"
+  [ -z "${c}" ] && { cat; return; }
+  local i="$(echo "$*"|sed 's/./ /g')"
+  local j="$(printf %s "${i}"|wc -c)"
+  if [ "${c}" -gt "${j}" ]; then
+    dd bs=1 count="${j}" 2>/dev/null
+    "${f}" -sw "$((${c}-${j}))" | sed '2,$s/^/'"${i}"'/'
+  else
+    "${f}" -sw "${c}"
+  fi
+}
+
+help() {
+  echo "Usage ${0##*/} [-h|--help] [-c|--clean] [-C|--clean-all] [-l|--list] [--] args" | wrap "Usage ${0##*/} "
+  echo "  help:      print this message"                                                | wrap "             "
+  echo "  list:      list all known chroot environments"                                | wrap "             "
+  echo "  clean:     remove all old chroot sessions for \"${chroot}\""                  | wrap "             "
+  echo "  clean-all: remove all old chroot sessions for all environments"               | wrap "             "
+  exit 0
+}
+
+clean() {
+  local s t rc
+  rc=0
+  for s in $(schroot -l --all-sessions); do
+    if [ -n "$1" ]; then
+      t="${s#session:}"
+      [ "${t#${chroot}-}" == "${t}" ] && continue
+    fi
+    if ls -l /proc/*/{cwd,fd} 2>/dev/null |
+       fgrep -qs "/var/lib/schroot/mount/${t}"; then
+      echo "Session \"${t}\" still has active users, not cleaning up" | wrap
+      rc=1
+      continue
+    fi
+    sudo schroot -c "${s}" -e || rc=1
+  done
+  exit ${rc}
+}
+
+list() {
+  for e in $(schroot -l); do
+    e="${e#chroot:}"
+    [ -x "/usr/local/bin/${e}" ] || continue
+    if schroot -l --all-sessions 2>/dev/null |
+       sed 's/^session://' |
+       grep -qs "^${e}-"; then
+      echo "${e} is currently active"
+    else
+      echo "${e}"
+    fi
+  done
+  exit 0
+}
+
+while [ "$#" -ne 0 ]; do
+  case "$1" in
+    --)             shift; break;;
+    -h|--help)      shift; help;;
+    -l|--list)      shift; list;;
+    -c|--clean)     shift; clean "${chroot}";;
+    -C|--clean-all) shift; clean;;
+    *)              break;;
+  esac
+done
+
+# Start a new chroot session and keep track of the session id. We inject this
+# id into all processes that run inside the chroot. Unless they go out of their
+# way to clear their environment, we can then later identify our child and
+# grand-child processes by scanning their environment.
+session="$(schroot -c "${chroot}" -b)"
+export CHROOT_SESSION_ID="${session}"
+
+# Set GOMA_TMP_DIR for better handling of goma inside chroot.
+export GOMA_TMP_DIR="/tmp/goma_tmp_$CHROOT_SESSION_ID"
+mkdir -p "$GOMA_TMP_DIR"
+
+if [ $# -eq 0 ]; then
+  # Run an interactive shell session
+  schroot -c "${session}" -r -p
+else
+  # Run a command inside of the chroot environment
+  p="$1"; shift
+  schroot -c "${session}" -r -p "$p" -- "$@"
+fi
+rc=$?
+
+# Compute the inode of the root directory inside of the chroot environment.
+i=$(schroot -c "${session}" -r -p ls -- -id /proc/self/root/. |
+     awk '{ print $1 }') 2>/dev/null
+other_pids=
+while [ -n "$i" ]; do
+  # Identify processes by the inode number of their root directory. Then
+  # remove all processes that we know belong to other sessions. We use
+  # "sort | uniq -u" to do what amounts to a "set substraction operation".
+  pids=$({ ls -id1 /proc/*/root/. 2>/dev/null |
+         sed -e 's,^[^0-9]*'$i'.*/\([1-9][0-9]*\)/.*$,\1,
+                 t
+                 d';
+         echo "${other_pids}";
+         echo "${other_pids}"; } | sort | uniq -u) >/dev/null 2>&1
+  # Kill all processes that are still left running in the session. This is
+  # typically an assortment of daemon processes that were started
+  # automatically. They result in us being unable to tear down the session
+  # cleanly.
+  [ -z "${pids}" ] && break
+  for j in $pids; do
+    # Unfortunately, the way that schroot sets up sessions has the
+    # side-effect of being unable to tell one session apart from another.
+    # This can result in us attempting to kill processes in other sessions.
+    # We make a best-effort to avoid doing so.
+    k="$( ( xargs -0 -n1 </proc/$j/environ ) 2>/dev/null |
+         sed 's/^CHROOT_SESSION_ID=/x/;t1;d;:1;q')"
+    if [ -n "${k}" -a "${k#x}" != "${session}" ]; then
+      other_pids="${other_pids}
+${j}"
+      continue
+    fi
+    kill -9 $pids
+  done
+done
+# End the chroot session. This should clean up all temporary files. But if we
+# earlier failed to terminate all (daemon) processes inside of the session,
+# deleting the session could fail. When that happens, the user has to manually
+# clean up the stale files by invoking us with "--clean" after having killed
+# all running processes.
+schroot -c "${session}" -e
+# Since no goma processes are running, we can remove goma directory.
+rm -rf "$GOMA_TMP_DIR"
+exit $rc
+EOF
+sudo chown root:root /usr/local/bin/"${target%bit}"
+sudo chmod 755 /usr/local/bin/"${target%bit}"
+
+# Add the standard Ubuntu update repositories if requested.
+[ "${alt_repos}" = "y" -a \
+  -r "/var/lib/chroot/${target}/etc/apt/sources.list" ] &&
+sudo sed -i '/^deb .* [^ -]\+ main$/p
+             s/^\(deb .* [^ -]\+\) main/\1-security main/
+             p
+             t1
+             d
+             :1;s/-security main/-updates main/
+             t
+             d' "/var/lib/chroot/${target}/etc/apt/sources.list"
+
+# Add a few more repositories to the chroot
+[ -r "/var/lib/chroot/${target}/etc/apt/sources.list" ] &&
+sudo sed -i 's/ main$/ main restricted universe multiverse/' \
+         "/var/lib/chroot/${target}/etc/apt/sources.list"
+
+# Add the Ubuntu "partner" repository, if available
+if [ -r "/var/lib/chroot/${target}/etc/apt/sources.list" ] &&
+   HEAD "http://archive.canonical.com/ubuntu/dists/${distname}/partner" \
+   >&/dev/null; then
+  sudo sh -c '
+    echo "deb http://archive.canonical.com/ubuntu" \
+         "'"${distname}"' partner" \
+      >>"/var/lib/chroot/'"${target}"'/etc/apt/sources.list"'
+fi
+
+# Add source repositories, if the user requested we do so
+[ "${add_srcs}" = "y" -a \
+  -r "/var/lib/chroot/${target}/etc/apt/sources.list" ] &&
+sudo sed -i '/^deb[^-]/p
+             s/^deb\([^-]\)/deb-src\1/' \
+         "/var/lib/chroot/${target}/etc/apt/sources.list"
+
+# Set apt proxy if host has set http_proxy
+if [ -n "${http_proxy}" ]; then
+  sudo sh -c '
+    echo "Acquire::http::proxy \"'"${http_proxy}"'\";" \
+        >>"/var/lib/chroot/'"${target}"'/etc/apt/apt.conf"'
+fi
+
+# Update packages
+sudo "/usr/local/bin/${target%bit}" /bin/sh -c '
+  apt-get update; apt-get -y dist-upgrade' || :
+
+# Install a couple of missing packages
+for i in debian-keyring ubuntu-keyring locales sudo; do
+  [ -d "/var/lib/chroot/${target}/usr/share/doc/$i" ] ||
+    sudo "/usr/local/bin/${target%bit}" apt-get -y install "$i" || :
+done
+
+# Configure locales
+sudo "/usr/local/bin/${target%bit}" /bin/sh -c '
+  l='"${LANG:-en_US}"'; l="${l%%.*}"
+  [ -r /etc/locale.gen ] &&
+    sed -i "s/^# \($l\)/\1/" /etc/locale.gen
+  locale-gen $LANG en_US en_US.UTF-8' || :
+
+# Enable multi-arch support, if available
+sudo "/usr/local/bin/${target%bit}" dpkg --assert-multi-arch >&/dev/null &&
+  [ -r "/var/lib/chroot/${target}/etc/apt/sources.list" ] && {
+  sudo sed -i 's/ / [arch=amd64,i386] /' \
+              "/var/lib/chroot/${target}/etc/apt/sources.list"
+  [ -d /var/lib/chroot/${target}/etc/dpkg/dpkg.cfg.d/ ] &&
+  sudo "/usr/local/bin/${target%bit}" dpkg --add-architecture \
+      $([ "${arch}" = "32bit" ] && echo amd64 || echo i386) >&/dev/null ||
+    echo foreign-architecture \
+        $([ "${arch}" = "32bit" ] && echo amd64 || echo i386) |
+      sudo sh -c \
+        "cat >'/var/lib/chroot/${target}/etc/dpkg/dpkg.cfg.d/multiarch'"
+}
+
+# Configure "sudo" package
+sudo "/usr/local/bin/${target%bit}" /bin/sh -c '
+  egrep -qs '"'^$(id -nu) '"' /etc/sudoers ||
+  echo '"'$(id -nu) ALL=(ALL) ALL'"' >>/etc/sudoers'
+
+# Install a few more commonly used packages
+sudo "/usr/local/bin/${target%bit}" apt-get -y install                         \
+  autoconf automake1.9 dpkg-dev g++-multilib gcc-multilib gdb less libtool     \
+  lsof strace
+
+# If running a 32bit environment on a 64bit machine, install a few binaries
+# as 64bit. This is only done automatically if the chroot distro is the same as
+# the host, otherwise there might be incompatibilities in build settings or
+# runtime dependencies. The user can force it with the '-c' flag.
+host_distro=$(grep -s DISTRIB_CODENAME /etc/lsb-release | \
+  cut -d "=" -f 2)
+if [ "${copy_64}" = "y" -o \
+    "${host_distro}" = "${distname}" -a "${arch}" = 32bit ] && \
+    file /bin/bash 2>/dev/null | grep -q x86-64; then
+  readlinepkg=$(sudo "/usr/local/bin/${target%bit}" sh -c \
+    'apt-cache search "lib64readline.\$" | sort | tail -n 1 | cut -d " " -f 1')
+  sudo "/usr/local/bin/${target%bit}" apt-get -y install                       \
+    lib64expat1 lib64ncurses5 ${readlinepkg} lib64z1 lib64stdc++6
+  dep=
+  for i in binutils gdb; do
+    [ -d /usr/share/doc/"$i" ] || dep="$dep $i"
+  done
+  [ -n "$dep" ] && sudo apt-get -y install $dep
+  sudo mkdir -p "/var/lib/chroot/${target}/usr/local/lib/amd64"
+  for i in libbfd libpython; do
+    lib="$({ ldd /usr/bin/ld; ldd /usr/bin/gdb; } |
+           grep -s "$i" | awk '{ print $3 }')"
+    if [ -n "$lib" -a -r "$lib" ]; then
+      sudo cp "$lib" "/var/lib/chroot/${target}/usr/local/lib/amd64"
+    fi
+  done
+  for lib in libssl libcrypt; do
+    for path in /usr/lib /usr/lib/x86_64-linux-gnu; do
+      sudo cp $path/$lib* \
+              "/var/lib/chroot/${target}/usr/local/lib/amd64/" >&/dev/null || :
+    done
+  done
+  for i in gdb ld; do
+    sudo cp /usr/bin/$i "/var/lib/chroot/${target}/usr/local/lib/amd64/"
+    sudo sh -c "cat >'/var/lib/chroot/${target}/usr/local/bin/$i'" <<EOF
+#!/bin/sh
+exec /lib64/ld-linux-x86-64.so.2 --library-path /usr/local/lib/amd64 \
+  /usr/local/lib/amd64/$i "\$@"
+EOF
+    sudo chmod 755 "/var/lib/chroot/${target}/usr/local/bin/$i"
+  done
+fi
+
+
+# If the install-build-deps.sh script can be found, offer to run it now
+script="$(dirname $(readlink -f "$0"))/install-build-deps.sh"
+if [ -x "${script}" ]; then
+  while :; do
+    echo
+    echo "If you plan on building Chrome inside of the new chroot environment,"
+    echo "you now have to install the build dependencies. Do you want me to"
+    printf "start the script that does this for you (y/n)? "
+    read install_deps
+    case "${install_deps}" in
+      y|Y)
+        echo
+        # We prefer running the script in-place, but this might not be
+        # possible, if it lives on a network filesystem that denies
+        # access to root.
+        tmp_script=
+        if ! sudo /usr/local/bin/"${target%bit}" \
+            sh -c "[ -x '${script}' ]" >&/dev/null; then
+          tmp_script="/tmp/${script##*/}"
+          cp "${script}" "${tmp_script}"
+        fi
+        # Some distributions automatically start an instance of the system-
+        # wide dbus daemon, cron daemon or of the logging daemon, when
+        # installing the Chrome build depencies. This prevents the chroot
+        # session from being closed.  So, we always try to shut down any running
+        # instance of dbus and rsyslog.
+        sudo /usr/local/bin/"${target%bit}" sh -c "${script};
+              rc=$?;
+              /etc/init.d/cron stop >/dev/null 2>&1 || :;
+              /etc/init.d/rsyslog stop >/dev/null 2>&1 || :;
+              /etc/init.d/dbus stop >/dev/null 2>&1 || :;
+              exit $rc"
+        rc=$?
+        [ -n "${tmp_script}" ] && rm -f "${tmp_script}"
+        [ $rc -ne 0 ] && exit $rc
+        break
+      ;;
+      n|N)
+        break
+      ;;
+    esac
+  done
+  echo
+fi
+
+# Check whether ~/chroot is on a (slow) network file system and offer to
+# relocate it. Also offer relocation, if the user appears to have multiple
+# spindles (as indicated by "${bind_mount}" being non-empty).
+# We only offer this option, if it doesn't look as if a chroot environment
+# is currently active. Otherwise, relocation is unlikely to work and it
+# can be difficult for the user to recover from the failed attempt to relocate
+# the ~/chroot directory.
+# We don't aim to solve this problem for every configuration,
+# but try to help with the common cases. For more advanced configuration
+# options, the user can always manually adjust things.
+mkdir -p "${HOME}/chroot/"
+if [ ! -h "${HOME}/chroot" ] &&
+   ! egrep -qs '^[^[:space:]]*/chroot' /etc/fstab &&
+   { [ -n "${bind_mounts}" -a "${bind_mounts}" != "NONE" ] ||
+     is_network_drive "${HOME}/chroot"; } &&
+   ! egrep -qs '/var/lib/[^/]*chroot/.*/chroot' /proc/mounts; then
+  echo "${HOME}/chroot is currently located on the same device as your"
+  echo "home directory."
+  echo "This might not be what you want. Do you want me to move it somewhere"
+  echo "else?"
+  # If the computer has multiple spindles, many users configure all or part of
+  # the secondary hard disk to be writable by the primary user of this machine.
+  # Make some reasonable effort to detect this type of configuration and
+  # then offer a good location for where to put the ~/chroot directory.
+  suggest=
+  for i in $(echo "${bind_mounts}"|cut -d ' ' -f 1); do
+    if [ -d "$i" -a -w "$i" -a \( ! -a "$i/chroot" -o -w "$i/chroot/." \) ] &&
+       ! is_network_drive "$i"; then
+      suggest="$i"
+    else
+      for j in "$i/"*; do
+        if [ -d "$j" -a -w "$j" -a \
+             \( ! -a "$j/chroot" -o -w "$j/chroot/." \) ] &&
+           ! is_network_drive "$j"; then
+          suggest="$j"
+        else
+          for k in "$j/"*; do
+            if [ -d "$k" -a -w "$k" -a \
+                 \( ! -a "$k/chroot" -o -w "$k/chroot/." \) ] &&
+               ! is_network_drive "$k"; then
+              suggest="$k"
+              break
+            fi
+          done
+        fi
+        [ -n "${suggest}" ] && break
+      done
+    fi
+    [ -n "${suggest}" ] && break
+  done
+  def_suggest="${HOME}"
+  if [ -n "${suggest}" ]; then
+    # For home directories that reside on network drives, make our suggestion
+    # the default option. For home directories that reside on a local drive,
+    # require that the user manually enters the new location.
+    if is_network_drive "${HOME}"; then
+      def_suggest="${suggest}"
+    else
+      echo "A good location would probably be in \"${suggest}\""
+    fi
+  fi
+  while :; do
+    printf "Physical location [${def_suggest}]: "
+    read dir
+    [ -z "${dir}" ] && dir="${def_suggest}"
+    [ "${dir%%/}" == "${HOME%%/}" ] && break
+    if ! [ -d "${dir}" -a -w "${dir}" ] ||
+       [ -a "${dir}/chroot" -a ! -w "${dir}/chroot/." ]; then
+      echo "Cannot write to ${dir}/chroot. Please try again"
+    else
+      mv "${HOME}/chroot" "${dir}/chroot"
+      ln -s "${dir}/chroot" "${HOME}/chroot"
+      for i in $(list_all_chroots); do
+        sudo "$i" mkdir -p "${dir}/chroot"
+      done
+      sudo sed -i "s,${HOME}/chroot,${dir}/chroot,g" /etc/schroot/mount-*
+      break
+    fi
+  done
+fi
+
+# Clean up package files
+sudo schroot -c "${target%bit}" -p -- apt-get clean
+sudo apt-get clean
+
+trap '' INT TERM QUIT HUP
+trap '' EXIT
+
+# Let the user know what we did
+cat <<EOF
+
+
+Successfully installed ${distname} ${arch}
+
+You can run programs inside of the chroot by invoking the
+"/usr/local/bin/${target%bit}" command.
+
+This command can be used with arguments, in order to just run a single
+program inside of the chroot environment (e.g. "${target%bit} make chrome")
+or without arguments, in order to run an interactive shell session inside
+of the chroot environment.
+
+If you need to run things as "root", you can use "sudo" (e.g. try
+"sudo ${target%bit} apt-get update").
+
+Your home directory is shared between the host and the chroot. But I
+configured "${HOME}/chroot" to be private to the chroot environment.
+You can use it for files that need to differ between environments. This
+would be a good place to store binaries that you have built from your
+source files.
+
+For Chrome, this probably means you want to make your "out" directory a
+symbolic link that points somewhere inside of "${HOME}/chroot".
+
+You still need to run "gclient runhooks" whenever you switch from building
+outside of the chroot to inside of the chroot. But you will find that you
+don't have to repeatedly erase and then completely rebuild all your object
+and binary files.
+
+EOF
diff --git a/build/internal/README.chromium b/build/internal/README.chromium
new file mode 100644
index 0000000..4624830
--- /dev/null
+++ b/build/internal/README.chromium
@@ -0,0 +1,24 @@
+Internal property sheets:
+  essential.vsprops
+    Contains the common settings used throughout the projects. Is included by either ..\debug.vsprops or ..\release.vsprops, so in general, it is not included directly.
+
+  release_defaults.vsprops
+    Included by ..\release.vsprops. Its settings are overriden by release_impl$(CHROME_BUILD_TYPE).vsprops. Uses the default VS setting which is "Maximize Speed". Results in relatively fast build with reasonable optimization level but without whole program optimization to reduce build time.
+
+  release_impl.vsprops
+    Included by ..\release.vsprops by default when CHROME_BUILD_TYPE is undefined. Includes release_defaults.vsprops.
+
+  release_impl_checksenabled.vsprops
+    Included by ..\release.vsprops when CHROME_BUILD_TYPE=_checksenabled. Matches what release_defaults.vsprops does, but doesn't actually inherit from it as we couldn't quite get that working. The only difference is that _DEBUG is set instead of NDEBUG. Used for keeping debug checks enabled with a build that is fast enough to dogfood with.
+
+  release_impl_official.vsprops
+    Included by ..\release.vsprops when CHROME_BUILD_TYPE=_official. Includes release_defaults.vsprops. Enables Whole Program Optimizations (WPO), which doubles the build time. Results in much more optimized build. Uses "Full Optimization" and "Flavor small code".
+
+  release_impl_pgo_instrument.vsprops
+    Included by ..\release.vsprops when CHROME_BUILD_TYPE=_pgo_instrument. Includes release_defaults.vsprops. Enables Profile Guided Optimization (PGO) instrumentation (first pass). Uses "Full Optimization" and "Flavor small code".
+
+  release_impl_pgo_optimize.vsprops
+    Included by ..\release.vsprops when CHROME_BUILD_TYPE=_pgo_optimize. Includes release_defaults.vsprops. Enables Profile Guided Optimization (PGO) optimization (second pass). Uses "Full Optimization" and "Flavor small code".
+
+  release_impl_purify.vsprops
+    Included by ..\release.vsprops when CHROME_BUILD_TYPE=_purify. Includes release_defaults.vsprops. Disables optimizations. Used with Purify to test without debug tools and without optimization; i.e. NDEBUG is defined but the compiler doesn't optimize the binary.
diff --git a/build/internal/release_defaults.gypi b/build/internal/release_defaults.gypi
new file mode 100644
index 0000000..1bf674a
--- /dev/null
+++ b/build/internal/release_defaults.gypi
@@ -0,0 +1,18 @@
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+{
+  'msvs_settings': {
+    'VCCLCompilerTool': {
+      'StringPooling': 'true',
+    },
+    'VCLinkerTool': {
+      # No incremental linking.
+      'LinkIncremental': '1',
+      # Eliminate Unreferenced Data (/OPT:REF).
+      'OptimizeReferences': '2',
+      # Folding on (/OPT:ICF).
+      'EnableCOMDATFolding': '2',
+    },
+  },
+}
diff --git a/build/internal/release_impl.gypi b/build/internal/release_impl.gypi
new file mode 100644
index 0000000..5ac0e09
--- /dev/null
+++ b/build/internal/release_impl.gypi
@@ -0,0 +1,17 @@
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+{
+  'includes': ['release_defaults.gypi'],
+  'msvs_settings': {
+    'VCCLCompilerTool': {
+      'OmitFramePointers': 'false',
+      # The above is not sufficient (http://crbug.com/106711): it
+      # simply eliminates an explicit "/Oy", but both /O2 and /Ox
+      # perform FPO regardless, so we must explicitly disable.
+      # We still want the false setting above to avoid having
+      # "/Oy /Oy-" and warnings about overriding.
+      'AdditionalOptions': ['/Oy-'],
+    },
+  },
+}
diff --git a/build/internal/release_impl_official.gypi b/build/internal/release_impl_official.gypi
new file mode 100644
index 0000000..36d5d78
--- /dev/null
+++ b/build/internal/release_impl_official.gypi
@@ -0,0 +1,41 @@
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+{
+  'includes': ['release_defaults.gypi'],
+  'defines': ['OFFICIAL_BUILD'],
+  'msvs_settings': {
+    'VCCLCompilerTool': {
+      'InlineFunctionExpansion': '2',
+      'EnableIntrinsicFunctions': 'true',
+      'OmitFramePointers': 'false',
+      # The above is not sufficient (http://crbug.com/106711): it
+      # simply eliminates an explicit "/Oy", but both /O2 and /Ox
+      # perform FPO regardless, so we must explicitly disable.
+      # We still want the false setting above to avoid having
+      # "/Oy /Oy-" and warnings about overriding.
+      'AdditionalOptions': ['/Oy-'],
+    },
+    'VCLibrarianTool': {
+      'AdditionalOptions': [
+        '/ltcg',
+        '/expectedoutputsize:120000000'
+      ],
+    },
+    'VCLinkerTool': {
+      'AdditionalOptions': [
+        '/time',
+        # This may reduce memory fragmentation during linking.
+        # The expected size is 40*1024*1024, which gives us about 10M of
+        # headroom as of Dec 16, 2011.
+        '/expectedoutputsize:41943040',
+      ],
+      # The /PROFILE flag causes the linker to add a "FIXUP" debug stream to
+      # the generated PDB. According to MSDN documentation, this flag is only
+      # available (or perhaps supported) in the Enterprise (team development)
+      # version of Visual Studio. If this blocks your official build, simply
+      # comment out this line, then  re-run "gclient runhooks".
+      'Profile': 'true',
+    },
+  },
+}
diff --git a/build/inverse_depth.py b/build/inverse_depth.py
new file mode 100755
index 0000000..ce7a6ab
--- /dev/null
+++ b/build/inverse_depth.py
@@ -0,0 +1,24 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+import sys
+
+
+def DoMain(argv):
+  depth = argv[0]
+  return os.path.relpath(os.getcwd(), os.path.abspath(depth))
+
+
+def main(argv):
+  if len(argv) < 2:
+    print "USAGE: inverse_depth.py depth"
+    return 1
+  print DoMain(argv[1:])
+  return 0
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv))
diff --git a/build/ios/OWNERS b/build/ios/OWNERS
new file mode 100644
index 0000000..4caf405
--- /dev/null
+++ b/build/ios/OWNERS
@@ -0,0 +1,4 @@
+rohitrao@chromium.org
+stuartmorgan@chromium.org
+
+per-file grit_whitelist.txt=*
diff --git a/build/ios/PRESUBMIT.py b/build/ios/PRESUBMIT.py
new file mode 100644
index 0000000..bbd17b3
--- /dev/null
+++ b/build/ios/PRESUBMIT.py
@@ -0,0 +1,42 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+
+"""Chromium presubmit script for src/tools/ios.
+
+See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts
+for more details on the presubmit API built into depot_tools.
+"""
+
+WHITELIST_FILE = 'build/ios/grit_whitelist.txt'
+
+def _CheckWhitelistSorted(input_api, output_api):
+  for path in input_api.LocalPaths():
+    if WHITELIST_FILE == path:
+      lines = open(os.path.join('../..', WHITELIST_FILE)).readlines()
+      i = 0
+      while i < len(lines) - 1 and lines[i] <= lines[i + 1]:
+        i += 1
+      if i < len(lines) - 1:
+        return [output_api.PresubmitError(
+            'The file ' + WHITELIST_FILE + ' must be sorted.  ' +
+            'First offending line: #' + str(i + 2))]
+  return []
+
+def _CommonChecks(input_api, output_api):
+  """Checks common to both upload and commit."""
+  results = []
+  results.extend(_CheckWhitelistSorted(input_api, output_api))
+  return results
+
+def CheckChangeOnUpload(input_api, output_api):
+  results = []
+  results.extend(_CommonChecks(input_api, output_api))
+  return results
+
+def CheckChangeOnCommit(input_api, output_api):
+  results = []
+  results.extend(_CommonChecks(input_api, output_api))
+  return results
diff --git a/build/ios/chrome_ios.croc b/build/ios/chrome_ios.croc
new file mode 100644
index 0000000..938a2e9
--- /dev/null
+++ b/build/ios/chrome_ios.croc
@@ -0,0 +1,71 @@
+# -*- python -*-
+# Crocodile config file for Chromium iOS.
+#
+# Note that Chromium iOS also uses the config file at src/build/common.croc.
+#
+# See src/tools/code_coverage/example.croc for more info on config files.
+
+{
+  # List of rules, applied in order
+  'rules' : [
+    # Specify inclusions before exclusions, since rules are in order.
+
+    # Exclude everything to negate whatever is in src/build/common.croc
+    {
+      'regexp' : '.*',
+      'include' : 0,
+    },
+
+    # Include all directories (but not the files in the directories).
+    # This is a workaround for how croc.py walks the directory tree. See the
+    # TODO in the AddFiles method of src/tools/code_coverage/croc.py
+    {
+      'regexp' : '.*/$',
+      'include' : 1,
+    },
+
+    # Include any file with an 'ios' directory in the path.
+    {
+      'regexp' : '.*/ios/.*',
+      'include' : 1,
+      'add_if_missing' : 1,
+    },
+    
+    # Include any file that ends with _ios.
+    {
+      'regexp' : '.*_ios\\.(c|cc|m|mm)$',
+      'include' : 1,
+      'add_if_missing' : 1,
+    },
+
+    # Include any file that ends with _ios_unittest (and label it a test).
+    {
+      'regexp' : '.*_ios_unittest\\.(c|cc|m|mm)$',
+      'include' : 1,
+      'add_if_missing' : 1,
+      'group' : 'test',
+    },
+
+    # Don't scan for executable lines in uninstrumented header files
+    {
+      'regexp' : '.*\\.(h|hpp)$',
+      'add_if_missing' : 0,
+    },
+
+    # Don't measure coverage of perftests.
+    {
+      'regexp' : '.*perftest\\.(c|cc|m|mm)$',
+      'include' : 0,
+    },
+
+    # Languages
+    {
+      'regexp' : '.*\\.m$',
+      'language' : 'ObjC',
+    },
+    {
+      'regexp' : '.*\\.mm$',
+      'language' : 'ObjC++',
+    },
+  ],
+}
diff --git a/build/ios/clean_env.py b/build/ios/clean_env.py
new file mode 100755
index 0000000..548e2b9
--- /dev/null
+++ b/build/ios/clean_env.py
@@ -0,0 +1,77 @@
+#!/usr/bin/python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+import sys
+
+def Main(argv):
+  """This is like 'env -i', but it uses a whitelist of env variables to allow
+  through to the command being run.  It attempts to strip off Xcode-added
+  values from PATH.
+  """
+  # Note: An attempt was made to do something like: env -i bash -lc '[command]'
+  # but that fails to set the things set by login (USER, etc.), so instead
+  # the only approach that seems to work is to have a whitelist.
+  env_key_whitelist = (
+    'HOME',
+    'LOGNAME',
+    # 'PATH' added below (but filtered).
+    'PWD',
+    'SHELL',
+    'TEMP',
+    'TMPDIR',
+    'USER'
+  )
+
+  # Need something to run.
+  # TODO(lliabraa): Make this output a usage string and exit (here and below).
+  assert(len(argv) > 0)
+
+  add_to_path = [];
+  first_entry = argv[0];
+  if first_entry.startswith('ADD_TO_PATH='):
+    argv = argv[1:];
+    add_to_path = first_entry.replace('ADD_TO_PATH=', '', 1).split(':')
+
+  # Still need something to run.
+  assert(len(argv) > 0)
+
+  clean_env = {}
+
+  # Pull over the whitelisted keys.
+  for key in env_key_whitelist:
+    val = os.environ.get(key, None)
+    if not val is None:
+      clean_env[key] = val
+
+  # Collect the developer dir as set via Xcode, defaulting it.
+  dev_prefix = os.environ.get('DEVELOPER_DIR', '/Developer/')
+  if dev_prefix[-1:] != '/':
+    dev_prefix += '/'
+
+  # Now pull in PATH, but remove anything Xcode might have added.
+  initial_path = os.environ.get('PATH', '')
+  filtered_chunks = \
+      [x for x in initial_path.split(':') if not x.startswith(dev_prefix)]
+  if filtered_chunks:
+    clean_env['PATH'] = ':'.join(add_to_path + filtered_chunks)
+
+  # Add any KEY=VALUE args before the command to the cleaned environment.
+  args = argv[:]
+  while '=' in args[0]:
+    (key, val) = args[0].split('=', 1)
+    clean_env[key] = val
+    args = args[1:]
+
+  # Still need something to run.
+  assert(len(args) > 0)
+
+  # Off it goes...
+  os.execvpe(args[0], args, clean_env)
+  # Should never get here, so return a distinctive, non-zero status code.
+  return 66
+
+if __name__ == '__main__':
+  sys.exit(Main(sys.argv[1:]))
diff --git a/build/ios/coverage.gypi b/build/ios/coverage.gypi
new file mode 100644
index 0000000..e822089
--- /dev/null
+++ b/build/ios/coverage.gypi
@@ -0,0 +1,32 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+  'variables': {
+    'enable_coverage%': 0,
+  },
+  'conditions': [
+    ['enable_coverage', {
+        'target_defaults': {
+          'defines': [
+            'ENABLE_TEST_CODE_COVERAGE=1'
+          ],
+          'link_settings': {
+            'xcode_settings': {
+              'OTHER_LDFLAGS': [
+                '-fprofile-arcs',
+              ],
+            },
+          },
+          'xcode_settings': {
+            'OTHER_CFLAGS': [
+              '-fprofile-arcs',
+              '-ftest-coverage',
+            ],
+          },
+        },
+    }],
+  ],
+}
+
diff --git a/build/ios/grit_whitelist.txt b/build/ios/grit_whitelist.txt
new file mode 100644
index 0000000..a976daf
--- /dev/null
+++ b/build/ios/grit_whitelist.txt
@@ -0,0 +1,1155 @@
+IDR_ABOUT_DOM_DISTILLER_CSS
+IDR_ABOUT_DOM_DISTILLER_HTML
+IDR_ABOUT_DOM_DISTILLER_JS
+IDR_ABOUT_STATS_HTML
+IDR_ABOUT_STATS_JS
+IDR_ABOUT_VERSION_CSS
+IDR_ABOUT_VERSION_HTML
+IDR_ABOUT_VERSION_JS
+IDR_CONTEXTUAL_SEARCH_PROMO_HTML
+IDR_CONTROLLED_SETTING_MANDATORY
+IDR_CRASHES_HTML
+IDR_CRASHES_JS
+IDR_CREDITS_HTML
+IDR_CREDITS_JS
+IDR_CREDIT_CARD_CVC_HINT
+IDR_CREDIT_CARD_CVC_HINT_AMEX
+IDR_DATA_REDUCTION_PROXY_INTERSTITIAL_HTML
+IDR_DEFAULT_FAVICON
+IDR_DEFAULT_FAVICON_32
+IDR_DEFAULT_FAVICON_64
+IDR_DIR_HEADER_HTML
+IDR_DISTILLABLE_PAGE_SERIALIZED_MODEL
+IDR_DISTILLER_CSS
+IDR_DISTILLER_IOS_CSS
+IDR_DISTILLER_JS
+IDR_DOM_DISTILLER_VIEWER_HTML
+IDR_DOM_DISTILLER_VIEWER_JS
+IDR_EXTRACT_PAGE_FEATURES_JS
+IDR_FLAGS_FAVICON
+IDR_FLAGS_HTML
+IDR_FLAGS_JS
+IDR_GCM_INTERNALS_CSS
+IDR_GCM_INTERNALS_HTML
+IDR_GCM_INTERNALS_JS
+IDR_HISTORY_FAVICON
+IDR_HISTORY_HTML
+IDR_HISTORY_JS
+IDR_INCOGNITO_TAB_HTML
+IDR_INFOBAR_AUTOFILL_CC
+IDR_INFOBAR_AUTOLOGIN
+IDR_INFOBAR_RESTORE_SESSION
+IDR_INFOBAR_SAVE_PASSWORD
+IDR_INFOBAR_TRANSLATE_IOS
+IDR_INFOBAR_WARNING
+IDR_IS_DISTILLABLE_JS
+IDR_LOCATION_BAR_HTTP
+IDR_NET_ERROR_HTML
+IDR_NET_EXPORT_HTML
+IDR_NET_EXPORT_JS
+IDR_NET_INTERNALS_INDEX_HTML
+IDR_NET_INTERNALS_INDEX_JS
+IDR_OMAHA_HTML
+IDR_OMAHA_JS
+IDR_OMNIBOX_CALCULATOR
+IDR_OMNIBOX_CLEAR_IOS
+IDR_OMNIBOX_CLEAR_OTR_IOS
+IDR_OMNIBOX_CLEAR_OTR_PRESSED_IOS
+IDR_OMNIBOX_CLEAR_PRESSED_IOS
+IDR_OMNIBOX_EXTENSION_APP
+IDR_OMNIBOX_HISTORY
+IDR_OMNIBOX_HISTORY_INCOGNITO
+IDR_OMNIBOX_HTTP
+IDR_OMNIBOX_HTTPS_INVALID
+IDR_OMNIBOX_HTTPS_POLICY_WARNING
+IDR_OMNIBOX_HTTPS_VALID
+IDR_OMNIBOX_HTTPS_WARNING
+IDR_OMNIBOX_HTTP_INCOGNITO
+IDR_OMNIBOX_KEYBOARD_VIEW_APPEND
+IDR_OMNIBOX_KEYBOARD_VIEW_APPEND_HIGHLIGHTED
+IDR_OMNIBOX_KEYBOARD_VIEW_APPEND_INCOGNITO
+IDR_OMNIBOX_KEYBOARD_VIEW_APPEND_INCOGNITO_HIGHLIGHTED
+IDR_OMNIBOX_SEARCH
+IDR_OMNIBOX_SEARCH_INCOGNITO
+IDR_OMNIBOX_SEARCH_SECURED
+IDR_OMNIBOX_STAR
+IDR_OMNIBOX_STAR_INCOGNITO
+IDR_OTHER_DEVICES_JS
+IDR_PAGEINFO_BAD
+IDR_PAGEINFO_GOOD
+IDR_PAGEINFO_INFO
+IDR_PAGEINFO_WARNING_MAJOR
+IDR_PAGEINFO_WARNING_MINOR
+IDR_POLICY_CSS
+IDR_POLICY_HTML
+IDR_POLICY_JS
+IDR_PRINTER_FAVICON
+IDR_SAD_FAVICON
+IDR_SAD_TAB
+IDR_SECURITY_INTERSTITIAL_HTML
+IDR_SIGNIN_INTERNALS_INDEX_HTML
+IDR_SIGNIN_INTERNALS_INDEX_JS
+IDR_SYNC_INTERNALS_ABOUT_JS
+IDR_SYNC_INTERNALS_CHROME_SYNC_JS
+IDR_SYNC_INTERNALS_DATA_JS
+IDR_SYNC_INTERNALS_EVENTS_JS
+IDR_SYNC_INTERNALS_INDEX_HTML
+IDR_SYNC_INTERNALS_INDEX_JS
+IDR_SYNC_INTERNALS_SEARCH_JS
+IDR_SYNC_INTERNALS_SYNC_LOG_JS
+IDR_SYNC_INTERNALS_SYNC_NODE_BROWSER_JS
+IDR_SYNC_INTERNALS_SYNC_SEARCH_JS
+IDR_SYNC_INTERNALS_TYPES_JS
+IDR_TOOLBAR_SHADOW_FULL_BLEED
+IDR_TRANSLATE_JS
+IDR_UBER_UTILS_JS
+IDR_WEBUI_CSS_TEXT_DEFAULTS
+IDR_WEBUI_I18N_TEMPLATE_JS
+IDR_WEBUI_JSTEMPLATE_JS
+IDR_WEBUI_JS_LOAD_TIME_DATA
+IDS_ABOUT_MAC
+IDS_ABOUT_VERSION_COMMAND_LINE
+IDS_ABOUT_VERSION_COMPANY_NAME
+IDS_ABOUT_VERSION_COPYRIGHT
+IDS_ABOUT_VERSION_EXECUTABLE_PATH
+IDS_ABOUT_VERSION_OFFICIAL
+IDS_ABOUT_VERSION_OS
+IDS_ABOUT_VERSION_PATH_NOTFOUND
+IDS_ABOUT_VERSION_PROFILE_PATH
+IDS_ABOUT_VERSION_REVISION
+IDS_ABOUT_VERSION_TITLE
+IDS_ABOUT_VERSION_UNOFFICIAL
+IDS_ABOUT_VERSION_USER_AGENT
+IDS_ABOUT_VERSION_VARIATIONS
+IDS_ACCEPT_LANGUAGES
+IDS_ACCNAME_BACK
+IDS_ACCNAME_CLEAR_TEXT
+IDS_ACCNAME_FORWARD
+IDS_ACCNAME_LOCATION
+IDS_ACCNAME_VOICE_SEARCH
+IDS_ALLOW_INSECURE_CONTENT_BUTTON
+IDS_ALTERNATE_NAV_URL_VIEW_LABEL
+IDS_ANNOTATED_SUGGESTION
+IDS_APP_CANCEL
+IDS_APP_OK
+IDS_APP_UNTITLED_SHORTCUT_FILE_NAME
+IDS_AUTOCOMPLETE_SEARCH_DESCRIPTION
+IDS_AUTOFILL_ADDRESS_LINE_SEPARATOR
+IDS_AUTOFILL_ADDRESS_SUMMARY_SEPARATOR
+IDS_AUTOFILL_CARD_UNMASK_PROMPT_ERROR_NETWORK
+IDS_AUTOFILL_CARD_UNMASK_PROMPT_ERROR_PERMANENT
+IDS_AUTOFILL_CARD_UNMASK_PROMPT_ERROR_TRY_AGAIN
+IDS_AUTOFILL_CARD_UNMASK_PROMPT_INSTRUCTIONS
+IDS_AUTOFILL_CARD_UNMASK_PROMPT_INSTRUCTIONS_AMEX
+IDS_AUTOFILL_CARD_UNMASK_PROMPT_INSTRUCTIONS_EXPIRED
+IDS_AUTOFILL_CARD_UNMASK_PROMPT_INSTRUCTIONS_EXPIRED_AMEX
+IDS_AUTOFILL_CC_AMEX
+IDS_AUTOFILL_CC_AMEX_SHORT
+IDS_AUTOFILL_CC_DINERS
+IDS_AUTOFILL_CC_DISCOVER
+IDS_AUTOFILL_CC_GENERIC
+IDS_AUTOFILL_CC_INFOBAR_ACCEPT
+IDS_AUTOFILL_CC_INFOBAR_DENY
+IDS_AUTOFILL_CC_INFOBAR_TEXT
+IDS_AUTOFILL_CC_JCB
+IDS_AUTOFILL_CC_MASTERCARD
+IDS_AUTOFILL_CC_UNION_PAY
+IDS_AUTOFILL_CC_VISA
+IDS_AUTOFILL_CLEAR_FORM_MENU_ITEM
+IDS_AUTOFILL_DELETE_AUTOCOMPLETE_SUGGESTION_CONFIRMATION_BODY
+IDS_AUTOFILL_DELETE_CREDIT_CARD_SUGGESTION_CONFIRMATION_BODY
+IDS_AUTOFILL_DELETE_PROFILE_SUGGESTION_CONFIRMATION_BODY
+IDS_AUTOFILL_DIALOG_PRIVACY_POLICY_LINK
+IDS_AUTOFILL_FIELD_LABEL_AREA
+IDS_AUTOFILL_FIELD_LABEL_COUNTY
+IDS_AUTOFILL_FIELD_LABEL_DEPARTMENT
+IDS_AUTOFILL_FIELD_LABEL_DISTRICT
+IDS_AUTOFILL_FIELD_LABEL_EMIRATE
+IDS_AUTOFILL_FIELD_LABEL_ISLAND
+IDS_AUTOFILL_FIELD_LABEL_PARISH
+IDS_AUTOFILL_FIELD_LABEL_POSTAL_CODE
+IDS_AUTOFILL_FIELD_LABEL_PREFECTURE
+IDS_AUTOFILL_FIELD_LABEL_PROVINCE
+IDS_AUTOFILL_FIELD_LABEL_STATE
+IDS_AUTOFILL_FIELD_LABEL_ZIP_CODE
+IDS_AUTOFILL_OPTIONS_POPUP
+IDS_AUTOFILL_PASSWORD_FIELD_SUGGESTIONS_TITLE
+IDS_AUTOFILL_SCAN_CREDIT_CARD
+IDS_AUTOFILL_WARNING_FORM_DISABLED
+IDS_AUTOFILL_WARNING_INSECURE_CONNECTION
+IDS_AUTOLOGIN_INFOBAR_CANCEL_BUTTON
+IDS_AUTOLOGIN_INFOBAR_MESSAGE
+IDS_AUTOLOGIN_INFOBAR_OK_BUTTON
+IDS_BLOCKED_DISPLAYING_INSECURE_CONTENT
+IDS_BLOCK_INSECURE_CONTENT_BUTTON
+IDS_BOOKMARK_ADD_EDITOR_TITLE
+IDS_BOOKMARK_ALL_TABS_DIALOG_TITLE
+IDS_BOOKMARK_BAR_FOLDER_NAME
+IDS_BOOKMARK_BAR_MANAGED_FOLDER_DEFAULT_NAME
+IDS_BOOKMARK_BAR_MANAGED_FOLDER_DOMAIN_NAME
+IDS_BOOKMARK_BAR_MOBILE_FOLDER_NAME
+IDS_BOOKMARK_BAR_OTHER_FOLDER_NAME
+IDS_BOOKMARK_BAR_REDO
+IDS_BOOKMARK_BAR_REDO_ADD
+IDS_BOOKMARK_BAR_REDO_DELETE
+IDS_BOOKMARK_BAR_REDO_EDIT
+IDS_BOOKMARK_BAR_REDO_MOVE
+IDS_BOOKMARK_BAR_REDO_REORDER
+IDS_BOOKMARK_BAR_SUPERVISED_FOLDER_DEFAULT_NAME
+IDS_BOOKMARK_BAR_UNDO
+IDS_BOOKMARK_BAR_UNDO_ADD
+IDS_BOOKMARK_BAR_UNDO_DELETE
+IDS_BOOKMARK_BAR_UNDO_EDIT
+IDS_BOOKMARK_BAR_UNDO_MOVE
+IDS_BOOKMARK_BAR_UNDO_REORDER
+IDS_BOOKMARK_BUBBLE_CHOOSER_ANOTHER_FOLDER
+IDS_BOOKMARK_BUBBLE_REMOVE_BOOKMARK
+IDS_BOOKMARK_EDITOR_CONFIRM_DELETE
+IDS_BOOKMARK_EDITOR_NEW_FOLDER_NAME
+IDS_BOOKMARK_EDITOR_TITLE
+IDS_BOOKMARK_FOLDER_CHOOSER_TITLE
+IDS_BOOKMARK_FOLDER_EDITOR_TITLE
+IDS_BOOKMARK_FOLDER_EDITOR_WINDOW_TITLE
+IDS_BOOKMARK_FOLDER_EDITOR_WINDOW_TITLE_NEW
+IDS_BOOKMARK_MANAGER_FOLDER_SECTION
+IDS_BOOKMARK_MANAGER_FOLDER_TITLE
+IDS_BOOKMARK_MANAGER_NAME_INPUT_PLACE_HOLDER
+IDS_BOOKMARK_MANAGER_REMOVE_TITLE
+IDS_BOOKMARK_MANAGER_URL_INPUT_PLACE_HOLDER
+IDS_BOOKMARK_NEW_FOLDER_BUTTON_TITLE
+IDS_CANCEL
+IDS_CERT_ERROR_AUTHORITY_INVALID_DESCRIPTION
+IDS_CERT_ERROR_AUTHORITY_INVALID_DETAILS
+IDS_CERT_ERROR_AUTHORITY_INVALID_EXTRA_INFO_2
+IDS_CERT_ERROR_AUTHORITY_INVALID_TITLE
+IDS_CERT_ERROR_CHAIN_EXPIRED_DESCRIPTION
+IDS_CERT_ERROR_CHAIN_EXPIRED_DETAILS
+IDS_CERT_ERROR_COMMON_NAME_INVALID_DESCRIPTION
+IDS_CERT_ERROR_COMMON_NAME_INVALID_DETAILS
+IDS_CERT_ERROR_COMMON_NAME_INVALID_EXTRA_INFO_2
+IDS_CERT_ERROR_COMMON_NAME_INVALID_TITLE
+IDS_CERT_ERROR_CONTAINS_ERRORS_DESCRIPTION
+IDS_CERT_ERROR_CONTAINS_ERRORS_DETAILS
+IDS_CERT_ERROR_CONTAINS_ERRORS_EXTRA_INFO_2
+IDS_CERT_ERROR_CONTAINS_ERRORS_TITLE
+IDS_CERT_ERROR_EXPIRED_DESCRIPTION
+IDS_CERT_ERROR_EXPIRED_DETAILS
+IDS_CERT_ERROR_EXPIRED_DETAILS_EXTRA_INFO_2
+IDS_CERT_ERROR_EXPIRED_TITLE
+IDS_CERT_ERROR_EXTRA_INFO_1
+IDS_CERT_ERROR_EXTRA_INFO_TITLE
+IDS_CERT_ERROR_INVALID_CERT_DESCRIPTION
+IDS_CERT_ERROR_INVALID_CERT_DETAILS
+IDS_CERT_ERROR_INVALID_CERT_EXTRA_INFO_2
+IDS_CERT_ERROR_INVALID_CERT_TITLE
+IDS_CERT_ERROR_NAME_CONSTRAINT_VIOLATION_DESCRIPTION
+IDS_CERT_ERROR_NAME_CONSTRAINT_VIOLATION_DETAILS
+IDS_CERT_ERROR_NAME_CONSTRAINT_VIOLATION_TITLE
+IDS_CERT_ERROR_NOT_YET_VALID_DESCRIPTION
+IDS_CERT_ERROR_NOT_YET_VALID_DETAILS
+IDS_CERT_ERROR_NOT_YET_VALID_DETAILS_EXTRA_INFO_2
+IDS_CERT_ERROR_NOT_YET_VALID_TITLE
+IDS_CERT_ERROR_NO_REVOCATION_MECHANISM_DESCRIPTION
+IDS_CERT_ERROR_NO_REVOCATION_MECHANISM_DETAILS
+IDS_CERT_ERROR_NO_REVOCATION_MECHANISM_TITLE
+IDS_CERT_ERROR_REVOKED_CERT_DESCRIPTION
+IDS_CERT_ERROR_REVOKED_CERT_DETAILS
+IDS_CERT_ERROR_REVOKED_CERT_EXTRA_INFO_2
+IDS_CERT_ERROR_REVOKED_CERT_TITLE
+IDS_CERT_ERROR_UNABLE_TO_CHECK_REVOCATION_DESCRIPTION
+IDS_CERT_ERROR_UNABLE_TO_CHECK_REVOCATION_DETAILS
+IDS_CERT_ERROR_UNABLE_TO_CHECK_REVOCATION_TITLE
+IDS_CERT_ERROR_UNKNOWN_ERROR_DESCRIPTION
+IDS_CERT_ERROR_UNKNOWN_ERROR_DETAILS
+IDS_CERT_ERROR_UNKNOWN_ERROR_TITLE
+IDS_CERT_ERROR_WEAK_KEY_DESCRIPTION
+IDS_CERT_ERROR_WEAK_KEY_DETAILS
+IDS_CERT_ERROR_WEAK_KEY_EXTRA_INFO_2
+IDS_CERT_ERROR_WEAK_KEY_TITLE
+IDS_CERT_ERROR_WEAK_SIGNATURE_ALGORITHM_DESCRIPTION
+IDS_CERT_ERROR_WEAK_SIGNATURE_ALGORITHM_DETAILS
+IDS_CERT_ERROR_WEAK_SIGNATURE_ALGORITHM_EXTRA_INFO_2
+IDS_CERT_ERROR_WEAK_SIGNATURE_ALGORITHM_TITLE
+IDS_CHROME_TO_DEVICE_PRINT_TO_PHONE
+IDS_CHROME_TO_DEVICE_SNAPSHOTS
+IDS_CLOSE
+IDS_CONTEXTUAL_SEARCH_HEADER
+IDS_CONTEXTUAL_SEARCH_PROMO_DESCRIPTION_1
+IDS_CONTEXTUAL_SEARCH_PROMO_DESCRIPTION_2
+IDS_CONTEXTUAL_SEARCH_PROMO_FEATURE_NAME
+IDS_CONTEXTUAL_SEARCH_PROMO_OPTIN
+IDS_CONTEXTUAL_SEARCH_PROMO_OPTOUT
+IDS_COULDNT_OPEN_PROFILE_ERROR
+IDS_CRASHES_BUG_LINK_LABEL
+IDS_CRASHES_CRASH_COUNT_BANNER_FORMAT
+IDS_CRASHES_CRASH_HEADER_FORMAT
+IDS_CRASHES_CRASH_TIME_FORMAT
+IDS_CRASHES_DISABLED_HEADER
+IDS_CRASHES_DISABLED_MESSAGE
+IDS_CRASHES_NO_CRASHES_MESSAGE
+IDS_CRASHES_TITLE
+IDS_CRASHES_UPLOAD_MESSAGE
+IDS_DATA_REDUCTION_PROXY_BACK_BUTTON
+IDS_DATA_REDUCTION_PROXY_CANNOT_PROXY_HEADING
+IDS_DATA_REDUCTION_PROXY_CANNOT_PROXY_PRIMARY_PARAGRAPH
+IDS_DATA_REDUCTION_PROXY_CANNOT_PROXY_SECONDARY_PARAGRAPH
+IDS_DATA_REDUCTION_PROXY_CONTINUE_BUTTON
+IDS_DATA_REDUCTION_PROXY_TITLE
+IDS_DEFAULT_AVATAR_NAME_10
+IDS_DEFAULT_AVATAR_NAME_11
+IDS_DEFAULT_AVATAR_NAME_12
+IDS_DEFAULT_AVATAR_NAME_13
+IDS_DEFAULT_AVATAR_NAME_14
+IDS_DEFAULT_AVATAR_NAME_15
+IDS_DEFAULT_AVATAR_NAME_16
+IDS_DEFAULT_AVATAR_NAME_17
+IDS_DEFAULT_AVATAR_NAME_18
+IDS_DEFAULT_AVATAR_NAME_19
+IDS_DEFAULT_AVATAR_NAME_20
+IDS_DEFAULT_AVATAR_NAME_21
+IDS_DEFAULT_AVATAR_NAME_22
+IDS_DEFAULT_AVATAR_NAME_23
+IDS_DEFAULT_AVATAR_NAME_24
+IDS_DEFAULT_AVATAR_NAME_25
+IDS_DEFAULT_AVATAR_NAME_26
+IDS_DEFAULT_AVATAR_NAME_8
+IDS_DEFAULT_AVATAR_NAME_9
+IDS_DEFAULT_ENCODING
+IDS_DEFAULT_PROFILE_NAME
+IDS_DEFAULT_TAB_TITLE
+IDS_DELETE
+IDS_DISABLE_TOUCH_ADJUSTMENT_DESCRIPTION
+IDS_DISABLE_TOUCH_ADJUSTMENT_NAME
+IDS_DOM_DISTILLER_JAVASCRIPT_DISABLED_CONTENT
+IDS_DOM_DISTILLER_QUALITY_ANSWER_NO
+IDS_DOM_DISTILLER_QUALITY_ANSWER_YES
+IDS_DOM_DISTILLER_QUALITY_QUESTION
+IDS_DOM_DISTILLER_VIEWER_CLOSE_READER_VIEW
+IDS_DOM_DISTILLER_VIEWER_FAILED_TO_FIND_ARTICLE_CONTENT
+IDS_DOM_DISTILLER_VIEWER_FAILED_TO_FIND_ARTICLE_TITLE
+IDS_DOM_DISTILLER_VIEWER_LOADING_STRING
+IDS_DOM_DISTILLER_VIEWER_LOADING_TITLE
+IDS_DOM_DISTILLER_VIEWER_NO_DATA_CONTENT
+IDS_DOM_DISTILLER_VIEWER_NO_DATA_TITLE
+IDS_DOM_DISTILLER_VIEWER_VIEW_ORIGINAL
+IDS_DOM_DISTILLER_WEBUI_ENTRY_ADD
+IDS_DOM_DISTILLER_WEBUI_ENTRY_ADD_FAILED
+IDS_DOM_DISTILLER_WEBUI_ENTRY_URL
+IDS_DOM_DISTILLER_WEBUI_FETCHING_ENTRIES
+IDS_DOM_DISTILLER_WEBUI_REFRESH
+IDS_DOM_DISTILLER_WEBUI_TITLE
+IDS_DOM_DISTILLER_WEBUI_VIEW_URL
+IDS_DOM_DISTILLER_WEBUI_VIEW_URL_FAILED
+IDS_DONE
+IDS_EASY_UNLOCK_SCREENLOCK_USER_POD_AUTH_VALUE
+IDS_EDIT_FIND_MAC
+IDS_EMPTY_KEYWORD_VALUE
+IDS_ERRORPAGES_BUTTON_LESS
+IDS_ERRORPAGES_BUTTON_MORE
+IDS_ERRORPAGES_BUTTON_RELOAD
+IDS_ERRORPAGES_BUTTON_SHOW_SAVED_COPY
+IDS_ERRORPAGES_BUTTON_SHOW_SAVED_COPY_HELP
+IDS_ERRORPAGES_DETAILS_ADDRESS_UNREACHABLE
+IDS_ERRORPAGES_DETAILS_BAD_GATEWAY
+IDS_ERRORPAGES_DETAILS_BAD_SSL_CLIENT_AUTH_CERT
+IDS_ERRORPAGES_DETAILS_BLOCKED
+IDS_ERRORPAGES_DETAILS_BLOCKED_BY_ADMINISTRATOR
+IDS_ERRORPAGES_DETAILS_BLOCKED_ENROLLMENT_CHECK_PENDING
+IDS_ERRORPAGES_DETAILS_CACHE_MISS
+IDS_ERRORPAGES_DETAILS_CACHE_READ_FAILURE
+IDS_ERRORPAGES_DETAILS_CONNECTION_CLOSED
+IDS_ERRORPAGES_DETAILS_CONNECTION_FAILED
+IDS_ERRORPAGES_DETAILS_CONNECTION_REFUSED
+IDS_ERRORPAGES_DETAILS_CONNECTION_RESET
+IDS_ERRORPAGES_DETAILS_DNS_PROBE_RUNNING
+IDS_ERRORPAGES_DETAILS_DOWNLOAD_FILE_TYPE_ERROR
+IDS_ERRORPAGES_DETAILS_EMPTY_RESPONSE
+IDS_ERRORPAGES_DETAILS_FILE_ACCESS_DENIED
+IDS_ERRORPAGES_DETAILS_FILE_NOT_FOUND
+IDS_ERRORPAGES_DETAILS_FORBIDDEN
+IDS_ERRORPAGES_DETAILS_GATEWAY_TIMEOUT
+IDS_ERRORPAGES_DETAILS_GONE
+IDS_ERRORPAGES_DETAILS_HTTP_VERSION_NOT_SUPPORTED
+IDS_ERRORPAGES_DETAILS_ICANN_NAME_COLLISION
+IDS_ERRORPAGES_DETAILS_INTERNAL_SERVER_ERROR
+IDS_ERRORPAGES_DETAILS_INTERNET_DISCONNECTED
+IDS_ERRORPAGES_DETAILS_NAME_NOT_RESOLVED
+IDS_ERRORPAGES_DETAILS_NETWORK_ACCESS_DENIED
+IDS_ERRORPAGES_DETAILS_NETWORK_CHANGED
+IDS_ERRORPAGES_DETAILS_NETWORK_IO_SUSPENDED
+IDS_ERRORPAGES_DETAILS_NOT_IMPLEMENTED
+IDS_ERRORPAGES_DETAILS_PINNING_FAILURE
+IDS_ERRORPAGES_DETAILS_PROXY_CONNECTION_FAILED
+IDS_ERRORPAGES_DETAILS_RESPONSE_HEADERS_MULTIPLE_CONTENT_DISPOSITION
+IDS_ERRORPAGES_DETAILS_RESPONSE_HEADERS_MULTIPLE_CONTENT_LENGTH
+IDS_ERRORPAGES_DETAILS_RESPONSE_HEADERS_MULTIPLE_LOCATION
+IDS_ERRORPAGES_DETAILS_SERVICE_UNAVAILABLE
+IDS_ERRORPAGES_DETAILS_SSL_FALLBACK_BEYOND_MINIMUM_VERSION
+IDS_ERRORPAGES_DETAILS_SSL_PROTOCOL_ERROR
+IDS_ERRORPAGES_DETAILS_SSL_VERSION_OR_CIPHER_MISMATCH
+IDS_ERRORPAGES_DETAILS_TEMPORARILY_THROTTLED
+IDS_ERRORPAGES_DETAILS_TIMED_OUT
+IDS_ERRORPAGES_DETAILS_TOO_MANY_REDIRECTS
+IDS_ERRORPAGES_DETAILS_UNKNOWN
+IDS_ERRORPAGES_ERROR_CODE
+IDS_ERRORPAGES_HEADING_ACCESS_DENIED
+IDS_ERRORPAGES_HEADING_BAD_SSL_CLIENT_AUTH_CERT
+IDS_ERRORPAGES_HEADING_BLOCKED
+IDS_ERRORPAGES_HEADING_BLOCKED_BY_ADMINISTRATOR
+IDS_ERRORPAGES_HEADING_CACHE_MISS
+IDS_ERRORPAGES_HEADING_CACHE_READ_FAILURE
+IDS_ERRORPAGES_HEADING_DOWNLOAD_FILE_TYPE_ERROR
+IDS_ERRORPAGES_HEADING_DUPLICATE_HEADERS
+IDS_ERRORPAGES_HEADING_EMPTY_RESPONSE
+IDS_ERRORPAGES_HEADING_FILE_ACCESS_DENIED
+IDS_ERRORPAGES_HEADING_HTTP_SERVER_ERROR
+IDS_ERRORPAGES_HEADING_INTERNET_DISCONNECTED
+IDS_ERRORPAGES_HEADING_NETWORK_ACCESS_DENIED
+IDS_ERRORPAGES_HEADING_NETWORK_IO_SUSPENDED
+IDS_ERRORPAGES_HEADING_NOT_AVAILABLE
+IDS_ERRORPAGES_HEADING_NOT_FOUND
+IDS_ERRORPAGES_HEADING_PINNING_FAILURE
+IDS_ERRORPAGES_HEADING_PROXY_CONNECTION_FAILED
+IDS_ERRORPAGES_HEADING_SSL_FALLBACK_BEYOND_MINIMUM_VERSION
+IDS_ERRORPAGES_HEADING_SSL_PROTOCOL_ERROR
+IDS_ERRORPAGES_HEADING_SSL_VERSION_OR_CIPHER_MISMATCH
+IDS_ERRORPAGES_HEADING_TOO_MANY_REDIRECTS
+IDS_ERRORPAGES_HEADING_WEAK_SERVER_EPHEMERAL_DH_KEY
+IDS_ERRORPAGES_HTTP_POST_WARNING
+IDS_ERRORPAGES_SUGGESTION_CHECK_CONNECTION_BODY
+IDS_ERRORPAGES_SUGGESTION_CHECK_CONNECTION_HEADER
+IDS_ERRORPAGES_SUGGESTION_CONTACT_ADMINISTRATOR
+IDS_ERRORPAGES_SUGGESTION_DNS_CONFIG
+IDS_ERRORPAGES_SUGGESTION_FIREWALL_CONFIG
+IDS_ERRORPAGES_SUGGESTION_GOOGLE_SEARCH
+IDS_ERRORPAGES_SUGGESTION_LEARNMORE_BODY
+IDS_ERRORPAGES_SUGGESTION_NETWORK_PREDICTION
+IDS_ERRORPAGES_SUGGESTION_PROXY_CONFIG
+IDS_ERRORPAGES_SUGGESTION_PROXY_DISABLE_PLATFORM
+IDS_ERRORPAGES_SUGGESTION_RELOAD
+IDS_ERRORPAGES_SUGGESTION_RELOAD_REPOST_BODY
+IDS_ERRORPAGES_SUGGESTION_RELOAD_REPOST_HEADER
+IDS_ERRORPAGES_SUGGESTION_VIEW_POLICIES
+IDS_ERRORPAGES_SUMMARY_ADDRESS_UNREACHABLE
+IDS_ERRORPAGES_SUMMARY_BAD_GATEWAY
+IDS_ERRORPAGES_SUMMARY_BAD_SSL_CLIENT_AUTH_CERT
+IDS_ERRORPAGES_SUMMARY_BLOCKED
+IDS_ERRORPAGES_SUMMARY_BLOCKED_BY_ADMINISTRATOR
+IDS_ERRORPAGES_SUMMARY_BLOCKED_ENROLLMENT_CHECK_PENDING
+IDS_ERRORPAGES_SUMMARY_CACHE_MISS
+IDS_ERRORPAGES_SUMMARY_CACHE_READ_FAILURE
+IDS_ERRORPAGES_SUMMARY_CONNECTION_REFUSED
+IDS_ERRORPAGES_SUMMARY_CONNECTION_RESET
+IDS_ERRORPAGES_SUMMARY_DNS_PROBE_RUNNING
+IDS_ERRORPAGES_SUMMARY_DOWNLOAD_FILE_TYPE_ERROR
+IDS_ERRORPAGES_SUMMARY_DUPLICATE_HEADERS
+IDS_ERRORPAGES_SUMMARY_EMPTY_RESPONSE
+IDS_ERRORPAGES_SUMMARY_FILE_ACCESS_DENIED
+IDS_ERRORPAGES_SUMMARY_FORBIDDEN
+IDS_ERRORPAGES_SUMMARY_GATEWAY_TIMEOUT
+IDS_ERRORPAGES_SUMMARY_GONE
+IDS_ERRORPAGES_SUMMARY_ICANN_NAME_COLLISION
+IDS_ERRORPAGES_SUMMARY_INTERNAL_SERVER_ERROR
+IDS_ERRORPAGES_SUMMARY_INTERNET_DISCONNECTED
+IDS_ERRORPAGES_SUMMARY_INTERNET_DISCONNECTED_INSTRUCTIONS_TEMPLATE
+IDS_ERRORPAGES_SUMMARY_INTERNET_DISCONNECTED_PLATFORM
+IDS_ERRORPAGES_SUMMARY_NAME_NOT_RESOLVED
+IDS_ERRORPAGES_SUMMARY_NETWORK_ACCESS_DENIED
+IDS_ERRORPAGES_SUMMARY_NETWORK_CHANGED
+IDS_ERRORPAGES_SUMMARY_NETWORK_IO_SUSPENDED
+IDS_ERRORPAGES_SUMMARY_NOT_AVAILABLE
+IDS_ERRORPAGES_SUMMARY_NOT_FOUND
+IDS_ERRORPAGES_SUMMARY_PINNING_FAILURE
+IDS_ERRORPAGES_SUMMARY_PROXY_CONNECTION_FAILED
+IDS_ERRORPAGES_SUMMARY_SERVICE_UNAVAILABLE
+IDS_ERRORPAGES_SUMMARY_SSL_FALLBACK_BEYOND_MINIMUM_VERSION
+IDS_ERRORPAGES_SUMMARY_SSL_PROTOCOL_ERROR
+IDS_ERRORPAGES_SUMMARY_SSL_VERSION_OR_CIPHER_MISMATCH
+IDS_ERRORPAGES_SUMMARY_TEMPORARILY_THROTTLED
+IDS_ERRORPAGES_SUMMARY_TIMED_OUT
+IDS_ERRORPAGES_SUMMARY_TOO_MANY_REDIRECTS
+IDS_ERRORPAGES_SUMMARY_WEAK_SERVER_EPHEMERAL_DH_KEY
+IDS_ERRORPAGES_SUMMARY_WEBSITE_CANNOT_HANDLE
+IDS_ERRORPAGES_TITLE_ACCESS_DENIED
+IDS_ERRORPAGES_TITLE_BLOCKED
+IDS_ERRORPAGES_TITLE_LOAD_FAILED
+IDS_ERRORPAGES_TITLE_NOT_AVAILABLE
+IDS_ERRORPAGES_TITLE_NOT_FOUND
+IDS_ERRORPAGE_NET_BUTTON_DETAILS
+IDS_ERRORPAGE_NET_BUTTON_HIDE_DETAILS
+IDS_EXTENSION_KEYWORD_COMMAND
+IDS_FEEDBACK_REPORT_PAGE_TITLE
+IDS_FEEDBACK_REPORT_URL_LABEL
+IDS_FEEDBACK_SEND_REPORT
+IDS_FEEDBACK_USER_EMAIL_LABEL
+IDS_FIND_IN_PAGE_CLOSE_TOOLTIP
+IDS_FIND_IN_PAGE_COUNT
+IDS_FIND_IN_PAGE_NEXT_TOOLTIP
+IDS_FIND_IN_PAGE_PREVIOUS_TOOLTIP
+IDS_FLAGS_ACCELERATED_FIXED_ROOT_BACKGROUND_DESCRIPTION
+IDS_FLAGS_ACCELERATED_FIXED_ROOT_BACKGROUND_NAME
+IDS_FLAGS_ALLOW_NACL_SOCKET_API_DESCRIPTION
+IDS_FLAGS_ALLOW_NACL_SOCKET_API_NAME
+IDS_FLAGS_ALLOW_TOUCHPAD_THREE_FINGER_CLICK_DESCRIPTION
+IDS_FLAGS_ALLOW_TOUCHPAD_THREE_FINGER_CLICK_NAME
+IDS_FLAGS_COMPOSITED_LAYER_BORDERS
+IDS_FLAGS_COMPOSITED_LAYER_BORDERS_DESCRIPTION
+IDS_FLAGS_COMPOSITING_FOR_FIXED_POSITION_DESCRIPTION
+IDS_FLAGS_COMPOSITING_FOR_FIXED_POSITION_HIGH_DPI
+IDS_FLAGS_COMPOSITING_FOR_FIXED_POSITION_NAME
+IDS_FLAGS_CONFLICTS_CHECK_DESCRIPTION
+IDS_FLAGS_CONFLICTS_CHECK_NAME
+IDS_FLAGS_DEBUG_PACKED_APP_DESCRIPTION
+IDS_FLAGS_DEBUG_PACKED_APP_NAME
+IDS_FLAGS_DEBUG_SHORTCUTS_DESCRIPTION
+IDS_FLAGS_DEBUG_SHORTCUTS_NAME
+IDS_FLAGS_DEFAULT_TILE_HEIGHT_DESCRIPTION
+IDS_FLAGS_DEFAULT_TILE_HEIGHT_GRANDE
+IDS_FLAGS_DEFAULT_TILE_HEIGHT_NAME
+IDS_FLAGS_DEFAULT_TILE_HEIGHT_SHORT
+IDS_FLAGS_DEFAULT_TILE_HEIGHT_TALL
+IDS_FLAGS_DEFAULT_TILE_HEIGHT_VENTI
+IDS_FLAGS_DEFAULT_TILE_WIDTH_DESCRIPTION
+IDS_FLAGS_DEFAULT_TILE_WIDTH_GRANDE
+IDS_FLAGS_DEFAULT_TILE_WIDTH_NAME
+IDS_FLAGS_DEFAULT_TILE_WIDTH_SHORT
+IDS_FLAGS_DEFAULT_TILE_WIDTH_TALL
+IDS_FLAGS_DEFAULT_TILE_WIDTH_VENTI
+IDS_FLAGS_DISABLE
+IDS_FLAGS_DISABLE_ACCELERATED_2D_CANVAS_DESCRIPTION
+IDS_FLAGS_DISABLE_ACCELERATED_2D_CANVAS_NAME
+IDS_FLAGS_DISABLE_ACCELERATED_VIDEO_DECODE_DESCRIPTION
+IDS_FLAGS_DISABLE_ACCELERATED_VIDEO_DECODE_NAME
+IDS_FLAGS_DISABLE_BOOT_ANIMATION
+IDS_FLAGS_DISABLE_BOOT_ANIMATION_DESCRIPTION
+IDS_FLAGS_DISABLE_GESTURE_REQUIREMENT_FOR_MEDIA_PLAYBACK_DESCRIPTION
+IDS_FLAGS_DISABLE_GESTURE_REQUIREMENT_FOR_MEDIA_PLAYBACK_NAME
+IDS_FLAGS_DISABLE_HYPERLINK_AUDITING_DESCRIPTION
+IDS_FLAGS_DISABLE_HYPERLINK_AUDITING_NAME
+IDS_FLAGS_DISABLE_PNACL_DESCRIPTION
+IDS_FLAGS_DISABLE_PNACL_NAME
+IDS_FLAGS_DISABLE_SOFTWARE_RASTERIZER_DESCRIPTION
+IDS_FLAGS_DISABLE_SOFTWARE_RASTERIZER_NAME
+IDS_FLAGS_DISABLE_WEBGL_DESCRIPTION
+IDS_FLAGS_DISABLE_WEBGL_NAME
+IDS_FLAGS_DISABLE_WEBRTC_DESCRIPTION
+IDS_FLAGS_DISABLE_WEBRTC_NAME
+IDS_FLAGS_ENABLE
+IDS_FLAGS_ENABLE_ACCELERATED_MJPEG_DECODE_DESCRIPTION
+IDS_FLAGS_ENABLE_ACCELERATED_MJPEG_DECODE_NAME
+IDS_FLAGS_ENABLE_APPS_SHOW_ON_FIRST_PAINT_DESCRIPTION
+IDS_FLAGS_ENABLE_APPS_SHOW_ON_FIRST_PAINT_NAME
+IDS_FLAGS_ENABLE_CONTEXTUAL_SEARCH
+IDS_FLAGS_ENABLE_CONTEXTUAL_SEARCH_DESCRIPTION
+IDS_FLAGS_ENABLE_DEVTOOLS_EXPERIMENTS_DESCRIPTION
+IDS_FLAGS_ENABLE_DEVTOOLS_EXPERIMENTS_NAME
+IDS_FLAGS_ENABLE_DOWNLOAD_RESUMPTION_DESCRIPTION
+IDS_FLAGS_ENABLE_DOWNLOAD_RESUMPTION_NAME
+IDS_FLAGS_ENABLE_ENHANCED_BOOKMARKS_DESCRIPTION
+IDS_FLAGS_ENABLE_ENHANCED_BOOKMARKS_NAME
+IDS_FLAGS_ENABLE_EXPERIMENTAL_CANVAS_FEATURES_DESCRIPTION
+IDS_FLAGS_ENABLE_EXPERIMENTAL_CANVAS_FEATURES_NAME
+IDS_FLAGS_ENABLE_GESTURE_TAP_HIGHLIGHTING_DESCRIPTION
+IDS_FLAGS_ENABLE_GESTURE_TAP_HIGHLIGHTING_NAME
+IDS_FLAGS_ENABLE_ICON_NTP_DESCRIPTION
+IDS_FLAGS_ENABLE_ICON_NTP_NAME
+IDS_FLAGS_ENABLE_JAVASCRIPT_HARMONY_DESCRIPTION
+IDS_FLAGS_ENABLE_JAVASCRIPT_HARMONY_NAME
+IDS_FLAGS_ENABLE_NACL_DEBUG_DESCRIPTION
+IDS_FLAGS_ENABLE_NACL_DEBUG_NAME
+IDS_FLAGS_ENABLE_NACL_DESCRIPTION
+IDS_FLAGS_ENABLE_NACL_NAME
+IDS_FLAGS_ENABLE_PANELS_DESCRIPTION
+IDS_FLAGS_ENABLE_PANELS_NAME
+IDS_FLAGS_ENABLE_PASSWORD_GENERATION_DESCRIPTION
+IDS_FLAGS_ENABLE_PASSWORD_GENERATION_NAME
+IDS_FLAGS_ENABLE_PINCH_SCALE_DESCRIPTION
+IDS_FLAGS_ENABLE_PINCH_SCALE_NAME
+IDS_FLAGS_ENABLE_REQUEST_TABLET_SITE_DESCRIPTION
+IDS_FLAGS_ENABLE_REQUEST_TABLET_SITE_NAME
+IDS_FLAGS_ENABLE_SCREEN_CAPTURE_DESCRIPTION
+IDS_FLAGS_ENABLE_SCREEN_CAPTURE_NAME
+IDS_FLAGS_ENABLE_SIMPLE_CACHE_BACKEND_DESCRIPTION
+IDS_FLAGS_ENABLE_SIMPLE_CACHE_BACKEND_NAME
+IDS_FLAGS_ENABLE_SMOOTH_SCROLLING_DESCRIPTION
+IDS_FLAGS_ENABLE_SMOOTH_SCROLLING_NAME
+IDS_FLAGS_ENABLE_STALE_WHILE_REVALIDATE_DESCRIPTION
+IDS_FLAGS_ENABLE_STALE_WHILE_REVALIDATE_NAME
+IDS_FLAGS_ENABLE_SUGGESTIONS_SERVICE_DESCRIPTION
+IDS_FLAGS_ENABLE_SUGGESTIONS_SERVICE_NAME
+IDS_FLAGS_ENABLE_SYNCED_NOTIFICATIONS_DESCRIPTION
+IDS_FLAGS_ENABLE_SYNCED_NOTIFICATIONS_NAME
+IDS_FLAGS_ENABLE_TCP_FAST_OPEN_DESCRIPTION
+IDS_FLAGS_ENABLE_TCP_FAST_OPEN_NAME
+IDS_FLAGS_ENABLE_TOUCH_DRAG_DROP_DESCRIPTION
+IDS_FLAGS_ENABLE_TOUCH_DRAG_DROP_NAME
+IDS_FLAGS_ENABLE_TOUCH_EDITING_DESCRIPTION
+IDS_FLAGS_ENABLE_TOUCH_EDITING_NAME
+IDS_FLAGS_ENABLE_TRANSLATE_NEW_UX_DESCRIPTION
+IDS_FLAGS_ENABLE_TRANSLATE_NEW_UX_NAME
+IDS_FLAGS_EXPERIMENTAL_EXTENSION_APIS_DESCRIPTION
+IDS_FLAGS_EXPERIMENTAL_EXTENSION_APIS_NAME
+IDS_FLAGS_EXPERIMENTAL_WEB_PLATFORM_FEATURES_DESCRIPTION
+IDS_FLAGS_EXPERIMENTAL_WEB_PLATFORM_FEATURES_NAME
+IDS_FLAGS_EXTENSIONS_ON_CHROME_URLS_DESCRIPTION
+IDS_FLAGS_EXTENSIONS_ON_CHROME_URLS_NAME
+IDS_FLAGS_FORCE_ACCELERATED_OVERFLOW_SCROLL_MODE_DESCRIPTION
+IDS_FLAGS_FORCE_ACCELERATED_OVERFLOW_SCROLL_MODE_NAME
+IDS_FLAGS_FORCE_HIGH_DPI_DESCRIPTION
+IDS_FLAGS_FORCE_HIGH_DPI_NAME
+IDS_FLAGS_IGNORE_GPU_BLACKLIST_DESCRIPTION
+IDS_FLAGS_IGNORE_GPU_BLACKLIST_NAME
+IDS_FLAGS_LONG_TITLE
+IDS_FLAGS_NACL_DEBUG_MASK_DESCRIPTION
+IDS_FLAGS_NACL_DEBUG_MASK_NAME
+IDS_FLAGS_NOT_AVAILABLE
+IDS_FLAGS_NO_EXPERIMENTS_AVAILABLE
+IDS_FLAGS_NO_UNSUPPORTED_EXPERIMENTS
+IDS_FLAGS_NTP_OTHER_SESSIONS_MENU_DESCRIPTION
+IDS_FLAGS_NTP_OTHER_SESSIONS_MENU_NAME
+IDS_FLAGS_PERFORMANCE_MONITOR_GATHERING_DESCRIPTION
+IDS_FLAGS_PERFORMANCE_MONITOR_GATHERING_NAME
+IDS_FLAGS_RELAUNCH_BUTTON
+IDS_FLAGS_RELAUNCH_NOTICE
+IDS_FLAGS_RESET_ALL_BUTTON
+IDS_FLAGS_SAVE_PAGE_AS_MHTML_DESCRIPTION
+IDS_FLAGS_SAVE_PAGE_AS_MHTML_NAME
+IDS_FLAGS_SHOW_AUTOFILL_TYPE_PREDICTIONS_DESCRIPTION
+IDS_FLAGS_SHOW_AUTOFILL_TYPE_PREDICTIONS_NAME
+IDS_FLAGS_SHOW_FPS_COUNTER
+IDS_FLAGS_SHOW_FPS_COUNTER_DESCRIPTION
+IDS_FLAGS_SHOW_TOUCH_HUD_DESCRIPTION
+IDS_FLAGS_SHOW_TOUCH_HUD_NAME
+IDS_FLAGS_SILENT_DEBUGGER_EXTENSION_API_DESCRIPTION
+IDS_FLAGS_SILENT_DEBUGGER_EXTENSION_API_NAME
+IDS_FLAGS_SPELLCHECK_AUTOCORRECT
+IDS_FLAGS_SPELLCHECK_AUTOCORRECT_DESCRIPTION
+IDS_FLAGS_STACKED_TAB_STRIP_DESCRIPTION
+IDS_FLAGS_STACKED_TAB_STRIP_NAME
+IDS_FLAGS_TABLE_TITLE
+IDS_FLAGS_THREADED_COMPOSITING_MODE_DESCRIPTION
+IDS_FLAGS_THREADED_COMPOSITING_MODE_NAME
+IDS_FLAGS_TOUCH_SCROLLING_MODE_ABSORB_TOUCHMOVE
+IDS_FLAGS_TOUCH_SCROLLING_MODE_DESCRIPTION
+IDS_FLAGS_TOUCH_SCROLLING_MODE_NAME
+IDS_FLAGS_TOUCH_SCROLLING_MODE_SYNC_TOUCHMOVE
+IDS_FLAGS_TOUCH_SCROLLING_MODE_TOUCHCANCEL
+IDS_FLAGS_UNSUPPORTED_TABLE_TITLE
+IDS_FLAGS_WALLET_SERVICE_USE_SANDBOX_DESCRIPTION
+IDS_FLAGS_WALLET_SERVICE_USE_SANDBOX_NAME
+IDS_FLAGS_WARNING_HEADER
+IDS_FLAGS_WARNING_TEXT
+IDS_FULLSCREEN
+IDS_GENERIC_EXPERIMENT_CHOICE_AUTOMATIC
+IDS_GENERIC_EXPERIMENT_CHOICE_DEFAULT
+IDS_GENERIC_EXPERIMENT_CHOICE_DISABLED
+IDS_GENERIC_EXPERIMENT_CHOICE_ENABLED
+IDS_GROUP_BY_DOMAIN_LABEL
+IDS_GUEST_PROFILE_NAME
+IDS_HARMFUL_V3_EXPLANATION_PARAGRAPH
+IDS_HARMFUL_V3_HEADING
+IDS_HARMFUL_V3_PRIMARY_PARAGRAPH
+IDS_HARMFUL_V3_PROCEED_PARAGRAPH
+IDS_HISTORY_ACTION_MENU_DESCRIPTION
+IDS_HISTORY_BLOCKED_VISIT_TEXT
+IDS_HISTORY_BROWSERESULTS
+IDS_HISTORY_CONTINUED
+IDS_HISTORY_DATE_WITH_RELATIVE_TIME
+IDS_HISTORY_DELETE_PRIOR_VISITS_CONFIRM_BUTTON
+IDS_HISTORY_DELETE_PRIOR_VISITS_WARNING
+IDS_HISTORY_FILTER_ALLOWED
+IDS_HISTORY_FILTER_ALLOW_ITEMS
+IDS_HISTORY_FILTER_BLOCKED
+IDS_HISTORY_FILTER_BLOCK_ITEMS
+IDS_HISTORY_HAS_SYNCED_RESULTS
+IDS_HISTORY_INTERVAL
+IDS_HISTORY_IN_CONTENT_PACK
+IDS_HISTORY_LOADING
+IDS_HISTORY_LOCK_BUTTON
+IDS_HISTORY_MORE_FROM_SITE
+IDS_HISTORY_NEWER
+IDS_HISTORY_NEWEST
+IDS_HISTORY_NO_RESULTS
+IDS_HISTORY_NO_SEARCH_RESULTS
+IDS_HISTORY_NO_SYNCED_RESULTS
+IDS_HISTORY_NUMBER_VISITS
+IDS_HISTORY_OLDER
+IDS_HISTORY_OPEN_CLEAR_BROWSING_DATA_DIALOG
+IDS_HISTORY_OTHER_SESSIONS_COLLAPSE_SESSION
+IDS_HISTORY_OTHER_SESSIONS_EXPAND_SESSION
+IDS_HISTORY_OTHER_SESSIONS_OPEN_ALL
+IDS_HISTORY_RANGE_ALL_TIME
+IDS_HISTORY_RANGE_LABEL
+IDS_HISTORY_RANGE_MONTH
+IDS_HISTORY_RANGE_NEXT
+IDS_HISTORY_RANGE_PREVIOUS
+IDS_HISTORY_RANGE_TODAY
+IDS_HISTORY_RANGE_WEEK
+IDS_HISTORY_REMOVE_BOOKMARK
+IDS_HISTORY_REMOVE_PAGE
+IDS_HISTORY_REMOVE_SELECTED_ITEMS
+IDS_HISTORY_SEARCHRESULTSFOR
+IDS_HISTORY_SEARCH_BUTTON
+IDS_HISTORY_TITLE
+IDS_HISTORY_UNKNOWN_DEVICE
+IDS_HISTORY_UNLOCK_BUTTON
+IDS_HTTP_POST_WARNING
+IDS_HTTP_POST_WARNING_RESEND
+IDS_HTTP_POST_WARNING_TITLE
+IDS_IMPORT_FROM_FIREFOX
+IDS_IMPORT_FROM_ICEWEASEL
+IDS_JAVASCRIPT_ALERT_DEFAULT_TITLE
+IDS_JAVASCRIPT_ALERT_TITLE
+IDS_JAVASCRIPT_MESSAGEBOX_DEFAULT_TITLE
+IDS_JAVASCRIPT_MESSAGEBOX_TITLE
+IDS_KEYWORD_SEARCH
+IDS_LEARN_MORE
+IDS_LEGACY_DEFAULT_PROFILE_NAME
+IDS_LIBADDRESSINPUT_ADDRESS_LINE_1_LABEL
+IDS_LIBADDRESSINPUT_AREA
+IDS_LIBADDRESSINPUT_COUNTRY_OR_REGION_LABEL
+IDS_LIBADDRESSINPUT_COUNTY
+IDS_LIBADDRESSINPUT_DEPARTMENT
+IDS_LIBADDRESSINPUT_DISTRICT
+IDS_LIBADDRESSINPUT_DO_SI
+IDS_LIBADDRESSINPUT_EMIRATE
+IDS_LIBADDRESSINPUT_ISLAND
+IDS_LIBADDRESSINPUT_LOCALITY_LABEL
+IDS_LIBADDRESSINPUT_MISMATCHING_VALUE_POSTAL_CODE
+IDS_LIBADDRESSINPUT_MISMATCHING_VALUE_POSTAL_CODE_URL
+IDS_LIBADDRESSINPUT_MISMATCHING_VALUE_ZIP
+IDS_LIBADDRESSINPUT_MISMATCHING_VALUE_ZIP_URL
+IDS_LIBADDRESSINPUT_MISSING_REQUIRED_FIELD
+IDS_LIBADDRESSINPUT_MISSING_REQUIRED_POSTAL_CODE_EXAMPLE
+IDS_LIBADDRESSINPUT_MISSING_REQUIRED_POSTAL_CODE_EXAMPLE_AND_URL
+IDS_LIBADDRESSINPUT_MISSING_REQUIRED_ZIP_CODE_EXAMPLE
+IDS_LIBADDRESSINPUT_MISSING_REQUIRED_ZIP_CODE_EXAMPLE_AND_URL
+IDS_LIBADDRESSINPUT_NEIGHBORHOOD
+IDS_LIBADDRESSINPUT_OBLAST
+IDS_LIBADDRESSINPUT_ORGANIZATION_LABEL
+IDS_LIBADDRESSINPUT_PARISH
+IDS_LIBADDRESSINPUT_PIN_CODE_LABEL
+IDS_LIBADDRESSINPUT_POSTAL_CODE_LABEL
+IDS_LIBADDRESSINPUT_POST_TOWN
+IDS_LIBADDRESSINPUT_PO_BOX_FORBIDDEN_VALUE
+IDS_LIBADDRESSINPUT_PREFECTURE
+IDS_LIBADDRESSINPUT_PROVINCE
+IDS_LIBADDRESSINPUT_RECIPIENT_LABEL
+IDS_LIBADDRESSINPUT_STATE
+IDS_LIBADDRESSINPUT_SUBURB
+IDS_LIBADDRESSINPUT_UNKNOWN_VALUE
+IDS_LIBADDRESSINPUT_UNRECOGNIZED_FORMAT_POSTAL_CODE
+IDS_LIBADDRESSINPUT_UNRECOGNIZED_FORMAT_POSTAL_CODE_EXAMPLE
+IDS_LIBADDRESSINPUT_UNRECOGNIZED_FORMAT_POSTAL_CODE_EXAMPLE_AND_URL
+IDS_LIBADDRESSINPUT_UNRECOGNIZED_FORMAT_ZIP
+IDS_LIBADDRESSINPUT_UNRECOGNIZED_FORMAT_ZIP_CODE_EXAMPLE
+IDS_LIBADDRESSINPUT_UNRECOGNIZED_FORMAT_ZIP_CODE_EXAMPLE_AND_URL
+IDS_LIBADDRESSINPUT_VILLAGE_TOWNSHIP
+IDS_LIBADDRESSINPUT_ZIP_CODE_LABEL
+IDS_LINK_FROM_CLIPBOARD
+IDS_LOGIN_DIALOG_OK_BUTTON_LABEL
+IDS_LOGIN_DIALOG_PASSWORD_FIELD
+IDS_LOGIN_DIALOG_TITLE
+IDS_LOGIN_DIALOG_USERNAME_FIELD
+IDS_MALWARE_V3_ADVICE_HEADING
+IDS_MALWARE_V3_EXPLANATION_PARAGRAPH
+IDS_MALWARE_V3_EXPLANATION_PARAGRAPH_ADVICE
+IDS_MALWARE_V3_EXPLANATION_PARAGRAPH_HISTORY
+IDS_MALWARE_V3_EXPLANATION_PARAGRAPH_SUBRESOURCE
+IDS_MALWARE_V3_EXPLANATION_PARAGRAPH_SUBRESOURCE_ADVICE
+IDS_MALWARE_V3_EXPLANATION_PARAGRAPH_SUBRESOURCE_HISTORY
+IDS_MALWARE_V3_HEADING
+IDS_MALWARE_V3_PRIMARY_PARAGRAPH
+IDS_MALWARE_V3_PROCEED_PARAGRAPH
+IDS_MALWARE_V3_PROCEED_PARAGRAPH_NOT_RECOMMEND
+IDS_MALWARE_V3_PROCEED_PARAGRAPH_SOCIAL
+IDS_MANAGED_USER_AVATAR_LABEL
+IDS_MIDI_SYSEX_INFOBAR_QUESTION
+IDS_MIDI_SYSEX_PERMISSION_FRAGMENT
+IDS_MOBILE_WELCOME_URL
+IDS_NACL_DEBUG_MASK_CHOICE_DEBUG_ALL
+IDS_NACL_DEBUG_MASK_CHOICE_EXCLUDE_UTILS_PNACL
+IDS_NACL_DEBUG_MASK_CHOICE_INCLUDE_DEBUG
+IDS_NETWORK_PREDICTION_ENABLED_DESCRIPTION
+IDS_NET_EXPORT_NO_EMAIL_ACCOUNTS_ALERT_MESSAGE
+IDS_NET_EXPORT_NO_EMAIL_ACCOUNTS_ALERT_TITLE
+IDS_NEW_INCOGNITO_WINDOW_MAC
+IDS_NEW_NUMBERED_PROFILE_NAME
+IDS_NEW_TAB_CHROME_WELCOME_PAGE_TITLE
+IDS_NEW_TAB_MOST_VISITED
+IDS_NEW_TAB_RECENTLY_CLOSED
+IDS_NEW_TAB_RESTORE_THUMBNAILS_SHORT_LINK
+IDS_NEW_TAB_THUMBNAIL_REMOVED_NOTIFICATION
+IDS_NEW_TAB_TITLE
+IDS_NEW_TAB_UNDO_THUMBNAIL_REMOVE
+IDS_NUMBERED_PROFILE_NAME
+IDS_OK
+IDS_OMNIBOX_EMPTY_HINT
+IDS_ONE_CLICK_SIGNIN_CONFIRM_EMAIL_DIALOG_CANCEL_BUTTON
+IDS_OPEN_TABS_NOTYETSYNCED
+IDS_OPEN_TABS_PROMOCOMPUTER
+IDS_OPTIONS_ADVANCED_SECTION_TITLE_PRIVACY
+IDS_OPTIONS_DISABLE_WEB_SERVICES
+IDS_OPTIONS_ENABLE_LOGGING
+IDS_OPTIONS_IMPROVE_BROWSING_EXPERIENCE
+IDS_OPTIONS_PROXIES_CONFIGURE_BUTTON
+IDS_OTHER_DEVICES_X_MORE
+IDS_PAGEINFO_ADDRESS
+IDS_PAGEINFO_CERT_INFO_BUTTON
+IDS_PAGEINFO_PARTIAL_ADDRESS
+IDS_PAGE_INFO_HELP_CENTER_LINK
+IDS_PAGE_INFO_INTERNAL_PAGE
+IDS_PAGE_INFO_SECURITY_BUTTON_ACCESSIBILITY_LABEL
+IDS_PAGE_INFO_SECURITY_TAB_DEPRECATED_SIGNATURE_ALGORITHM
+IDS_PAGE_INFO_SECURITY_TAB_ENCRYPTED_CONNECTION_TEXT
+IDS_PAGE_INFO_SECURITY_TAB_ENCRYPTED_INSECURE_CONTENT_ERROR
+IDS_PAGE_INFO_SECURITY_TAB_ENCRYPTED_INSECURE_CONTENT_WARNING
+IDS_PAGE_INFO_SECURITY_TAB_ENCRYPTED_SENTENCE_LINK
+IDS_PAGE_INFO_SECURITY_TAB_ENCRYPTION_DETAILS
+IDS_PAGE_INFO_SECURITY_TAB_ENCRYPTION_DETAILS_AEAD
+IDS_PAGE_INFO_SECURITY_TAB_FALLBACK_MESSAGE
+IDS_PAGE_INFO_SECURITY_TAB_FIRST_VISITED_TODAY
+IDS_PAGE_INFO_SECURITY_TAB_INSECURE_IDENTITY
+IDS_PAGE_INFO_SECURITY_TAB_NON_UNIQUE_NAME
+IDS_PAGE_INFO_SECURITY_TAB_NOT_ENCRYPTED_CONNECTION_TEXT
+IDS_PAGE_INFO_SECURITY_TAB_NO_REVOCATION_MECHANISM
+IDS_PAGE_INFO_SECURITY_TAB_RENEGOTIATION_MESSAGE
+IDS_PAGE_INFO_SECURITY_TAB_SECURE_IDENTITY_EV_NO_CT
+IDS_PAGE_INFO_SECURITY_TAB_SECURE_IDENTITY_NO_CT
+IDS_PAGE_INFO_SECURITY_TAB_SSL_VERSION
+IDS_PAGE_INFO_SECURITY_TAB_UNABLE_TO_CHECK_REVOCATION
+IDS_PAGE_INFO_SECURITY_TAB_UNKNOWN_PARTY
+IDS_PAGE_INFO_SECURITY_TAB_WEAK_ENCRYPTION_CONNECTION_TEXT
+IDS_PASSWORDS_EXCEPTIONS_TAB_TITLE
+IDS_PASSWORDS_SHOW_PASSWORDS_TAB_TITLE
+IDS_PASSWORD_MANAGER_BLACKLIST_BUTTON
+IDS_PASSWORD_MANAGER_EMPTY_LOGIN
+IDS_PASSWORD_MANAGER_SAVE_BUTTON
+IDS_PASSWORD_MANAGER_SAVE_PASSWORD_PROMPT
+IDS_PAST_TIME_TODAY
+IDS_PAST_TIME_YESTERDAY
+IDS_PDF_INFOBAR_ALWAYS_USE_READER_BUTTON
+IDS_PERMISSION_ALLOW
+IDS_PERMISSION_DENY
+IDS_PHISHING_V3_EXPLANATION_PARAGRAPH
+IDS_PHISHING_V3_HEADING
+IDS_PHISHING_V3_PRIMARY_PARAGRAPH
+IDS_PHISHING_V3_PROCEED_PARAGRAPH
+IDS_PLATFORM_LABEL
+IDS_PLUGIN_CONFIRM_INSTALL_DIALOG_ACCEPT_BUTTON
+IDS_PLUGIN_CONFIRM_INSTALL_DIALOG_TITLE
+IDS_PLUGIN_NOT_SUPPORTED
+IDS_POLICY_ASSOCIATION_STATE_ACTIVE
+IDS_POLICY_ASSOCIATION_STATE_DEPROVISIONED
+IDS_POLICY_ASSOCIATION_STATE_UNMANAGED
+IDS_POLICY_DEFAULT_SEARCH_DISABLED
+IDS_POLICY_DEPRECATED
+IDS_POLICY_DM_STATUS_HTTP_STATUS_ERROR
+IDS_POLICY_DM_STATUS_REQUEST_FAILED
+IDS_POLICY_DM_STATUS_REQUEST_INVALID
+IDS_POLICY_DM_STATUS_RESPONSE_DECODING_ERROR
+IDS_POLICY_DM_STATUS_SERVICE_ACTIVATION_PENDING
+IDS_POLICY_DM_STATUS_SERVICE_DEPROVISIONED
+IDS_POLICY_DM_STATUS_SERVICE_DEVICE_ID_CONFLICT
+IDS_POLICY_DM_STATUS_SERVICE_DEVICE_NOT_FOUND
+IDS_POLICY_DM_STATUS_SERVICE_DOMAIN_MISMATCH
+IDS_POLICY_DM_STATUS_SERVICE_INVALID_SERIAL_NUMBER
+IDS_POLICY_DM_STATUS_SERVICE_MANAGEMENT_NOT_SUPPORTED
+IDS_POLICY_DM_STATUS_SERVICE_MANAGEMENT_TOKEN_INVALID
+IDS_POLICY_DM_STATUS_SERVICE_MISSING_LICENSES
+IDS_POLICY_DM_STATUS_SERVICE_POLICY_NOT_FOUND
+IDS_POLICY_DM_STATUS_SUCCESS
+IDS_POLICY_DM_STATUS_TEMPORARY_UNAVAILABLE
+IDS_POLICY_DM_STATUS_UNKNOWN_ERROR
+IDS_POLICY_FILTER_PLACEHOLDER
+IDS_POLICY_HEADER_LEVEL
+IDS_POLICY_HEADER_NAME
+IDS_POLICY_HEADER_SCOPE
+IDS_POLICY_HEADER_STATUS
+IDS_POLICY_HEADER_VALUE
+IDS_POLICY_HIDE_EXPANDED_VALUE
+IDS_POLICY_INVALID_BOOKMARK
+IDS_POLICY_INVALID_PROXY_MODE_ERROR
+IDS_POLICY_INVALID_SEARCH_URL_ERROR
+IDS_POLICY_LABEL_ASSET_ID
+IDS_POLICY_LABEL_CLIENT_ID
+IDS_POLICY_LABEL_DIRECTORY_API_ID
+IDS_POLICY_LABEL_DOMAIN
+IDS_POLICY_LABEL_LOCATION
+IDS_POLICY_LABEL_REFRESH_INTERVAL
+IDS_POLICY_LABEL_STATUS
+IDS_POLICY_LABEL_TIME_SINCE_LAST_REFRESH
+IDS_POLICY_LABEL_USERNAME
+IDS_POLICY_LEVEL_ERROR
+IDS_POLICY_LEVEL_MANDATORY
+IDS_POLICY_LEVEL_RECOMMENDED
+IDS_POLICY_LIST_ENTRY_ERROR
+IDS_POLICY_NEVER_FETCHED
+IDS_POLICY_NOT_SPECIFIED
+IDS_POLICY_NOT_SPECIFIED_ERROR
+IDS_POLICY_NO_POLICIES_SET
+IDS_POLICY_OK
+IDS_POLICY_OUT_OF_RANGE_ERROR
+IDS_POLICY_OVERRIDDEN
+IDS_POLICY_PROXY_BOTH_SPECIFIED_ERROR
+IDS_POLICY_PROXY_MODE_AUTO_DETECT_ERROR
+IDS_POLICY_PROXY_MODE_DISABLED_ERROR
+IDS_POLICY_PROXY_MODE_FIXED_SERVERS_ERROR
+IDS_POLICY_PROXY_MODE_PAC_URL_ERROR
+IDS_POLICY_PROXY_MODE_SYSTEM_ERROR
+IDS_POLICY_PROXY_NEITHER_SPECIFIED_ERROR
+IDS_POLICY_RELOAD_POLICIES
+IDS_POLICY_SCHEMA_VALIDATION_ERROR
+IDS_POLICY_SCOPE_DEVICE
+IDS_POLICY_SCOPE_USER
+IDS_POLICY_SHOW_EXPANDED_VALUE
+IDS_POLICY_SHOW_UNSET
+IDS_POLICY_STATUS
+IDS_POLICY_STATUS_DEVICE
+IDS_POLICY_STATUS_USER
+IDS_POLICY_STORE_STATUS_BAD_STATE
+IDS_POLICY_STORE_STATUS_LOAD_ERROR
+IDS_POLICY_STORE_STATUS_OK
+IDS_POLICY_STORE_STATUS_PARSE_ERROR
+IDS_POLICY_STORE_STATUS_SERIALIZE_ERROR
+IDS_POLICY_STORE_STATUS_STORE_ERROR
+IDS_POLICY_STORE_STATUS_UNKNOWN_ERROR
+IDS_POLICY_STORE_STATUS_VALIDATION_ERROR
+IDS_POLICY_SUBKEY_ERROR
+IDS_POLICY_TITLE
+IDS_POLICY_TYPE_ERROR
+IDS_POLICY_UNKNOWN
+IDS_POLICY_UNSET
+IDS_POLICY_VALIDATION_BAD_INITIAL_SIGNATURE
+IDS_POLICY_VALIDATION_BAD_KEY_VERIFICATION_SIGNATURE
+IDS_POLICY_VALIDATION_BAD_SIGNATURE
+IDS_POLICY_VALIDATION_BAD_TIMESTAMP
+IDS_POLICY_VALIDATION_BAD_USERNAME
+IDS_POLICY_VALIDATION_ERROR_CODE_PRESENT
+IDS_POLICY_VALIDATION_OK
+IDS_POLICY_VALIDATION_PAYLOAD_PARSE_ERROR
+IDS_POLICY_VALIDATION_POLICY_PARSE_ERROR
+IDS_POLICY_VALIDATION_UNKNOWN_ERROR
+IDS_POLICY_VALIDATION_WRONG_POLICY_TYPE
+IDS_POLICY_VALIDATION_WRONG_SETTINGS_ENTITY_ID
+IDS_POLICY_VALIDATION_WRONG_TOKEN
+IDS_PREFERENCES_CORRUPT_ERROR
+IDS_PREFERENCES_UNREADABLE_ERROR
+IDS_PRINT
+IDS_PRIVACY_POLICY_URL
+IDS_PRODUCT_NAME
+IDS_PROFILES_GUEST_PROFILE_NAME
+IDS_PROFILES_LOCAL_PROFILE_STATE
+IDS_PROFILE_TOO_NEW_ERROR
+IDS_PUSH_MESSAGES_BUBBLE_FRAGMENT
+IDS_PUSH_MESSAGES_BUBBLE_TEXT
+IDS_PUSH_MESSAGES_PERMISSION_QUESTION
+IDS_RECENT_TABS_MENU
+IDS_SAD_TAB_MESSAGE
+IDS_SAD_TAB_RELOAD_LABEL
+IDS_SAD_TAB_TITLE
+IDS_SAFEBROWSING_OVERRIDABLE_SAFETY_BUTTON
+IDS_SAFEBROWSING_V3_CLOSE_DETAILS_BUTTON
+IDS_SAFEBROWSING_V3_OPEN_DETAILS_BUTTON
+IDS_SAFEBROWSING_V3_TITLE
+IDS_SAFE_BROWSING_MALWARE_BACK_BUTTON
+IDS_SAFE_BROWSING_MALWARE_BACK_HEADLINE
+IDS_SAFE_BROWSING_MALWARE_COLLAB_HEADLINE
+IDS_SAFE_BROWSING_MALWARE_DIAGNOSTIC_PAGE
+IDS_SAFE_BROWSING_MALWARE_FEAR_HEADLINE
+IDS_SAFE_BROWSING_MALWARE_HEADLINE
+IDS_SAFE_BROWSING_MALWARE_LABEL
+IDS_SAFE_BROWSING_MALWARE_QUESTION_HEADLINE
+IDS_SAFE_BROWSING_MALWARE_REPORTING_AGREE
+IDS_SAFE_BROWSING_MALWARE_TITLE
+IDS_SAFE_BROWSING_MALWARE_V2_DESCRIPTION1
+IDS_SAFE_BROWSING_MALWARE_V2_DESCRIPTION1_SUBRESOURCE
+IDS_SAFE_BROWSING_MALWARE_V2_DESCRIPTION2
+IDS_SAFE_BROWSING_MALWARE_V2_DESCRIPTION2_SUBRESOURCE
+IDS_SAFE_BROWSING_MALWARE_V2_DESCRIPTION3
+IDS_SAFE_BROWSING_MALWARE_V2_DETAILS
+IDS_SAFE_BROWSING_MALWARE_V2_DETAILS_SUBRESOURCE
+IDS_SAFE_BROWSING_MALWARE_V2_HEADLINE
+IDS_SAFE_BROWSING_MALWARE_V2_HEADLINE_SUBRESOURCE
+IDS_SAFE_BROWSING_MALWARE_V2_LEARN_MORE
+IDS_SAFE_BROWSING_MALWARE_V2_PROCEED_LINK
+IDS_SAFE_BROWSING_MALWARE_V2_REPORTING_AGREE
+IDS_SAFE_BROWSING_MALWARE_V2_SEE_MORE
+IDS_SAFE_BROWSING_MALWARE_V2_TITLE
+IDS_SAFE_BROWSING_MULTI_MALWARE_DESCRIPTION1
+IDS_SAFE_BROWSING_MULTI_MALWARE_DESCRIPTION2
+IDS_SAFE_BROWSING_MULTI_MALWARE_DESCRIPTION3
+IDS_SAFE_BROWSING_MULTI_MALWARE_DESCRIPTION_AGREE
+IDS_SAFE_BROWSING_MULTI_MALWARE_PROCEED_BUTTON
+IDS_SAFE_BROWSING_MULTI_PHISHING_DESCRIPTION1
+IDS_SAFE_BROWSING_MULTI_THREAT_DESCRIPTION1
+IDS_SAFE_BROWSING_MULTI_THREAT_DESCRIPTION2
+IDS_SAFE_BROWSING_MULTI_THREAT_TITLE
+IDS_SAFE_BROWSING_PHISHING_BACK_HEADLINE
+IDS_SAFE_BROWSING_PHISHING_COLLAB_HEADLINE
+IDS_SAFE_BROWSING_PHISHING_FEAR_HEADLINE
+IDS_SAFE_BROWSING_PHISHING_HEADLINE
+IDS_SAFE_BROWSING_PHISHING_LABEL
+IDS_SAFE_BROWSING_PHISHING_QUESTION_HEADLINE
+IDS_SAFE_BROWSING_PHISHING_REPORT_ERROR
+IDS_SAFE_BROWSING_PHISHING_TITLE
+IDS_SAFE_BROWSING_PHISHING_V2_DESCRIPTION1
+IDS_SAFE_BROWSING_PHISHING_V2_DESCRIPTION2
+IDS_SAFE_BROWSING_PHISHING_V2_HEADLINE
+IDS_SAFE_BROWSING_PHISHING_V2_REPORT_ERROR
+IDS_SAFE_BROWSING_PHISHING_V2_TITLE
+IDS_SAFE_BROWSING_PRIVACY_POLICY_PAGE
+IDS_SAFE_BROWSING_PRIVACY_POLICY_PAGE_V2
+IDS_SAFE_BROWSING_PRIVACY_POLICY_URL
+IDS_SAVE
+IDS_SEARCH_BOX_EMPTY_HINT
+IDS_SECURE_CONNECTION_EV
+IDS_SESSION_CRASHED_VIEW_MESSAGE
+IDS_SESSION_CRASHED_VIEW_RESTORE_BUTTON
+IDS_SETTINGS_SHOW_ADVANCED_SETTINGS
+IDS_SHORT_PRODUCT_NAME
+IDS_SHOW_HISTORY
+IDS_SIGNED_IN_WITH_SYNC_DISABLED
+IDS_SIGNED_IN_WITH_SYNC_SUPPRESSED
+IDS_SIGNIN_ERROR_BUBBLE_VIEW_TITLE
+IDS_SINGLE_PROFILE_DISPLAY_NAME
+IDS_SSL_CLOCK_ERROR
+IDS_SSL_CLOCK_ERROR_EXPLANATION
+IDS_SSL_NONOVERRIDABLE_HSTS
+IDS_SSL_NONOVERRIDABLE_INVALID
+IDS_SSL_NONOVERRIDABLE_MORE
+IDS_SSL_NONOVERRIDABLE_MORE_INVALID_SP3
+IDS_SSL_NONOVERRIDABLE_PINNED
+IDS_SSL_NONOVERRIDABLE_REVOKED
+IDS_SSL_OVERRIDABLE_PRIMARY_PARAGRAPH
+IDS_SSL_OVERRIDABLE_PROCEED_LINK_TEXT
+IDS_SSL_OVERRIDABLE_PROCEED_PARAGRAPH
+IDS_SSL_OVERRIDABLE_SAFETY_BUTTON
+IDS_SSL_OVERRIDABLE_TITLE
+IDS_SSL_RELOAD
+IDS_SSL_V2_CLOCK_AHEAD_HEADING
+IDS_SSL_V2_CLOCK_BEHIND_HEADING
+IDS_SSL_V2_CLOCK_PRIMARY_PARAGRAPH
+IDS_SSL_V2_CLOCK_TITLE
+IDS_SSL_V2_CLOCK_UPDATE_DATE_AND_TIME
+IDS_SSL_V2_CLOSE_DETAILS_BUTTON
+IDS_SSL_V2_HEADING
+IDS_SSL_V2_OPEN_DETAILS_BUTTON
+IDS_SSL_V2_PRIMARY_PARAGRAPH
+IDS_SSL_V2_TITLE
+IDS_STARS_PROMO_LABEL_IOS
+IDS_SUPERVISED_USER_AVATAR_LABEL
+IDS_SUPERVISED_USER_NEW_AVATAR_LABEL
+IDS_SYNC_ACCOUNT_DETAILS_NOT_ENTERED
+IDS_SYNC_ACCOUNT_SYNCING_TO_USER
+IDS_SYNC_ACCOUNT_SYNCING_TO_USER_WITH_MANAGE_LINK
+IDS_SYNC_AUTHENTICATING_LABEL
+IDS_SYNC_BASIC_ENCRYPTION_DATA
+IDS_SYNC_CLEAR_USER_DATA
+IDS_SYNC_CONFIGURE_ENCRYPTION
+IDS_SYNC_DATATYPE_AUTOFILL
+IDS_SYNC_DATATYPE_BOOKMARKS
+IDS_SYNC_DATATYPE_PASSWORDS
+IDS_SYNC_DATATYPE_PREFERENCES
+IDS_SYNC_DATATYPE_TABS
+IDS_SYNC_DATATYPE_TYPED_URLS
+IDS_SYNC_EMPTY_PASSPHRASE_ERROR
+IDS_SYNC_ENABLE_SYNC_ON_ACCOUNT
+IDS_SYNC_ENCRYPTION_SECTION_TITLE
+IDS_SYNC_ENTER_GOOGLE_PASSPHRASE_BODY
+IDS_SYNC_ENTER_PASSPHRASE_BODY
+IDS_SYNC_ENTER_PASSPHRASE_BODY_WITH_DATE
+IDS_SYNC_ENTER_PASSPHRASE_TITLE
+IDS_SYNC_ERROR_BUBBLE_VIEW_TITLE
+IDS_SYNC_ERROR_SIGNING_IN
+IDS_SYNC_FULL_ENCRYPTION_DATA
+IDS_SYNC_INVALID_USER_CREDENTIALS
+IDS_SYNC_LOGIN_INFO_OUT_OF_DATE
+IDS_SYNC_LOGIN_SETTING_UP
+IDS_SYNC_MENU_PRE_SYNCED_LABEL
+IDS_SYNC_MENU_SYNCED_LABEL
+IDS_SYNC_NTP_PASSWORD_ENABLE
+IDS_SYNC_NTP_PASSWORD_PROMO
+IDS_SYNC_NTP_PASSWORD_PROMO,
+IDS_SYNC_NTP_SETUP_IN_PROGRESS
+IDS_SYNC_OPTIONS_GROUP_NAME
+IDS_SYNC_OTHER_SIGN_IN_ERROR_BUBBLE_VIEW_MESSAGE
+IDS_SYNC_PASSPHRASE_ERROR_BUBBLE_VIEW_ACCEPT
+IDS_SYNC_PASSPHRASE_ERROR_BUBBLE_VIEW_MESSAGE
+IDS_SYNC_PASSPHRASE_ERROR_WRENCH_MENU_ITEM
+IDS_SYNC_PASSPHRASE_LABEL
+IDS_SYNC_PASSPHRASE_MISMATCH_ERROR
+IDS_SYNC_PASSPHRASE_MSG_EXPLICIT_POSTFIX
+IDS_SYNC_PASSPHRASE_MSG_EXPLICIT_PREFIX
+IDS_SYNC_PASSWORD_SYNC_ATTENTION
+IDS_SYNC_PROMO_NTP_BUBBLE_MESSAGE
+IDS_SYNC_PROMO_TAB_TITLE
+IDS_SYNC_RELOGIN_LINK_LABEL
+IDS_SYNC_SERVER_IS_UNREACHABLE
+IDS_SYNC_SERVICE_UNAVAILABLE
+IDS_SYNC_SETUP_ERROR
+IDS_SYNC_SIGN_IN_ERROR_BUBBLE_VIEW_ACCEPT
+IDS_SYNC_SIGN_IN_ERROR_BUBBLE_VIEW_MESSAGE
+IDS_SYNC_SIGN_IN_ERROR_WRENCH_MENU_ITEM
+IDS_SYNC_START_SYNC_BUTTON_LABEL
+IDS_SYNC_STATUS_UNRECOVERABLE_ERROR
+IDS_SYNC_STOP_AND_RESTART_SYNC
+IDS_SYNC_TIME_JUST_NOW
+IDS_SYNC_TIME_NEVER
+IDS_SYNC_UNAVAILABLE_ERROR_BUBBLE_VIEW_ACCEPT
+IDS_SYNC_UNAVAILABLE_ERROR_BUBBLE_VIEW_MESSAGE
+IDS_SYNC_UNRECOVERABLE_ERROR_HELP_URL
+IDS_SYNC_UPGRADE_CLIENT
+IDS_SYSTEM_FLAGS_OWNER_ONLY
+IDS_TERMS_HTML
+IDS_TIME_DAYS
+IDS_TIME_DAYS_1ST
+IDS_TIME_ELAPSED_DAYS
+IDS_TIME_ELAPSED_HOURS
+IDS_TIME_ELAPSED_MINS
+IDS_TIME_ELAPSED_SECS
+IDS_TIME_HOURS
+IDS_TIME_HOURS_1ST
+IDS_TIME_HOURS_2ND
+IDS_TIME_LONG_MINS
+IDS_TIME_LONG_MINS_1ST
+IDS_TIME_LONG_MINS_2ND
+IDS_TIME_LONG_SECS
+IDS_TIME_LONG_SECS_2ND
+IDS_TIME_MINS
+IDS_TIME_REMAINING_DAYS
+IDS_TIME_REMAINING_HOURS
+IDS_TIME_REMAINING_LONG_MINS
+IDS_TIME_REMAINING_LONG_SECS
+IDS_TIME_REMAINING_MINS
+IDS_TIME_REMAINING_SECS
+IDS_TIME_SECS
+IDS_TOOLTIP_STAR
+IDS_TOUCH_EVENTS_DESCRIPTION
+IDS_TOUCH_EVENTS_NAME
+IDS_TRANSLATE_INFOBAR_ACCEPT
+IDS_TRANSLATE_INFOBAR_AFTER_MESSAGE
+IDS_TRANSLATE_INFOBAR_AFTER_MESSAGE_AUTODETERMINED_SOURCE_LANGUAGE
+IDS_TRANSLATE_INFOBAR_ALWAYS_TRANSLATE
+IDS_TRANSLATE_INFOBAR_BEFORE_MESSAGE
+IDS_TRANSLATE_INFOBAR_BEFORE_MESSAGE_IOS
+IDS_TRANSLATE_INFOBAR_DENY
+IDS_TRANSLATE_INFOBAR_ERROR_CANT_CONNECT
+IDS_TRANSLATE_INFOBAR_ERROR_CANT_TRANSLATE
+IDS_TRANSLATE_INFOBAR_ERROR_SAME_LANGUAGE
+IDS_TRANSLATE_INFOBAR_NEVER_MESSAGE_IOS
+IDS_TRANSLATE_INFOBAR_NEVER_TRANSLATE
+IDS_TRANSLATE_INFOBAR_OPTIONS_ABOUT
+IDS_TRANSLATE_INFOBAR_OPTIONS_ALWAYS
+IDS_TRANSLATE_INFOBAR_OPTIONS_NEVER_TRANSLATE_LANG
+IDS_TRANSLATE_INFOBAR_OPTIONS_NEVER_TRANSLATE_SITE
+IDS_TRANSLATE_INFOBAR_OPTIONS_REPORT_ERROR
+IDS_TRANSLATE_INFOBAR_RETRY
+IDS_TRANSLATE_INFOBAR_REVERT
+IDS_TRANSLATE_INFOBAR_TRANSLATING_TO
+IDS_TRANSLATE_INFOBAR_UNKNOWN_PAGE_LANGUAGE
+IDS_TRANSLATE_INFOBAR_UNSUPPORTED_PAGE_LANGUAGE
+IDS_UPGRADE_AVAILABLE
+IDS_UPGRADE_AVAILABLE_BUTTON
+IDS_WEB_FONT_FAMILY
+IDS_WEB_FONT_SIZE
diff --git a/build/ios/mac_build.gypi b/build/ios/mac_build.gypi
new file mode 100644
index 0000000..4da21eb
--- /dev/null
+++ b/build/ios/mac_build.gypi
@@ -0,0 +1,83 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Xcode throws an error if an iOS target depends on a Mac OS X target. So
+# any place a utility program needs to be build and run, an action is
+# used to run ninja as script to work around this.
+# Example:
+# {
+#   'target_name': 'foo',
+#   'type': 'none',
+#   'variables': {
+#     # The name of a directory used for ninja. This cannot be shared with
+#     # another mac build.
+#     'ninja_output_dir': 'ninja-foo',
+#     # The full path to the location in which the ninja executable should be
+#     # placed. This cannot be shared with another mac build.
+#    'ninja_product_dir':
+#      '<(DEPTH)/xcodebuild/<(ninja_output_dir)/<(CONFIGURATION_NAME)',
+#     # The list of all the gyp files that contain the targets to run.
+#     're_run_targets': [
+#       'foo.gyp',
+#     ],
+#   },
+#   'includes': ['path_to/mac_build.gypi'],
+#   'actions': [
+#     {
+#       'action_name': 'compile foo',
+#       'inputs': [],
+#       'outputs': [],
+#       'action': [
+#         '<@(ninja_cmd)',
+#         # All the targets to build.
+#         'foo1',
+#         'foo2',
+#       ],
+#     },
+#   ],
+# }
+{
+  'variables': {
+    'variables': {
+     'parent_generator%': '<(GENERATOR)',
+    },
+    'parent_generator%': '<(parent_generator)',
+    # Common ninja command line flags.
+    'ninja_cmd': [
+      # Bounce through clean_env to clean up the environment so things
+      # set by the iOS build don't pollute the Mac build.
+      '<(DEPTH)/build/ios/clean_env.py',
+      # ninja must be found in the PATH.
+      'ADD_TO_PATH=<!(echo $PATH)',
+      'ninja',
+      '-C',
+      '<(ninja_product_dir)',
+    ],
+
+    # Common syntax to rerun gyp to generate the Mac projects.
+    're_run_gyp': [
+      'build/gyp_chromium',
+      '--depth=.',
+      # Don't use anything set for the iOS side of things.
+      '--ignore-environment',
+      # Generate for ninja
+      '--format=ninja',
+      # Generate files into xcodebuild/ninja
+      '-Goutput_dir=xcodebuild/<(ninja_output_dir)',
+      # nacl isn't in the iOS checkout, make sure it's turned off
+      '-Ddisable_nacl=1',
+      # Pass through the Mac SDK version.
+      '-Dmac_sdk=<(mac_sdk)',
+      '-Dparent_generator=<(parent_generator)'
+    ],
+
+    # Rerun gyp for each of the projects needed. This is what actually
+    # generates the projects on disk.
+    're_run_gyp_execution':
+      '<!(cd <(DEPTH) && <@(re_run_gyp) <@(re_run_targets))',
+  },
+  # Since these are used to generate things needed by other targets, make
+  # them hard dependencies so they are always built first.
+  'hard_dependency': 1,
+}
diff --git a/build/isolate.gypi b/build/isolate.gypi
new file mode 100644
index 0000000..69af5b0
--- /dev/null
+++ b/build/isolate.gypi
@@ -0,0 +1,125 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into a target to provide a rule
+# to "build" .isolate files into a .isolated file.
+#
+# To use this, create a gyp target with the following form:
+# 'conditions': [
+#   ['test_isolation_mode != "noop"', {
+#     'targets': [
+#       {
+#         'target_name': 'foo_test_run',
+#         'type': 'none',
+#         'dependencies': [
+#           'foo_test',
+#         ],
+#         'includes': [
+#           '../build/isolate.gypi',
+#         ],
+#         'sources': [
+#           'foo_test.isolate',
+#         ],
+#       },
+#     ],
+#   }],
+# ],
+#
+# Note: foo_test.isolate is included and a source file. It is an inherent
+# property of the .isolate format. This permits to define GYP variables but is
+# a stricter format than GYP so isolate.py can read it.
+#
+# The generated .isolated file will be:
+#   <(PRODUCT_DIR)/foo_test.isolated
+#
+# See http://dev.chromium.org/developers/testing/isolated-testing/for-swes
+# for more information.
+
+{
+  'includes': [
+    '../build/util/version.gypi',
+  ],
+  'rules': [
+    {
+      'rule_name': 'isolate',
+      'extension': 'isolate',
+      'inputs': [
+        # Files that are known to be involved in this step.
+        '<(DEPTH)/tools/isolate_driver.py',
+        '<(DEPTH)/tools/swarming_client/isolate.py',
+        '<(DEPTH)/tools/swarming_client/run_isolated.py',
+      ],
+      'outputs': [],
+      'action': [
+        'python',
+        '<(DEPTH)/tools/isolate_driver.py',
+        '<(test_isolation_mode)',
+        '--isolated', '<(PRODUCT_DIR)/<(RULE_INPUT_ROOT).isolated',
+        '--isolate', '<(RULE_INPUT_PATH)',
+
+        # Variables should use the -V FOO=<(FOO) form so frequent values,
+        # like '0' or '1', aren't stripped out by GYP. Run 'isolate.py help' for
+        # more details.
+
+        # Path variables are used to replace file paths when loading a .isolate
+        # file
+        '--path-variable', 'DEPTH', '<(DEPTH)',
+        '--path-variable', 'PRODUCT_DIR', '<(PRODUCT_DIR) ',
+
+        # Extra variables are replaced on the 'command' entry and on paths in
+        # the .isolate file but are not considered relative paths.
+        '--extra-variable', 'version_full=<(version_full)',
+
+        # Note: This list must match DefaultConfigVariables()
+        # in build/android/pylib/utils/isolator.py
+        '--config-variable', 'CONFIGURATION_NAME=<(CONFIGURATION_NAME)',
+        '--config-variable', 'OS=<(OS)',
+        '--config-variable', 'asan=<(asan)',
+        '--config-variable', 'branding=<(branding)',
+        '--config-variable', 'chromeos=<(chromeos)',
+        '--config-variable', 'component=<(component)',
+        '--config-variable', 'disable_nacl=<(disable_nacl)',
+        '--config-variable', 'enable_pepper_cdms=<(enable_pepper_cdms)',
+        '--config-variable', 'enable_plugins=<(enable_plugins)',
+        '--config-variable', 'fastbuild=<(fastbuild)',
+        '--config-variable', 'icu_use_data_file_flag=<(icu_use_data_file_flag)',
+        # TODO(kbr): move this to chrome_tests.gypi:gles2_conform_tests_run
+        # once support for user-defined config variables is added.
+        '--config-variable',
+          'internal_gles2_conform_tests=<(internal_gles2_conform_tests)',
+        '--config-variable', 'kasko=<(kasko)',
+        '--config-variable', 'libpeer_target_type=<(libpeer_target_type)',
+        '--config-variable', 'lsan=<(lsan)',
+        '--config-variable', 'msan=<(msan)',
+        '--config-variable', 'target_arch=<(target_arch)',
+        '--config-variable', 'tsan=<(tsan)',
+        '--config-variable', 'use_custom_libcxx=<(use_custom_libcxx)',
+        '--config-variable', 'use_instrumented_libraries=<(use_instrumented_libraries)',
+        '--config-variable',
+        'use_prebuilt_instrumented_libraries=<(use_prebuilt_instrumented_libraries)',
+        '--config-variable', 'use_openssl=<(use_openssl)',
+        '--config-variable', 'use_ozone=<(use_ozone)',
+        '--config-variable', 'use_x11=<(use_x11)',
+        '--config-variable', 'v8_use_external_startup_data=<(v8_use_external_startup_data)',
+      ],
+      'conditions': [
+        # Note: When gyp merges lists, it appends them to the old value.
+        ['OS=="mac"', {
+          'action': [
+            '--extra-variable', 'mac_product_name=<(mac_product_name)',
+          ],
+        }],
+        ["test_isolation_mode == 'prepare'", {
+          'outputs': [
+            '<(PRODUCT_DIR)/<(RULE_INPUT_ROOT).isolated.gen.json',
+          ],
+        }, {
+          'outputs': [
+            '<(PRODUCT_DIR)/<(RULE_INPUT_ROOT).isolated',
+          ],
+        }],
+      ],
+    },
+  ],
+}
diff --git a/build/jar_file_jni_generator.gypi b/build/jar_file_jni_generator.gypi
new file mode 100644
index 0000000..3d95b28
--- /dev/null
+++ b/build/jar_file_jni_generator.gypi
@@ -0,0 +1,67 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into a target to provide a rule
+# to generate jni bindings for system Java-files in a consistent manner.
+#
+# To use this, create a gyp target with the following form:
+# {
+#   'target_name': 'android_jar_jni_headers',
+#   'type': 'none',
+#   'variables': {
+#     'jni_gen_package': 'chrome',
+#     'input_java_class': 'java/io/InputStream.class',
+#   },
+#   'includes': [ '../build/jar_file_jni_generator.gypi' ],
+# },
+#
+# Optional variables:
+#  input_jar_file - The input jar file, if omitted, android_sdk_jar will be used.
+
+{
+  'variables': {
+    'jni_generator': '<(DEPTH)/base/android/jni_generator/jni_generator.py',
+    # A comma separated string of include files.
+    'jni_generator_includes%': (
+        'base/android/jni_generator/jni_generator_helper.h'
+    ),
+    'native_exports%': '--native_exports_optional',
+  },
+  'actions': [
+    {
+      'action_name': 'generate_jni_headers_from_jar_file',
+      'inputs': [
+        '<(jni_generator)',
+        '<(input_jar_file)',
+        '<(android_sdk_jar)',
+      ],
+      'variables': {
+        'java_class_name': '<!(basename <(input_java_class)|sed "s/\.class//")',
+        'input_jar_file%': '<(android_sdk_jar)'
+      },
+      'outputs': [
+        '<(SHARED_INTERMEDIATE_DIR)/<(jni_gen_package)/jni/<(java_class_name)_jni.h',
+      ],
+      'action': [
+        '<(jni_generator)',
+        '-j',
+        '<(input_jar_file)',
+        '--input_file',
+        '<(input_java_class)',
+        '--output_dir',
+        '<(SHARED_INTERMEDIATE_DIR)/<(jni_gen_package)/jni',
+        '--includes',
+        '<(jni_generator_includes)',
+        '--optimize_generation',
+        '<(optimize_jni_generation)',
+        '<(native_exports)',
+      ],
+      'message': 'Generating JNI bindings from  <(input_jar_file)/<(input_java_class)',
+      'process_outputs_as_sources': 1,
+    },
+  ],
+  # This target exports a hard dependency because it generates header
+  # files.
+  'hard_dependency': 1,
+}
diff --git a/build/java.gypi b/build/java.gypi
new file mode 100644
index 0000000..73c550d
--- /dev/null
+++ b/build/java.gypi
@@ -0,0 +1,368 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into a target to provide a rule
+# to build Java in a consistent manner.
+#
+# To use this, create a gyp target with the following form:
+# {
+#   'target_name': 'my-package_java',
+#   'type': 'none',
+#   'variables': {
+#     'java_in_dir': 'path/to/package/root',
+#   },
+#   'includes': ['path/to/this/gypi/file'],
+# }
+#
+# Required variables:
+#  java_in_dir - The top-level java directory. The src should be in
+#    <java_in_dir>/src.
+# Optional/automatic variables:
+#  add_to_dependents_classpaths - Set to 0 if the resulting jar file should not
+#    be added to its dependents' classpaths.
+#  additional_input_paths - These paths will be included in the 'inputs' list to
+#    ensure that this target is rebuilt when one of these paths changes.
+#  additional_src_dirs - Additional directories with .java files to be compiled
+#    and included in the output of this target.
+#  generated_src_dirs - Same as additional_src_dirs except used for .java files
+#    that are generated at build time. This should be set automatically by a
+#    target's dependencies. The .java files in these directories are not
+#    included in the 'inputs' list (unlike additional_src_dirs).
+#  input_jars_paths - The path to jars to be included in the classpath. This
+#    should be filled automatically by depending on the appropriate targets.
+#  javac_includes - A list of specific files to include. This is by default
+#    empty, which leads to inclusion of all files specified. May include
+#    wildcard, and supports '**/' for recursive path wildcards, ie.:
+#    '**/MyFileRegardlessOfDirectory.java', '**/IncludedPrefix*.java'.
+#  has_java_resources - Set to 1 if the java target contains an
+#    Android-compatible resources folder named res.  If 1, R_package and
+#    R_package_relpath must also be set.
+#  R_package - The java package in which the R class (which maps resources to
+#    integer IDs) should be generated, e.g. org.chromium.content.
+#  R_package_relpath - Same as R_package, but replace each '.' with '/'.
+#  res_extra_dirs - A list of extra directories containing Android resources.
+#    These directories may be generated at build time.
+#  res_extra_files - A list of the files in res_extra_dirs.
+#  never_lint - Set to 1 to not run lint on this target.
+
+{
+  'dependencies': [
+    '<(DEPTH)/build/android/setup.gyp:build_output_dirs'
+  ],
+  'variables': {
+    'add_to_dependents_classpaths%': 1,
+    'android_jar': '<(android_sdk)/android.jar',
+    'input_jars_paths': [ '<(android_jar)' ],
+    'additional_src_dirs': [],
+    'javac_includes': [],
+    'jar_name': '<(_target_name).jar',
+    'jar_dir': '<(PRODUCT_DIR)/lib.java',
+    'jar_path': '<(intermediate_dir)/<(jar_name)',
+    'jar_final_path': '<(jar_dir)/<(jar_name)',
+    'jar_excluded_classes': [ '*/R.class', '*/R##*.class' ],
+    'instr_stamp': '<(intermediate_dir)/instr.stamp',
+    'additional_input_paths': [],
+    'dex_path': '<(PRODUCT_DIR)/lib.java/<(_target_name).dex.jar',
+    'generated_src_dirs': ['>@(generated_R_dirs)'],
+    'generated_R_dirs': [],
+    'has_java_resources%': 0,
+    'res_extra_dirs': [],
+    'res_extra_files': [],
+    'res_v14_skip%': 0,
+    'resource_input_paths': ['>@(res_extra_files)'],
+    'intermediate_dir': '<(SHARED_INTERMEDIATE_DIR)/<(_target_name)',
+    'compile_stamp': '<(intermediate_dir)/compile.stamp',
+    'lint_stamp': '<(intermediate_dir)/lint.stamp',
+    'lint_result': '<(intermediate_dir)/lint_result.xml',
+    'lint_config': '<(intermediate_dir)/lint_config.xml',
+    'never_lint%': 0,
+    'findbugs_stamp': '<(intermediate_dir)/findbugs.stamp',
+    'run_findbugs%': 0,
+    'java_in_dir_suffix%': '/src',
+    'proguard_config%': '',
+    'proguard_preprocess%': '0',
+    'enable_errorprone%': '0',
+    'errorprone_exe_path': '<(PRODUCT_DIR)/bin.java/chromium_errorprone',
+    'variables': {
+      'variables': {
+        'proguard_preprocess%': 0,
+        'emma_never_instrument%': 0,
+      },
+      'conditions': [
+        ['proguard_preprocess == 1', {
+          'javac_jar_path': '<(intermediate_dir)/<(_target_name).pre.jar'
+        }, {
+          'javac_jar_path': '<(jar_path)'
+        }],
+        ['chromium_code != 0 and emma_coverage != 0 and emma_never_instrument == 0', {
+          'emma_instrument': 1,
+        }, {
+          'emma_instrument': 0,
+        }],
+      ],
+    },
+    'emma_instrument': '<(emma_instrument)',
+    'javac_jar_path': '<(javac_jar_path)',
+  },
+  'conditions': [
+    ['add_to_dependents_classpaths == 1', {
+      # This all_dependent_settings is used for java targets only. This will add the
+      # jar path to the classpath of dependent java targets.
+      'all_dependent_settings': {
+        'variables': {
+          'input_jars_paths': ['<(jar_final_path)'],
+          'library_dexed_jars_paths': ['<(dex_path)'],
+        },
+      },
+    }],
+    ['has_java_resources == 1', {
+      'variables': {
+        'resource_dir': '<(java_in_dir)/res',
+        'res_input_dirs': ['<(resource_dir)', '<@(res_extra_dirs)'],
+        'resource_input_paths': ['<!@(find <(resource_dir) -type f)'],
+
+        'R_dir': '<(intermediate_dir)/java_R',
+        'R_text_file': '<(R_dir)/R.txt',
+
+        'generated_src_dirs': ['<(R_dir)'],
+        'additional_input_paths': ['<(resource_zip_path)', ],
+
+        'dependencies_res_zip_paths': [],
+        'resource_zip_path': '<(PRODUCT_DIR)/res.java/<(_target_name).zip',
+      },
+      'all_dependent_settings': {
+        'variables': {
+          # Dependent libraries include this target's R.java file via
+          # generated_R_dirs.
+          'generated_R_dirs': ['<(R_dir)'],
+
+          # Dependent libraries and apks include this target's resources via
+          # dependencies_res_zip_paths.
+          'additional_input_paths': ['<(resource_zip_path)'],
+          'dependencies_res_zip_paths': ['<(resource_zip_path)'],
+
+          # additional_res_packages and additional_R_text_files are used to
+          # create this packages R.java files when building the APK.
+          'additional_res_packages': ['<(R_package)'],
+          'additional_R_text_files': ['<(R_text_file)'],
+        },
+      },
+      'actions': [
+        # Generate R.java and crunch image resources.
+        {
+          'action_name': 'process_resources',
+          'message': 'processing resources for <(_target_name)',
+          'variables': {
+            'android_manifest': '<(DEPTH)/build/android/AndroidManifest.xml',
+            # Write the inputs list to a file, so that its mtime is updated when
+            # the list of inputs changes.
+            'inputs_list_file': '>|(java_resources.<(_target_name).gypcmd >@(resource_input_paths))',
+            'process_resources_options': [],
+            'conditions': [
+              ['res_v14_skip == 1', {
+                'process_resources_options': ['--v14-skip']
+              }],
+            ],
+          },
+          'inputs': [
+            '<(DEPTH)/build/android/gyp/util/build_utils.py',
+            '<(DEPTH)/build/android/gyp/process_resources.py',
+            '<(DEPTH)/build/android/gyp/generate_v14_compatible_resources.py',
+            '>@(resource_input_paths)',
+            '>@(dependencies_res_zip_paths)',
+            '>(inputs_list_file)',
+          ],
+          'outputs': [
+            '<(resource_zip_path)',
+          ],
+          'action': [
+            'python', '<(DEPTH)/build/android/gyp/process_resources.py',
+            '--android-sdk', '<(android_sdk)',
+            '--aapt-path', '<(android_aapt_path)',
+            '--non-constant-id',
+
+            '--android-manifest', '<(android_manifest)',
+            '--custom-package', '<(R_package)',
+
+            '--dependencies-res-zips', '>(dependencies_res_zip_paths)',
+            '--resource-dirs', '<(res_input_dirs)',
+
+            '--R-dir', '<(R_dir)',
+            '--resource-zip-out', '<(resource_zip_path)',
+
+            '<@(process_resources_options)',
+          ],
+        },
+      ],
+    }],
+    ['proguard_preprocess == 1', {
+      'actions': [
+        {
+          'action_name': 'proguard_<(_target_name)',
+          'message': 'Proguard preprocessing <(_target_name) jar',
+          'inputs': [
+            '<(android_sdk_root)/tools/proguard/lib/proguard.jar',
+            '<(DEPTH)/build/android/gyp/util/build_utils.py',
+            '<(DEPTH)/build/android/gyp/proguard.py',
+            '<(javac_jar_path)',
+            '<(proguard_config)',
+          ],
+          'outputs': [
+            '<(jar_path)',
+          ],
+          'action': [
+            'python', '<(DEPTH)/build/android/gyp/proguard.py',
+            '--proguard-path=<(android_sdk_root)/tools/proguard/lib/proguard.jar',
+            '--input-path=<(javac_jar_path)',
+            '--output-path=<(jar_path)',
+            '--proguard-config=<(proguard_config)',
+            '--classpath=<(android_sdk_jar) >(input_jars_paths)',
+          ]
+        },
+      ],
+    }],
+    ['run_findbugs == 1', {
+      'actions': [
+        {
+          'action_name': 'findbugs_<(_target_name)',
+          'message': 'Running findbugs on <(_target_name)',
+          'inputs': [
+            '<(DEPTH)/build/android/findbugs_diff.py',
+            '<(DEPTH)/build/android/findbugs_filter/findbugs_exclude.xml',
+            '<(DEPTH)/build/android/pylib/utils/findbugs.py',
+            '>@(input_jars_paths)',
+            '<(jar_final_path)',
+            '<(compile_stamp)',
+          ],
+          'outputs': [
+            '<(findbugs_stamp)',
+          ],
+          'action': [
+            'python', '<(DEPTH)/build/android/findbugs_diff.py',
+            '--auxclasspath-gyp', '>(input_jars_paths)',
+            '--stamp', '<(findbugs_stamp)',
+            '<(jar_final_path)',
+          ],
+        },
+      ],
+    }],
+    ['enable_errorprone == 1', {
+      'dependencies': [
+        '<(DEPTH)/third_party/errorprone/errorprone.gyp:chromium_errorprone',
+      ],
+    }],
+  ],
+  'actions': [
+    {
+      'action_name': 'javac_<(_target_name)',
+      'message': 'Compiling <(_target_name) java sources',
+      'variables': {
+        'extra_args': [],
+        'extra_inputs': [],
+        'java_sources': ['>!@(find >(java_in_dir)>(java_in_dir_suffix) >(additional_src_dirs) -name "*.java")'],
+        'conditions': [
+          ['enable_errorprone == 1', {
+            'extra_inputs': [
+              '<(errorprone_exe_path)',
+            ],
+            'extra_args': [ '--use-errorprone-path=<(errorprone_exe_path)' ],
+          }],
+        ],
+      },
+      'inputs': [
+        '<(DEPTH)/build/android/gyp/util/build_utils.py',
+        '<(DEPTH)/build/android/gyp/javac.py',
+        '>@(java_sources)',
+        '>@(input_jars_paths)',
+        '>@(additional_input_paths)',
+        '<@(extra_inputs)',
+      ],
+      'outputs': [
+        '<(compile_stamp)',
+        '<(javac_jar_path)',
+      ],
+      'action': [
+        'python', '<(DEPTH)/build/android/gyp/javac.py',
+        '--bootclasspath=<(android_sdk_jar)',
+        '--classpath=>(input_jars_paths)',
+        '--src-gendirs=>(generated_src_dirs)',
+        '--javac-includes=<(javac_includes)',
+        '--chromium-code=<(chromium_code)',
+        '--jar-path=<(javac_jar_path)',
+        '--jar-excluded-classes=<(jar_excluded_classes)',
+        '--stamp=<(compile_stamp)',
+        '>@(java_sources)',
+        '<@(extra_args)',
+      ]
+    },
+    {
+      'action_name': 'instr_jar_<(_target_name)',
+      'message': 'Instrumenting <(_target_name) jar',
+      'variables': {
+        'input_path': '<(jar_path)',
+        'output_path': '<(jar_final_path)',
+        'stamp_path': '<(instr_stamp)',
+        'instr_type': 'jar',
+      },
+      'outputs': [
+        '<(jar_final_path)',
+      ],
+      'inputs': [
+        '<(jar_path)',
+      ],
+      'includes': [ 'android/instr_action.gypi' ],
+    },
+    {
+      'variables': {
+        'src_dirs': [
+          '<(java_in_dir)<(java_in_dir_suffix)',
+          '>@(additional_src_dirs)',
+        ],
+        'stamp_path': '<(lint_stamp)',
+        'result_path': '<(lint_result)',
+        'config_path': '<(lint_config)',
+        'lint_jar_path': '<(jar_final_path)',
+      },
+      'inputs': [
+        '<(jar_final_path)',
+        '<(compile_stamp)',
+      ],
+      'outputs': [
+        '<(lint_stamp)',
+      ],
+      'includes': [ 'android/lint_action.gypi' ],
+    },
+    {
+      'action_name': 'jar_toc_<(_target_name)',
+      'message': 'Creating <(_target_name) jar.TOC',
+      'inputs': [
+        '<(DEPTH)/build/android/gyp/util/build_utils.py',
+        '<(DEPTH)/build/android/gyp/util/md5_check.py',
+        '<(DEPTH)/build/android/gyp/jar_toc.py',
+        '<(jar_final_path)',
+      ],
+      'outputs': [
+        '<(jar_final_path).TOC',
+      ],
+      'action': [
+        'python', '<(DEPTH)/build/android/gyp/jar_toc.py',
+        '--jar-path=<(jar_final_path)',
+        '--toc-path=<(jar_final_path).TOC',
+      ]
+    },
+    {
+      'action_name': 'dex_<(_target_name)',
+      'variables': {
+        'conditions': [
+          ['emma_instrument != 0', {
+            'dex_no_locals': 1,
+          }],
+        ],
+        'dex_input_paths': [ '<(jar_final_path)' ],
+        'output_path': '<(dex_path)',
+      },
+      'includes': [ 'android/dex_action.gypi' ],
+    },
+  ],
+}
diff --git a/build/java_aidl.gypi b/build/java_aidl.gypi
new file mode 100644
index 0000000..dda2894
--- /dev/null
+++ b/build/java_aidl.gypi
@@ -0,0 +1,79 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into a target to provide a rule
+# to build Java aidl files in a consistent manner.
+#
+# To use this, create a gyp target with the following form:
+# {
+#   'target_name': 'aidl_aidl-file-name',
+#   'type': 'none',
+#   'variables': {
+#     'aidl_interface_file': '<interface-path>/<interface-file>.aidl',
+#     'aidl_import_include': '<(DEPTH)/<path-to-src-dir>',
+#   },
+#   'sources': {
+#     '<input-path1>/<input-file1>.aidl',
+#     '<input-path2>/<input-file2>.aidl',
+#     ...
+#   },
+#   'includes': ['<path-to-this-file>/java_aidl.gypi'],
+# }
+#
+#
+# The generated java files will be:
+#   <(PRODUCT_DIR)/lib.java/<input-file1>.java
+#   <(PRODUCT_DIR)/lib.java/<input-file2>.java
+#   ...
+#
+# Optional variables:
+#  aidl_import_include - This should be an absolute path to your java src folder
+#    that contains the classes that are imported by your aidl files.
+#
+# TODO(cjhopman): dependents need to rebuild when this target's inputs have changed.
+
+{
+  'variables': {
+    'aidl_path%': '<(android_sdk_tools)/aidl',
+    'intermediate_dir': '<(SHARED_INTERMEDIATE_DIR)/<(_target_name)/aidl',
+    'aidl_import_include%': '',
+    'additional_aidl_arguments': [],
+    'additional_aidl_input_paths': [],
+  },
+  'direct_dependent_settings': {
+    'variables': {
+      'generated_src_dirs': ['<(intermediate_dir)/'],
+    },
+  },
+  'conditions': [
+    ['aidl_import_include != ""', {
+      'variables': {
+        'additional_aidl_arguments': [ '-I<(aidl_import_include)' ],
+        'additional_aidl_input_paths': [ '<!@(find <(aidl_import_include) -name "*.java" | sort)' ],
+      }
+    }],
+  ],
+  'rules': [
+    {
+      'rule_name': 'compile_aidl',
+      'extension': 'aidl',
+      'inputs': [
+        '<(android_sdk)/framework.aidl',
+        '<(aidl_interface_file)',
+        '<@(additional_aidl_input_paths)',
+      ],
+      'outputs': [
+        '<(intermediate_dir)/<(RULE_INPUT_ROOT).java',
+      ],
+      'action': [
+        '<(aidl_path)',
+        '-p<(android_sdk)/framework.aidl',
+        '-p<(aidl_interface_file)',
+        '<@(additional_aidl_arguments)',
+        '<(RULE_INPUT_PATH)',
+        '<(intermediate_dir)/<(RULE_INPUT_ROOT).java',
+      ],
+    },
+  ],
+}
diff --git a/build/java_apk.gypi b/build/java_apk.gypi
new file mode 100644
index 0000000..ff837c3
--- /dev/null
+++ b/build/java_apk.gypi
@@ -0,0 +1,1063 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into a target to provide a rule
+# to build Android APKs in a consistent manner.
+#
+# To use this, create a gyp target with the following form:
+# {
+#   'target_name': 'my_package_apk',
+#   'type': 'none',
+#   'variables': {
+#     'apk_name': 'MyPackage',
+#     'java_in_dir': 'path/to/package/root',
+#     'resource_dir': 'path/to/package/root/res',
+#   },
+#   'includes': ['path/to/this/gypi/file'],
+# }
+#
+# Required variables:
+#  apk_name - The final apk will be named <apk_name>.apk
+#  java_in_dir - The top-level java directory. The src should be in
+#    <(java_in_dir)/src.
+# Optional/automatic variables:
+#  additional_input_paths - These paths will be included in the 'inputs' list to
+#    ensure that this target is rebuilt when one of these paths changes.
+#  additional_res_packages - Package names of R.java files generated in addition
+#    to the default package name defined in AndroidManifest.xml.
+#  additional_src_dirs - Additional directories with .java files to be compiled
+#    and included in the output of this target.
+#  additional_bundled_libs - Additional libraries what will be stripped and
+#    bundled in the apk.
+#  asset_location - The directory where assets are located.
+#  create_abi_split - Whether to create abi-based spilts. Splits
+#    are supported only for minSdkVersion >= 21.
+#  create_density_splits - Whether to create density-based apk splits.
+#  language_splits - List of languages to create apk splits for.
+#  generated_src_dirs - Same as additional_src_dirs except used for .java files
+#    that are generated at build time. This should be set automatically by a
+#    target's dependencies. The .java files in these directories are not
+#    included in the 'inputs' list (unlike additional_src_dirs).
+#  input_jars_paths - The path to jars to be included in the classpath. This
+#    should be filled automatically by depending on the appropriate targets.
+#  is_test_apk - Set to 1 if building a test apk.  This prevents resources from
+#    dependencies from being re-included.
+#  native_lib_target - The target_name of the target which generates the final
+#    shared library to be included in this apk. A stripped copy of the
+#    library will be included in the apk.
+#  resource_dir - The directory for resources.
+#  shared_resources - Make a resource package that can be loaded by a different
+#    application at runtime to access the package's resources.
+#  R_package - A custom Java package to generate the resource file R.java in.
+#    By default, the package given in AndroidManifest.xml will be used.
+#  include_all_resources - Set to 1 to include all resource IDs in all generated
+#    R.java files.
+#  use_chromium_linker - Enable the content dynamic linker that allows sharing the
+#    RELRO section of the native libraries between the different processes.
+#  load_library_from_zip - When using the dynamic linker, load the library
+#    directly out of the zip file.
+#  use_relocation_packer - Enable relocation packing. Relies on the chromium
+#    linker, so use_chromium_linker must also be enabled.
+#  enable_chromium_linker_tests - Enable the content dynamic linker test support
+#    code. This allows a test APK to inject a Linker.TestRunner instance at
+#    runtime. Should only be used by the chromium_linker_test_apk target!!
+#  never_lint - Set to 1 to not run lint on this target.
+#  java_in_dir_suffix - To override the /src suffix on java_in_dir.
+#  app_manifest_version_name - set the apps 'human readable' version number.
+#  app_manifest_version_code - set the apps version number.
+{
+  'variables': {
+    'tested_apk_obfuscated_jar_path%': '/',
+    'tested_apk_dex_path%': '/',
+    'additional_input_paths': [],
+    'create_density_splits%': 0,
+    'language_splits': [],
+    'input_jars_paths': [],
+    'library_dexed_jars_paths': [],
+    'additional_src_dirs': [],
+    'generated_src_dirs': [],
+    'app_manifest_version_name%': '<(android_app_version_name)',
+    'app_manifest_version_code%': '<(android_app_version_code)',
+    # aapt generates this proguard.txt.
+    'generated_proguard_file': '<(intermediate_dir)/proguard.txt',
+    'proguard_enabled%': 'false',
+    'proguard_flags_paths': ['<(generated_proguard_file)'],
+    'jar_name': 'chromium_apk_<(_target_name).jar',
+    'resource_dir%':'<(DEPTH)/build/android/ant/empty/res',
+    'R_package%':'',
+    'include_all_resources%': 0,
+    'additional_R_text_files': [],
+    'dependencies_res_zip_paths': [],
+    'additional_res_packages': [],
+    'additional_bundled_libs%': [],
+    'is_test_apk%': 0,
+    # Allow icu data, v8 snapshots, and pak files to be loaded directly from the .apk.
+    # Note: These are actually suffix matches, not necessarily extensions.
+    'extensions_to_not_compress%': '.dat,.bin,.pak',
+    'resource_input_paths': [],
+    'intermediate_dir': '<(PRODUCT_DIR)/<(_target_name)',
+    'asset_location%': '<(intermediate_dir)/assets',
+    'codegen_stamp': '<(intermediate_dir)/codegen.stamp',
+    'package_input_paths': [],
+    'ordered_libraries_file': '<(intermediate_dir)/native_libraries.json',
+    'additional_ordered_libraries_file': '<(intermediate_dir)/additional_native_libraries.json',
+    'native_libraries_template': '<(DEPTH)/base/android/java/templates/NativeLibraries.template',
+    'native_libraries_java_dir': '<(intermediate_dir)/native_libraries_java/',
+    'native_libraries_java_file': '<(native_libraries_java_dir)/NativeLibraries.java',
+    'native_libraries_java_stamp': '<(intermediate_dir)/native_libraries_java.stamp',
+    'native_libraries_template_data_dir': '<(intermediate_dir)/native_libraries/',
+    'native_libraries_template_data_file': '<(native_libraries_template_data_dir)/native_libraries_array.h',
+    'native_libraries_template_version_file': '<(native_libraries_template_data_dir)/native_libraries_version.h',
+    'compile_stamp': '<(intermediate_dir)/compile.stamp',
+    'lint_stamp': '<(intermediate_dir)/lint.stamp',
+    'lint_result': '<(intermediate_dir)/lint_result.xml',
+    'lint_config': '<(intermediate_dir)/lint_config.xml',
+    'never_lint%': 0,
+    'findbugs_stamp': '<(intermediate_dir)/findbugs.stamp',
+    'run_findbugs%': 0,
+    'java_in_dir_suffix%': '/src',
+    'instr_stamp': '<(intermediate_dir)/instr.stamp',
+    'jar_stamp': '<(intermediate_dir)/jar.stamp',
+    'obfuscate_stamp': '<(intermediate_dir)/obfuscate.stamp',
+    'pack_relocations_stamp': '<(intermediate_dir)/pack_relocations.stamp',
+    'strip_stamp': '<(intermediate_dir)/strip.stamp',
+    'stripped_libraries_dir': '<(intermediate_dir)/stripped_libraries',
+    'strip_additional_stamp': '<(intermediate_dir)/strip_additional.stamp',
+    'version_stamp': '<(intermediate_dir)/version.stamp',
+    'javac_includes': [],
+    'jar_excluded_classes': [],
+    'javac_jar_path': '<(intermediate_dir)/<(_target_name).javac.jar',
+    'jar_path': '<(PRODUCT_DIR)/lib.java/<(jar_name)',
+    'obfuscated_jar_path': '<(intermediate_dir)/obfuscated.jar',
+    'test_jar_path': '<(PRODUCT_DIR)/test.lib.java/<(apk_name).jar',
+    'dex_path': '<(intermediate_dir)/classes.dex',
+    'emma_device_jar': '<(android_sdk_root)/tools/lib/emma_device.jar',
+    'android_manifest_path%': '<(java_in_dir)/AndroidManifest.xml',
+    'split_android_manifest_path': '<(intermediate_dir)/split-manifests/<(android_app_abi)/AndroidManifest.xml',
+    'push_stamp': '<(intermediate_dir)/push.stamp',
+    'link_stamp': '<(intermediate_dir)/link.stamp',
+    'resource_zip_path': '<(intermediate_dir)/<(_target_name).resources.zip',
+    'shared_resources%': 0,
+    'final_apk_path%': '<(PRODUCT_DIR)/apks/<(apk_name).apk',
+    'final_apk_path_no_extension%': '<(PRODUCT_DIR)/apks/<(apk_name)',
+    'final_abi_split_apk_path%': '<(PRODUCT_DIR)/apks/<(apk_name)-abi-<(android_app_abi).apk',
+    'incomplete_apk_path': '<(intermediate_dir)/<(apk_name)-incomplete.apk',
+    'apk_install_record': '<(intermediate_dir)/apk_install.record.stamp',
+    'device_intermediate_dir': '/data/data/org.chromium.gyp_managed_install/<(_target_name)/<(CONFIGURATION_NAME)',
+    'symlink_script_host_path': '<(intermediate_dir)/create_symlinks.sh',
+    'symlink_script_device_path': '<(device_intermediate_dir)/create_symlinks.sh',
+    'create_standalone_apk%': 1,
+    'res_v14_skip%': 0,
+    'variables': {
+      'variables': {
+        'native_lib_target%': '',
+        'native_lib_version_name%': '',
+        'use_chromium_linker%' : 0,
+        'use_relocation_packer%' : 0,
+        'enable_chromium_linker_tests%': 0,
+        'is_test_apk%': 0,
+        'unsigned_apk_path': '<(intermediate_dir)/<(apk_name)-unsigned.apk',
+        'unsigned_abi_split_apk_path': '<(intermediate_dir)/<(apk_name)-abi-<(android_app_abi)-unsigned.apk',
+        'create_abi_split%': 0,
+      },
+      'unsigned_apk_path': '<(unsigned_apk_path)',
+      'unsigned_abi_split_apk_path': '<(unsigned_abi_split_apk_path)',
+      'create_abi_split%': '<(create_abi_split)',
+      'conditions': [
+        ['gyp_managed_install == 1 and native_lib_target != ""', {
+          'conditions': [
+            ['create_abi_split == 0', {
+              'unsigned_standalone_apk_path': '<(intermediate_dir)/<(apk_name)-standalone-unsigned.apk',
+            }, {
+              'unsigned_standalone_apk_path': '<(intermediate_dir)/<(apk_name)-abi-<(android_app_abi)-standalone-unsigned.apk',
+            }],
+          ],
+        }, {
+          'unsigned_standalone_apk_path': '<(unsigned_apk_path)',
+        }],
+        ['gyp_managed_install == 1', {
+          'apk_package_native_libs_dir': '<(intermediate_dir)/libs.managed',
+        }, {
+          'apk_package_native_libs_dir': '<(intermediate_dir)/libs',
+        }],
+        ['is_test_apk == 0 and emma_coverage != 0', {
+          'emma_instrument%': 1,
+        },{
+          'emma_instrument%': 0,
+        }],
+        # When using abi splits, the abi split is modified by
+        # gyp_managed_install rather than the main .apk
+        ['create_abi_split == 1', {
+          'managed_input_apk_path': '<(unsigned_abi_split_apk_path)',
+        }, {
+          'managed_input_apk_path': '<(unsigned_apk_path)',
+        }],
+      ],
+    },
+    'native_lib_target%': '',
+    'native_lib_version_name%': '',
+    'use_chromium_linker%' : 0,
+    'load_library_from_zip%' : 0,
+    'use_relocation_packer%' : 0,
+    'enable_chromium_linker_tests%': 0,
+    'emma_instrument%': '<(emma_instrument)',
+    'apk_package_native_libs_dir': '<(apk_package_native_libs_dir)',
+    'unsigned_standalone_apk_path': '<(unsigned_standalone_apk_path)',
+    'unsigned_apk_path': '<(unsigned_apk_path)',
+    'unsigned_abi_split_apk_path': '<(unsigned_abi_split_apk_path)',
+    'create_abi_split%': '<(create_abi_split)',
+    'managed_input_apk_path': '<(managed_input_apk_path)',
+    'libchromium_android_linker': 'libchromium_android_linker.>(android_product_extension)',
+    'extra_native_libs': [],
+    'native_lib_placeholder_stamp': '<(apk_package_native_libs_dir)/<(android_app_abi)/native_lib_placeholder.stamp',
+    'native_lib_placeholders': [],
+    'main_apk_name': '<(apk_name)',
+    'enable_errorprone%': '0',
+    'errorprone_exe_path': '<(PRODUCT_DIR)/bin.java/chromium_errorprone',
+  },
+  # Pass the jar path to the apk's "fake" jar target.  This would be better as
+  # direct_dependent_settings, but a variable set by a direct_dependent_settings
+  # cannot be lifted in a dependent to all_dependent_settings.
+  'all_dependent_settings': {
+    'conditions': [
+      ['proguard_enabled == "true"', {
+        'variables': {
+          'proguard_enabled': 'true',
+        }
+      }],
+    ],
+    'variables': {
+      'apk_output_jar_path': '<(jar_path)',
+      'tested_apk_obfuscated_jar_path': '<(obfuscated_jar_path)',
+      'tested_apk_dex_path': '<(dex_path)',
+    },
+  },
+  'conditions': [
+    ['resource_dir!=""', {
+      'variables': {
+        'resource_input_paths': [ '<!@(find <(resource_dir) -name "*")' ]
+      },
+    }],
+    ['R_package != ""', {
+      'variables': {
+        # We generate R.java in package R_package (in addition to the package
+        # listed in the AndroidManifest.xml, which is unavoidable).
+        'additional_res_packages': ['<(R_package)'],
+        'additional_R_text_files': ['<(intermediate_dir)/R.txt'],
+      },
+    }],
+    ['native_lib_target != "" and component == "shared_library"', {
+      'dependencies': [
+        '<(DEPTH)/build/android/setup.gyp:copy_system_libraries',
+      ],
+    }],
+    ['use_chromium_linker == 1', {
+      'dependencies': [
+        '<(DEPTH)/base/base.gyp:chromium_android_linker',
+      ],
+    }],
+    ['enable_errorprone == 1', {
+      'dependencies': [
+        '<(DEPTH)/third_party/errorprone/errorprone.gyp:chromium_errorprone',
+      ],
+    }],
+    ['native_lib_target != ""', {
+      'variables': {
+        'conditions': [
+          ['use_chromium_linker == 1', {
+            'variables': {
+              'chromium_linker_path': [
+                '<(SHARED_LIB_DIR)/<(libchromium_android_linker)',
+              ],
+            }
+          }, {
+            'variables': {
+              'chromium_linker_path': [],
+            },
+          }],
+        ],
+        'generated_src_dirs': [ '<(native_libraries_java_dir)' ],
+        'native_libs_paths': [
+          '<(SHARED_LIB_DIR)/<(native_lib_target).>(android_product_extension)',
+          '<@(chromium_linker_path)'
+        ],
+        'package_input_paths': [
+          '<(apk_package_native_libs_dir)/<(android_app_abi)/gdbserver',
+        ],
+      },
+      'copies': [
+        {
+          # gdbserver is always copied into the APK's native libs dir. The ant
+          # build scripts (apkbuilder task) will only include it in a debug
+          # build.
+          'destination': '<(apk_package_native_libs_dir)/<(android_app_abi)',
+          'files': [
+            '<(android_gdbserver)',
+          ],
+        },
+      ],
+      'actions': [
+        {
+          'variables': {
+            'input_libraries': [
+              '<@(native_libs_paths)',
+              '<@(extra_native_libs)',
+            ],
+          },
+          'includes': ['../build/android/write_ordered_libraries.gypi'],
+        },
+        {
+          'action_name': 'native_libraries_<(_target_name)',
+          'variables': {
+            'conditions': [
+              ['use_chromium_linker == 1', {
+                'variables': {
+                  'linker_gcc_preprocess_defines': [
+                    '--defines', 'ENABLE_CHROMIUM_LINKER',
+                  ],
+                }
+              }, {
+                'variables': {
+                  'linker_gcc_preprocess_defines': [],
+                },
+              }],
+              ['load_library_from_zip == 1', {
+                'variables': {
+                  'linker_load_from_zip_file_preprocess_defines': [
+                    '--defines', 'ENABLE_CHROMIUM_LINKER_LIBRARY_IN_ZIP_FILE',
+                  ],
+                }
+              }, {
+                'variables': {
+                  'linker_load_from_zip_file_preprocess_defines': [],
+                },
+              }],
+              ['enable_chromium_linker_tests == 1', {
+                'variables': {
+                  'linker_tests_gcc_preprocess_defines': [
+                    '--defines', 'ENABLE_CHROMIUM_LINKER_TESTS',
+                  ],
+                }
+              }, {
+                'variables': {
+                  'linker_tests_gcc_preprocess_defines': [],
+                },
+              }],
+            ],
+            'gcc_preprocess_defines': [
+              '<@(linker_load_from_zip_file_preprocess_defines)',
+              '<@(linker_gcc_preprocess_defines)',
+              '<@(linker_tests_gcc_preprocess_defines)',
+            ],
+          },
+          'message': 'Creating NativeLibraries.java for <(_target_name)',
+          'inputs': [
+            '<(DEPTH)/build/android/gyp/util/build_utils.py',
+            '<(DEPTH)/build/android/gyp/gcc_preprocess.py',
+            '<(ordered_libraries_file)',
+            '<(native_libraries_template)',
+          ],
+          'outputs': [
+            '<(native_libraries_java_stamp)',
+          ],
+          'action': [
+            'python', '<(DEPTH)/build/android/gyp/gcc_preprocess.py',
+            '--include-path=',
+            '--output=<(native_libraries_java_file)',
+            '--template=<(native_libraries_template)',
+            '--stamp=<(native_libraries_java_stamp)',
+            '--defines', 'NATIVE_LIBRARIES_LIST=@FileArg(<(ordered_libraries_file):java_libraries_list)',
+            '--defines', 'NATIVE_LIBRARIES_VERSION_NUMBER="<(native_lib_version_name)"',
+            '<@(gcc_preprocess_defines)',
+          ],
+        },
+        {
+          'action_name': 'strip_native_libraries',
+          'variables': {
+            'ordered_libraries_file%': '<(ordered_libraries_file)',
+            'stripped_libraries_dir%': '<(stripped_libraries_dir)',
+            'input_paths': [
+              '<@(native_libs_paths)',
+              '<@(extra_native_libs)',
+            ],
+            'stamp': '<(strip_stamp)'
+          },
+          'includes': ['../build/android/strip_native_libraries.gypi'],
+        },
+        {
+          'action_name': 'insert_chromium_version',
+          'variables': {
+            'ordered_libraries_file%': '<(ordered_libraries_file)',
+            'stripped_libraries_dir%': '<(stripped_libraries_dir)',
+            'version_string': '<(native_lib_version_name)',
+            'input_paths': [
+              '<(strip_stamp)',
+            ],
+            'stamp': '<(version_stamp)'
+          },
+          'includes': ['../build/android/insert_chromium_version.gypi'],
+        },
+        {
+          'action_name': 'pack_relocations',
+          'variables': {
+            'conditions': [
+              ['use_chromium_linker == 1 and use_relocation_packer == 1 and profiling != 1', {
+                'enable_packing': 1,
+              }, {
+                'enable_packing': 0,
+              }],
+            ],
+            'exclude_packing_list': [
+              '<(libchromium_android_linker)',
+            ],
+            'ordered_libraries_file%': '<(ordered_libraries_file)',
+            'stripped_libraries_dir%': '<(stripped_libraries_dir)',
+            'packed_libraries_dir': '<(libraries_source_dir)',
+            'input_paths': [
+              '<(version_stamp)'
+            ],
+            'stamp': '<(pack_relocations_stamp)',
+          },
+          'includes': ['../build/android/pack_relocations.gypi'],
+        },
+        {
+          'variables': {
+            'input_libraries': [
+              '<@(additional_bundled_libs)',
+            ],
+            'ordered_libraries_file': '<(additional_ordered_libraries_file)',
+            'subtarget': '_additional_libraries',
+          },
+          'includes': ['../build/android/write_ordered_libraries.gypi'],
+        },
+        {
+          'action_name': 'strip_additional_libraries',
+          'variables': {
+            'ordered_libraries_file': '<(additional_ordered_libraries_file)',
+            'stripped_libraries_dir': '<(libraries_source_dir)',
+            'input_paths': [
+              '<@(additional_bundled_libs)',
+              '<(strip_stamp)',
+            ],
+            'stamp': '<(strip_additional_stamp)'
+          },
+          'includes': ['../build/android/strip_native_libraries.gypi'],
+        },
+        {
+          'action_name': 'Create native lib placeholder files for previous releases',
+          'variables': {
+            'placeholders': ['<@(native_lib_placeholders)'],
+            'conditions': [
+              ['gyp_managed_install == 1', {
+                # This "library" just needs to be put in the .apk. It is not loaded
+                # at runtime.
+                'placeholders': ['libfix.crbug.384638.so'],
+              }]
+            ],
+          },
+          'inputs': [
+            '<(DEPTH)/build/android/gyp/create_placeholder_files.py',
+          ],
+          'outputs': [
+            '<(native_lib_placeholder_stamp)',
+          ],
+          'action': [
+            'python', '<(DEPTH)/build/android/gyp/create_placeholder_files.py',
+            '--dest-lib-dir=<(apk_package_native_libs_dir)/<(android_app_abi)/',
+            '--stamp=<(native_lib_placeholder_stamp)',
+            '<@(placeholders)',
+          ],
+        },
+      ],
+      'conditions': [
+        ['gyp_managed_install == 1', {
+          'variables': {
+            'libraries_top_dir': '<(intermediate_dir)/lib.stripped',
+            'libraries_source_dir': '<(libraries_top_dir)/lib/<(android_app_abi)',
+            'device_library_dir': '<(device_intermediate_dir)/lib.stripped',
+            'configuration_name': '<(CONFIGURATION_NAME)',
+          },
+          'dependencies': [
+            '<(DEPTH)/build/android/setup.gyp:get_build_device_configurations',
+            '<(DEPTH)/build/android/pylib/device/commands/commands.gyp:chromium_commands',
+          ],
+          'actions': [
+            {
+              'includes': ['../build/android/push_libraries.gypi'],
+            },
+            {
+              'action_name': 'create device library symlinks',
+              'message': 'Creating links on device for <(_target_name)',
+              'inputs': [
+                '<(DEPTH)/build/android/gyp/util/build_utils.py',
+                '<(DEPTH)/build/android/gyp/create_device_library_links.py',
+                '<(apk_install_record)',
+                '<(build_device_config_path)',
+                '<(ordered_libraries_file)',
+              ],
+              'outputs': [
+                '<(link_stamp)'
+              ],
+              'action': [
+                'python', '<(DEPTH)/build/android/gyp/create_device_library_links.py',
+                '--build-device-configuration=<(build_device_config_path)',
+                '--libraries=@FileArg(<(ordered_libraries_file):libraries)',
+                '--script-host-path=<(symlink_script_host_path)',
+                '--script-device-path=<(symlink_script_device_path)',
+                '--target-dir=<(device_library_dir)',
+                '--apk=<(incomplete_apk_path)',
+                '--stamp=<(link_stamp)',
+                '--configuration-name=<(CONFIGURATION_NAME)',
+              ],
+            },
+          ],
+          'conditions': [
+            ['create_standalone_apk == 1', {
+              'actions': [
+                {
+                  'action_name': 'create standalone APK',
+                  'variables': {
+                    'inputs': [
+                      '<(ordered_libraries_file)',
+                      '<(strip_additional_stamp)',
+                      '<(pack_relocations_stamp)',
+                    ],
+                    'output_apk_path': '<(unsigned_standalone_apk_path)',
+                    'libraries_top_dir%': '<(libraries_top_dir)',
+                    'input_apk_path': '<(managed_input_apk_path)',
+                  },
+                  'includes': [ 'android/create_standalone_apk_action.gypi' ],
+                },
+              ],
+            }],
+          ],
+        }, {
+          # gyp_managed_install != 1
+          'variables': {
+            'libraries_source_dir': '<(apk_package_native_libs_dir)/<(android_app_abi)',
+            'package_input_paths': [
+              '<(strip_additional_stamp)',
+              '<(pack_relocations_stamp)',
+            ],
+          },
+        }],
+      ],
+    }], # native_lib_target != ''
+    ['gyp_managed_install == 0 or create_standalone_apk == 1 or create_abi_split == 1', {
+      'dependencies': [
+        '<(DEPTH)/build/android/rezip.gyp:rezip_apk_jar',
+      ],
+    }],
+    ['create_abi_split == 1 or gyp_managed_install == 0 or create_standalone_apk == 1', {
+      'actions': [
+        {
+          'action_name': 'finalize_base',
+          'variables': {
+            'output_apk_path': '<(final_apk_path)',
+            'conditions': [
+              ['create_abi_split == 0', {
+                'input_apk_path': '<(unsigned_standalone_apk_path)',
+              }, {
+                'input_apk_path': '<(unsigned_apk_path)',
+                'load_library_from_zip': 0,
+              }]
+            ],
+          },
+          'includes': [ 'android/finalize_apk_action.gypi']
+        },
+      ],
+    }],
+    ['create_abi_split == 1', {
+      'actions': [
+        {
+          'action_name': 'generate_split_manifest_<(_target_name)',
+          'inputs': [
+            '<(DEPTH)/build/android/gyp/util/build_utils.py',
+            '<(DEPTH)/build/android/gyp/generate_split_manifest.py',
+            '<(android_manifest_path)',
+          ],
+          'outputs': [
+            '<(split_android_manifest_path)',
+          ],
+          'action': [
+            'python', '<(DEPTH)/build/android/gyp/generate_split_manifest.py',
+            '--main-manifest', '<(android_manifest_path)',
+            '--out-manifest', '<(split_android_manifest_path)',
+            '--split', 'abi_<(android_app_abi)',
+          ],
+        },
+        {
+          'variables': {
+            'apk_name': '<(main_apk_name)-abi-<(android_app_abi)',
+            'asset_location': '',
+            'android_manifest_path': '<(split_android_manifest_path)',
+            'create_density_splits': 0,
+            'language_splits=': [],
+          },
+          'includes': [ 'android/package_resources_action.gypi' ],
+        },
+        {
+          'variables': {
+            'apk_name': '<(main_apk_name)-abi-<(android_app_abi)',
+            'apk_path': '<(unsigned_abi_split_apk_path)',
+            'has_code': 0,
+            'native_libs_dir': '<(apk_package_native_libs_dir)',
+            'extra_inputs': ['<(native_lib_placeholder_stamp)'],
+          },
+          'includes': ['android/apkbuilder_action.gypi'],
+        },
+      ],
+    }],
+    ['create_abi_split == 1 and (gyp_managed_install == 0 or create_standalone_apk == 1)', {
+      'actions': [
+        {
+          'action_name': 'finalize_split',
+          'variables': {
+            'output_apk_path': '<(final_abi_split_apk_path)',
+            'conditions': [
+              ['gyp_managed_install == 1', {
+                'input_apk_path': '<(unsigned_standalone_apk_path)',
+              }, {
+                'input_apk_path': '<(unsigned_abi_split_apk_path)',
+              }],
+            ],
+          },
+          'includes': [ 'android/finalize_apk_action.gypi']
+        },
+      ],
+    }],
+    ['gyp_managed_install == 1', {
+      'actions': [
+        {
+          'action_name': 'finalize incomplete apk',
+          'variables': {
+            'load_library_from_zip': 0,
+            'input_apk_path': '<(managed_input_apk_path)',
+            'output_apk_path': '<(incomplete_apk_path)',
+          },
+          'includes': [ 'android/finalize_apk_action.gypi']
+        },
+        {
+          'action_name': 'apk_install_<(_target_name)',
+          'message': 'Installing <(apk_name).apk',
+          'inputs': [
+            '<(DEPTH)/build/android/gyp/util/build_utils.py',
+            '<(DEPTH)/build/android/gyp/apk_install.py',
+            '<(build_device_config_path)',
+            '<(incomplete_apk_path)',
+          ],
+          'outputs': [
+            '<(apk_install_record)',
+          ],
+          'action': [
+            'python', '<(DEPTH)/build/android/gyp/apk_install.py',
+            '--build-device-configuration=<(build_device_config_path)',
+            '--install-record=<(apk_install_record)',
+            '--configuration-name=<(CONFIGURATION_NAME)',
+            '--android-sdk-tools', '<(android_sdk_tools)',
+          ],
+          'conditions': [
+            ['create_abi_split == 1', {
+              'inputs': [
+                '<(final_apk_path)',
+              ],
+              'action': [
+                '--apk-path=<(final_apk_path)',
+                '--split-apk-path=<(incomplete_apk_path)',
+              ],
+            }, {
+              'action': [
+                '--apk-path=<(incomplete_apk_path)',
+              ],
+            }],
+            ['create_density_splits == 1', {
+              'inputs': [
+                '<(final_apk_path_no_extension)-density-hdpi.apk',
+                '<(final_apk_path_no_extension)-density-xhdpi.apk',
+                '<(final_apk_path_no_extension)-density-xxhdpi.apk',
+                '<(final_apk_path_no_extension)-density-xxxhdpi.apk',
+                '<(final_apk_path_no_extension)-density-tvdpi.apk',
+              ],
+              'action': [
+                '--split-apk-path=<(final_apk_path_no_extension)-density-hdpi.apk',
+                '--split-apk-path=<(final_apk_path_no_extension)-density-xhdpi.apk',
+                '--split-apk-path=<(final_apk_path_no_extension)-density-xxhdpi.apk',
+                '--split-apk-path=<(final_apk_path_no_extension)-density-xxxhdpi.apk',
+                '--split-apk-path=<(final_apk_path_no_extension)-density-tvdpi.apk',
+              ],
+            }],
+            ['language_splits != []', {
+              'inputs': [
+                "<!@(python <(DEPTH)/build/apply_locales.py '<(final_apk_path_no_extension)-lang-ZZLOCALE.apk' <(language_splits))",
+              ],
+              'action': [
+                "<!@(python <(DEPTH)/build/apply_locales.py -- '--split-apk-path=<(final_apk_path_no_extension)-lang-ZZLOCALE.apk' <(language_splits))",
+              ],
+            }],
+          ],
+        },
+      ],
+    }],
+    ['create_density_splits == 1', {
+      'actions': [
+        {
+          'action_name': 'finalize_density_splits',
+          'variables': {
+            'density_splits': 1,
+          },
+          'includes': [ 'android/finalize_splits_action.gypi']
+        },
+      ],
+    }],
+    ['is_test_apk == 1', {
+      'dependencies': [
+        '<(DEPTH)/build/android/pylib/device/commands/commands.gyp:chromium_commands',
+        '<(DEPTH)/tools/android/android_tools.gyp:android_tools',
+      ]
+    }],
+    ['run_findbugs == 1', {
+      'actions': [
+        {
+          'action_name': 'findbugs_<(_target_name)',
+          'message': 'Running findbugs on <(_target_name)',
+          'inputs': [
+            '<(DEPTH)/build/android/findbugs_diff.py',
+            '<(DEPTH)/build/android/findbugs_filter/findbugs_exclude.xml',
+            '<(DEPTH)/build/android/pylib/utils/findbugs.py',
+            '>@(input_jars_paths)',
+            '<(jar_path)',
+            '<(compile_stamp)',
+          ],
+          'outputs': [
+            '<(findbugs_stamp)',
+          ],
+          'action': [
+            'python', '<(DEPTH)/build/android/findbugs_diff.py',
+            '--auxclasspath-gyp', '>(input_jars_paths)',
+            '--stamp', '<(findbugs_stamp)',
+            '<(jar_path)',
+          ],
+        },
+      ],
+    },
+    ]
+  ],
+  'dependencies': [
+    '<(DEPTH)/tools/android/md5sum/md5sum.gyp:md5sum',
+  ],
+  'actions': [
+    {
+      'action_name': 'process_resources',
+      'message': 'processing resources for <(_target_name)',
+      'variables': {
+        # Write the inputs list to a file, so that its mtime is updated when
+        # the list of inputs changes.
+        'inputs_list_file': '>|(apk_codegen.<(_target_name).gypcmd >@(additional_input_paths) >@(resource_input_paths))',
+        'process_resources_options': [],
+        'conditions': [
+          ['is_test_apk == 1', {
+            'dependencies_res_zip_paths=': [],
+            'additional_res_packages=': [],
+          }],
+          ['res_v14_skip == 1', {
+            'process_resources_options+': ['--v14-skip']
+          }],
+          ['shared_resources == 1', {
+            'process_resources_options+': ['--shared-resources']
+          }],
+          ['R_package != ""', {
+            'process_resources_options+': ['--custom-package', '<(R_package)']
+          }],
+          ['include_all_resources == 1', {
+            'process_resources_options+': ['--include-all-resources']
+          }]
+        ],
+      },
+      'inputs': [
+        '<(DEPTH)/build/android/gyp/util/build_utils.py',
+        '<(DEPTH)/build/android/gyp/process_resources.py',
+        '<(android_manifest_path)',
+        '>@(additional_input_paths)',
+        '>@(resource_input_paths)',
+        '>@(dependencies_res_zip_paths)',
+        '>(inputs_list_file)',
+      ],
+      'outputs': [
+        '<(resource_zip_path)',
+        '<(generated_proguard_file)',
+        '<(codegen_stamp)',
+      ],
+      'action': [
+        'python', '<(DEPTH)/build/android/gyp/process_resources.py',
+        '--android-sdk', '<(android_sdk)',
+        '--aapt-path', '<(android_aapt_path)',
+
+        '--android-manifest', '<(android_manifest_path)',
+        '--dependencies-res-zips', '>(dependencies_res_zip_paths)',
+
+        '--extra-res-packages', '>(additional_res_packages)',
+        '--extra-r-text-files', '>(additional_R_text_files)',
+
+        '--proguard-file', '<(generated_proguard_file)',
+
+        '--resource-dirs', '<(resource_dir)',
+        '--resource-zip-out', '<(resource_zip_path)',
+
+        '--R-dir', '<(intermediate_dir)/gen',
+
+        '--stamp', '<(codegen_stamp)',
+
+        '<@(process_resources_options)',
+      ],
+    },
+    {
+      'action_name': 'javac_<(_target_name)',
+      'message': 'Compiling java for <(_target_name)',
+      'variables': {
+        'extra_args': [],
+        'extra_inputs': [],
+        'gen_src_dirs': [
+          '<(intermediate_dir)/gen',
+          '>@(generated_src_dirs)',
+        ],
+        # If there is a separate find for additional_src_dirs, it will find the
+        # wrong .java files when additional_src_dirs is empty.
+        # TODO(thakis): Gyp caches >! evaluation by command. Both java.gypi and
+        # java_apk.gypi evaluate the same command, and at the moment two targets
+        # set java_in_dir to "java". Add a dummy comment here to make sure
+        # that the two targets (one uses java.gypi, the other java_apk.gypi)
+        # get distinct source lists. Medium-term, make targets list all their
+        # Java files instead of using find. (As is, this will be broken if two
+        # targets use the same java_in_dir and both use java_apk.gypi or
+        # both use java.gypi.)
+        'java_sources': ['>!@(find >(java_in_dir)>(java_in_dir_suffix) >(additional_src_dirs) -name "*.java"  # apk)'],
+        'conditions': [
+          ['enable_errorprone == 1', {
+            'extra_inputs': [
+              '<(errorprone_exe_path)',
+            ],
+            'extra_args': [ '--use-errorprone-path=<(errorprone_exe_path)' ],
+          }],
+        ],
+      },
+      'inputs': [
+        '<(DEPTH)/build/android/gyp/util/build_utils.py',
+        '<(DEPTH)/build/android/gyp/javac.py',
+        '>@(java_sources)',
+        '>@(input_jars_paths)',
+        '<(codegen_stamp)',
+        '<@(extra_inputs)',
+      ],
+      'conditions': [
+        ['native_lib_target != ""', {
+          'inputs': [ '<(native_libraries_java_stamp)' ],
+        }],
+      ],
+      'outputs': [
+        '<(compile_stamp)',
+        '<(javac_jar_path)',
+      ],
+      'action': [
+        'python', '<(DEPTH)/build/android/gyp/javac.py',
+        '--bootclasspath=<(android_sdk_jar)',
+        '--classpath=>(input_jars_paths) <(android_sdk_jar)',
+        '--src-gendirs=>(gen_src_dirs)',
+        '--javac-includes=<(javac_includes)',
+        '--chromium-code=<(chromium_code)',
+        '--jar-path=<(javac_jar_path)',
+        '--jar-excluded-classes=<(jar_excluded_classes)',
+        '--stamp=<(compile_stamp)',
+        '<@(extra_args)',
+        '>@(java_sources)',
+      ],
+    },
+    {
+      'action_name': 'instr_jar_<(_target_name)',
+      'message': 'Instrumenting <(_target_name) jar',
+      'variables': {
+        'input_path': '<(javac_jar_path)',
+        'output_path': '<(jar_path)',
+        'stamp_path': '<(instr_stamp)',
+        'instr_type': 'jar',
+      },
+      'outputs': [
+        '<(instr_stamp)',
+        '<(jar_path)',
+      ],
+      'inputs': [
+        '<(javac_jar_path)',
+      ],
+      'includes': [ 'android/instr_action.gypi' ],
+    },
+    {
+      'variables': {
+        'src_dirs': [
+          '<(java_in_dir)<(java_in_dir_suffix)',
+          '>@(additional_src_dirs)',
+        ],
+        'lint_jar_path': '<(jar_path)',
+        'stamp_path': '<(lint_stamp)',
+        'result_path': '<(lint_result)',
+        'config_path': '<(lint_config)',
+      },
+      'outputs': [
+        '<(lint_stamp)',
+      ],
+      'includes': [ 'android/lint_action.gypi' ],
+    },
+    {
+      'action_name': 'obfuscate_<(_target_name)',
+      'message': 'Obfuscating <(_target_name)',
+      'variables': {
+        'additional_obfuscate_options': [],
+        'additional_obfuscate_input_paths': [],
+        'proguard_out_dir': '<(intermediate_dir)/proguard',
+        'proguard_input_jar_paths': [
+          '>@(input_jars_paths)',
+          '<(jar_path)',
+        ],
+        'target_conditions': [
+          ['is_test_apk == 1', {
+            'additional_obfuscate_options': [
+              '--testapp',
+            ],
+          }],
+          ['is_test_apk == 1 and tested_apk_obfuscated_jar_path != "/"', {
+            'additional_obfuscate_options': [
+              '--tested-apk-obfuscated-jar-path', '>(tested_apk_obfuscated_jar_path)',
+            ],
+            'additional_obfuscate_input_paths': [
+              '>(tested_apk_obfuscated_jar_path).info',
+            ],
+          }],
+          ['proguard_enabled == "true"', {
+            'additional_obfuscate_options': [
+              '--proguard-enabled',
+            ],
+          }],
+        ],
+        'obfuscate_input_jars_paths': [
+          '>@(input_jars_paths)',
+          '<(jar_path)',
+        ],
+      },
+      'conditions': [
+        ['is_test_apk == 1', {
+          'outputs': [
+            '<(test_jar_path)',
+          ],
+        }],
+      ],
+      'inputs': [
+        '<(DEPTH)/build/android/gyp/apk_obfuscate.py',
+        '<(DEPTH)/build/android/gyp/util/build_utils.py',
+        '>@(proguard_flags_paths)',
+        '>@(obfuscate_input_jars_paths)',
+        '>@(additional_obfuscate_input_paths)',
+        '<(instr_stamp)',
+      ],
+      'outputs': [
+        '<(obfuscate_stamp)',
+
+        # In non-Release builds, these paths will all be empty files.
+        '<(obfuscated_jar_path)',
+        '<(obfuscated_jar_path).info',
+        '<(obfuscated_jar_path).dump',
+        '<(obfuscated_jar_path).seeds',
+        '<(obfuscated_jar_path).mapping',
+        '<(obfuscated_jar_path).usage',
+      ],
+      'action': [
+        'python', '<(DEPTH)/build/android/gyp/apk_obfuscate.py',
+
+        '--configuration-name', '<(CONFIGURATION_NAME)',
+
+        '--android-sdk', '<(android_sdk)',
+        '--android-sdk-tools', '<(android_sdk_tools)',
+        '--android-sdk-jar', '<(android_sdk_jar)',
+
+        '--input-jars-paths=>(proguard_input_jar_paths)',
+        '--proguard-configs=>(proguard_flags_paths)',
+
+        '--test-jar-path', '<(test_jar_path)',
+        '--obfuscated-jar-path', '<(obfuscated_jar_path)',
+
+        '--proguard-jar-path', '<(android_sdk_root)/tools/proguard/lib/proguard.jar',
+
+        '--stamp', '<(obfuscate_stamp)',
+
+        '>@(additional_obfuscate_options)',
+      ],
+    },
+    {
+      'action_name': 'dex_<(_target_name)',
+      'variables': {
+        'dex_input_paths': [
+          '>@(library_dexed_jars_paths)',
+          '<(jar_path)',
+        ],
+        'output_path': '<(dex_path)',
+        'proguard_enabled_input_path': '<(obfuscated_jar_path)',
+      },
+      'target_conditions': [
+        ['emma_instrument != 0', {
+          'variables': {
+            'dex_no_locals': 1,
+            'dex_input_paths': [
+              '<(emma_device_jar)'
+            ],
+          },
+        }],
+        ['is_test_apk == 1 and tested_apk_dex_path != "/"', {
+          'variables': {
+            'dex_additional_options': [
+              '--excluded-paths', '@FileArg(>(tested_apk_dex_path).inputs)'
+            ],
+          },
+          'inputs': [
+            '>(tested_apk_dex_path).inputs',
+          ],
+        }],
+        ['proguard_enabled == "true"', {
+          'inputs': [ '<(obfuscate_stamp)' ]
+        }, {
+          'inputs': [ '<(instr_stamp)' ]
+        }],
+      ],
+      'includes': [ 'android/dex_action.gypi' ],
+    },
+    {
+      'variables': {
+        'extra_inputs': ['<(codegen_stamp)'],
+        'resource_zips': [
+          '<(resource_zip_path)',
+        ],
+        'conditions': [
+          ['is_test_apk == 0', {
+            'resource_zips': [
+              '>@(dependencies_res_zip_paths)',
+            ],
+          }],
+        ],
+      },
+      'includes': [ 'android/package_resources_action.gypi' ],
+    },
+    {
+      'variables': {
+        'apk_path': '<(unsigned_apk_path)',
+        'conditions': [
+          ['native_lib_target != ""', {
+            'extra_inputs': ['<(native_lib_placeholder_stamp)'],
+          }],
+          ['create_abi_split == 0', {
+            'native_libs_dir': '<(apk_package_native_libs_dir)',
+          }, {
+            'native_libs_dir': '<(DEPTH)/build/android/ant/empty/res',
+          }],
+        ],
+      },
+      'includes': ['android/apkbuilder_action.gypi'],
+    },
+  ],
+}
diff --git a/build/java_prebuilt.gypi b/build/java_prebuilt.gypi
new file mode 100644
index 0000000..8efc4ef
--- /dev/null
+++ b/build/java_prebuilt.gypi
@@ -0,0 +1,102 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into a target to provide a rule
+# to package prebuilt Java JARs in a consistent manner.
+#
+# To use this, create a gyp target with the following form:
+# {
+#   'target_name': 'my-package_java',
+#   'type': 'none',
+#   'variables': {
+#     'jar_path': 'path/to/your.jar',
+#   },
+#   'includes': ['path/to/this/gypi/file'],
+# }
+#
+# Required variables:
+#  jar_path - The path to the prebuilt Java JAR file.
+
+{
+  'dependencies': [
+    '<(DEPTH)/build/android/setup.gyp:build_output_dirs'
+  ],
+  'variables': {
+    'dex_path': '<(PRODUCT_DIR)/lib.java/<(_target_name).dex.jar',
+    'intermediate_dir': '<(SHARED_INTERMEDIATE_DIR)/<(_target_name)',
+    'android_jar': '<(android_sdk)/android.jar',
+    'input_jars_paths': [ '<(android_jar)' ],
+    'neverlink%': 0,
+    'proguard_config%': '',
+    'proguard_preprocess%': '0',
+    'variables': {
+      'variables': {
+        'proguard_preprocess%': 0,
+      },
+      'conditions': [
+        ['proguard_preprocess == 1', {
+          'dex_input_jar_path': '<(intermediate_dir)/<(_target_name).pre.jar'
+        }, {
+          'dex_input_jar_path': '<(jar_path)'
+        }],
+      ],
+    },
+    'dex_input_jar_path': '<(dex_input_jar_path)',
+  },
+  'all_dependent_settings': {
+    'variables': {
+      'input_jars_paths': ['<(dex_input_jar_path)'],
+      'conditions': [
+        ['neverlink == 1', {
+          'library_dexed_jars_paths': [],
+        }, {
+          'library_dexed_jars_paths': ['<(dex_path)'],
+        }],
+      ],
+    },
+  },
+  'conditions' : [
+    ['proguard_preprocess == 1', {
+      'actions': [
+        {
+          'action_name': 'proguard_<(_target_name)',
+          'message': 'Proguard preprocessing <(_target_name) jar',
+          'inputs': [
+            '<(android_sdk_root)/tools/proguard/lib/proguard.jar',
+            '<(DEPTH)/build/android/gyp/util/build_utils.py',
+            '<(DEPTH)/build/android/gyp/proguard.py',
+            '<(jar_path)',
+            '<(proguard_config)',
+          ],
+          'outputs': [
+            '<(dex_input_jar_path)',
+          ],
+          'action': [
+            'python', '<(DEPTH)/build/android/gyp/proguard.py',
+            '--proguard-path=<(android_sdk_root)/tools/proguard/lib/proguard.jar',
+            '--input-path=<(jar_path)',
+            '--output-path=<(dex_input_jar_path)',
+            '--proguard-config=<(proguard_config)',
+            '--classpath=>(input_jars_paths)',
+          ]
+        },
+      ],
+    }],
+    ['neverlink == 0', {
+      'actions': [
+        {
+          'action_name': 'dex_<(_target_name)',
+          'message': 'Dexing <(_target_name) jar',
+          'variables': {
+            'dex_input_paths': [
+              '<(dex_input_jar_path)',
+            ],
+            'output_path': '<(dex_path)',
+          },
+          'includes': [ 'android/dex_action.gypi' ],
+        },
+      ],
+    }],
+  ],
+}
diff --git a/build/java_strings_grd.gypi b/build/java_strings_grd.gypi
new file mode 100644
index 0000000..7534be5
--- /dev/null
+++ b/build/java_strings_grd.gypi
@@ -0,0 +1,62 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into a target to provide a rule
+# to generate localized strings.xml from a grd file.
+#
+# To use this, create a gyp target with the following form:
+# {
+#   'target_name': 'my-package_strings_grd',
+#   'type': 'none',
+#   'variables': {
+#     'grd_file': 'path/to/grd/file',
+#   },
+#   'includes': ['path/to/this/gypi/file'],
+# }
+#
+# Required variables:
+#  grd_file - The path to the grd file to use.
+{
+  'variables': {
+    'res_grit_dir': '<(INTERMEDIATE_DIR)/<(_target_name)/res_grit',
+    'grit_grd_file': '<(grd_file)',
+    'resource_zip_path': '<(PRODUCT_DIR)/res.java/<(_target_name).zip',
+    'grit_additional_defines': ['-E', 'ANDROID_JAVA_TAGGED_ONLY=false'],
+    'grit_out_dir': '<(res_grit_dir)',
+    # resource_ids is unneeded since we don't generate .h headers.
+    'grit_resource_ids': '',
+    'grit_outputs': [
+      '<!@pymod_do_main(grit_info <@(grit_defines) <@(grit_additional_defines) '
+          '--outputs \'<(grit_out_dir)\' '
+          '<(grit_grd_file) -f "<(grit_resource_ids)")',
+          ]
+  },
+  'all_dependent_settings': {
+    'variables': {
+      'additional_input_paths': ['<(resource_zip_path)'],
+      'dependencies_res_zip_paths': ['<(resource_zip_path)'],
+    },
+  },
+  'actions': [
+    {
+      'action_name': 'generate_localized_strings_xml',
+      'includes': ['../build/grit_action.gypi'],
+    },
+    {
+      'action_name': 'create_resources_zip',
+      'inputs': [
+          '<(DEPTH)/build/android/gyp/zip.py',
+          '<@(grit_outputs)',
+      ],
+      'outputs': [
+          '<(resource_zip_path)',
+      ],
+      'action': [
+          'python', '<(DEPTH)/build/android/gyp/zip.py',
+          '--input-dir', '<(res_grit_dir)',
+          '--output', '<(resource_zip_path)',
+      ],
+    }
+  ],
+}
diff --git a/build/jni_generator.gypi b/build/jni_generator.gypi
new file mode 100644
index 0000000..7a9e333
--- /dev/null
+++ b/build/jni_generator.gypi
@@ -0,0 +1,87 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into a target to provide a rule
+# to generate jni bindings for Java-files in a consistent manner.
+#
+# To use this, create a gyp target with the following form:
+#  {
+#    'target_name': 'base_jni_headers',
+#    'type': 'none',
+#    'sources': [
+#      'android/java/src/org/chromium/base/BuildInfo.java',
+#      ...
+#      ...
+#      'android/java/src/org/chromium/base/SystemMessageHandler.java',
+#    ],
+#    'variables': {
+#      'jni_gen_package': 'base',
+#    },
+#    'includes': [ '../build/jni_generator.gypi' ],
+#  },
+#
+# The generated file name pattern can be seen on the "outputs" section below.
+# (note that RULE_INPUT_ROOT is the basename for the java file).
+#
+# See base/android/jni_generator/jni_generator.py for more info about the
+# format of generating JNI bindings.
+
+{
+  'variables': {
+    'jni_generator': '<(DEPTH)/base/android/jni_generator/jni_generator.py',
+    'jni_generator_jarjar_file%': '',
+    'jni_generator_ptr_type%': 'long',
+    # A comma separated string of include files.
+    'jni_generator_includes%': (
+        'base/android/jni_generator/jni_generator_helper.h'
+    ),
+    'native_exports%': '--native_exports_optional',
+  },
+  'rules': [
+    {
+      'rule_name': 'generate_jni_headers',
+      'extension': 'java',
+      'inputs': [
+        '<(jni_generator)',
+      ],
+      'outputs': [
+        '<(SHARED_INTERMEDIATE_DIR)/<(jni_gen_package)/jni/<(RULE_INPUT_ROOT)_jni.h',
+      ],
+      'action': [
+        '<(jni_generator)',
+        '--input_file',
+        '<(RULE_INPUT_PATH)',
+        '--output_dir',
+        '<(SHARED_INTERMEDIATE_DIR)/<(jni_gen_package)/jni',
+        '--includes',
+        '<(jni_generator_includes)',
+        '--optimize_generation',
+        '<(optimize_jni_generation)',
+        '--jarjar',
+        '<(jni_generator_jarjar_file)',
+        '--ptr_type',
+        '<(jni_generator_ptr_type)',
+        '<(native_exports)',
+      ],
+      'message': 'Generating JNI bindings from <(RULE_INPUT_PATH)',
+      'process_outputs_as_sources': 1,
+      'conditions': [
+        ['jni_generator_jarjar_file != ""', {
+          'inputs': [
+            '<(jni_generator_jarjar_file)',
+          ],
+        }]
+      ],
+    },
+  ],
+  'direct_dependent_settings': {
+    'include_dirs': [
+      '<(SHARED_INTERMEDIATE_DIR)/<(jni_gen_package)',
+    ],
+  },
+  # This target exports a hard dependency because it generates header
+  # files.
+  'hard_dependency': 1,
+}
+
diff --git a/build/json_schema_api.gni b/build/json_schema_api.gni
new file mode 100644
index 0000000..e1c2d33
--- /dev/null
+++ b/build/json_schema_api.gni
@@ -0,0 +1,242 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Defines a static library corresponding to the output of schema compiler tools
+# over a set of extensions API schemas (IDL or JSON format.) The library target
+# has implicit hard dependencies on all schema files listed by the invoker and
+# is itself a hard dependency.
+#
+# Invocations of this template may use the following variables:
+#
+# sources [required] A list of schema files to be compiled.
+#
+# root_namespace [required]
+#     A Python string substituion pattern used to generate the C++
+#     namespace for each API. Use %(namespace)s to replace with the API
+#     namespace, like "toplevel::%(namespace)s_api".
+#
+# schema_include_rules [optional]
+#     A list of paths to include when searching for referenced objects,
+#     with the namespace separated by a :.
+#     Example:
+#       [ '/foo/bar:Foo::Bar::%(namespace)s' ]
+#
+# schemas [optional, default = false]
+#   Boolean indicating if the schema files should be generated.
+#
+# bundle [optional, default = false]
+#   Boolean indicating if the schema bundle files should be generated.
+#
+# bundle_registration [optional, default = false]
+#   Boolean indicating if the API registration bundle files should be generated.
+#
+# impl_dir [required if bundle_registration = true, otherwise unused]
+#   The path containing C++ implementations of API functions. This path is
+#   used as the root path when looking for {schema}/{schema}_api.h headers
+#   when generating API registration bundles. Such headers, if found, are
+#   automatically included by the generated code.
+#
+# uncompiled_sources [optional, only used when bundle = true or
+#     bundle_registration = true]
+#   A list of schema files which should not be compiled, but which should still
+#   be processed for API bundle generation.
+#
+# deps [optional]
+#   If any deps are specified they will be inherited by the static library
+#   target.
+#
+# generate_static_library [optional, defaults to false]
+#   Produces a static library instead of a source_set.
+#
+# The generated library target also inherits the visibility and output_name
+# of its invoker.
+
+template("json_schema_api") {
+  assert(defined(invoker.sources),
+         "\"sources\" must be defined for the $target_name template.")
+  assert(defined(invoker.root_namespace),
+         "\"root_namespace\" must be defined for the $target_name template.")
+
+  schemas = defined(invoker.schemas) && invoker.schemas
+  bundle = defined(invoker.bundle) && invoker.bundle
+  bundle_registration =
+      defined(invoker.bundle_registration) && invoker.bundle_registration
+
+  schema_include_rules = ""
+  if (defined(invoker.schema_include_rules)) {
+    schema_include_rules = invoker.schema_include_rules
+  }
+
+  # Keep a copy of the target_name here since it will be trampled
+  # in nested targets.
+  target_visibility = [ ":$target_name" ]
+
+  generated_config_name = target_name + "_generated_config"
+  config(generated_config_name) {
+    include_dirs = [ root_gen_dir ]
+    visibility = target_visibility
+  }
+
+  root_namespace = invoker.root_namespace
+
+  compiler_root = "//tools/json_schema_compiler"
+  compiler_script = "$compiler_root/compiler.py"
+  compiler_sources = [
+    "$compiler_root/cc_generator.py",
+    "$compiler_root/code.py",
+    "$compiler_root/compiler.py",
+    "$compiler_root/cpp_generator.py",
+    "$compiler_root/cpp_type_generator.py",
+    "$compiler_root/cpp_util.py",
+    "$compiler_root/h_generator.py",
+    "$compiler_root/idl_schema.py",
+    "$compiler_root/model.py",
+    "$compiler_root/util_cc_helper.py",
+  ]
+
+  if (schemas) {
+    schema_generator_name = target_name + "_schema_generator"
+    action_foreach(schema_generator_name) {
+      script = compiler_script
+      sources = invoker.sources
+      inputs = compiler_sources
+      outputs = [
+        "$target_gen_dir/{{source_name_part}}.cc",
+        "$target_gen_dir/{{source_name_part}}.h",
+      ]
+      args = [
+        "{{source}}",
+        "--root=" + rebase_path("//", root_build_dir),
+        "--destdir=" + rebase_path(root_gen_dir, root_build_dir),
+        "--namespace=$root_namespace",
+        "--generator=cpp",
+        "--include-rules=$schema_include_rules",
+      ]
+
+      if (defined(invoker.visibility)) {
+        # If visibility is restricted, add our own target to it.
+        visibility = invoker.visibility + target_visibility
+      }
+    }
+  }
+
+  if (bundle) {
+    uncompiled_sources = []
+    if (defined(invoker.uncompiled_sources)) {
+      uncompiled_sources = invoker.uncompiled_sources
+    }
+
+    bundle_generator_schema_name = target_name + "_bundle_generator_schema"
+    action(bundle_generator_schema_name) {
+      script = compiler_script
+      inputs = compiler_sources + invoker.sources + uncompiled_sources
+      outputs = [
+        "$target_gen_dir/generated_schemas.cc",
+        "$target_gen_dir/generated_schemas.h",
+      ]
+      args = [
+               "--root=" + rebase_path("//", root_build_dir),
+               "--destdir=" + rebase_path(root_gen_dir, root_build_dir),
+               "--namespace=$root_namespace",
+               "--generator=cpp-bundle-schema",
+               "--include-rules=$schema_include_rules",
+             ] + rebase_path(invoker.sources, root_build_dir) +
+             rebase_path(uncompiled_sources, root_build_dir)
+    }
+  }
+
+  if (bundle_registration) {
+    uncompiled_sources = []
+    if (defined(invoker.uncompiled_sources)) {
+      uncompiled_sources = invoker.uncompiled_sources
+    }
+
+    assert(defined(invoker.impl_dir),
+           "\"impl_dir\" must be defined for the $target_name template.")
+
+    # Child directory inside the generated file tree.
+    gen_child_dir = rebase_path(invoker.impl_dir, "//")
+
+    bundle_generator_registration_name =
+        target_name + "_bundle_generator_registration"
+    action(bundle_generator_registration_name) {
+      script = compiler_script
+      inputs = compiler_sources + invoker.sources + uncompiled_sources
+      outputs = [
+        "$root_gen_dir/$gen_child_dir/generated_api_registration.cc",
+        "$root_gen_dir/$gen_child_dir/generated_api_registration.h",
+      ]
+      args = [
+               "--root=" + rebase_path("//", root_build_dir),
+               "--destdir=" + rebase_path(root_gen_dir, root_build_dir),
+               "--namespace=$root_namespace",
+               "--generator=cpp-bundle-registration",
+               "--impl-dir=$gen_child_dir",
+               "--include-rules=$schema_include_rules",
+             ] + rebase_path(invoker.sources, root_build_dir) +
+             rebase_path(uncompiled_sources, root_build_dir)
+    }
+  }
+
+  # Compute the contents of the library/source set.
+  lib_sources = invoker.sources
+  lib_deps = []
+  lib_public_deps = []
+  lib_extra_configs = []
+
+  if (schemas) {
+    lib_sources += get_target_outputs(":$schema_generator_name")
+    lib_public_deps += [ ":$schema_generator_name" ]
+    lib_deps += [ "//tools/json_schema_compiler:generated_api_util" ]
+    lib_extra_configs += [ "//build/config/compiler:no_size_t_to_int_warning" ]
+  }
+
+  if (bundle) {
+    lib_sources += get_target_outputs(":$bundle_generator_schema_name")
+    lib_deps += [ ":$bundle_generator_schema_name" ]
+  }
+
+  if (bundle_registration) {
+    lib_sources += get_target_outputs(":$bundle_generator_registration_name")
+    lib_deps += [ ":$bundle_generator_registration_name" ]
+  }
+
+  if (defined(invoker.deps)) {
+    lib_deps += invoker.deps
+  }
+
+  # Generate either a static library or a source set.
+  if (defined(invoker.generate_static_library) &&
+      invoker.generate_static_library) {
+    static_library(target_name) {
+      sources = lib_sources
+      deps = lib_deps
+      public_deps = lib_public_deps
+      configs += lib_extra_configs
+      public_configs = [ ":$generated_config_name" ]
+
+      if (defined(invoker.visibility)) {
+        visibility = invoker.visibility
+      }
+      if (defined(invoker.output_name)) {
+        output_name = invoker.output_name
+      }
+    }
+  } else {
+    source_set(target_name) {
+      sources = lib_sources
+      deps = lib_deps
+      public_deps = lib_public_deps
+      configs += lib_extra_configs
+      public_configs = [ ":$generated_config_name" ]
+
+      if (defined(invoker.visibility)) {
+        visibility = invoker.visibility
+      }
+      if (defined(invoker.output_name)) {
+        output_name = invoker.output_name
+      }
+    }
+  }
+}
diff --git a/build/json_schema_bundle_compile.gypi b/build/json_schema_bundle_compile.gypi
new file mode 100644
index 0000000..a302013
--- /dev/null
+++ b/build/json_schema_bundle_compile.gypi
@@ -0,0 +1,83 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+  'variables': {
+    # When including this gypi, the following variables must be set:
+    #   schema_files:
+    #     An array of json or idl files that comprise the api model.
+    #   schema_include_rules (optional):
+    #     An array of paths to include when searching for referenced objects,
+    #     with the namespace separated by a :.
+    #     Example:
+    #       [ '/foo/bar:Foo::Bar::%(namespace)s' ]
+    #   cc_dir:
+    #     The directory to put the generated code in.
+    #   root_namespace:
+    #     A Python string substituion pattern used to generate the C++
+    #     namespace for each API. Use %(namespace)s to replace with the API
+    #     namespace, like "toplevel::%(namespace)s_api".
+    #
+    # Functions and namespaces can be excluded by setting "nocompile" to true.
+    # The default root path of API implementation sources is
+    # chrome/browser/extensions/api and can be overridden by setting "impl_dir".
+    'api_gen_dir': '<(DEPTH)/tools/json_schema_compiler',
+    'api_gen': '<(api_gen_dir)/compiler.py',
+    'generator_files': [
+      '<(api_gen_dir)/cc_generator.py',
+      '<(api_gen_dir)/code.py',
+      '<(api_gen_dir)/compiler.py',
+      '<(api_gen_dir)/cpp_bundle_generator.py',
+      '<(api_gen_dir)/cpp_type_generator.py',
+      '<(api_gen_dir)/cpp_util.py',
+      '<(api_gen_dir)/h_generator.py',
+      '<(api_gen_dir)/idl_schema.py',
+      '<(api_gen_dir)/json_schema.py',
+      '<(api_gen_dir)/model.py',
+      '<(api_gen_dir)/util_cc_helper.py',
+    ],
+    'schema_include_rules': [],
+  },
+  'actions': [
+    {
+      'action_name': 'genapi_bundle_schema',
+      'inputs': [
+        '<@(generator_files)',
+        '<@(schema_files)',
+        '<@(non_compiled_schema_files)',
+      ],
+      'outputs': [
+        '<(SHARED_INTERMEDIATE_DIR)/<(cc_dir)/generated_schemas.h',
+        '<(SHARED_INTERMEDIATE_DIR)/<(cc_dir)/generated_schemas.cc',
+      ],
+      'action': [
+        'python',
+        '<(api_gen)',
+        '--root=<(DEPTH)',
+        '--destdir=<(SHARED_INTERMEDIATE_DIR)',
+        '--namespace=<(root_namespace)',
+        '--generator=cpp-bundle-schema',
+        '--include-rules=<(schema_include_rules)',
+        '<@(schema_files)',
+        '<@(non_compiled_schema_files)',
+      ],
+      'message': 'Generating C++ API bundle code for schemas',
+      'process_outputs_as_sources': 1,
+      # Avoid running MIDL compiler on IDL input files.
+      'explicit_idl_action': 1,
+    },
+  ],
+  'include_dirs': [
+    '<(SHARED_INTERMEDIATE_DIR)',
+    '<(DEPTH)',
+  ],
+  'direct_dependent_settings': {
+    'include_dirs': [
+      '<(SHARED_INTERMEDIATE_DIR)',
+    ]
+  },
+  # This target exports a hard dependency because it generates header
+  # files.
+  'hard_dependency': 1,
+}
diff --git a/build/json_schema_bundle_registration_compile.gypi b/build/json_schema_bundle_registration_compile.gypi
new file mode 100644
index 0000000..8c5af4e
--- /dev/null
+++ b/build/json_schema_bundle_registration_compile.gypi
@@ -0,0 +1,78 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+  'variables': {
+    # When including this gypi, the following variables must be set:
+    #   schema_files:
+    #     An array of json or idl files that comprise the api model.
+    #   impl_dir_:
+    #     The root path of API implementations; also used for the
+    #     output location. (N.B. Named as such to prevent gyp from
+    #     expanding it as a relative path.)
+    #   root_namespace:
+    #     A Python string substituion pattern used to generate the C++
+    #     namespace for each API. Use %(namespace)s to replace with the API
+    #     namespace, like "toplevel::%(namespace)s_api".
+    #
+    # Functions and namespaces can be excluded by setting "nocompile" to true.
+    'api_gen_dir': '<(DEPTH)/tools/json_schema_compiler',
+    'api_gen': '<(api_gen_dir)/compiler.py',
+    'generator_files': [
+      '<(api_gen_dir)/cc_generator.py',
+      '<(api_gen_dir)/code.py',
+      '<(api_gen_dir)/compiler.py',
+      '<(api_gen_dir)/cpp_bundle_generator.py',
+      '<(api_gen_dir)/cpp_type_generator.py',
+      '<(api_gen_dir)/cpp_util.py',
+      '<(api_gen_dir)/h_generator.py',
+      '<(api_gen_dir)/idl_schema.py',
+      '<(api_gen_dir)/json_schema.py',
+      '<(api_gen_dir)/model.py',
+      '<(api_gen_dir)/util_cc_helper.py',
+    ],
+  },
+  'actions': [
+    {
+      # GN version: json_schema_api.gni
+      'action_name': 'genapi_bundle_registration',
+      'inputs': [
+        '<@(generator_files)',
+        '<@(schema_files)',
+        '<@(non_compiled_schema_files)',
+      ],
+      'outputs': [
+        '<(SHARED_INTERMEDIATE_DIR)/<(impl_dir_)/generated_api_registration.h',
+        '<(SHARED_INTERMEDIATE_DIR)/<(impl_dir_)/generated_api_registration.cc',
+      ],
+      'action': [
+        'python',
+        '<(api_gen)',
+        '--root=<(DEPTH)',
+        '--destdir=<(SHARED_INTERMEDIATE_DIR)',
+        '--namespace=<(root_namespace)',
+        '--generator=cpp-bundle-registration',
+        '--impl-dir=<(impl_dir_)',
+        '<@(schema_files)',
+        '<@(non_compiled_schema_files)',
+      ],
+      'message': 'Generating C++ API bundle code for function registration',
+      'process_outputs_as_sources': 1,
+      # Avoid running MIDL compiler on IDL input files.
+      'explicit_idl_action': 1,
+    },
+  ],
+  'include_dirs': [
+    '<(SHARED_INTERMEDIATE_DIR)',
+    '<(DEPTH)',
+  ],
+  'direct_dependent_settings': {
+    'include_dirs': [
+      '<(SHARED_INTERMEDIATE_DIR)',
+    ]
+  },
+  # This target exports a hard dependency because it generates header
+  # files.
+  'hard_dependency': 1,
+}
diff --git a/build/json_schema_compile.gypi b/build/json_schema_compile.gypi
new file mode 100644
index 0000000..6e5727a
--- /dev/null
+++ b/build/json_schema_compile.gypi
@@ -0,0 +1,123 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+  'variables': {
+    # When including this gypi, the following variables must be set:
+    #   schema_files:
+    #     An array of json or idl files that comprise the api model.
+    #   schema_include_rules (optional):
+    #     An array of paths to include when searching for referenced objects,
+    #     with the namespace separated by a :.
+    #     Example:
+    #       [ '/foo/bar:Foo::Bar::%(namespace)s' ]
+    #   cc_dir:
+    #     The directory to put the generated code in.
+    #   root_namespace:
+    #     A Python string substituion pattern used to generate the C++
+    #     namespace for each API. Use %(namespace)s to replace with the API
+    #     namespace, like "toplevel::%(namespace)s_api".
+    #
+    # Functions and namespaces can be excluded by setting "nocompile" to true.
+    'api_gen_dir': '<(DEPTH)/tools/json_schema_compiler',
+    'api_gen': '<(api_gen_dir)/compiler.py',
+    'schema_include_rules': [],
+  },
+  'rules': [
+    {
+      # GN version: json_schema_api.gni
+      'rule_name': 'genapi',
+      'msvs_external_rule': 1,
+      'extension': 'json',
+      'inputs': [
+        '<(api_gen_dir)/cc_generator.py',
+        '<(api_gen_dir)/code.py',
+        '<(api_gen_dir)/compiler.py',
+        '<(api_gen_dir)/cpp_generator.py',
+        '<(api_gen_dir)/cpp_type_generator.py',
+        '<(api_gen_dir)/cpp_util.py',
+        '<(api_gen_dir)/h_generator.py',
+        '<(api_gen_dir)/json_schema.py',
+        '<(api_gen_dir)/model.py',
+        '<(api_gen_dir)/util.cc',
+        '<(api_gen_dir)/util.h',
+        '<(api_gen_dir)/util_cc_helper.py',
+        # TODO(calamity): uncomment this when gyp on windows behaves like other
+        # platforms. List expansions of filepaths in inputs expand to different
+        # things.
+        # '<@(schema_files)',
+      ],
+      'outputs': [
+        '<(SHARED_INTERMEDIATE_DIR)/<(cc_dir)/<(RULE_INPUT_DIRNAME)/<(RULE_INPUT_ROOT).cc',
+        '<(SHARED_INTERMEDIATE_DIR)/<(cc_dir)/<(RULE_INPUT_DIRNAME)/<(RULE_INPUT_ROOT).h',
+      ],
+      'action': [
+        'python',
+        '<(api_gen)',
+        '<(RULE_INPUT_PATH)',
+        '--root=<(DEPTH)',
+        '--destdir=<(SHARED_INTERMEDIATE_DIR)',
+        '--namespace=<(root_namespace)',
+        '--generator=cpp',
+        '--include-rules=<(schema_include_rules)'
+      ],
+      'message': 'Generating C++ code from <(RULE_INPUT_PATH) json files',
+      'process_outputs_as_sources': 1,
+    },
+    {
+      'rule_name': 'genapi_idl',
+      'msvs_external_rule': 1,
+      'extension': 'idl',
+      'inputs': [
+        '<(api_gen_dir)/cc_generator.py',
+        '<(api_gen_dir)/code.py',
+        '<(api_gen_dir)/compiler.py',
+        '<(api_gen_dir)/cpp_generator.py',
+        '<(api_gen_dir)/cpp_type_generator.py',
+        '<(api_gen_dir)/cpp_util.py',
+        '<(api_gen_dir)/h_generator.py',
+        '<(api_gen_dir)/idl_schema.py',
+        '<(api_gen_dir)/model.py',
+        '<(api_gen_dir)/util.cc',
+        '<(api_gen_dir)/util.h',
+        '<(api_gen_dir)/util_cc_helper.py',
+        # TODO(calamity): uncomment this when gyp on windows behaves like other
+        # platforms. List expansions of filepaths in inputs expand to different
+        # things.
+        # '<@(schema_files)',
+      ],
+      'outputs': [
+        '<(SHARED_INTERMEDIATE_DIR)/<(cc_dir)/<(RULE_INPUT_DIRNAME)/<(RULE_INPUT_ROOT).cc',
+        '<(SHARED_INTERMEDIATE_DIR)/<(cc_dir)/<(RULE_INPUT_DIRNAME)/<(RULE_INPUT_ROOT).h',
+      ],
+      'action': [
+        'python',
+        '<(api_gen)',
+        '<(RULE_INPUT_PATH)',
+        '--root=<(DEPTH)',
+        '--destdir=<(SHARED_INTERMEDIATE_DIR)',
+        '--namespace=<(root_namespace)',
+        '--generator=cpp',
+        '--include-rules=<(schema_include_rules)'
+      ],
+      'message': 'Generating C++ code from <(RULE_INPUT_PATH) IDL files',
+      'process_outputs_as_sources': 1,
+    },
+  ],
+  'include_dirs': [
+    '<(SHARED_INTERMEDIATE_DIR)',
+    '<(DEPTH)',
+  ],
+  'dependencies':[
+    '<(DEPTH)/tools/json_schema_compiler/api_gen_util.gyp:api_gen_util',
+  ],
+  'direct_dependent_settings': {
+    'include_dirs': [
+      '<(SHARED_INTERMEDIATE_DIR)',
+    ]
+  },
+  # This target exports a hard dependency because it generates header
+  # files.
+  'hard_dependency': 1,
+}
diff --git a/build/json_to_struct.gypi b/build/json_to_struct.gypi
new file mode 100644
index 0000000..09c8e3e
--- /dev/null
+++ b/build/json_to_struct.gypi
@@ -0,0 +1,53 @@
+# Copyright 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+  'variables': {
+    # When including this gypi, the following variables must be set:
+    #   schema_file: a json file that comprise the structure model.
+    #   namespace: the C++ namespace that all generated files go under
+    #   cc_dir: path to generated files
+    # Functions and namespaces can be excluded by setting "nocompile" to true.
+    'struct_gen_dir': '<(DEPTH)/tools/json_to_struct',
+    'struct_gen%': '<(struct_gen_dir)/json_to_struct.py',
+    'output_filename%': '<(RULE_INPUT_ROOT)',
+  },
+  'rules': [
+    {
+      # GN version: //tools/json_to_struct/json_to_struct.gni
+      'rule_name': 'genstaticinit',
+      'extension': 'json',
+      'inputs': [
+        '<(struct_gen)',
+        '<(struct_gen_dir)/element_generator.py',
+        '<(struct_gen_dir)/json_to_struct.py',
+        '<(struct_gen_dir)/struct_generator.py',
+        '<(schema_file)',
+      ],
+      'outputs': [
+        '<(SHARED_INTERMEDIATE_DIR)/<(cc_dir)/<(output_filename).cc',
+        '<(SHARED_INTERMEDIATE_DIR)/<(cc_dir)/<(output_filename).h',
+      ],
+      'action': [
+        'python',
+        '<(struct_gen)',
+        '<(RULE_INPUT_PATH)',
+        '--destbase=<(SHARED_INTERMEDIATE_DIR)',
+        '--destdir=<(cc_dir)',
+        '--namespace=<(namespace)',
+        '--schema=<(schema_file)',
+        '--output=<(output_filename)',
+      ],
+      'message': 'Generating C++ static initializers from <(RULE_INPUT_PATH)',
+      'process_outputs_as_sources': 1,
+    },
+  ],
+  'include_dirs': [
+    '<(SHARED_INTERMEDIATE_DIR)',
+    '<(DEPTH)',
+  ],
+  # This target exports a hard dependency because it generates header
+  # files.
+  'hard_dependency': 1,
+}
diff --git a/build/landmine_utils.py b/build/landmine_utils.py
new file mode 100644
index 0000000..6d18b6d
--- /dev/null
+++ b/build/landmine_utils.py
@@ -0,0 +1,120 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+
+import functools
+import logging
+import os
+import shlex
+import sys
+
+
+def memoize(default=None):
+  """This decorator caches the return value of a parameterless pure function"""
+  def memoizer(func):
+    val = []
+    @functools.wraps(func)
+    def inner():
+      if not val:
+        ret = func()
+        val.append(ret if ret is not None else default)
+        if logging.getLogger().isEnabledFor(logging.INFO):
+          print '%s -> %r' % (func.__name__, val[0])
+      return val[0]
+    return inner
+  return memoizer
+
+
+@memoize()
+def IsWindows():
+  return sys.platform in ['win32', 'cygwin']
+
+
+@memoize()
+def IsLinux():
+  return sys.platform.startswith(('linux', 'freebsd', 'openbsd'))
+
+
+@memoize()
+def IsMac():
+  return sys.platform == 'darwin'
+
+
+@memoize()
+def gyp_defines():
+  """Parses and returns GYP_DEFINES env var as a dictionary."""
+  return dict(arg.split('=', 1)
+      for arg in shlex.split(os.environ.get('GYP_DEFINES', '')))
+
+@memoize()
+def gyp_generator_flags():
+  """Parses and returns GYP_GENERATOR_FLAGS env var as a dictionary."""
+  return dict(arg.split('=', 1)
+      for arg in shlex.split(os.environ.get('GYP_GENERATOR_FLAGS', '')))
+
+@memoize()
+def gyp_msvs_version():
+  return os.environ.get('GYP_MSVS_VERSION', '')
+
+@memoize()
+def distributor():
+  """
+  Returns a string which is the distributed build engine in use (if any).
+  Possible values: 'goma', 'ib', ''
+  """
+  if 'goma' in gyp_defines():
+    return 'goma'
+  elif IsWindows():
+    if 'CHROME_HEADLESS' in os.environ:
+      return 'ib' # use (win and !goma and headless) as approximation of ib
+
+
+@memoize()
+def platform():
+  """
+  Returns a string representing the platform this build is targetted for.
+  Possible values: 'win', 'mac', 'linux', 'ios', 'android'
+  """
+  if 'OS' in gyp_defines():
+    if 'android' in gyp_defines()['OS']:
+      return 'android'
+    else:
+      return gyp_defines()['OS']
+  elif IsWindows():
+    return 'win'
+  elif IsLinux():
+    return 'linux'
+  else:
+    return 'mac'
+
+
+@memoize()
+def builder():
+  """
+  Returns a string representing the build engine (not compiler) to use.
+  Possible values: 'make', 'ninja', 'xcode', 'msvs', 'scons'
+  """
+  if 'GYP_GENERATORS' in os.environ:
+    # for simplicity, only support the first explicit generator
+    generator = os.environ['GYP_GENERATORS'].split(',')[0]
+    if generator.endswith('-android'):
+      return generator.split('-')[0]
+    elif generator.endswith('-ninja'):
+      return 'ninja'
+    else:
+      return generator
+  else:
+    if platform() == 'android':
+      # Good enough for now? Do any android bots use make?
+      return 'ninja'
+    elif platform() == 'ios':
+      return 'xcode'
+    elif IsWindows():
+      return 'ninja'
+    elif IsLinux():
+      return 'ninja'
+    elif IsMac():
+      return 'ninja'
+    else:
+      assert False, 'Don\'t know what builder we\'re using!'
diff --git a/build/landmines.py b/build/landmines.py
new file mode 100755
index 0000000..0ea2b64
--- /dev/null
+++ b/build/landmines.py
@@ -0,0 +1,134 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+This script runs every build as the first hook (See DEPS). If it detects that
+the build should be clobbered, it will delete the contents of the build
+directory.
+
+A landmine is tripped when a builder checks out a different revision, and the
+diff between the new landmines and the old ones is non-null. At this point, the
+build is clobbered.
+"""
+
+import difflib
+import errno
+import logging
+import optparse
+import os
+import sys
+import subprocess
+import time
+
+import clobber
+import landmine_utils
+
+
+SRC_DIR = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
+
+
+def get_build_dir(build_tool, is_iphone=False):
+  """
+  Returns output directory absolute path dependent on build and targets.
+  Examples:
+    r'c:\b\build\slave\win\build\src\out'
+    '/mnt/data/b/build/slave/linux/build/src/out'
+    '/b/build/slave/ios_rel_device/build/src/xcodebuild'
+
+  Keep this function in sync with tools/build/scripts/slave/compile.py
+  """
+  ret = None
+  if build_tool == 'xcode':
+    ret = os.path.join(SRC_DIR, 'xcodebuild')
+  elif build_tool in ['make', 'ninja', 'ninja-ios']:  # TODO: Remove ninja-ios.
+    if 'CHROMIUM_OUT_DIR' in os.environ:
+      output_dir = os.environ.get('CHROMIUM_OUT_DIR').strip()
+      if not output_dir:
+        raise Error('CHROMIUM_OUT_DIR environment variable is set but blank!')
+    else:
+      output_dir = landmine_utils.gyp_generator_flags().get('output_dir', 'out')
+    ret = os.path.join(SRC_DIR, output_dir)
+  else:
+    raise NotImplementedError('Unexpected GYP_GENERATORS (%s)' % build_tool)
+  return os.path.abspath(ret)
+
+
+def clobber_if_necessary(new_landmines):
+  """Does the work of setting, planting, and triggering landmines."""
+  out_dir = get_build_dir(landmine_utils.builder())
+  landmines_path = os.path.normpath(os.path.join(out_dir, '..', '.landmines'))
+  try:
+    os.makedirs(out_dir)
+  except OSError as e:
+    if e.errno == errno.EEXIST:
+      pass
+
+  if os.path.exists(landmines_path):
+    with open(landmines_path, 'r') as f:
+      old_landmines = f.readlines()
+    if old_landmines != new_landmines:
+      old_date = time.ctime(os.stat(landmines_path).st_ctime)
+      diff = difflib.unified_diff(old_landmines, new_landmines,
+          fromfile='old_landmines', tofile='new_landmines',
+          fromfiledate=old_date, tofiledate=time.ctime(), n=0)
+      sys.stdout.write('Clobbering due to:\n')
+      sys.stdout.writelines(diff)
+
+      clobber.clobber(out_dir)
+
+  # Save current set of landmines for next time.
+  with open(landmines_path, 'w') as f:
+    f.writelines(new_landmines)
+
+
+def process_options():
+  """Returns a list of landmine emitting scripts."""
+  parser = optparse.OptionParser()
+  parser.add_option(
+      '-s', '--landmine-scripts', action='append',
+      default=[os.path.join(SRC_DIR, 'build', 'get_landmines.py')],
+      help='Path to the script which emits landmines to stdout. The target '
+           'is passed to this script via option -t. Note that an extra '
+           'script can be specified via an env var EXTRA_LANDMINES_SCRIPT.')
+  parser.add_option('-v', '--verbose', action='store_true',
+      default=('LANDMINES_VERBOSE' in os.environ),
+      help=('Emit some extra debugging information (default off). This option '
+          'is also enabled by the presence of a LANDMINES_VERBOSE environment '
+          'variable.'))
+
+  options, args = parser.parse_args()
+
+  if args:
+    parser.error('Unknown arguments %s' % args)
+
+  logging.basicConfig(
+      level=logging.DEBUG if options.verbose else logging.ERROR)
+
+  extra_script = os.environ.get('EXTRA_LANDMINES_SCRIPT')
+  if extra_script:
+    return options.landmine_scripts + [extra_script]
+  else:
+    return options.landmine_scripts
+
+
+def main():
+  landmine_scripts = process_options()
+
+  if landmine_utils.builder() in ('dump_dependency_json', 'eclipse'):
+    return 0
+
+
+  landmines = []
+  for s in landmine_scripts:
+    proc = subprocess.Popen([sys.executable, s], stdout=subprocess.PIPE)
+    output, _ = proc.communicate()
+    landmines.extend([('%s\n' % l.strip()) for l in output.splitlines()])
+  clobber_if_necessary(landmines)
+
+  return 0
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/build/linux/OWNERS b/build/linux/OWNERS
new file mode 100644
index 0000000..4a60b79
--- /dev/null
+++ b/build/linux/OWNERS
@@ -0,0 +1,3 @@
+mmoss@chromium.org
+phajdan.jr@chromium.org
+thestig@chromium.org
diff --git a/build/linux/bin/eu-strip b/build/linux/bin/eu-strip
new file mode 100755
index 0000000..7f93eec
--- /dev/null
+++ b/build/linux/bin/eu-strip
Binary files differ
diff --git a/build/linux/bin/eu-strip.sha1 b/build/linux/bin/eu-strip.sha1
new file mode 100644
index 0000000..43f290a7
--- /dev/null
+++ b/build/linux/bin/eu-strip.sha1
@@ -0,0 +1 @@
+0a9b8f68615ce388b65201e6d22da7a9cf2e729c
\ No newline at end of file
diff --git a/build/linux/chrome_linux.croc b/build/linux/chrome_linux.croc
new file mode 100644
index 0000000..f400306
--- /dev/null
+++ b/build/linux/chrome_linux.croc
@@ -0,0 +1,29 @@
+# -*- python -*-
+# Crocodile config file for Chromium linux
+
+# TODO(jhawkins): We'll need to add a chromeos.croc once we get a coverage bot
+# for that platform.
+
+{
+  # List of rules, applied in order
+  'rules' : [
+    # Specify inclusions before exclusions, since rules are in order.
+
+    # Don't include non-Linux platform dirs
+    {
+      'regexp' : '.*/(chromeos|views)/',
+      'include' : 0,
+    },
+    # Don't include chromeos, windows, or mac specific files
+    {
+      'regexp' : '.*(_|/)(chromeos|mac|win|views)(\\.|_)',
+      'include' : 0,
+    },
+
+    # Groups
+    {
+      'regexp' : '.*_test_linux\\.',
+      'group' : 'test',
+    },
+  ],
+}
diff --git a/build/linux/dump_app_syms.py b/build/linux/dump_app_syms.py
new file mode 100644
index 0000000..c18bff7
--- /dev/null
+++ b/build/linux/dump_app_syms.py
@@ -0,0 +1,29 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Helper script to run dump_syms on Chrome Linux executables and strip
+# them if needed.
+
+import os
+import subprocess
+import sys
+
+if len(sys.argv) != 5:
+  print "dump_app_syms.py <dump_syms_exe> <strip_binary>"
+  print "                 <binary_with_symbols> <symbols_output>"
+  sys.exit(1)
+
+dumpsyms = sys.argv[1]
+strip_binary = sys.argv[2]
+infile = sys.argv[3]
+outfile = sys.argv[4]
+
+# Dump only when the output file is out-of-date.
+if not os.path.isfile(outfile) or \
+   os.stat(outfile).st_mtime > os.stat(infile).st_mtime:
+  with open(outfile, 'w') as outfileobj:
+    subprocess.check_call([dumpsyms, '-r', infile], stdout=outfileobj)
+
+if strip_binary != '0':
+  subprocess.check_call(['strip', infile])
diff --git a/build/linux/install-chromeos-fonts.py b/build/linux/install-chromeos-fonts.py
new file mode 100755
index 0000000..a24adc9
--- /dev/null
+++ b/build/linux/install-chromeos-fonts.py
@@ -0,0 +1,73 @@
+#!/usr/bin/env python
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Script to install the Chrome OS fonts on Linux.
+# This script can be run manually (as root), but is also run as part
+# install-build-deps.sh.
+
+import os
+import shutil
+import subprocess
+import sys
+
+# Taken from the media-fonts/notofonts ebuild in chromiumos-overlay.
+VERSION = '20140815'
+URL = ('https://commondatastorage.googleapis.com/chromeos-localmirror/'
+       'distfiles/notofonts-%s.tar.bz2') % (VERSION)
+FONTS_DIR = '/usr/local/share/fonts'
+
+def main(args):
+  if not sys.platform.startswith('linux'):
+    print "Error: %s must be run on Linux." % __file__
+    return 1
+
+  if os.getuid() != 0:
+    print "Error: %s must be run as root." % __file__
+    return 1
+
+  if not os.path.isdir(FONTS_DIR):
+    print "Error: Destination directory does not exist: %s" % FONTS_DIR
+    return 1
+
+  dest_dir = os.path.join(FONTS_DIR, 'chromeos')
+
+  stamp = os.path.join(dest_dir, ".stamp02")
+  if os.path.exists(stamp):
+    with open(stamp) as s:
+      if s.read() == URL:
+        print "Chrome OS fonts already up-to-date in %s." % dest_dir
+        return 0
+
+  if os.path.isdir(dest_dir):
+    shutil.rmtree(dest_dir)
+  os.mkdir(dest_dir)
+  os.chmod(dest_dir, 0755)
+
+  print "Installing Chrome OS fonts to %s." % dest_dir
+  tarball = os.path.join(dest_dir, os.path.basename(URL))
+  subprocess.check_call(['curl', '-L', URL, '-o', tarball])
+  subprocess.check_call(['tar', '--no-same-owner', '--no-same-permissions',
+                         '-xf', tarball, '-C', dest_dir])
+  os.remove(tarball)
+
+  readme = os.path.join(dest_dir, "README")
+  with open(readme, 'w') as s:
+    s.write("This directory and its contents are auto-generated.\n")
+    s.write("It may be deleted and recreated. Do not modify.\n")
+    s.write("Script: %s\n" % __file__)
+
+  with open(stamp, 'w') as s:
+    s.write(URL)
+
+  for base, dirs, files in os.walk(dest_dir):
+    for dir in dirs:
+      os.chmod(os.path.join(base, dir), 0755)
+    for file in files:
+      os.chmod(os.path.join(base, file), 0644)
+
+  return 0
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv[1:]))
diff --git a/build/linux/pkg-config-wrapper b/build/linux/pkg-config-wrapper
new file mode 100755
index 0000000..b759564
--- /dev/null
+++ b/build/linux/pkg-config-wrapper
@@ -0,0 +1,59 @@
+#!/bin/bash
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This program wraps around pkg-config to generate the correct include and
+# library paths when cross-compiling using a sysroot.
+# The assumption is that the sysroot contains the .pc files in usr/lib/pkgconfig
+# and usr/share/pkgconfig (relative to the sysroot) and that they output paths
+# relative to some parent path of the sysroot.
+# This assumption is valid for a range of sysroots, in particular: a
+# LSB-compliant root filesystem mounted at the sysroot, and a board build
+# directory of a Chromium OS chroot.
+# Additional directories containing .pc files may be specified by setting
+# the PKG_CONFIG_PATH environment variable- these will be prepended to the
+# generated paths.
+
+root="$1"
+shift
+target_arch="$1"
+shift
+libpath="$1"
+shift
+
+if [ -z "$root" -o -z "$target_arch" ]
+then
+  echo "usage: $0 /path/to/sysroot target_arch libdir [pkg-config-arguments] package" >&2
+  exit 1
+fi
+
+if [ "$target_arch" = "x64" ]
+then
+  : ${libpath:="lib64"}
+else
+  : ${libpath:="lib"}
+fi
+
+rewrite=`dirname $0`/rewrite_dirs.py
+package=${!#}
+
+config_path=$root/usr/$libpath/pkgconfig:$root/usr/share/pkgconfig
+
+# prepend any paths specified by the environment
+if [ -n "$PKG_CONFIG_PATH" ]
+then
+  config_path="$PKG_CONFIG_PATH:$config_path"
+fi
+
+set -e
+# Some sysroots, like the Chromium OS ones, may generate paths that are not
+# relative to the sysroot. For example,
+# /path/to/chroot/build/x86-generic/usr/lib/pkgconfig/pkg.pc may have all paths
+# relative to /path/to/chroot (i.e. prefix=/build/x86-generic/usr) instead of
+# relative to /path/to/chroot/build/x86-generic (i.e prefix=/usr).
+# To support this correctly, it's necessary to extract the prefix to strip from
+# pkg-config's |prefix| variable.
+prefix=`PKG_CONFIG_PATH=$config_path pkg-config --variable=prefix "$package" | sed -e 's|/usr$||'`
+result=`PKG_CONFIG_PATH=$config_path pkg-config "$@"`
+echo "$result"| $rewrite --sysroot "$root" --strip-prefix "$prefix"
diff --git a/build/linux/rewrite_dirs.py b/build/linux/rewrite_dirs.py
new file mode 100755
index 0000000..30f22f0
--- /dev/null
+++ b/build/linux/rewrite_dirs.py
@@ -0,0 +1,71 @@
+#!/usr/bin/env python
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Rewrites paths in -I, -L and other option to be relative to a sysroot."""
+
+import sys
+import os
+import optparse
+
+REWRITE_PREFIX = ['-I',
+                  '-idirafter',
+                  '-imacros',
+                  '-imultilib',
+                  '-include',
+                  '-iprefix',
+                  '-iquote',
+                  '-isystem',
+                  '-L']
+
+def RewritePath(path, opts):
+  """Rewrites a path by stripping the prefix and prepending the sysroot."""
+  sysroot = opts.sysroot
+  prefix = opts.strip_prefix
+  if os.path.isabs(path) and not path.startswith(sysroot):
+    if path.startswith(prefix):
+      path = path[len(prefix):]
+    path = path.lstrip('/')
+    return os.path.join(sysroot, path)
+  else:
+    return path
+
+
+def RewriteLine(line, opts):
+  """Rewrites all the paths in recognized options."""
+  args = line.split()
+  count = len(args)
+  i = 0
+  while i < count:
+    for prefix in REWRITE_PREFIX:
+      # The option can be either in the form "-I /path/to/dir" or
+      # "-I/path/to/dir" so handle both.
+      if args[i] == prefix:
+        i += 1
+        try:
+          args[i] = RewritePath(args[i], opts)
+        except IndexError:
+          sys.stderr.write('Missing argument following %s\n' % prefix)
+          break
+      elif args[i].startswith(prefix):
+        args[i] = prefix + RewritePath(args[i][len(prefix):], opts)
+    i += 1
+
+  return ' '.join(args)
+
+
+def main(argv):
+  parser = optparse.OptionParser()
+  parser.add_option('-s', '--sysroot', default='/', help='sysroot to prepend')
+  parser.add_option('-p', '--strip-prefix', default='', help='prefix to strip')
+  opts, args = parser.parse_args(argv[1:])
+
+  for line in sys.stdin.readlines():
+    line = RewriteLine(line.strip(), opts)
+    print line
+  return 0
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv))
diff --git a/build/linux/sysroot_ld_path.sh b/build/linux/sysroot_ld_path.sh
new file mode 100755
index 0000000..4b8bf73
--- /dev/null
+++ b/build/linux/sysroot_ld_path.sh
@@ -0,0 +1,100 @@
+#!/bin/sh
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Reads etc/ld.so.conf and/or etc/ld.so.conf.d/*.conf and returns the
+# appropriate linker flags.
+#
+#  sysroot_ld_path.sh /abspath/to/sysroot
+#
+
+log_error_and_exit() {
+  echo $0: $@
+  exit 1
+}
+
+process_entry() {
+  if [ -z "$1" ] || [ -z "$2" ]; then
+    log_error_and_exit "bad arguments to process_entry()"
+  fi
+  local root="$1"
+  local localpath="$2"
+
+  echo $localpath | grep -qs '^/'
+  if [ $? -ne 0 ]; then
+    log_error_and_exit $localpath does not start with /
+  fi
+  local entry="$root$localpath"
+  echo -L$entry
+  echo -Wl,-rpath-link=$entry
+}
+
+process_ld_so_conf() {
+  if [ -z "$1" ] || [ -z "$2" ]; then
+    log_error_and_exit "bad arguments to process_ld_so_conf()"
+  fi
+  local root="$1"
+  local ld_so_conf="$2"
+
+  # ld.so.conf may include relative include paths. pushd is a bashism.
+  local saved_pwd=$(pwd)
+  cd $(dirname "$ld_so_conf")
+
+  cat "$ld_so_conf" | \
+    while read ENTRY; do
+      echo "$ENTRY" | grep -qs ^include
+      if [ $? -eq 0 ]; then
+        local included_files=$(echo "$ENTRY" | sed 's/^include //')
+        echo "$included_files" | grep -qs ^/
+        if [ $? -eq 0 ]; then
+          if ls $root$included_files >/dev/null 2>&1 ; then
+            for inc_file in $root$included_files; do
+              process_ld_so_conf "$root" "$inc_file"
+            done
+          fi
+        else
+          if ls $(pwd)/$included_files >/dev/null 2>&1 ; then
+            for inc_file in $(pwd)/$included_files; do
+              process_ld_so_conf "$root" "$inc_file"
+            done
+          fi
+        fi
+        continue
+      fi
+
+      echo "$ENTRY" | grep -qs ^/
+      if [ $? -eq 0 ]; then
+        process_entry "$root" "$ENTRY"
+      fi
+    done
+
+  # popd is a bashism
+  cd "$saved_pwd"
+}
+
+# Main
+
+if [ $# -ne 1 ]; then
+  echo Usage $0 /abspath/to/sysroot
+  exit 1
+fi
+
+echo $1 | grep -qs ' '
+if [ $? -eq 0 ]; then
+  log_error_and_exit $1 contains whitespace.
+fi
+
+LD_SO_CONF="$1/etc/ld.so.conf"
+LD_SO_CONF_D="$1/etc/ld.so.conf.d"
+
+if [ -e "$LD_SO_CONF" ]; then
+  process_ld_so_conf "$1" "$LD_SO_CONF" | xargs echo
+elif [ -e "$LD_SO_CONF_D" ]; then
+  find "$LD_SO_CONF_D" -maxdepth 1 -name '*.conf' -print -quit > /dev/null
+  if [ $? -eq 0 ]; then
+    for entry in $LD_SO_CONF_D/*.conf; do
+      process_ld_so_conf "$1" "$entry"
+    done | xargs echo
+  fi
+fi
diff --git a/build/linux/sysroot_scripts/install-sysroot.py b/build/linux/sysroot_scripts/install-sysroot.py
new file mode 100755
index 0000000..99fc2d6
--- /dev/null
+++ b/build/linux/sysroot_scripts/install-sysroot.py
@@ -0,0 +1,193 @@
+#!/usr/bin/env python
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Script to install a Debian Wheezy sysroot for making official Google Chrome
+# Linux builds.
+# The sysroot is needed to make Chrome work for Debian Wheezy.
+# This script can be run manually but is more often run as part of gclient
+# hooks. When run from hooks this script should be a no-op on non-linux
+# platforms.
+
+# The sysroot image could be constructed from scratch based on the current
+# state or Debian Wheezy but for consistency we currently use a pre-built root
+# image. The image will normally need to be rebuilt every time chrome's build
+# dependancies are changed.
+
+import hashlib
+import platform
+import optparse
+import os
+import re
+import shutil
+import subprocess
+import sys
+
+
+SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
+URL_PREFIX = 'http://storage.googleapis.com'
+URL_PATH = 'chrome-linux-sysroot/toolchain'
+REVISION_AMD64 = 'a2d45701cb21244b9514e420950ba6ba687fb655'
+REVISION_ARM = 'a2d45701cb21244b9514e420950ba6ba687fb655'
+REVISION_I386 = 'a2d45701cb21244b9514e420950ba6ba687fb655'
+REVISION_MIPS = '7749d2957387abf225b6d45154c3ddad142148dc'
+TARBALL_AMD64 = 'debian_wheezy_amd64_sysroot.tgz'
+TARBALL_ARM = 'debian_wheezy_arm_sysroot.tgz'
+TARBALL_I386 = 'debian_wheezy_i386_sysroot.tgz'
+TARBALL_MIPS = 'debian_wheezy_mips_sysroot.tgz'
+TARBALL_AMD64_SHA1SUM = '601216c0f980e798e7131635f3dd8171b3dcbcde'
+TARBALL_ARM_SHA1SUM = '6289593b36616526562a4d85ae9c92b694b8ce7e'
+TARBALL_I386_SHA1SUM = '0090e5a4b56ab9ffb5d557da6a520195ab59b446'
+TARBALL_MIPS_SHA1SUM = '3b4d782a237db4aac185a638572a7747c1a21825'
+SYSROOT_DIR_AMD64 = 'debian_wheezy_amd64-sysroot'
+SYSROOT_DIR_ARM = 'debian_wheezy_arm-sysroot'
+SYSROOT_DIR_I386 = 'debian_wheezy_i386-sysroot'
+SYSROOT_DIR_MIPS = 'debian_wheezy_mips-sysroot'
+
+valid_archs = ('arm', 'i386', 'amd64', 'mips')
+
+
+def GetSha1(filename):
+  sha1 = hashlib.sha1()
+  with open(filename, 'rb') as f:
+    while True:
+      # Read in 1mb chunks, so it doesn't all have to be loaded into memory.
+      chunk = f.read(1024*1024)
+      if not chunk:
+        break
+      sha1.update(chunk)
+  return sha1.hexdigest()
+
+
+def DetectArch(gyp_defines):
+  # Check for optional target_arch and only install for that architecture.
+  # If target_arch is not specified, then only install for the host
+  # architecture.
+  if 'target_arch=x64' in gyp_defines:
+    return 'amd64'
+  elif 'target_arch=ia32' in gyp_defines:
+    return 'i386'
+  elif 'target_arch=arm' in gyp_defines:
+    return 'arm'
+  elif 'target_arch=mipsel' in gyp_defines:
+    return 'mips'
+
+  # Figure out host arch using build/detect_host_arch.py and
+  # set target_arch to host arch
+  build_dir = os.path.dirname(os.path.dirname(os.path.join(SCRIPT_DIR)))
+  sys.path.append(build_dir)
+  import detect_host_arch
+
+  detected_host_arch = detect_host_arch.HostArch()
+  if detected_host_arch == 'x64':
+    return 'amd64'
+  elif detected_host_arch == 'ia32':
+    return 'i386'
+  elif detected_host_arch == 'arm':
+    return 'arm'
+  elif detected_host_arch == 'mips':
+    return 'mips'
+  else:
+    print "Unknown host arch: %s" % detected_host_arch
+
+  return None
+
+
+def main():
+  if options.running_as_hook and not sys.platform.startswith('linux'):
+    return 0
+
+  gyp_defines = os.environ.get('GYP_DEFINES', '')
+
+  if options.arch:
+    target_arch = options.arch
+  else:
+    target_arch = DetectArch(gyp_defines)
+    if not target_arch:
+      print 'Unable to detect host architecture'
+      return 1
+
+  if options.running_as_hook and target_arch != 'arm' and target_arch != 'mips':
+    # When run from runhooks, only install the sysroot for an Official Chrome
+    # Linux build, except on ARM where we always use a sysroot.
+    skip_if_defined = ['branding=Chrome', 'buildtype=Official']
+    skip_if_undefined = ['chromeos=1']
+    for option in skip_if_defined:
+      if option not in gyp_defines:
+        return 0
+    for option in skip_if_undefined:
+      if option in gyp_defines:
+        return 0
+
+  # The sysroot directory should match the one specified in build/common.gypi.
+  # TODO(thestig) Consider putting this else where to avoid having to recreate
+  # it on every build.
+  linux_dir = os.path.dirname(SCRIPT_DIR)
+  if target_arch == 'amd64':
+    sysroot = os.path.join(linux_dir, SYSROOT_DIR_AMD64)
+    tarball_filename = TARBALL_AMD64
+    tarball_sha1sum = TARBALL_AMD64_SHA1SUM
+    revision = REVISION_AMD64
+  elif target_arch == 'arm':
+    sysroot = os.path.join(linux_dir, SYSROOT_DIR_ARM)
+    tarball_filename = TARBALL_ARM
+    tarball_sha1sum = TARBALL_ARM_SHA1SUM
+    revision = REVISION_ARM
+  elif target_arch == 'i386':
+    sysroot = os.path.join(linux_dir, SYSROOT_DIR_I386)
+    tarball_filename = TARBALL_I386
+    tarball_sha1sum = TARBALL_I386_SHA1SUM
+    revision = REVISION_I386
+  elif target_arch == 'mips':
+    sysroot = os.path.join(linux_dir, SYSROOT_DIR_MIPS)
+    tarball_filename = TARBALL_MIPS
+    tarball_sha1sum = TARBALL_MIPS_SHA1SUM
+    revision = REVISION_MIPS
+  else:
+    print 'Unknown architecture: %s' % target_arch
+    assert(False)
+
+  url = '%s/%s/%s/%s' % (URL_PREFIX, URL_PATH, revision, tarball_filename)
+
+  stamp = os.path.join(sysroot, '.stamp')
+  if os.path.exists(stamp):
+    with open(stamp) as s:
+      if s.read() == url:
+        print 'Debian Wheezy %s root image already up-to-date: %s' % \
+            (target_arch, sysroot)
+        return 0
+
+  print 'Installing Debian Wheezy %s root image: %s' % (target_arch, sysroot)
+  if os.path.isdir(sysroot):
+    shutil.rmtree(sysroot)
+  os.mkdir(sysroot)
+  tarball = os.path.join(sysroot, tarball_filename)
+  print 'Downloading %s' % url
+  sys.stdout.flush()
+  sys.stderr.flush()
+  subprocess.check_call(['curl', '--fail', '-L', url, '-o', tarball])
+  sha1sum = GetSha1(tarball)
+  if sha1sum != tarball_sha1sum:
+    print 'Tarball sha1sum is wrong.'
+    print 'Expected %s, actual: %s' % (tarball_sha1sum, sha1sum)
+    return 1
+  subprocess.check_call(['tar', 'xf', tarball, '-C', sysroot])
+  os.remove(tarball)
+
+  with open(stamp, 'w') as s:
+    s.write(url)
+  return 0
+
+
+if __name__ == '__main__':
+  parser = optparse.OptionParser('usage: %prog [OPTIONS]')
+  parser.add_option('--running-as-hook', action='store_true',
+                    default=False, help='Used when running from gclient hooks.'
+                                        ' In this mode the sysroot will only '
+                                        'be installed for official Linux '
+                                        'builds or ARM Linux builds')
+  parser.add_option('--arch', type='choice', choices=valid_archs,
+                    help='Sysroot architecture: %s' % ', '.join(valid_archs))
+  options, _ = parser.parse_args()
+  sys.exit(main())
diff --git a/build/linux/sysroot_scripts/packagelist.trusty.arm b/build/linux/sysroot_scripts/packagelist.trusty.arm
new file mode 100644
index 0000000..cd4b671
--- /dev/null
+++ b/build/linux/sysroot_scripts/packagelist.trusty.arm
@@ -0,0 +1,158 @@
+main/a/alsa-lib/libasound2_1.0.27.2-3ubuntu7_armhf.deb
+main/a/alsa-lib/libasound2-dev_1.0.27.2-3ubuntu7_armhf.deb
+main/a/atk1.0/libatk1.0-0_2.10.0-2ubuntu2_armhf.deb
+main/a/atk1.0/libatk1.0-dev_2.10.0-2ubuntu2_armhf.deb
+main/a/avahi/libavahi-client3_0.6.31-4ubuntu1_armhf.deb
+main/a/avahi/libavahi-common3_0.6.31-4ubuntu1_armhf.deb
+main/c/cairo/libcairo2_1.13.0~20140204-0ubuntu1_armhf.deb
+main/c/cairo/libcairo2-dev_1.13.0~20140204-0ubuntu1_armhf.deb
+main/c/cairo/libcairo-gobject2_1.13.0~20140204-0ubuntu1_armhf.deb
+main/c/cairo/libcairo-script-interpreter2_1.13.0~20140204-0ubuntu1_armhf.deb
+main/c/cups/libcups2_1.7.2-0ubuntu1_armhf.deb
+main/c/cups/libcups2-dev_1.7.2-0ubuntu1_armhf.deb
+main/d/dbus-glib/libdbus-glib-1-2_0.100.2-1_armhf.deb
+main/d/dbus/libdbus-1-3_1.6.18-0ubuntu4_armhf.deb
+main/d/dbus/libdbus-1-dev_1.6.18-0ubuntu4_armhf.deb
+main/e/e2fsprogs/comerr-dev_2.1-1.42.9-3ubuntu1_armhf.deb
+main/e/e2fsprogs/libcomerr2_1.42.9-3ubuntu1_armhf.deb
+main/e/eglibc/libc6_2.19-0ubuntu6_armhf.deb
+main/e/eglibc/libc6-dev_2.19-0ubuntu6_armhf.deb
+main/e/elfutils/libelf1_0.158-0ubuntu5_armhf.deb
+main/e/elfutils/libelf-dev_0.158-0ubuntu5_armhf.deb
+main/e/expat/libexpat1_2.1.0-4ubuntu1_armhf.deb
+main/e/expat/libexpat1-dev_2.1.0-4ubuntu1_armhf.deb
+main/f/fontconfig/libfontconfig1_2.11.0-0ubuntu4_armhf.deb
+main/f/fontconfig/libfontconfig1-dev_2.11.0-0ubuntu4_armhf.deb
+main/f/freetype/libfreetype6_2.5.2-1ubuntu2_armhf.deb
+main/f/freetype/libfreetype6-dev_2.5.2-1ubuntu2_armhf.deb
+main/g/gcc-4.8/gcc-4.8_4.8.2-19ubuntu1_armhf.deb
+main/g/gcc-4.8/libgomp1_4.8.2-19ubuntu1_armhf.deb
+main/g/gcc-4.8/libstdc++-4.8-dev_4.8.2-19ubuntu1_armhf.deb
+main/g/gcc-4.8/libstdc++6_4.8.2-19ubuntu1_armhf.deb
+main/g/gccgo-4.9/libgcc1_4.9-20140406-0ubuntu1_armhf.deb
+main/g/gconf/libgconf2-4_3.2.6-0ubuntu2_armhf.deb
+main/g/gconf/libgconf-2-4_3.2.6-0ubuntu2_armhf.deb
+main/g/gconf/libgconf2-dev_3.2.6-0ubuntu2_armhf.deb
+main/g/gdk-pixbuf/libgdk-pixbuf2.0-0_2.30.7-0ubuntu1_armhf.deb
+main/g/gdk-pixbuf/libgdk-pixbuf2.0-dev_2.30.7-0ubuntu1_armhf.deb
+main/g/glib2.0/libglib2.0-0_2.40.0-2_armhf.deb
+main/g/glib2.0/libglib2.0-dev_2.40.0-2_armhf.deb
+main/g/gnutls26/libgnutls26_2.12.23-12ubuntu2_armhf.deb
+main/g/gnutls26/libgnutls-dev_2.12.23-12ubuntu2_armhf.deb
+main/g/gnutls26/libgnutls-openssl27_2.12.23-12ubuntu2_armhf.deb
+main/g/gnutls26/libgnutlsxx27_2.12.23-12ubuntu2_armhf.deb
+main/g/gtk+2.0/libgtk2.0-0_2.24.23-0ubuntu1_armhf.deb
+main/g/gtk+2.0/libgtk2.0-dev_2.24.23-0ubuntu1_armhf.deb
+main/k/keyutils/libkeyutils1_1.5.6-1_armhf.deb
+main/k/krb5/krb5-multidev_1.12+dfsg-2ubuntu4_armhf.deb
+main/k/krb5/libgssapi-krb5-2_1.12+dfsg-2ubuntu4_armhf.deb
+main/k/krb5/libgssrpc4_1.12+dfsg-2ubuntu4_armhf.deb
+main/k/krb5/libk5crypto3_1.12+dfsg-2ubuntu4_armhf.deb
+main/k/krb5/libkadm5clnt-mit9_1.12+dfsg-2ubuntu4_armhf.deb
+main/k/krb5/libkadm5srv-mit9_1.12+dfsg-2ubuntu4_armhf.deb
+main/k/krb5/libkdb5-7_1.12+dfsg-2ubuntu4_armhf.deb
+main/k/krb5/libkrb5-3_1.12+dfsg-2ubuntu4_armhf.deb
+main/k/krb5/libkrb5-dev_1.12+dfsg-2ubuntu4_armhf.deb
+main/k/krb5/libkrb5support0_1.12+dfsg-2ubuntu4_armhf.deb
+main/libc/libcap2/libcap2_2.24-0ubuntu2_armhf.deb
+main/libc/libcap2/libcap-dev_2.24-0ubuntu2_armhf.deb
+main/libd/libdrm/libdrm2_2.4.52-1_armhf.deb
+main/libe/libexif/libexif12_0.6.21-1ubuntu1_armhf.deb
+main/libe/libexif/libexif-dev_0.6.21-1ubuntu1_armhf.deb
+main/libf/libffi/libffi6_3.1~rc1+r3.0.13-12_armhf.deb
+main/libg/libgcrypt11/libgcrypt11_1.5.3-2ubuntu4_armhf.deb
+main/libg/libgcrypt11/libgcrypt11-dev_1.5.3-2ubuntu4_armhf.deb
+main/libg/libgnome-keyring/libgnome-keyring0_3.8.0-2_armhf.deb
+main/libg/libgnome-keyring/libgnome-keyring-dev_3.8.0-2_armhf.deb
+main/libg/libgpg-error/libgpg-error0_1.12-0.2ubuntu1_armhf.deb
+main/libg/libgpg-error/libgpg-error-dev_1.12-0.2ubuntu1_armhf.deb
+main/libn/libnss-db/libnss-db_2.2.3pre1-5build3_armhf.deb
+main/libp/libp11/libp11-2_0.2.8-3ubuntu1_armhf.deb
+main/libp/libpng/libpng12-0_1.2.50-1ubuntu2_armhf.deb
+main/libp/libpng/libpng12-dev_1.2.50-1ubuntu2_armhf.deb
+main/libs/libselinux/libselinux1_2.2.2-1_armhf.deb
+main/libt/libtasn1-6/libtasn1-6_3.4-3_armhf.deb
+main/libx/libx11/libx11-6_1.6.2-1ubuntu2_armhf.deb
+main/libx/libx11/libx11-dev_1.6.2-1ubuntu2_armhf.deb
+main/libx/libx11/libx11-xcb1_1.6.2-1ubuntu2_armhf.deb
+main/libx/libxau/libxau6_1.0.8-1_armhf.deb
+main/libx/libxau/libxau-dev_1.0.8-1_armhf.deb
+main/libx/libxcb/libxcb1_1.10-2ubuntu1_armhf.deb
+main/libx/libxcb/libxcb1-dev_1.10-2ubuntu1_armhf.deb
+main/libx/libxcb/libxcb-glx0_1.10-2ubuntu1_armhf.deb
+main/libx/libxcb/libxcb-render0_1.10-2ubuntu1_armhf.deb
+main/libx/libxcb/libxcb-render0-dev_1.10-2ubuntu1_armhf.deb
+main/libx/libxcb/libxcb-shm0_1.10-2ubuntu1_armhf.deb
+main/libx/libxcb/libxcb-shm0-dev_1.10-2ubuntu1_armhf.deb
+main/libx/libxcomposite/libxcomposite1_0.4.4-1_armhf.deb
+main/libx/libxcomposite/libxcomposite-dev_0.4.4-1_armhf.deb
+main/libx/libxcursor/libxcursor1_1.1.14-1_armhf.deb
+main/libx/libxcursor/libxcursor-dev_1.1.14-1_armhf.deb
+main/libx/libxdamage/libxdamage1_1.1.4-1ubuntu1_armhf.deb
+main/libx/libxdamage/libxdamage-dev_1.1.4-1ubuntu1_armhf.deb
+main/libx/libxdmcp/libxdmcp6_1.1.1-1_armhf.deb
+main/libx/libxext/libxext6_1.3.2-1_armhf.deb
+main/libx/libxext/libxext-dev_1.3.2-1_armhf.deb
+main/libx/libxfixes/libxfixes3_5.0.1-1ubuntu1_armhf.deb
+main/libx/libxfixes/libxfixes-dev_5.0.1-1ubuntu1_armhf.deb
+main/libx/libxi/libxi6_1.7.1.901-1ubuntu1_armhf.deb
+main/libx/libxi/libxi-dev_1.7.1.901-1ubuntu1_armhf.deb
+main/libx/libxinerama/libxinerama1_1.1.3-1_armhf.deb
+main/libx/libxinerama/libxinerama-dev_1.1.3-1_armhf.deb
+main/libx/libxrandr/libxrandr2_1.4.2-1_armhf.deb
+main/libx/libxrandr/libxrandr-dev_1.4.2-1_armhf.deb
+main/libx/libxrender/libxrender1_0.9.8-1_armhf.deb
+main/libx/libxrender/libxrender-dev_0.9.8-1_armhf.deb
+main/libx/libxss/libxss1_1.2.2-1_armhf.deb
+main/libx/libxss/libxss-dev_1.2.2-1_armhf.deb
+main/libx/libxt/libxt6_1.1.4-1_armhf.deb
+main/libx/libxt/libxt-dev_1.1.4-1_armhf.deb
+main/libx/libxtst/libxtst6_1.2.2-1_armhf.deb
+main/libx/libxtst/libxtst-dev_1.2.2-1_armhf.deb
+main/libx/libxxf86vm/libxxf86vm1_1.1.3-1_armhf.deb
+main/l/linux/linux-libc-dev_3.13.0-24.46_armhf.deb
+main/m/mesa/libgl1-mesa-dev_10.1.0-4ubuntu5_armhf.deb
+main/m/mesa/libgl1-mesa-glx_10.1.0-4ubuntu5_armhf.deb
+main/m/mesa/libglapi-mesa_10.1.0-4ubuntu5_armhf.deb
+main/m/mesa/mesa-common-dev_10.1.0-4ubuntu5_armhf.deb
+main/n/nspr/libnspr4_4.10.2-1ubuntu1_armhf.deb
+main/n/nspr/libnspr4-dev_4.10.2-1ubuntu1_armhf.deb
+main/n/nss/libnss3_3.15.4-1ubuntu7_armhf.deb
+main/n/nss/libnss3-dev_3.15.4-1ubuntu7_armhf.deb
+main/o/openssl/libssl1.0.0_1.0.1f-1ubuntu2_armhf.deb
+main/o/openssl/libssl-dev_1.0.1f-1ubuntu2_armhf.deb
+main/o/orbit2/liborbit2_2.14.19-0.3_armhf.deb
+main/p/p11-kit/libp11-kit0_0.20.2-2ubuntu2_armhf.deb
+main/p/pam/libpam0g_1.1.8-1ubuntu2_armhf.deb
+main/p/pam/libpam0g-dev_1.1.8-1ubuntu2_armhf.deb
+main/p/pango1.0/libpango-1.0-0_1.36.3-1ubuntu1_armhf.deb
+main/p/pango1.0/libpango1.0-dev_1.36.3-1ubuntu1_armhf.deb
+main/p/pango1.0/libpangocairo-1.0-0_1.36.3-1ubuntu1_armhf.deb
+main/p/pango1.0/libpangoft2-1.0-0_1.36.3-1ubuntu1_armhf.deb
+main/p/pango1.0/libpangoxft-1.0-0_1.36.3-1ubuntu1_armhf.deb
+main/p/pciutils/libpci3_3.2.1-1ubuntu5_armhf.deb
+main/p/pciutils/libpci-dev_3.2.1-1ubuntu5_armhf.deb
+main/p/pcre3/libpcre3_8.31-2ubuntu2_armhf.deb
+main/p/pcre3/libpcre3-dev_8.31-2ubuntu2_armhf.deb
+main/p/pcre3/libpcrecpp0_8.31-2ubuntu2_armhf.deb
+main/p/pixman/libpixman-1-0_0.30.2-2ubuntu1_armhf.deb
+main/p/pixman/libpixman-1-dev_0.30.2-2ubuntu1_armhf.deb
+main/p/pulseaudio/libpulse0_4.0-0ubuntu11_armhf.deb
+main/p/pulseaudio/libpulse-dev_4.0-0ubuntu11_armhf.deb
+main/p/pulseaudio/libpulse-mainloop-glib0_4.0-0ubuntu11_armhf.deb
+main/s/speech-dispatcher/libspeechd2_0.8-5ubuntu1_armhf.deb
+main/s/speech-dispatcher/libspeechd-dev_0.8-5ubuntu1_armhf.deb
+main/s/speech-dispatcher/speech-dispatcher_0.8-5ubuntu1_armhf.deb
+main/x/x11proto-composite/x11proto-composite-dev_0.4.2-2_all.deb
+main/x/x11proto-core/x11proto-core-dev_7.0.24-1_all.deb
+main/x/x11proto-damage/x11proto-damage-dev_1.2.1-2_all.deb
+main/x/x11proto-fixes/x11proto-fixes-dev_5.0-2ubuntu2_all.deb
+main/x/x11proto-input/x11proto-input-dev_2.3-1_all.deb
+main/x/x11proto-kb/x11proto-kb-dev_1.0.6-2_all.deb
+main/x/x11proto-randr/x11proto-randr-dev_1.4.0+git20120101.is.really.1.4.0-0ubuntu1_all.deb
+main/x/x11proto-record/x11proto-record-dev_1.14.2-1_all.deb
+main/x/x11proto-render/x11proto-render-dev_0.11.1-2_all.deb
+main/x/x11proto-scrnsaver/x11proto-scrnsaver-dev_1.2.2-1_all.deb
+main/x/x11proto-xext/x11proto-xext-dev_7.3.0-1_all.deb
+main/z/zlib/zlib1g_1.2.8.dfsg-1ubuntu1_armhf.deb
+main/z/zlib/zlib1g-dev_1.2.8.dfsg-1ubuntu1_armhf.deb
diff --git a/build/linux/sysroot_scripts/packagelist.wheezy.amd64 b/build/linux/sysroot_scripts/packagelist.wheezy.amd64
new file mode 100644
index 0000000..ced7dfd
--- /dev/null
+++ b/build/linux/sysroot_scripts/packagelist.wheezy.amd64
@@ -0,0 +1,157 @@
+main/a/alsa-lib/libasound2_1.0.25-4_amd64.deb
+main/a/alsa-lib/libasound2-dev_1.0.25-4_amd64.deb
+main/a/atk1.0/libatk1.0-0_2.4.0-2_amd64.deb
+main/a/atk1.0/libatk1.0-dev_2.4.0-2_amd64.deb
+main/a/attr/libattr1_2.4.46-8_amd64.deb
+main/a/avahi/libavahi-client3_0.6.31-2_amd64.deb
+main/a/avahi/libavahi-common3_0.6.31-2_amd64.deb
+main/c/cairo/libcairo2_1.12.2-3_amd64.deb
+main/c/cairo/libcairo2-dev_1.12.2-3_amd64.deb
+main/c/cairo/libcairo-gobject2_1.12.2-3_amd64.deb
+main/c/cairo/libcairo-script-interpreter2_1.12.2-3_amd64.deb
+main/c/cups/libcups2_1.5.3-5+deb7u4_amd64.deb
+main/c/cups/libcups2-dev_1.5.3-5+deb7u4_amd64.deb
+main/d/dbus-glib/libdbus-glib-1-2_0.100.2-1_amd64.deb
+main/d/dbus/libdbus-1-3_1.6.8-1+deb7u5_amd64.deb
+main/d/dbus/libdbus-1-dev_1.6.8-1+deb7u5_amd64.deb
+main/e/e2fsprogs/comerr-dev_2.1-1.42.5-1.1_amd64.deb
+main/e/e2fsprogs/libcomerr2_1.42.5-1.1_amd64.deb
+main/e/eglibc/libc6_2.13-38+deb7u6_amd64.deb
+main/e/eglibc/libc6-dev_2.13-38+deb7u6_amd64.deb
+main/e/elfutils/libelf1_0.152-1+wheezy1_amd64.deb
+main/e/elfutils/libelf-dev_0.152-1+wheezy1_amd64.deb
+main/e/expat/libexpat1_2.1.0-1+deb7u1_amd64.deb
+main/e/expat/libexpat1-dev_2.1.0-1+deb7u1_amd64.deb
+main/f/fontconfig/libfontconfig1_2.9.0-7.1_amd64.deb
+main/f/fontconfig/libfontconfig1-dev_2.9.0-7.1_amd64.deb
+main/f/freetype/libfreetype6_2.4.9-1.1_amd64.deb
+main/f/freetype/libfreetype6-dev_2.4.9-1.1_amd64.deb
+main/g/gcc-4.6/gcc-4.6_4.6.3-14_amd64.deb
+main/g/gcc-4.6/libstdc++6-4.6-dev_4.6.3-14_amd64.deb
+main/g/gcc-4.7/libgcc1_4.7.2-5_amd64.deb
+main/g/gcc-4.7/libgomp1_4.7.2-5_amd64.deb
+main/g/gcc-4.7/libquadmath0_4.7.2-5_amd64.deb
+main/g/gcc-4.7/libstdc++6_4.7.2-5_amd64.deb
+main/g/gconf/libgconf-2-4_3.2.5-1+build1_amd64.deb
+main/g/gconf/libgconf2-4_3.2.5-1+build1_amd64.deb
+main/g/gconf/libgconf2-dev_3.2.5-1+build1_amd64.deb
+main/g/gdk-pixbuf/libgdk-pixbuf2.0-0_2.26.1-1_amd64.deb
+main/g/gdk-pixbuf/libgdk-pixbuf2.0-dev_2.26.1-1_amd64.deb
+main/g/glib2.0/libglib2.0-0_2.33.12+really2.32.4-5_amd64.deb
+main/g/glib2.0/libglib2.0-dev_2.33.12+really2.32.4-5_amd64.deb
+main/g/gnutls26/libgnutls26_2.12.20-8+deb7u2_amd64.deb
+main/g/gnutls26/libgnutls-dev_2.12.20-8+deb7u2_amd64.deb
+main/g/gnutls26/libgnutls-openssl27_2.12.20-8+deb7u2_amd64.deb
+main/g/gnutls26/libgnutlsxx27_2.12.20-8+deb7u2_amd64.deb
+main/g/gtk+2.0/libgtk2.0-0_2.24.10-2_amd64.deb
+main/g/gtk+2.0/libgtk2.0-dev_2.24.10-2_amd64.deb
+main/k/keyutils/libkeyutils1_1.5.5-3+deb7u1_amd64.deb
+main/k/krb5/krb5-multidev_1.10.1+dfsg-5+deb7u2_amd64.deb
+main/k/krb5/libgssapi-krb5-2_1.10.1+dfsg-5+deb7u2_amd64.deb
+main/k/krb5/libgssrpc4_1.10.1+dfsg-5+deb7u2_amd64.deb
+main/k/krb5/libk5crypto3_1.10.1+dfsg-5+deb7u2_amd64.deb
+main/k/krb5/libkadm5clnt-mit8_1.10.1+dfsg-5+deb7u2_amd64.deb
+main/k/krb5/libkadm5srv-mit8_1.10.1+dfsg-5+deb7u2_amd64.deb
+main/k/krb5/libkdb5-6_1.10.1+dfsg-5+deb7u2_amd64.deb
+main/k/krb5/libkrb5-3_1.10.1+dfsg-5+deb7u2_amd64.deb
+main/k/krb5/libkrb5-dev_1.10.1+dfsg-5+deb7u2_amd64.deb
+main/k/krb5/libkrb5support0_1.10.1+dfsg-5+deb7u2_amd64.deb
+main/libc/libcap2/libcap2_2.22-1.2_amd64.deb
+main/libc/libcap2/libcap-dev_2.22-1.2_amd64.deb
+main/libd/libdrm/libdrm2_2.4.40-1~deb7u2_amd64.deb
+main/libe/libexif/libexif12_0.6.20-3_amd64.deb
+main/libe/libexif/libexif-dev_0.6.20-3_amd64.deb
+main/libf/libffi/libffi5_3.0.10-3_amd64.deb
+main/libg/libgcrypt11/libgcrypt11_1.5.0-5+deb7u2_amd64.deb
+main/libg/libgcrypt11/libgcrypt11-dev_1.5.0-5+deb7u2_amd64.deb
+main/libg/libgnome-keyring/libgnome-keyring0_3.4.1-1_amd64.deb
+main/libg/libgnome-keyring/libgnome-keyring-dev_3.4.1-1_amd64.deb
+main/libg/libgpg-error/libgpg-error0_1.10-3.1_amd64.deb
+main/libg/libgpg-error/libgpg-error-dev_1.10-3.1_amd64.deb
+main/libn/libnss-db/libnss-db_2.2.3pre1-4_amd64.deb
+main/libp/libp11/libp11-2_0.2.8-2_amd64.deb
+main/libp/libpng/libpng12-0_1.2.49-1_amd64.deb
+main/libp/libpng/libpng12-dev_1.2.49-1_amd64.deb
+main/libs/libselinux/libselinux1_2.1.9-5_amd64.deb
+main/libt/libtasn1-3/libtasn1-3_2.13-2+deb7u1_amd64.deb
+main/libx/libx11/libx11-6_1.5.0-1+deb7u1_amd64.deb
+main/libx/libx11/libx11-dev_1.5.0-1+deb7u1_amd64.deb
+main/libx/libx11/libx11-xcb1_1.5.0-1+deb7u1_amd64.deb
+main/libx/libxau/libxau6_1.0.7-1_amd64.deb
+main/libx/libxau/libxau-dev_1.0.7-1_amd64.deb
+main/libx/libxcb/libxcb1_1.8.1-2+deb7u1_amd64.deb
+main/libx/libxcb/libxcb1-dev_1.8.1-2+deb7u1_amd64.deb
+main/libx/libxcb/libxcb-glx0_1.8.1-2+deb7u1_amd64.deb
+main/libx/libxcb/libxcb-render0_1.8.1-2+deb7u1_amd64.deb
+main/libx/libxcb/libxcb-render0-dev_1.8.1-2+deb7u1_amd64.deb
+main/libx/libxcb/libxcb-shm0_1.8.1-2+deb7u1_amd64.deb
+main/libx/libxcb/libxcb-shm0-dev_1.8.1-2+deb7u1_amd64.deb
+main/libx/libxcomposite/libxcomposite1_0.4.3-2_amd64.deb
+main/libx/libxcomposite/libxcomposite-dev_0.4.3-2_amd64.deb
+main/libx/libxcursor/libxcursor1_1.1.13-1+deb7u1_amd64.deb
+main/libx/libxcursor/libxcursor-dev_1.1.13-1+deb7u1_amd64.deb
+main/libx/libxdamage/libxdamage1_1.1.3-2_amd64.deb
+main/libx/libxdamage/libxdamage-dev_1.1.3-2_amd64.deb
+main/libx/libxdmcp/libxdmcp6_1.1.1-1_amd64.deb
+main/libx/libxext/libxext6_1.3.1-2+deb7u1_amd64.deb
+main/libx/libxext/libxext-dev_1.3.1-2+deb7u1_amd64.deb
+main/libx/libxfixes/libxfixes3_5.0-4+deb7u1_amd64.deb
+main/libx/libxfixes/libxfixes-dev_5.0-4+deb7u1_amd64.deb
+main/libx/libxi/libxi6_1.6.1-1+deb7u1_amd64.deb
+main/libx/libxi/libxi-dev_1.6.1-1+deb7u1_amd64.deb
+main/libx/libxinerama/libxinerama1_1.1.2-1+deb7u1_amd64.deb
+main/libx/libxinerama/libxinerama-dev_1.1.2-1+deb7u1_amd64.deb
+main/libx/libxrandr/libxrandr2_1.3.2-2+deb7u1_amd64.deb
+main/libx/libxrandr/libxrandr-dev_1.3.2-2+deb7u1_amd64.deb
+main/libx/libxrender/libxrender1_0.9.7-1+deb7u1_amd64.deb
+main/libx/libxrender/libxrender-dev_0.9.7-1+deb7u1_amd64.deb
+main/libx/libxss/libxss1_1.2.2-1_amd64.deb
+main/libx/libxss/libxss-dev_1.2.2-1_amd64.deb
+main/libx/libxt/libxt6_1.1.3-1+deb7u1_amd64.deb
+main/libx/libxt/libxt-dev_1.1.3-1+deb7u1_amd64.deb
+main/libx/libxtst/libxtst6_1.2.1-1+deb7u1_amd64.deb
+main/libx/libxtst/libxtst-dev_1.2.1-1+deb7u1_amd64.deb
+main/libx/libxxf86vm/libxxf86vm1_1.1.2-1+deb7u1_amd64.deb
+main/l/linux/linux-libc-dev_3.2.65-1_amd64.deb
+main/m/mesa/libgl1-mesa-dev_8.0.5-4+deb7u2_amd64.deb
+main/m/mesa/libgl1-mesa-glx_8.0.5-4+deb7u2_amd64.deb
+main/m/mesa/libglapi-mesa_8.0.5-4+deb7u2_amd64.deb
+main/m/mesa/mesa-common-dev_8.0.5-4+deb7u2_amd64.deb
+main/n/nspr/libnspr4_4.9.2-1+deb7u2_amd64.deb
+main/n/nspr/libnspr4-dev_4.9.2-1+deb7u2_amd64.deb
+main/n/nss/libnss3_3.14.5-1+deb7u3_amd64.deb
+main/n/nss/libnss3-dev_3.14.5-1+deb7u3_amd64.deb
+main/o/openssl/libssl1.0.0_1.0.1e-2+deb7u13_amd64.deb
+main/o/openssl/libssl-dev_1.0.1e-2+deb7u13_amd64.deb
+main/o/orbit2/liborbit2_2.14.19-0.1_amd64.deb
+main/p/p11-kit/libp11-kit0_0.12-3_amd64.deb
+main/p/pam/libpam0g_1.1.3-7.1_amd64.deb
+main/p/pam/libpam0g-dev_1.1.3-7.1_amd64.deb
+main/p/pango1.0/libpango1.0-0_1.30.0-1_amd64.deb
+main/p/pango1.0/libpango1.0-dev_1.30.0-1_amd64.deb
+main/p/pciutils/libpci3_3.1.9-6_amd64.deb
+main/p/pciutils/libpci-dev_3.1.9-6_amd64.deb
+main/p/pcre3/libpcre3_8.30-5_amd64.deb
+main/p/pcre3/libpcre3-dev_8.30-5_amd64.deb
+main/p/pcre3/libpcrecpp0_8.30-5_amd64.deb
+main/p/pixman/libpixman-1-0_0.26.0-4+deb7u1_amd64.deb
+main/p/pixman/libpixman-1-dev_0.26.0-4+deb7u1_amd64.deb
+main/p/pulseaudio/libpulse0_2.0-6.1_amd64.deb
+main/p/pulseaudio/libpulse-dev_2.0-6.1_amd64.deb
+main/p/pulseaudio/libpulse-mainloop-glib0_2.0-6.1_amd64.deb
+main/s/speech-dispatcher/libspeechd2_0.7.1-6.2_amd64.deb
+main/s/speech-dispatcher/libspeechd-dev_0.7.1-6.2_amd64.deb
+main/s/speech-dispatcher/speech-dispatcher_0.7.1-6.2_amd64.deb
+main/x/x11proto-composite/x11proto-composite-dev_0.4.2-2_all.deb
+main/x/x11proto-core/x11proto-core-dev_7.0.23-1_all.deb
+main/x/x11proto-damage/x11proto-damage-dev_1.2.1-2_all.deb
+main/x/x11proto-fixes/x11proto-fixes-dev_5.0-2_all.deb
+main/x/x11proto-input/x11proto-input-dev_2.2-1_all.deb
+main/x/x11proto-kb/x11proto-kb-dev_1.0.6-2_all.deb
+main/x/x11proto-randr/x11proto-randr-dev_1.3.2-2_all.deb
+main/x/x11proto-record/x11proto-record-dev_1.14.2-1_all.deb
+main/x/x11proto-render/x11proto-render-dev_0.11.1-2_all.deb
+main/x/x11proto-scrnsaver/x11proto-scrnsaver-dev_1.2.2-1_all.deb
+main/x/x11proto-xext/x11proto-xext-dev_7.2.1-1_all.deb
+main/z/zlib/zlib1g_1.2.7.dfsg-13_amd64.deb
+main/z/zlib/zlib1g-dev_1.2.7.dfsg-13_amd64.deb
diff --git a/build/linux/sysroot_scripts/packagelist.wheezy.arm b/build/linux/sysroot_scripts/packagelist.wheezy.arm
new file mode 100644
index 0000000..3d79cb3
--- /dev/null
+++ b/build/linux/sysroot_scripts/packagelist.wheezy.arm
@@ -0,0 +1,156 @@
+main/a/alsa-lib/libasound2_1.0.25-4_armhf.deb
+main/a/alsa-lib/libasound2-dev_1.0.25-4_armhf.deb
+main/a/atk1.0/libatk1.0-0_2.4.0-2_armhf.deb
+main/a/atk1.0/libatk1.0-dev_2.4.0-2_armhf.deb
+main/a/attr/libattr1_2.4.46-8_armhf.deb
+main/a/avahi/libavahi-client3_0.6.31-2_armhf.deb
+main/a/avahi/libavahi-common3_0.6.31-2_armhf.deb
+main/c/cairo/libcairo2_1.12.2-3_armhf.deb
+main/c/cairo/libcairo2-dev_1.12.2-3_armhf.deb
+main/c/cairo/libcairo-gobject2_1.12.2-3_armhf.deb
+main/c/cairo/libcairo-script-interpreter2_1.12.2-3_armhf.deb
+main/c/cups/libcups2_1.5.3-5+deb7u4_armhf.deb
+main/c/cups/libcups2-dev_1.5.3-5+deb7u4_armhf.deb
+main/d/dbus-glib/libdbus-glib-1-2_0.100.2-1_armhf.deb
+main/d/dbus/libdbus-1-3_1.6.8-1+deb7u5_armhf.deb
+main/d/dbus/libdbus-1-dev_1.6.8-1+deb7u5_armhf.deb
+main/e/e2fsprogs/comerr-dev_2.1-1.42.5-1.1_armhf.deb
+main/e/e2fsprogs/libcomerr2_1.42.5-1.1_armhf.deb
+main/e/eglibc/libc6_2.13-38+deb7u6_armhf.deb
+main/e/eglibc/libc6-dev_2.13-38+deb7u6_armhf.deb
+main/e/elfutils/libelf1_0.152-1+wheezy1_armhf.deb
+main/e/elfutils/libelf-dev_0.152-1+wheezy1_armhf.deb
+main/e/expat/libexpat1_2.1.0-1+deb7u1_armhf.deb
+main/e/expat/libexpat1-dev_2.1.0-1+deb7u1_armhf.deb
+main/f/fontconfig/libfontconfig1_2.9.0-7.1_armhf.deb
+main/f/fontconfig/libfontconfig1-dev_2.9.0-7.1_armhf.deb
+main/f/freetype/libfreetype6_2.4.9-1.1_armhf.deb
+main/f/freetype/libfreetype6-dev_2.4.9-1.1_armhf.deb
+main/g/gcc-4.6/gcc-4.6_4.6.3-14_armhf.deb
+main/g/gcc-4.6/libstdc++6-4.6-dev_4.6.3-14_armhf.deb
+main/g/gcc-4.7/libgcc1_4.7.2-5_armhf.deb
+main/g/gcc-4.7/libgomp1_4.7.2-5_armhf.deb
+main/g/gcc-4.7/libstdc++6_4.7.2-5_armhf.deb
+main/g/gconf/libgconf2-4_3.2.5-1+build1_armhf.deb
+main/g/gconf/libgconf-2-4_3.2.5-1+build1_armhf.deb
+main/g/gconf/libgconf2-dev_3.2.5-1+build1_armhf.deb
+main/g/gdk-pixbuf/libgdk-pixbuf2.0-0_2.26.1-1_armhf.deb
+main/g/gdk-pixbuf/libgdk-pixbuf2.0-dev_2.26.1-1_armhf.deb
+main/g/glib2.0/libglib2.0-0_2.33.12+really2.32.4-5_armhf.deb
+main/g/glib2.0/libglib2.0-dev_2.33.12+really2.32.4-5_armhf.deb
+main/g/gnutls26/libgnutls26_2.12.20-8+deb7u2_armhf.deb
+main/g/gnutls26/libgnutls-dev_2.12.20-8+deb7u2_armhf.deb
+main/g/gnutls26/libgnutls-openssl27_2.12.20-8+deb7u2_armhf.deb
+main/g/gnutls26/libgnutlsxx27_2.12.20-8+deb7u2_armhf.deb
+main/g/gtk+2.0/libgtk2.0-0_2.24.10-2_armhf.deb
+main/g/gtk+2.0/libgtk2.0-dev_2.24.10-2_armhf.deb
+main/k/keyutils/libkeyutils1_1.5.5-3+deb7u1_armhf.deb
+main/k/krb5/krb5-multidev_1.10.1+dfsg-5+deb7u2_armhf.deb
+main/k/krb5/libgssapi-krb5-2_1.10.1+dfsg-5+deb7u2_armhf.deb
+main/k/krb5/libgssrpc4_1.10.1+dfsg-5+deb7u2_armhf.deb
+main/k/krb5/libk5crypto3_1.10.1+dfsg-5+deb7u2_armhf.deb
+main/k/krb5/libkadm5clnt-mit8_1.10.1+dfsg-5+deb7u2_armhf.deb
+main/k/krb5/libkadm5srv-mit8_1.10.1+dfsg-5+deb7u2_armhf.deb
+main/k/krb5/libkdb5-6_1.10.1+dfsg-5+deb7u2_armhf.deb
+main/k/krb5/libkrb5-3_1.10.1+dfsg-5+deb7u2_armhf.deb
+main/k/krb5/libkrb5-dev_1.10.1+dfsg-5+deb7u2_armhf.deb
+main/k/krb5/libkrb5support0_1.10.1+dfsg-5+deb7u2_armhf.deb
+main/libc/libcap2/libcap2_2.22-1.2_armhf.deb
+main/libc/libcap2/libcap-dev_2.22-1.2_armhf.deb
+main/libd/libdrm/libdrm2_2.4.40-1~deb7u2_armhf.deb
+main/libe/libexif/libexif12_0.6.20-3_armhf.deb
+main/libe/libexif/libexif-dev_0.6.20-3_armhf.deb
+main/libf/libffi/libffi5_3.0.10-3+b1_armhf.deb
+main/libg/libgcrypt11/libgcrypt11_1.5.0-5+deb7u2_armhf.deb
+main/libg/libgcrypt11/libgcrypt11-dev_1.5.0-5+deb7u2_armhf.deb
+main/libg/libgnome-keyring/libgnome-keyring0_3.4.1-1_armhf.deb
+main/libg/libgnome-keyring/libgnome-keyring-dev_3.4.1-1_armhf.deb
+main/libg/libgpg-error/libgpg-error0_1.10-3.1_armhf.deb
+main/libg/libgpg-error/libgpg-error-dev_1.10-3.1_armhf.deb
+main/libn/libnss-db/libnss-db_2.2.3pre1-4_armhf.deb
+main/libp/libp11/libp11-2_0.2.8-2_armhf.deb
+main/libp/libpng/libpng12-0_1.2.49-1_armhf.deb
+main/libp/libpng/libpng12-dev_1.2.49-1_armhf.deb
+main/libs/libselinux/libselinux1_2.1.9-5_armhf.deb
+main/libt/libtasn1-3/libtasn1-3_2.13-2+deb7u1_armhf.deb
+main/libx/libx11/libx11-6_1.5.0-1+deb7u1_armhf.deb
+main/libx/libx11/libx11-dev_1.5.0-1+deb7u1_armhf.deb
+main/libx/libx11/libx11-xcb1_1.5.0-1+deb7u1_armhf.deb
+main/libx/libxau/libxau6_1.0.7-1_armhf.deb
+main/libx/libxau/libxau-dev_1.0.7-1_armhf.deb
+main/libx/libxcb/libxcb1_1.8.1-2+deb7u1_armhf.deb
+main/libx/libxcb/libxcb1-dev_1.8.1-2+deb7u1_armhf.deb
+main/libx/libxcb/libxcb-glx0_1.8.1-2+deb7u1_armhf.deb
+main/libx/libxcb/libxcb-render0_1.8.1-2+deb7u1_armhf.deb
+main/libx/libxcb/libxcb-render0-dev_1.8.1-2+deb7u1_armhf.deb
+main/libx/libxcb/libxcb-shm0_1.8.1-2+deb7u1_armhf.deb
+main/libx/libxcb/libxcb-shm0-dev_1.8.1-2+deb7u1_armhf.deb
+main/libx/libxcomposite/libxcomposite1_0.4.3-2+b1_armhf.deb
+main/libx/libxcomposite/libxcomposite-dev_0.4.3-2+b1_armhf.deb
+main/libx/libxcursor/libxcursor1_1.1.13-1+deb7u1_armhf.deb
+main/libx/libxcursor/libxcursor-dev_1.1.13-1+deb7u1_armhf.deb
+main/libx/libxdamage/libxdamage1_1.1.3-2+b1_armhf.deb
+main/libx/libxdamage/libxdamage-dev_1.1.3-2+b1_armhf.deb
+main/libx/libxdmcp/libxdmcp6_1.1.1-1_armhf.deb
+main/libx/libxext/libxext6_1.3.1-2+deb7u1_armhf.deb
+main/libx/libxext/libxext-dev_1.3.1-2+deb7u1_armhf.deb
+main/libx/libxfixes/libxfixes3_5.0-4+deb7u1_armhf.deb
+main/libx/libxfixes/libxfixes-dev_5.0-4+deb7u1_armhf.deb
+main/libx/libxi/libxi6_1.6.1-1+deb7u1_armhf.deb
+main/libx/libxi/libxi-dev_1.6.1-1+deb7u1_armhf.deb
+main/libx/libxinerama/libxinerama1_1.1.2-1+deb7u1_armhf.deb
+main/libx/libxinerama/libxinerama-dev_1.1.2-1+deb7u1_armhf.deb
+main/libx/libxrandr/libxrandr2_1.3.2-2+deb7u1_armhf.deb
+main/libx/libxrandr/libxrandr-dev_1.3.2-2+deb7u1_armhf.deb
+main/libx/libxrender/libxrender1_0.9.7-1+deb7u1_armhf.deb
+main/libx/libxrender/libxrender-dev_0.9.7-1+deb7u1_armhf.deb
+main/libx/libxss/libxss1_1.2.2-1_armhf.deb
+main/libx/libxss/libxss-dev_1.2.2-1_armhf.deb
+main/libx/libxt/libxt6_1.1.3-1+deb7u1_armhf.deb
+main/libx/libxt/libxt-dev_1.1.3-1+deb7u1_armhf.deb
+main/libx/libxtst/libxtst6_1.2.1-1+deb7u1_armhf.deb
+main/libx/libxtst/libxtst-dev_1.2.1-1+deb7u1_armhf.deb
+main/libx/libxxf86vm/libxxf86vm1_1.1.2-1+deb7u1_armhf.deb
+main/l/linux/linux-libc-dev_3.2.65-1_armhf.deb
+main/m/mesa/libgl1-mesa-dev_8.0.5-4+deb7u2_armhf.deb
+main/m/mesa/libgl1-mesa-glx_8.0.5-4+deb7u2_armhf.deb
+main/m/mesa/libglapi-mesa_8.0.5-4+deb7u2_armhf.deb
+main/m/mesa/mesa-common-dev_8.0.5-4+deb7u2_armhf.deb
+main/n/nspr/libnspr4_4.9.2-1+deb7u2_armhf.deb
+main/n/nspr/libnspr4-dev_4.9.2-1+deb7u2_armhf.deb
+main/n/nss/libnss3_3.14.5-1+deb7u3_armhf.deb
+main/n/nss/libnss3-dev_3.14.5-1+deb7u3_armhf.deb
+main/o/openssl/libssl1.0.0_1.0.1e-2+deb7u13_armhf.deb
+main/o/openssl/libssl-dev_1.0.1e-2+deb7u13_armhf.deb
+main/o/orbit2/liborbit2_2.14.19-0.1_armhf.deb
+main/p/p11-kit/libp11-kit0_0.12-3_armhf.deb
+main/p/pam/libpam0g_1.1.3-7.1_armhf.deb
+main/p/pam/libpam0g-dev_1.1.3-7.1_armhf.deb
+main/p/pango1.0/libpango1.0-0_1.30.0-1_armhf.deb
+main/p/pango1.0/libpango1.0-dev_1.30.0-1_armhf.deb
+main/p/pciutils/libpci3_3.1.9-6_armhf.deb
+main/p/pciutils/libpci-dev_3.1.9-6_armhf.deb
+main/p/pcre3/libpcre3_8.30-5_armhf.deb
+main/p/pcre3/libpcre3-dev_8.30-5_armhf.deb
+main/p/pcre3/libpcrecpp0_8.30-5_armhf.deb
+main/p/pixman/libpixman-1-0_0.26.0-4+deb7u1_armhf.deb
+main/p/pixman/libpixman-1-dev_0.26.0-4+deb7u1_armhf.deb
+main/p/pulseaudio/libpulse0_2.0-6.1_armhf.deb
+main/p/pulseaudio/libpulse-dev_2.0-6.1_armhf.deb
+main/p/pulseaudio/libpulse-mainloop-glib0_2.0-6.1_armhf.deb
+main/s/speech-dispatcher/libspeechd2_0.7.1-6.2_armhf.deb
+main/s/speech-dispatcher/libspeechd-dev_0.7.1-6.2_armhf.deb
+main/s/speech-dispatcher/speech-dispatcher_0.7.1-6.2_armhf.deb
+main/x/x11proto-composite/x11proto-composite-dev_0.4.2-2_all.deb
+main/x/x11proto-core/x11proto-core-dev_7.0.23-1_all.deb
+main/x/x11proto-damage/x11proto-damage-dev_1.2.1-2_all.deb
+main/x/x11proto-fixes/x11proto-fixes-dev_5.0-2_all.deb
+main/x/x11proto-input/x11proto-input-dev_2.2-1_all.deb
+main/x/x11proto-kb/x11proto-kb-dev_1.0.6-2_all.deb
+main/x/x11proto-randr/x11proto-randr-dev_1.3.2-2_all.deb
+main/x/x11proto-record/x11proto-record-dev_1.14.2-1_all.deb
+main/x/x11proto-render/x11proto-render-dev_0.11.1-2_all.deb
+main/x/x11proto-scrnsaver/x11proto-scrnsaver-dev_1.2.2-1_all.deb
+main/x/x11proto-xext/x11proto-xext-dev_7.2.1-1_all.deb
+main/z/zlib/zlib1g_1.2.7.dfsg-13_armhf.deb
+main/z/zlib/zlib1g-dev_1.2.7.dfsg-13_armhf.deb
diff --git a/build/linux/sysroot_scripts/packagelist.wheezy.i386 b/build/linux/sysroot_scripts/packagelist.wheezy.i386
new file mode 100644
index 0000000..1379fee
--- /dev/null
+++ b/build/linux/sysroot_scripts/packagelist.wheezy.i386
@@ -0,0 +1,157 @@
+main/a/alsa-lib/libasound2_1.0.25-4_i386.deb
+main/a/alsa-lib/libasound2-dev_1.0.25-4_i386.deb
+main/a/atk1.0/libatk1.0-0_2.4.0-2_i386.deb
+main/a/atk1.0/libatk1.0-dev_2.4.0-2_i386.deb
+main/a/attr/libattr1_2.4.46-8_i386.deb
+main/a/avahi/libavahi-client3_0.6.31-2_i386.deb
+main/a/avahi/libavahi-common3_0.6.31-2_i386.deb
+main/c/cairo/libcairo2_1.12.2-3_i386.deb
+main/c/cairo/libcairo2-dev_1.12.2-3_i386.deb
+main/c/cairo/libcairo-gobject2_1.12.2-3_i386.deb
+main/c/cairo/libcairo-script-interpreter2_1.12.2-3_i386.deb
+main/c/cups/libcups2_1.5.3-5+deb7u4_i386.deb
+main/c/cups/libcups2-dev_1.5.3-5+deb7u4_i386.deb
+main/d/dbus-glib/libdbus-glib-1-2_0.100.2-1_i386.deb
+main/d/dbus/libdbus-1-3_1.6.8-1+deb7u5_i386.deb
+main/d/dbus/libdbus-1-dev_1.6.8-1+deb7u5_i386.deb
+main/e/e2fsprogs/comerr-dev_2.1-1.42.5-1.1_i386.deb
+main/e/e2fsprogs/libcomerr2_1.42.5-1.1_i386.deb
+main/e/eglibc/libc6_2.13-38+deb7u6_i386.deb
+main/e/eglibc/libc6-dev_2.13-38+deb7u6_i386.deb
+main/e/elfutils/libelf1_0.152-1+wheezy1_i386.deb
+main/e/elfutils/libelf-dev_0.152-1+wheezy1_i386.deb
+main/e/expat/libexpat1_2.1.0-1+deb7u1_i386.deb
+main/e/expat/libexpat1-dev_2.1.0-1+deb7u1_i386.deb
+main/f/fontconfig/libfontconfig1_2.9.0-7.1_i386.deb
+main/f/fontconfig/libfontconfig1-dev_2.9.0-7.1_i386.deb
+main/f/freetype/libfreetype6_2.4.9-1.1_i386.deb
+main/f/freetype/libfreetype6-dev_2.4.9-1.1_i386.deb
+main/g/gcc-4.6/gcc-4.6_4.6.3-14_i386.deb
+main/g/gcc-4.6/libstdc++6-4.6-dev_4.6.3-14_i386.deb
+main/g/gcc-4.7/libgcc1_4.7.2-5_i386.deb
+main/g/gcc-4.7/libgomp1_4.7.2-5_i386.deb
+main/g/gcc-4.7/libquadmath0_4.7.2-5_i386.deb
+main/g/gcc-4.7/libstdc++6_4.7.2-5_i386.deb
+main/g/gconf/libgconf-2-4_3.2.5-1+build1_i386.deb
+main/g/gconf/libgconf2-4_3.2.5-1+build1_i386.deb
+main/g/gconf/libgconf2-dev_3.2.5-1+build1_i386.deb
+main/g/gdk-pixbuf/libgdk-pixbuf2.0-0_2.26.1-1_i386.deb
+main/g/gdk-pixbuf/libgdk-pixbuf2.0-dev_2.26.1-1_i386.deb
+main/g/glib2.0/libglib2.0-0_2.33.12+really2.32.4-5_i386.deb
+main/g/glib2.0/libglib2.0-dev_2.33.12+really2.32.4-5_i386.deb
+main/g/gnutls26/libgnutls26_2.12.20-8+deb7u2_i386.deb
+main/g/gnutls26/libgnutls-dev_2.12.20-8+deb7u2_i386.deb
+main/g/gnutls26/libgnutls-openssl27_2.12.20-8+deb7u2_i386.deb
+main/g/gnutls26/libgnutlsxx27_2.12.20-8+deb7u2_i386.deb
+main/g/gtk+2.0/libgtk2.0-0_2.24.10-2_i386.deb
+main/g/gtk+2.0/libgtk2.0-dev_2.24.10-2_i386.deb
+main/k/keyutils/libkeyutils1_1.5.5-3+deb7u1_i386.deb
+main/k/krb5/krb5-multidev_1.10.1+dfsg-5+deb7u2_i386.deb
+main/k/krb5/libgssapi-krb5-2_1.10.1+dfsg-5+deb7u2_i386.deb
+main/k/krb5/libgssrpc4_1.10.1+dfsg-5+deb7u2_i386.deb
+main/k/krb5/libk5crypto3_1.10.1+dfsg-5+deb7u2_i386.deb
+main/k/krb5/libkadm5clnt-mit8_1.10.1+dfsg-5+deb7u2_i386.deb
+main/k/krb5/libkadm5srv-mit8_1.10.1+dfsg-5+deb7u2_i386.deb
+main/k/krb5/libkdb5-6_1.10.1+dfsg-5+deb7u2_i386.deb
+main/k/krb5/libkrb5-3_1.10.1+dfsg-5+deb7u2_i386.deb
+main/k/krb5/libkrb5-dev_1.10.1+dfsg-5+deb7u2_i386.deb
+main/k/krb5/libkrb5support0_1.10.1+dfsg-5+deb7u2_i386.deb
+main/libc/libcap2/libcap2_2.22-1.2_i386.deb
+main/libc/libcap2/libcap-dev_2.22-1.2_i386.deb
+main/libd/libdrm/libdrm2_2.4.40-1~deb7u2_i386.deb
+main/libe/libexif/libexif12_0.6.20-3_i386.deb
+main/libe/libexif/libexif-dev_0.6.20-3_i386.deb
+main/libf/libffi/libffi5_3.0.10-3_i386.deb
+main/libg/libgcrypt11/libgcrypt11_1.5.0-5+deb7u2_i386.deb
+main/libg/libgcrypt11/libgcrypt11-dev_1.5.0-5+deb7u2_i386.deb
+main/libg/libgnome-keyring/libgnome-keyring0_3.4.1-1_i386.deb
+main/libg/libgnome-keyring/libgnome-keyring-dev_3.4.1-1_i386.deb
+main/libg/libgpg-error/libgpg-error0_1.10-3.1_i386.deb
+main/libg/libgpg-error/libgpg-error-dev_1.10-3.1_i386.deb
+main/libn/libnss-db/libnss-db_2.2.3pre1-4_i386.deb
+main/libp/libp11/libp11-2_0.2.8-2_i386.deb
+main/libp/libpng/libpng12-0_1.2.49-1_i386.deb
+main/libp/libpng/libpng12-dev_1.2.49-1_i386.deb
+main/libs/libselinux/libselinux1_2.1.9-5_i386.deb
+main/libt/libtasn1-3/libtasn1-3_2.13-2+deb7u1_i386.deb
+main/libx/libx11/libx11-6_1.5.0-1+deb7u1_i386.deb
+main/libx/libx11/libx11-dev_1.5.0-1+deb7u1_i386.deb
+main/libx/libx11/libx11-xcb1_1.5.0-1+deb7u1_i386.deb
+main/libx/libxau/libxau6_1.0.7-1_i386.deb
+main/libx/libxau/libxau-dev_1.0.7-1_i386.deb
+main/libx/libxcb/libxcb1_1.8.1-2+deb7u1_i386.deb
+main/libx/libxcb/libxcb1-dev_1.8.1-2+deb7u1_i386.deb
+main/libx/libxcb/libxcb-glx0_1.8.1-2+deb7u1_i386.deb
+main/libx/libxcb/libxcb-render0_1.8.1-2+deb7u1_i386.deb
+main/libx/libxcb/libxcb-render0-dev_1.8.1-2+deb7u1_i386.deb
+main/libx/libxcb/libxcb-shm0_1.8.1-2+deb7u1_i386.deb
+main/libx/libxcb/libxcb-shm0-dev_1.8.1-2+deb7u1_i386.deb
+main/libx/libxcomposite/libxcomposite1_0.4.3-2_i386.deb
+main/libx/libxcomposite/libxcomposite-dev_0.4.3-2_i386.deb
+main/libx/libxcursor/libxcursor1_1.1.13-1+deb7u1_i386.deb
+main/libx/libxcursor/libxcursor-dev_1.1.13-1+deb7u1_i386.deb
+main/libx/libxdamage/libxdamage1_1.1.3-2_i386.deb
+main/libx/libxdamage/libxdamage-dev_1.1.3-2_i386.deb
+main/libx/libxdmcp/libxdmcp6_1.1.1-1_i386.deb
+main/libx/libxext/libxext6_1.3.1-2+deb7u1_i386.deb
+main/libx/libxext/libxext-dev_1.3.1-2+deb7u1_i386.deb
+main/libx/libxfixes/libxfixes3_5.0-4+deb7u1_i386.deb
+main/libx/libxfixes/libxfixes-dev_5.0-4+deb7u1_i386.deb
+main/libx/libxi/libxi6_1.6.1-1+deb7u1_i386.deb
+main/libx/libxi/libxi-dev_1.6.1-1+deb7u1_i386.deb
+main/libx/libxinerama/libxinerama1_1.1.2-1+deb7u1_i386.deb
+main/libx/libxinerama/libxinerama-dev_1.1.2-1+deb7u1_i386.deb
+main/libx/libxrandr/libxrandr2_1.3.2-2+deb7u1_i386.deb
+main/libx/libxrandr/libxrandr-dev_1.3.2-2+deb7u1_i386.deb
+main/libx/libxrender/libxrender1_0.9.7-1+deb7u1_i386.deb
+main/libx/libxrender/libxrender-dev_0.9.7-1+deb7u1_i386.deb
+main/libx/libxss/libxss1_1.2.2-1_i386.deb
+main/libx/libxss/libxss-dev_1.2.2-1_i386.deb
+main/libx/libxt/libxt6_1.1.3-1+deb7u1_i386.deb
+main/libx/libxt/libxt-dev_1.1.3-1+deb7u1_i386.deb
+main/libx/libxtst/libxtst6_1.2.1-1+deb7u1_i386.deb
+main/libx/libxtst/libxtst-dev_1.2.1-1+deb7u1_i386.deb
+main/libx/libxxf86vm/libxxf86vm1_1.1.2-1+deb7u1_i386.deb
+main/l/linux/linux-libc-dev_3.2.65-1_i386.deb
+main/m/mesa/libgl1-mesa-dev_8.0.5-4+deb7u2_i386.deb
+main/m/mesa/libgl1-mesa-glx_8.0.5-4+deb7u2_i386.deb
+main/m/mesa/libglapi-mesa_8.0.5-4+deb7u2_i386.deb
+main/m/mesa/mesa-common-dev_8.0.5-4+deb7u2_i386.deb
+main/n/nspr/libnspr4_4.9.2-1+deb7u2_i386.deb
+main/n/nspr/libnspr4-dev_4.9.2-1+deb7u2_i386.deb
+main/n/nss/libnss3_3.14.5-1+deb7u3_i386.deb
+main/n/nss/libnss3-dev_3.14.5-1+deb7u3_i386.deb
+main/o/openssl/libssl1.0.0_1.0.1e-2+deb7u13_i386.deb
+main/o/openssl/libssl-dev_1.0.1e-2+deb7u13_i386.deb
+main/o/orbit2/liborbit2_2.14.19-0.1_i386.deb
+main/p/p11-kit/libp11-kit0_0.12-3_i386.deb
+main/p/pam/libpam0g_1.1.3-7.1_i386.deb
+main/p/pam/libpam0g-dev_1.1.3-7.1_i386.deb
+main/p/pango1.0/libpango1.0-0_1.30.0-1_i386.deb
+main/p/pango1.0/libpango1.0-dev_1.30.0-1_i386.deb
+main/p/pciutils/libpci3_3.1.9-6_i386.deb
+main/p/pciutils/libpci-dev_3.1.9-6_i386.deb
+main/p/pcre3/libpcre3_8.30-5_i386.deb
+main/p/pcre3/libpcre3-dev_8.30-5_i386.deb
+main/p/pcre3/libpcrecpp0_8.30-5_i386.deb
+main/p/pixman/libpixman-1-0_0.26.0-4+deb7u1_i386.deb
+main/p/pixman/libpixman-1-dev_0.26.0-4+deb7u1_i386.deb
+main/p/pulseaudio/libpulse0_2.0-6.1_i386.deb
+main/p/pulseaudio/libpulse-dev_2.0-6.1_i386.deb
+main/p/pulseaudio/libpulse-mainloop-glib0_2.0-6.1_i386.deb
+main/s/speech-dispatcher/libspeechd2_0.7.1-6.2_i386.deb
+main/s/speech-dispatcher/libspeechd-dev_0.7.1-6.2_i386.deb
+main/s/speech-dispatcher/speech-dispatcher_0.7.1-6.2_i386.deb
+main/x/x11proto-composite/x11proto-composite-dev_0.4.2-2_all.deb
+main/x/x11proto-core/x11proto-core-dev_7.0.23-1_all.deb
+main/x/x11proto-damage/x11proto-damage-dev_1.2.1-2_all.deb
+main/x/x11proto-fixes/x11proto-fixes-dev_5.0-2_all.deb
+main/x/x11proto-input/x11proto-input-dev_2.2-1_all.deb
+main/x/x11proto-kb/x11proto-kb-dev_1.0.6-2_all.deb
+main/x/x11proto-randr/x11proto-randr-dev_1.3.2-2_all.deb
+main/x/x11proto-record/x11proto-record-dev_1.14.2-1_all.deb
+main/x/x11proto-render/x11proto-render-dev_0.11.1-2_all.deb
+main/x/x11proto-scrnsaver/x11proto-scrnsaver-dev_1.2.2-1_all.deb
+main/x/x11proto-xext/x11proto-xext-dev_7.2.1-1_all.deb
+main/z/zlib/zlib1g_1.2.7.dfsg-13_i386.deb
+main/z/zlib/zlib1g-dev_1.2.7.dfsg-13_i386.deb
diff --git a/build/linux/sysroot_scripts/sysroot-creator-test.sh b/build/linux/sysroot_scripts/sysroot-creator-test.sh
new file mode 100755
index 0000000..b346bb7
--- /dev/null
+++ b/build/linux/sysroot_scripts/sysroot-creator-test.sh
@@ -0,0 +1,23 @@
+#!/bin/sh
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# Rudimentry test suite for sysroot-creator.
+
+SCRIPT_DIR=$(dirname $0)
+
+set -o errexit
+
+TestUpdateAllLists() {
+  echo "[ RUN      ] TestUpdateAllLists"
+  "$SCRIPT_DIR/sysroot-creator-trusty.sh" UpdatePackageListsAmd64
+  "$SCRIPT_DIR/sysroot-creator-trusty.sh" UpdatePackageListsI386
+  "$SCRIPT_DIR/sysroot-creator-trusty.sh" UpdatePackageListsARM
+  "$SCRIPT_DIR/sysroot-creator-wheezy.sh" UpdatePackageListsAmd64
+  "$SCRIPT_DIR/sysroot-creator-wheezy.sh" UpdatePackageListsI386
+  "$SCRIPT_DIR/sysroot-creator-wheezy.sh" UpdatePackageListsARM
+  echo "[      OK  ]"
+}
+
+TestUpdateAllLists
diff --git a/build/linux/sysroot_scripts/sysroot-creator-trusty.sh b/build/linux/sysroot_scripts/sysroot-creator-trusty.sh
new file mode 100755
index 0000000..c0d82ec
--- /dev/null
+++ b/build/linux/sysroot_scripts/sysroot-creator-trusty.sh
@@ -0,0 +1,182 @@
+#!/bin/sh
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+SCRIPT_DIR=$(dirname $0)
+
+DISTRO=ubuntu
+DIST=trusty
+
+# This is where we get all the debian packages from.
+APT_REPO=http://archive.ubuntu.com/ubuntu
+APT_REPO_ARM=http://ports.ubuntu.com
+REPO_BASEDIR="${APT_REPO}/dists/${DIST}"
+KEYRING_FILE=/usr/share/keyrings/ubuntu-archive-keyring.gpg
+
+# Sysroot packages: these are the packages needed to build chrome.
+# NOTE: When DEBIAN_PACKAGES is modified, the packagelist files must be updated
+# by running this script in GeneratePackageList mode.
+DEBIAN_PACKAGES="\
+  comerr-dev \
+  gcc-4.8 \
+  krb5-multidev \
+  libasound2 \
+  libasound2-dev \
+  libatk1.0-0 \
+  libatk1.0-dev \
+  libavahi-client3 \
+  libavahi-common3 \
+  libc6 \
+  libc6-dev \
+  libcairo2 \
+  libcairo2-dev \
+  libcairo-gobject2 \
+  libcairo-script-interpreter2 \
+  libcap-dev \
+  libcap2 \
+  libcomerr2 \
+  libcups2 \
+  libcups2-dev \
+  libdbus-1-3 \
+  libdbus-1-dev \
+  libdbus-glib-1-2 \
+  libdrm2 \
+  libelf1 \
+  libelf-dev \
+  libexif12 \
+  libexif-dev \
+  libexpat1 \
+  libexpat1-dev \
+  libffi6 \
+  libfontconfig1 \
+  libfontconfig1-dev \
+  libfreetype6 \
+  libfreetype6-dev \
+  libgcc1 \
+  libgconf-2-4 \
+  libgconf2-4 \
+  libgconf2-dev \
+  libgcrypt11 \
+  libgcrypt11-dev \
+  libgdk-pixbuf2.0-0 \
+  libgdk-pixbuf2.0-dev \
+  libgl1-mesa-dev \
+  libgl1-mesa-glx \
+  libglapi-mesa \
+  libglib2.0-0 \
+  libglib2.0-dev \
+  libgnome-keyring0 \
+  libgnome-keyring-dev \
+  libgnutls26 \
+  libgnutls-dev \
+  libgnutls-openssl27 \
+  libgnutlsxx27 \
+  libgomp1 \
+  libgpg-error0 \
+  libgpg-error-dev \
+  libgssapi-krb5-2 \
+  libgssrpc4 \
+  libgtk2.0-0 \
+  libgtk2.0-dev \
+  libk5crypto3 \
+  libkadm5clnt-mit9 \
+  libkadm5srv-mit9 \
+  libkdb5-7 \
+  libkeyutils1 \
+  libkrb5-3 \
+  libkrb5-dev \
+  libkrb5support0 \
+  libnspr4 \
+  libnspr4-dev \
+  libnss3 \
+  libnss3-dev \
+  libnss-db \
+  liborbit2 \
+  libp11-2 \
+  libp11-kit0 \
+  libpam0g \
+  libpam0g-dev \
+  libpango-1.0-0 \
+  libpango1.0-dev \
+  libpangocairo-1.0-0 \
+  libpangoft2-1.0-0 \
+  libpangoxft-1.0-0 \
+  libpci3 \
+  libpci-dev \
+  libpcre3 \
+  libpcre3-dev \
+  libpcrecpp0 \
+  libpixman-1-0 \
+  libpixman-1-dev \
+  libpng12-0 \
+  libpng12-dev \
+  libpulse0 \
+  libpulse-dev \
+  libpulse-mainloop-glib0 \
+  libselinux1 \
+  libspeechd2 \
+  libspeechd-dev \
+  libssl1.0.0 \
+  libssl-dev \
+  libstdc++6 \
+  libstdc++-4.8-dev \
+  libtasn1-6 \
+  libx11-6 \
+  libx11-dev \
+  libx11-xcb1 \
+  libxau6 \
+  libxau-dev \
+  libxcb1 \
+  libxcb1-dev \
+  libxcb-glx0 \
+  libxcb-render0 \
+  libxcb-render0-dev \
+  libxcb-shm0 \
+  libxcb-shm0-dev \
+  libxcomposite1 \
+  libxcomposite-dev \
+  libxcursor1 \
+  libxcursor-dev \
+  libxdamage1 \
+  libxdamage-dev \
+  libxdmcp6 \
+  libxext6 \
+  libxext-dev \
+  libxfixes3 \
+  libxfixes-dev \
+  libxi6 \
+  libxi-dev \
+  libxinerama1 \
+  libxinerama-dev \
+  libxrandr2 \
+  libxrandr-dev \
+  libxrender1 \
+  libxrender-dev \
+  libxss1 \
+  libxss-dev \
+  libxt6 \
+  libxt-dev \
+  libxtst6 \
+  libxtst-dev \
+  libxxf86vm1 \
+  linux-libc-dev \
+  mesa-common-dev \
+  speech-dispatcher \
+  x11proto-composite-dev \
+  x11proto-core-dev \
+  x11proto-damage-dev \
+  x11proto-fixes-dev \
+  x11proto-input-dev \
+  x11proto-kb-dev \
+  x11proto-randr-dev \
+  x11proto-record-dev \
+  x11proto-render-dev \
+  x11proto-scrnsaver-dev \
+  x11proto-xext-dev \
+  zlib1g \
+  zlib1g-dev"
+
+DEBIAN_PACKAGES_X86="libquadmath0"
+
+. ${SCRIPT_DIR}/sysroot-creator.sh
diff --git a/build/linux/sysroot_scripts/sysroot-creator-wheezy.sh b/build/linux/sysroot_scripts/sysroot-creator-wheezy.sh
new file mode 100755
index 0000000..9a4d1bf
--- /dev/null
+++ b/build/linux/sysroot_scripts/sysroot-creator-wheezy.sh
@@ -0,0 +1,177 @@
+#!/bin/sh
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+SCRIPT_DIR=$(dirname $0)
+
+DISTRO=debian
+DIST=wheezy
+APT_REPO=http://http.us.debian.org/debian
+REPO_BASEDIR="${APT_REPO}/dists/${DIST}"
+KEYRING_FILE=/usr/share/keyrings/debian-archive-keyring.gpg
+
+# Sysroot packages: these are the packages needed to build chrome.
+# NOTE: When DEBIAN_PACKAGES is modified, the packagelist files must be updated
+# by running this script in GeneratePackageList mode.
+DEBIAN_PACKAGES="\
+  comerr-dev \
+  gcc-4.6 \
+  krb5-multidev \
+  libasound2 \
+  libasound2-dev \
+  libatk1.0-0 \
+  libatk1.0-dev \
+  libattr1 \
+  libavahi-client3 \
+  libavahi-common3 \
+  libc6 \
+  libc6-dev \
+  libcairo2 \
+  libcairo2-dev \
+  libcairo-gobject2 \
+  libcairo-script-interpreter2 \
+  libcap-dev \
+  libcap2 \
+  libcomerr2 \
+  libcups2 \
+  libcups2-dev \
+  libdbus-1-3 \
+  libdbus-1-dev \
+  libdbus-glib-1-2 \
+  libdrm2 \
+  libelf1 \
+  libelf-dev \
+  libexif12 \
+  libexif-dev \
+  libexpat1 \
+  libexpat1-dev \
+  libffi5 \
+  libfontconfig1 \
+  libfontconfig1-dev \
+  libfreetype6 \
+  libfreetype6-dev \
+  libgcc1 \
+  libgconf-2-4 \
+  libgconf2-4 \
+  libgconf2-dev \
+  libgcrypt11 \
+  libgcrypt11-dev \
+  libgdk-pixbuf2.0-0 \
+  libgdk-pixbuf2.0-dev \
+  libgl1-mesa-dev \
+  libgl1-mesa-glx \
+  libglapi-mesa \
+  libglib2.0-0 \
+  libglib2.0-dev \
+  libgnome-keyring0 \
+  libgnome-keyring-dev \
+  libgnutls26 \
+  libgnutls-dev \
+  libgnutls-openssl27 \
+  libgnutlsxx27 \
+  libgomp1 \
+  libgpg-error0 \
+  libgpg-error-dev \
+  libgssapi-krb5-2 \
+  libgssrpc4 \
+  libgtk2.0-0 \
+  libgtk2.0-dev \
+  libk5crypto3 \
+  libkadm5clnt-mit8 \
+  libkadm5srv-mit8 \
+  libkdb5-6 \
+  libkeyutils1 \
+  libkrb5-3 \
+  libkrb5-dev \
+  libkrb5support0 \
+  libnspr4 \
+  libnspr4-dev \
+  libnss3 \
+  libnss3-dev \
+  libnss-db \
+  liborbit2 \
+  libp11-2 \
+  libp11-kit0 \
+  libpam0g \
+  libpam0g-dev \
+  libpango1.0-0 \
+  libpango1.0-dev \
+  libpci3 \
+  libpci-dev \
+  libpcre3 \
+  libpcre3-dev \
+  libpcrecpp0 \
+  libpixman-1-0 \
+  libpixman-1-dev \
+  libpng12-0 \
+  libpng12-dev \
+  libpulse0 \
+  libpulse-dev \
+  libpulse-mainloop-glib0 \
+  libselinux1 \
+  libspeechd2 \
+  libspeechd-dev \
+  libssl1.0.0 \
+  libssl-dev \
+  libstdc++6 \
+  libstdc++6-4.6-dev \
+  libtasn1-3 \
+  libx11-6 \
+  libx11-dev \
+  libx11-xcb1 \
+  libxau6 \
+  libxau-dev \
+  libxcb1 \
+  libxcb1-dev \
+  libxcb-glx0 \
+  libxcb-render0 \
+  libxcb-render0-dev \
+  libxcb-shm0 \
+  libxcb-shm0-dev \
+  libxcomposite1 \
+  libxcomposite-dev \
+  libxcursor1 \
+  libxcursor-dev \
+  libxdamage1 \
+  libxdamage-dev \
+  libxdmcp6 \
+  libxext6 \
+  libxext-dev \
+  libxfixes3 \
+  libxfixes-dev \
+  libxi6 \
+  libxi-dev \
+  libxinerama1 \
+  libxinerama-dev \
+  libxrandr2 \
+  libxrandr-dev \
+  libxrender1 \
+  libxrender-dev \
+  libxss1 \
+  libxss-dev \
+  libxt6 \
+  libxt-dev \
+  libxtst6 \
+  libxtst-dev \
+  libxxf86vm1 \
+  linux-libc-dev \
+  mesa-common-dev \
+  speech-dispatcher \
+  x11proto-composite-dev \
+  x11proto-core-dev \
+  x11proto-damage-dev \
+  x11proto-fixes-dev \
+  x11proto-input-dev \
+  x11proto-kb-dev \
+  x11proto-randr-dev \
+  x11proto-record-dev \
+  x11proto-render-dev \
+  x11proto-scrnsaver-dev \
+  x11proto-xext-dev \
+  zlib1g \
+  zlib1g-dev"
+
+DEBIAN_PACKAGES_X86="libquadmath0"
+
+. ${SCRIPT_DIR}/sysroot-creator.sh
diff --git a/build/linux/sysroot_scripts/sysroot-creator.sh b/build/linux/sysroot_scripts/sysroot-creator.sh
new file mode 100644
index 0000000..822a5e8
--- /dev/null
+++ b/build/linux/sysroot_scripts/sysroot-creator.sh
@@ -0,0 +1,700 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# This script should not be run directly but sourced by the other
+# scripts (e.g. sysroot-creator-trusty.sh).  Its up to the parent scripts
+# to define certain environment variables: e.g.
+#  DISTRO=ubuntu
+#  DIST=trusty
+#  APT_REPO=http://archive.ubuntu.com/ubuntu
+#  KEYRING_FILE=/usr/share/keyrings/ubuntu-archive-keyring.gpg
+#  DEBIAN_PACKAGES="gcc libz libssl"
+
+#@ This script builds a Debian sysroot images for building Google Chrome.
+#@
+#@  Generally this script is invoked as:
+#@  sysroot-creator-<flavour>.sh <mode> <args>*
+#@  Available modes are shown below.
+#@
+#@ List of modes:
+
+######################################################################
+# Config
+######################################################################
+
+set -o nounset
+set -o errexit
+
+SCRIPT_DIR=$(cd $(dirname $0) && pwd)
+
+if [ -z "${DIST:-}" ]; then
+  echo "error: DIST not defined"
+  exit 1
+fi
+
+if [ -z "${APT_REPO:-}" ]; then
+  echo "error: APT_REPO not defined"
+  exit 1
+fi
+
+if [ -z "${KEYRING_FILE:-}" ]; then
+  echo "error: KEYRING_FILE not defined"
+  exit 1
+fi
+
+if [ -z "${DEBIAN_PACKAGES:-}" ]; then
+  echo "error: DEBIAN_PACKAGES not defined"
+  exit 1
+fi
+
+readonly REPO_BASEDIR="${APT_REPO}/dists/${DIST}"
+
+readonly REQUIRED_TOOLS="wget"
+
+######################################################################
+# Package Config
+######################################################################
+
+readonly RELEASE_FILE="Release"
+readonly RELEASE_FILE_GPG="Release.gpg"
+readonly RELEASE_LIST="${REPO_BASEDIR}/${RELEASE_FILE}"
+readonly RELEASE_LIST_GPG="${REPO_BASEDIR}/${RELEASE_FILE_GPG}"
+readonly PACKAGE_FILE_AMD64="main/binary-amd64/Packages.bz2"
+readonly PACKAGE_FILE_I386="main/binary-i386/Packages.bz2"
+readonly PACKAGE_FILE_ARM="main/binary-armhf/Packages.bz2"
+readonly PACKAGE_FILE_MIPS="main/binary-mipsel/Packages.bz2"
+readonly PACKAGE_LIST_AMD64="${REPO_BASEDIR}/${PACKAGE_FILE_AMD64}"
+readonly PACKAGE_LIST_I386="${REPO_BASEDIR}/${PACKAGE_FILE_I386}"
+readonly PACKAGE_LIST_ARM="${REPO_BASEDIR}/${PACKAGE_FILE_ARM}"
+readonly PACKAGE_LIST_MIPS="${REPO_BASEDIR}/${PACKAGE_FILE_MIPS}"
+
+readonly DEBIAN_DEP_LIST_AMD64="packagelist.${DIST}.amd64"
+readonly DEBIAN_DEP_LIST_I386="packagelist.${DIST}.i386"
+readonly DEBIAN_DEP_LIST_ARM="packagelist.${DIST}.arm"
+readonly DEBIAN_DEP_LIST_MIPS="packagelist.${DIST}.mipsel"
+
+######################################################################
+# Helper
+######################################################################
+
+Banner() {
+  echo "######################################################################"
+  echo $*
+  echo "######################################################################"
+}
+
+
+SubBanner() {
+  echo "----------------------------------------------------------------------"
+  echo $*
+  echo "----------------------------------------------------------------------"
+}
+
+
+Usage() {
+  egrep "^#@" "${BASH_SOURCE[0]}" | cut --bytes=3-
+}
+
+
+DownloadOrCopy() {
+  if [ -f "$2" ] ; then
+    echo "$2 already in place"
+    return
+  fi
+
+  HTTP=0
+  echo "$1" | grep -qs ^http:// && HTTP=1
+  if [ "$HTTP" = "1" ]; then
+    SubBanner "downloading from $1 -> $2"
+    wget "$1" -O "${2}.partial"
+    mv "${2}.partial" $2
+  else
+    SubBanner "copying from $1"
+    cp "$1" "$2"
+  fi
+}
+
+
+SetEnvironmentVariables() {
+  ARCH=""
+  echo $1 | grep -qs Amd64$ && ARCH=AMD64
+  if [ -z "$ARCH" ]; then
+    echo $1 | grep -qs I386$ && ARCH=I386
+  fi
+  if [ -z "$ARCH" ]; then
+    echo $1 | grep -qs Mips$ && ARCH=MIPS
+  fi
+  if [ -z "$ARCH" ]; then
+    echo $1 | grep -qs ARM$ && ARCH=ARM
+  fi
+  if [ -z "${ARCH}" ]; then
+    echo "ERROR: Unable to determine architecture based on: $1"
+    exit 1
+  fi
+  ARCH_LOWER=$(echo $ARCH | tr '[:upper:]' '[:lower:]')
+}
+
+
+# some sanity checks to make sure this script is run from the right place
+# with the right tools
+SanityCheck() {
+  Banner "Sanity Checks"
+
+  local chrome_dir=$(cd "${SCRIPT_DIR}/../../../.." && pwd)
+  BUILD_DIR="${chrome_dir}/out/sysroot-build/${DIST}"
+  mkdir -p ${BUILD_DIR}
+  echo "Using build directory: ${BUILD_DIR}"
+
+  for tool in ${REQUIRED_TOOLS} ; do
+    if ! which ${tool} > /dev/null ; then
+      echo "Required binary $tool not found."
+      echo "Exiting."
+      exit 1
+    fi
+  done
+
+  # This is where the staging sysroot is.
+  INSTALL_ROOT="${BUILD_DIR}/${DIST}_${ARCH_LOWER}_staging"
+  TARBALL="${BUILD_DIR}/${DISTRO}_${DIST}_${ARCH_LOWER}_sysroot.tgz"
+
+  if ! mkdir -p "${INSTALL_ROOT}" ; then
+    echo "ERROR: ${INSTALL_ROOT} can't be created."
+    exit 1
+  fi
+}
+
+
+ChangeDirectory() {
+  # Change directory to where this script is.
+  cd ${SCRIPT_DIR}
+}
+
+
+ClearInstallDir() {
+  Banner "Clearing dirs in ${INSTALL_ROOT}"
+  rm -rf ${INSTALL_ROOT}/*
+}
+
+
+CreateTarBall() {
+  Banner "Creating tarball ${TARBALL}"
+  tar zcf ${TARBALL} -C ${INSTALL_ROOT} .
+}
+
+ExtractPackageBz2() {
+  bzcat "$1" | egrep '^(Package:|Filename:|SHA256:) ' > "$2"
+}
+
+GeneratePackageListAmd64() {
+  local output_file="$1"
+  local package_list="${BUILD_DIR}/Packages.${DIST}_amd64.bz2"
+  local tmp_package_list="${BUILD_DIR}/Packages.${DIST}_amd64"
+  DownloadOrCopy "${PACKAGE_LIST_AMD64}" "${package_list}"
+  VerifyPackageListing "${PACKAGE_FILE_AMD64}" "${package_list}"
+  ExtractPackageBz2 "$package_list" "$tmp_package_list"
+  GeneratePackageList "$tmp_package_list" "$output_file" "${DEBIAN_PACKAGES}
+    ${DEBIAN_PACKAGES_X86}"
+}
+
+GeneratePackageListI386() {
+  local output_file="$1"
+  local package_list="${BUILD_DIR}/Packages.${DIST}_i386.bz2"
+  local tmp_package_list="${BUILD_DIR}/Packages.${DIST}_amd64"
+  DownloadOrCopy "${PACKAGE_LIST_I386}" "${package_list}"
+  VerifyPackageListing "${PACKAGE_FILE_I386}" "${package_list}"
+  ExtractPackageBz2 "$package_list" "$tmp_package_list"
+  GeneratePackageList "$tmp_package_list" "$output_file" "${DEBIAN_PACKAGES}
+    ${DEBIAN_PACKAGES_X86}"
+}
+
+GeneratePackageListARM() {
+  local output_file="$1"
+  local package_list="${BUILD_DIR}/Packages.${DIST}_arm.bz2"
+  local tmp_package_list="${BUILD_DIR}/Packages.${DIST}_arm"
+  DownloadOrCopy "${PACKAGE_LIST_ARM}" "${package_list}"
+  VerifyPackageListing "${PACKAGE_FILE_ARM}" "${package_list}"
+  ExtractPackageBz2 "$package_list" "$tmp_package_list"
+  GeneratePackageList "$tmp_package_list" "$output_file" "${DEBIAN_PACKAGES}"
+}
+
+GeneratePackageListMips() {
+  local output_file="$1"
+  local package_list="${BUILD_DIR}/Packages.${DIST}_mips.bz2"
+  local tmp_package_list="${BUILD_DIR}/Packages.${DIST}_mips"
+  DownloadOrCopy "${PACKAGE_LIST_MIPS}" "${package_list}"
+  VerifyPackageListing "${PACKAGE_FILE_MIPS}" "${package_list}"
+  ExtractPackageBz2 "$package_list" "$tmp_package_list"
+  GeneratePackageList "$tmp_package_list" "$output_file" "${DEBIAN_PACKAGES}"
+}
+
+StripChecksumsFromPackageList() {
+  local package_file="$1"
+  sed -i 's/ [a-f0-9]\{64\}$//' "$package_file"
+}
+
+VerifyPackageFilesMatch() {
+  local downloaded_package_file="$1"
+  local stored_package_file="$2"
+  diff -u "$downloaded_package_file" "$stored_package_file"
+  if [ "$?" -ne "0" ]; then
+    echo "ERROR: downloaded package files does not match $2."
+    echo "You may need to run UpdatePackageLists."
+    exit 1
+  fi
+}
+
+######################################################################
+#
+######################################################################
+
+HacksAndPatchesAmd64() {
+  Banner "Misc Hacks & Patches"
+  # these are linker scripts with absolute pathnames in them
+  # which we rewrite here
+  lscripts="${INSTALL_ROOT}/usr/lib/x86_64-linux-gnu/libpthread.so \
+            ${INSTALL_ROOT}/usr/lib/x86_64-linux-gnu/libc.so"
+
+  # Rewrite linker scripts
+  sed -i -e 's|/usr/lib/x86_64-linux-gnu/||g'  ${lscripts}
+  sed -i -e 's|/lib/x86_64-linux-gnu/||g' ${lscripts}
+
+  # This is for chrome's ./build/linux/pkg-config-wrapper
+  # which overwrites PKG_CONFIG_PATH internally
+  SubBanner "Package Configs Symlink"
+  mkdir -p ${INSTALL_ROOT}/usr/share
+  ln -s ../lib/x86_64-linux-gnu/pkgconfig ${INSTALL_ROOT}/usr/share/pkgconfig
+
+  SubBanner "Adding an additional ld.conf include"
+  LD_SO_HACK_CONF="${INSTALL_ROOT}/etc/ld.so.conf.d/zz_hack.conf"
+  echo /usr/lib/gcc/x86_64-linux-gnu/4.6 > "$LD_SO_HACK_CONF"
+  echo /usr/lib >> "$LD_SO_HACK_CONF"
+}
+
+
+HacksAndPatchesI386() {
+  Banner "Misc Hacks & Patches"
+  # these are linker scripts with absolute pathnames in them
+  # which we rewrite here
+  lscripts="${INSTALL_ROOT}/usr/lib/i386-linux-gnu/libpthread.so \
+            ${INSTALL_ROOT}/usr/lib/i386-linux-gnu/libc.so"
+
+  # Rewrite linker scripts
+  sed -i -e 's|/usr/lib/i386-linux-gnu/||g'  ${lscripts}
+  sed -i -e 's|/lib/i386-linux-gnu/||g' ${lscripts}
+
+  # This is for chrome's ./build/linux/pkg-config-wrapper
+  # which overwrites PKG_CONFIG_PATH internally
+  SubBanner "Package Configs Symlink"
+  mkdir -p ${INSTALL_ROOT}/usr/share
+  ln -s ../lib/i386-linux-gnu/pkgconfig ${INSTALL_ROOT}/usr/share/pkgconfig
+
+  SubBanner "Adding an additional ld.conf include"
+  LD_SO_HACK_CONF="${INSTALL_ROOT}/etc/ld.so.conf.d/zz_hack.conf"
+  echo /usr/lib/gcc/i486-linux-gnu/4.6 > "$LD_SO_HACK_CONF"
+  echo /usr/lib >> "$LD_SO_HACK_CONF"
+}
+
+
+HacksAndPatchesARM() {
+  Banner "Misc Hacks & Patches"
+  # these are linker scripts with absolute pathnames in them
+  # which we rewrite here
+  lscripts="${INSTALL_ROOT}/usr/lib/arm-linux-gnueabihf/libpthread.so \
+            ${INSTALL_ROOT}/usr/lib/arm-linux-gnueabihf/libc.so"
+
+  # Rewrite linker scripts
+  sed -i -e 's|/usr/lib/arm-linux-gnueabihf/||g' ${lscripts}
+  sed -i -e 's|/lib/arm-linux-gnueabihf/||g' ${lscripts}
+
+  # This is for chrome's ./build/linux/pkg-config-wrapper
+  # which overwrites PKG_CONFIG_PATH internally
+  SubBanner "Package Configs Symlink"
+  mkdir -p ${INSTALL_ROOT}/usr/share
+  ln -s ../lib/arm-linux-gnueabihf/pkgconfig ${INSTALL_ROOT}/usr/share/pkgconfig
+}
+
+
+HacksAndPatchesMips() {
+  Banner "Misc Hacks & Patches"
+  # these are linker scripts with absolute pathnames in them
+  # which we rewrite here
+  lscripts="${INSTALL_ROOT}/usr/lib/mipsel-linux-gnu/libpthread.so \
+            ${INSTALL_ROOT}/usr/lib/mipsel-linux-gnu/libc.so"
+
+  # Rewrite linker scripts
+  sed -i -e 's|/usr/lib/mipsel-linux-gnu/||g' ${lscripts}
+  sed -i -e 's|/lib/mipsel-linux-gnu/||g' ${lscripts}
+
+  # This is for chrome's ./build/linux/pkg-config-wrapper
+  # which overwrites PKG_CONFIG_PATH internally
+  SubBanner "Package Configs Symlink"
+  mkdir -p ${INSTALL_ROOT}/usr/share
+  ln -s ../lib/mipsel-linux-gnu/pkgconfig ${INSTALL_ROOT}/usr/share/pkgconfig
+}
+
+
+InstallIntoSysroot() {
+  Banner "Install Libs And Headers Into Jail"
+
+  mkdir -p ${BUILD_DIR}/debian-packages
+  mkdir -p ${INSTALL_ROOT}
+  while (( "$#" )); do
+    local file="$1"
+    local package="${BUILD_DIR}/debian-packages/${file##*/}"
+    shift
+    local sha256sum="$1"
+    shift
+    if [ "${#sha256sum}" -ne "64" ]; then
+      echo "Bad sha256sum from package list"
+      exit 1
+    fi
+
+    Banner "Installing ${file}"
+    DownloadOrCopy ${APT_REPO}/pool/${file} ${package}
+    if [ ! -s "${package}" ] ; then
+      echo
+      echo "ERROR: bad package ${package}"
+      exit 1
+    fi
+    echo "${sha256sum}  ${package}" | sha256sum --quiet -c
+
+    SubBanner "Extracting to ${INSTALL_ROOT}"
+    dpkg --fsys-tarfile ${package}\
+      | tar -xf - --exclude=./usr/share -C ${INSTALL_ROOT}
+  done
+}
+
+
+CleanupJailSymlinks() {
+  Banner "Jail symlink cleanup"
+
+  SAVEDPWD=$(pwd)
+  cd ${INSTALL_ROOT}
+  local libdirs="lib usr/lib"
+  if [ "${ARCH}" != "MIPS" ]; then
+    libdirs+=" lib64"
+  fi
+  find $libdirs -type l -printf '%p %l\n' | while read link target; do
+    # skip links with non-absolute paths
+    echo "${target}" | grep -qs ^/ || continue
+    echo "${link}: ${target}"
+    case "${link}" in
+      usr/lib/gcc/x86_64-linux-gnu/4.*/* | usr/lib/gcc/i486-linux-gnu/4.*/* | \
+      usr/lib/gcc/arm-linux-gnueabihf/4.*/* | \
+      usr/lib/gcc/mipsel-linux-gnu/4.*/*)
+        # Relativize the symlink.
+        ln -snfv "../../../../..${target}" "${link}"
+        ;;
+      usr/lib/x86_64-linux-gnu/* | usr/lib/i386-linux-gnu/* | \
+      usr/lib/arm-linux-gnueabihf/* | usr/lib/mipsel-linux-gnu/* )
+        # Relativize the symlink.
+        ln -snfv "../../..${target}" "${link}"
+        ;;
+      usr/lib/*)
+        # Relativize the symlink.
+        ln -snfv "../..${target}" "${link}"
+        ;;
+      lib64/* | lib/*)
+        # Relativize the symlink.
+        ln -snfv "..${target}" "${link}"
+        ;;
+    esac
+  done
+
+  find $libdirs -type l -printf '%p %l\n' | while read link target; do
+    # Make sure we catch new bad links.
+    if [ ! -r "${link}" ]; then
+      echo "ERROR: FOUND BAD LINK ${link}"
+      ls -l ${link}
+      exit 1
+    fi
+  done
+  cd "$SAVEDPWD"
+}
+
+#@
+#@ BuildSysrootAmd64
+#@
+#@    Build everything and package it
+BuildSysrootAmd64() {
+  ClearInstallDir
+  local package_file="$BUILD_DIR/package_with_sha256sum_amd64"
+  GeneratePackageListAmd64 "$package_file"
+  local files_and_sha256sums="$(cat ${package_file})"
+  StripChecksumsFromPackageList "$package_file"
+  VerifyPackageFilesMatch "$package_file" "$DEBIAN_DEP_LIST_AMD64"
+  InstallIntoSysroot ${files_and_sha256sums}
+  CleanupJailSymlinks
+  HacksAndPatchesAmd64
+  CreateTarBall
+}
+
+#@
+#@ BuildSysrootI386
+#@
+#@    Build everything and package it
+BuildSysrootI386() {
+  ClearInstallDir
+  local package_file="$BUILD_DIR/package_with_sha256sum_i386"
+  GeneratePackageListI386 "$package_file"
+  local files_and_sha256sums="$(cat ${package_file})"
+  StripChecksumsFromPackageList "$package_file"
+  VerifyPackageFilesMatch "$package_file" "$DEBIAN_DEP_LIST_I386"
+  InstallIntoSysroot ${files_and_sha256sums}
+  CleanupJailSymlinks
+  HacksAndPatchesI386
+  CreateTarBall
+}
+
+#@
+#@ BuildSysrootARM
+#@
+#@    Build everything and package it
+BuildSysrootARM() {
+  ClearInstallDir
+  local package_file="$BUILD_DIR/package_with_sha256sum_arm"
+  GeneratePackageListARM "$package_file"
+  local files_and_sha256sums="$(cat ${package_file})"
+  StripChecksumsFromPackageList "$package_file"
+  VerifyPackageFilesMatch "$package_file" "$DEBIAN_DEP_LIST_ARM"
+  APT_REPO=${APR_REPO_ARM:=$APT_REPO}
+  InstallIntoSysroot ${files_and_sha256sums}
+  CleanupJailSymlinks
+  HacksAndPatchesARM
+  CreateTarBall
+}
+
+#@
+#@ BuildSysrootMips
+#@
+#@    Build everything and package it
+BuildSysrootMips() {
+  ClearInstallDir
+  local package_file="$BUILD_DIR/package_with_sha256sum_arm"
+  GeneratePackageListMips "$package_file"
+  local files_and_sha256sums="$(cat ${package_file})"
+  StripChecksumsFromPackageList "$package_file"
+  VerifyPackageFilesMatch "$package_file" "$DEBIAN_DEP_LIST_MIPS"
+  APT_REPO=${APR_REPO_MIPS:=$APT_REPO}
+  InstallIntoSysroot ${files_and_sha256sums}
+  CleanupJailSymlinks
+  HacksAndPatchesMips
+  CreateTarBall
+}
+
+#@
+#@ BuildSysrootAll
+#@
+#@    Build sysroot images for all architectures
+BuildSysrootAll() {
+  RunCommand BuildSysrootAmd64
+  RunCommand BuildSysrootI386
+  RunCommand BuildSysrootARM
+  RunCommand BuildSysrootMips
+}
+
+UploadSysroot() {
+  local rev=$1
+  if [ -z "${rev}" ]; then
+    echo "Please specify a revision to upload at."
+    exit 1
+  fi
+  set -x
+  gsutil cp -a public-read "${TARBALL}" \
+      "gs://chrome-linux-sysroot/toolchain/$rev/"
+  set +x
+}
+
+#@
+#@ UploadSysrootAmd64 <revision>
+#@
+UploadSysrootAmd64() {
+  UploadSysroot "$@"
+}
+
+#@
+#@ UploadSysrootI386 <revision>
+#@
+UploadSysrootI386() {
+  UploadSysroot "$@"
+}
+
+#@
+#@ UploadSysrootARM <revision>
+#@
+UploadSysrootARM() {
+  UploadSysroot "$@"
+}
+
+#@
+#@ UploadSysrootMips <revision>
+#@
+UploadSysrootMips() {
+  UploadSysroot "$@"
+}
+
+#@
+#@ UploadSysrootAll <revision>
+#@
+#@    Upload sysroot image for all architectures
+UploadSysrootAll() {
+  RunCommand UploadSysrootAmd64 "$@"
+  RunCommand UploadSysrootI386 "$@"
+  RunCommand UploadSysrootARM "$@"
+  RunCommand UploadSysrootMips "$@"
+}
+
+#
+# CheckForDebianGPGKeyring
+#
+#     Make sure the Debian GPG keys exist. Otherwise print a helpful message.
+#
+CheckForDebianGPGKeyring() {
+  if [ ! -e "$KEYRING_FILE" ]; then
+    echo "Debian GPG keys missing. Install the debian-archive-keyring package."
+    exit 1
+  fi
+}
+
+#
+# VerifyPackageListing
+#
+#     Verifies the downloaded Packages.bz2 file has the right checksums.
+#
+VerifyPackageListing() {
+  local file_path=$1
+  local output_file=$2
+  local release_file="${BUILD_DIR}/${RELEASE_FILE}"
+  local release_file_gpg="${BUILD_DIR}/${RELEASE_FILE_GPG}"
+  local tmp_keyring_file="${BUILD_DIR}/keyring.gpg"
+
+  CheckForDebianGPGKeyring
+
+  DownloadOrCopy ${RELEASE_LIST} ${release_file}
+  DownloadOrCopy ${RELEASE_LIST_GPG} ${release_file_gpg}
+  echo "Verifying: ${release_file} with ${release_file_gpg}"
+  cp "${KEYRING_FILE}" "${tmp_keyring_file}"
+  gpg --primary-keyring "${tmp_keyring_file}" --recv-keys 2B90D010
+  gpgv --keyring "${tmp_keyring_file}" "${release_file_gpg}" "${release_file}"
+
+  echo "Verifying: ${output_file}"
+  local checksums=$(grep ${file_path} ${release_file} | cut -d " " -f 2)
+  local sha256sum=$(echo ${checksums} | cut -d " " -f 3)
+
+  if [ "${#sha256sum}" -ne "64" ]; then
+    echo "Bad sha256sum from ${RELEASE_LIST}"
+    exit 1
+  fi
+
+  echo "${sha256sum}  ${output_file}" | sha256sum --quiet -c
+}
+
+#
+# GeneratePackageList
+#
+#     Looks up package names in ${BUILD_DIR}/Packages and write list of URLs
+#     to output file.
+#
+GeneratePackageList() {
+  local input_file="$1"
+  local output_file="$2"
+  echo "Updating: ${output_file} from ${input_file}"
+  /bin/rm -f "${output_file}"
+  shift
+  shift
+  for pkg in $@ ; do
+    local pkg_full=$(grep -A 1 " ${pkg}\$" "$input_file" | \
+      egrep -o "pool/.*")
+    if [ -z "${pkg_full}" ]; then
+        echo "ERROR: missing package: $pkg"
+        exit 1
+    fi
+    local pkg_nopool=$(echo "$pkg_full" | sed "s/^pool\///")
+    local sha256sum=$(grep -A 4 " ${pkg}\$" "$input_file" | \
+      grep ^SHA256: | sed 's/^SHA256: //')
+    if [ "${#sha256sum}" -ne "64" ]; then
+      echo "Bad sha256sum from Packages"
+      exit 1
+    fi
+    echo $pkg_nopool $sha256sum >> "$output_file"
+  done
+  # sort -o does an in-place sort of this file
+  sort "$output_file" -o "$output_file"
+}
+
+#@
+#@ UpdatePackageListsAmd64
+#@
+#@     Regenerate the package lists such that they contain an up-to-date
+#@     list of URLs within the Debian archive. (For amd64)
+UpdatePackageListsAmd64() {
+  GeneratePackageListAmd64 "$DEBIAN_DEP_LIST_AMD64"
+  StripChecksumsFromPackageList "$DEBIAN_DEP_LIST_AMD64"
+}
+
+#@
+#@ UpdatePackageListsI386
+#@
+#@     Regenerate the package lists such that they contain an up-to-date
+#@     list of URLs within the Debian archive. (For i386)
+UpdatePackageListsI386() {
+  GeneratePackageListI386 "$DEBIAN_DEP_LIST_I386"
+  StripChecksumsFromPackageList "$DEBIAN_DEP_LIST_I386"
+}
+
+#@
+#@ UpdatePackageListsARM
+#@
+#@     Regenerate the package lists such that they contain an up-to-date
+#@     list of URLs within the Debian archive. (For arm)
+UpdatePackageListsARM() {
+  GeneratePackageListARM "$DEBIAN_DEP_LIST_ARM"
+  StripChecksumsFromPackageList "$DEBIAN_DEP_LIST_ARM"
+}
+
+#@
+#@ UpdatePackageListsMips
+#@
+#@     Regenerate the package lists such that they contain an up-to-date
+#@     list of URLs within the Debian archive. (For arm)
+UpdatePackageListsMips() {
+  GeneratePackageListMips "$DEBIAN_DEP_LIST_MIPS"
+  StripChecksumsFromPackageList "$DEBIAN_DEP_LIST_MIPS"
+}
+
+#@
+#@ UpdatePackageListsAll
+#@
+#@    Regenerate the package lists for all architectures.
+UpdatePackageListsAll() {
+  RunCommand UpdatePackageListsAmd64
+  RunCommand UpdatePackageListsI386
+  RunCommand UpdatePackageListsARM
+  RunCommand UpdatePackageListsMips
+}
+
+RunCommand() {
+  SetEnvironmentVariables "$1"
+  SanityCheck
+  "$@"
+}
+
+if [ $# -eq 0 ] ; then
+  echo "ERROR: you must specify a mode on the commandline"
+  echo
+  Usage
+  exit 1
+elif [ "$(type -t $1)" != "function" ]; then
+  echo "ERROR: unknown function '$1'." >&2
+  echo "For help, try:"
+  echo "    $0 help"
+  exit 1
+else
+  ChangeDirectory
+  if echo $1 | grep -qs "All$"; then
+    "$@"
+  else
+    RunCommand "$@"
+  fi
+fi
diff --git a/build/linux/system.gyp b/build/linux/system.gyp
new file mode 100644
index 0000000..ab20951
--- /dev/null
+++ b/build/linux/system.gyp
@@ -0,0 +1,1222 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+  'variables': {
+    # If any of the linux_link_FOO below are set to 1, then the corresponding
+    # target will be linked against the FOO library (either dynamically or
+    # statically, depending on the pkg-config files), as opposed to loading the
+    # FOO library dynamically with dlopen.
+    'linux_link_libgps%': 0,
+    'linux_link_libpci%': 0,
+    'linux_link_libspeechd%': 0,
+    'linux_link_libbrlapi%': 0,
+
+    # Used below for the various libraries. In this scope for sharing with GN.
+    'libbrlapi_functions': [
+      'brlapi_getHandleSize',
+      'brlapi_error_location',
+      'brlapi_strerror',
+      'brlapi__acceptKeys',
+      'brlapi__openConnection',
+      'brlapi__closeConnection',
+      'brlapi__getDisplaySize',
+      'brlapi__enterTtyModeWithPath',
+      'brlapi__leaveTtyMode',
+      'brlapi__writeDots',
+      'brlapi__readKey',
+    ],
+    'libgio_functions': [
+      'g_settings_new',
+      'g_settings_get_child',
+      'g_settings_get_string',
+      'g_settings_get_boolean',
+      'g_settings_get_int',
+      'g_settings_get_strv',
+      'g_settings_list_schemas',
+    ],
+    'libpci_functions': [
+      'pci_alloc',
+      'pci_init',
+      'pci_cleanup',
+      'pci_scan_bus',
+      'pci_fill_info',
+      'pci_lookup_name',
+    ],
+    'libudev_functions': [
+      'udev_device_get_action',
+      'udev_device_get_devnode',
+      'udev_device_get_parent',
+      'udev_device_get_parent_with_subsystem_devtype',
+      'udev_device_get_property_value',
+      'udev_device_get_subsystem',
+      'udev_device_get_sysattr_value',
+      'udev_device_get_sysname',
+      'udev_device_get_syspath',
+      'udev_device_new_from_devnum',
+      'udev_device_new_from_subsystem_sysname',
+      'udev_device_new_from_syspath',
+      'udev_device_unref',
+      'udev_enumerate_add_match_subsystem',
+      'udev_enumerate_get_list_entry',
+      'udev_enumerate_new',
+      'udev_enumerate_scan_devices',
+      'udev_enumerate_unref',
+      'udev_list_entry_get_next',
+      'udev_list_entry_get_name',
+      'udev_monitor_enable_receiving',
+      'udev_monitor_filter_add_match_subsystem_devtype',
+      'udev_monitor_get_fd',
+      'udev_monitor_new_from_netlink',
+      'udev_monitor_receive_device',
+      'udev_monitor_unref',
+      'udev_new',
+      'udev_set_log_fn',
+      'udev_set_log_priority',
+      'udev_unref',
+    ],
+  },
+  'conditions': [
+    [ 'chromeos==0 and use_ozone==0', {
+      # Hide GTK and related dependencies for Chrome OS and Ozone, so they won't get
+      # added back to Chrome OS and Ozone. Don't try to use GTK on Chrome OS and Ozone.
+      'targets': [
+        {
+          'target_name': 'atk',
+          'type': 'none',
+          'conditions': [
+            ['_toolset=="target"', {
+              'direct_dependent_settings': {
+                'cflags': [
+                  '<!@(<(pkg-config) --cflags atk)',
+                ],
+                'defines': [
+                  'ATK_LIB_DIR="<!@(<(pkg-config) --variable=libdir atk)"',
+                ],
+              },
+              'link_settings': {
+                'ldflags': [
+                  '<!@(<(pkg-config) --libs-only-L --libs-only-other atk)',
+                ],
+                'libraries': [
+                  '<!@(<(pkg-config) --libs-only-l atk)',
+                ],
+              },
+            }],
+          ],
+        },
+        {
+          'target_name': 'gdk',
+          'type': 'none',
+          'conditions': [
+            ['_toolset=="target"', {
+              'direct_dependent_settings': {
+                'cflags': [
+                  '<!@(<(pkg-config) --cflags gdk-2.0)',
+                ],
+              },
+              'link_settings': {
+                'ldflags': [
+                  '<!@(<(pkg-config) --libs-only-L --libs-only-other gdk-2.0)',
+                ],
+                'libraries': [
+                  '<!@(<(pkg-config) --libs-only-l gdk-2.0)',
+                ],
+              },
+            }],
+          ],
+        },
+        {
+          'target_name': 'gtk',
+          'type': 'none',
+          'toolsets': ['host', 'target'],
+          'variables': {
+            # gtk requires gmodule, but it does not list it as a dependency
+            # in some misconfigured systems.
+            'gtk_packages': 'gmodule-2.0 gtk+-2.0 gthread-2.0',
+          },
+          'conditions': [
+            ['_toolset=="target"', {
+              'all_dependent_settings': {
+                'cflags': [
+                  '<!@(<(pkg-config) --cflags <(gtk_packages))',
+                ],
+              },
+              'link_settings': {
+                'ldflags': [
+                  '<!@(<(pkg-config) --libs-only-L --libs-only-other <(gtk_packages))',
+                ],
+                'libraries': [
+                  '<!@(<(pkg-config) --libs-only-l <(gtk_packages))',
+                ],
+              },
+            }, {
+              'all_dependent_settings': {
+                'cflags': [
+                  '<!@(pkg-config --cflags <(gtk_packages))',
+                ],
+              },
+              'link_settings': {
+                'ldflags': [
+                  '<!@(pkg-config --libs-only-L --libs-only-other <(gtk_packages))',
+                ],
+                'libraries': [
+                  '<!@(pkg-config --libs-only-l <(gtk_packages))',
+                ],
+              },
+            }],
+          ],
+        },
+        {
+          'target_name': 'gtkprint',
+          'type': 'none',
+          'conditions': [
+            ['_toolset=="target"', {
+              'direct_dependent_settings': {
+                'cflags': [
+                  '<!@(<(pkg-config) --cflags gtk+-unix-print-2.0)',
+                ],
+              },
+              'link_settings': {
+                'ldflags': [
+                  '<!@(<(pkg-config) --libs-only-L --libs-only-other gtk+-unix-print-2.0)',
+                ],
+                'libraries': [
+                  '<!@(<(pkg-config) --libs-only-l gtk+-unix-print-2.0)',
+                ],
+              },
+            }],
+          ],
+        },
+      ],  # targets
+    }],
+    [ 'use_x11==1 or ozone_platform_ozonex==1', {
+      # Hide X11 and related dependencies when use_x11=0
+      'targets': [
+        {
+          'target_name': 'x11',
+          'type': 'none',
+          'toolsets': ['host', 'target'],
+          'conditions': [
+            ['_toolset=="target"', {
+              'direct_dependent_settings': {
+                'cflags': [
+                  '<!@(<(pkg-config) --cflags x11)',
+                ],
+              },
+              'link_settings': {
+                'ldflags': [
+                  '<!@(<(pkg-config) --libs-only-L --libs-only-other x11 xi)',
+                ],
+                'libraries': [
+                  '<!@(<(pkg-config) --libs-only-l x11 xi)',
+                ],
+              },
+            }, {
+              'direct_dependent_settings': {
+                'cflags': [
+                  '<!@(pkg-config --cflags x11)',
+                ],
+              },
+              'link_settings': {
+                'ldflags': [
+                  '<!@(pkg-config --libs-only-L --libs-only-other x11 xi)',
+                ],
+                'libraries': [
+                  '<!@(pkg-config --libs-only-l x11 xi)',
+                ],
+              },
+            }],
+          ],
+        },
+        {
+          'target_name': 'xcursor',
+          'type': 'none',
+          'direct_dependent_settings': {
+            'cflags': [
+              '<!@(<(pkg-config) --cflags xcursor)',
+            ],
+          },
+          'link_settings': {
+            'ldflags': [
+              '<!@(<(pkg-config) --libs-only-L --libs-only-other xcursor)',
+            ],
+            'libraries': [
+              '<!@(<(pkg-config) --libs-only-l xcursor)',
+            ],
+          },
+        },
+        {
+          'target_name': 'xcomposite',
+          'type': 'none',
+          'direct_dependent_settings': {
+            'cflags': [
+              '<!@(<(pkg-config) --cflags xcomposite)',
+            ],
+          },
+          'link_settings': {
+            'ldflags': [
+              '<!@(<(pkg-config) --libs-only-L --libs-only-other xcomposite)',
+            ],
+            'libraries': [
+              '<!@(<(pkg-config) --libs-only-l xcomposite)',
+            ],
+          },
+        },
+        {
+          'target_name': 'xdamage',
+          'type': 'none',
+          'direct_dependent_settings': {
+            'cflags': [
+              '<!@(<(pkg-config) --cflags xdamage)',
+            ],
+          },
+          'link_settings': {
+            'ldflags': [
+              '<!@(<(pkg-config) --libs-only-L --libs-only-other xdamage)',
+            ],
+            'libraries': [
+              '<!@(<(pkg-config) --libs-only-l xdamage)',
+            ],
+          },
+        },
+        {
+          'target_name': 'xext',
+          'type': 'none',
+          'direct_dependent_settings': {
+            'cflags': [
+              '<!@(<(pkg-config) --cflags xext)',
+            ],
+          },
+          'link_settings': {
+            'ldflags': [
+              '<!@(<(pkg-config) --libs-only-L --libs-only-other xext)',
+            ],
+            'libraries': [
+              '<!@(<(pkg-config) --libs-only-l xext)',
+            ],
+          },
+        },
+        {
+          'target_name': 'xfixes',
+          'type': 'none',
+          'direct_dependent_settings': {
+            'cflags': [
+              '<!@(<(pkg-config) --cflags xfixes)',
+            ],
+          },
+          'link_settings': {
+            'ldflags': [
+              '<!@(<(pkg-config) --libs-only-L --libs-only-other xfixes)',
+            ],
+            'libraries': [
+              '<!@(<(pkg-config) --libs-only-l xfixes)',
+            ],
+          },
+        },
+        {
+          'target_name': 'xi',
+          'type': 'none',
+          'direct_dependent_settings': {
+            'cflags': [
+              '<!@(<(pkg-config) --cflags xi)',
+            ],
+          },
+          'link_settings': {
+            'ldflags': [
+              '<!@(<(pkg-config) --libs-only-L --libs-only-other xi)',
+            ],
+            'libraries': [
+              '<!@(<(pkg-config) --libs-only-l xi)',
+            ],
+          },
+        },
+        {
+          'target_name': 'xrandr',
+          'type': 'none',
+          'toolsets': ['host', 'target'],
+          'conditions': [
+            ['_toolset=="target"', {
+              'direct_dependent_settings': {
+                'cflags': [
+                  '<!@(<(pkg-config) --cflags xrandr)',
+                ],
+              },
+              'link_settings': {
+                'ldflags': [
+                  '<!@(<(pkg-config) --libs-only-L --libs-only-other xrandr)',
+                ],
+                'libraries': [
+                  '<!@(<(pkg-config) --libs-only-l xrandr)',
+                ],
+              },
+            }, {
+              'direct_dependent_settings': {
+                'cflags': [
+                  '<!@(pkg-config --cflags xrandr)',
+                ],
+              },
+              'link_settings': {
+                'ldflags': [
+                  '<!@(pkg-config --libs-only-L --libs-only-other xrandr)',
+                ],
+                'libraries': [
+                  '<!@(pkg-config --libs-only-l xrandr)',
+                ],
+              },
+            }],
+          ],
+        },
+        {
+          'target_name': 'xrender',
+          'type': 'none',
+          'direct_dependent_settings': {
+            'cflags': [
+              '<!@(<(pkg-config) --cflags xrender)',
+            ],
+          },
+          'link_settings': {
+            'ldflags': [
+              '<!@(<(pkg-config) --libs-only-L --libs-only-other xrender)',
+            ],
+            'libraries': [
+              '<!@(<(pkg-config) --libs-only-l xrender)',
+            ],
+          },
+        },
+        {
+          'target_name': 'xtst',
+          'type': 'none',
+          'toolsets': ['host', 'target'],
+          'conditions': [
+            ['_toolset=="target"', {
+              'direct_dependent_settings': {
+                'cflags': [
+                  '<!@(<(pkg-config) --cflags xtst)',
+                ],
+              },
+              'link_settings': {
+                'ldflags': [
+                  '<!@(<(pkg-config) --libs-only-L --libs-only-other xtst)',
+                ],
+                'libraries': [
+                  '<!@(<(pkg-config) --libs-only-l xtst)',
+                ],
+              },
+            }, {
+              'direct_dependent_settings': {
+                'cflags': [
+                  '<!@(pkg-config --cflags xtst)',
+                ],
+              },
+              'link_settings': {
+                'ldflags': [
+                  '<!@(pkg-config --libs-only-L --libs-only-other xtst)',
+                ],
+                'libraries': [
+                  '<!@(pkg-config --libs-only-l xtst)',
+                ],
+              },
+            }]
+          ]
+        }
+      ],  # targets
+    }],
+    ['use_x11==1 and chromeos==0', {
+      'targets': [
+        {
+          'target_name': 'xscrnsaver',
+          'type': 'none',
+          'direct_dependent_settings': {
+            'cflags': [
+              '<!@(<(pkg-config) --cflags xscrnsaver)',
+            ],
+          },
+          'link_settings': {
+            'ldflags': [
+              '<!@(<(pkg-config) --libs-only-L --libs-only-other xscrnsaver)',
+            ],
+            'libraries': [
+              '<!@(<(pkg-config) --libs-only-l xscrnsaver)',
+            ],
+          },
+        },
+      ],  # targets
+    }],
+    ['use_evdev_gestures==1', {
+      'targets': [
+        {
+          'target_name': 'libevdev-cros',
+          'type': 'none',
+          'direct_dependent_settings': {
+            'cflags': [
+              '<!@(<(pkg-config) --cflags libevdev-cros)'
+            ],
+          },
+          'link_settings': {
+            'ldflags': [
+              '<!@(<(pkg-config) --libs-only-L --libs-only-other libevdev-cros)',
+            ],
+            'libraries': [
+              '<!@(<(pkg-config) --libs-only-l libevdev-cros)',
+            ],
+          },
+        },
+        {
+          'target_name': 'libgestures',
+          'type': 'none',
+          'direct_dependent_settings': {
+            'cflags': [
+              '<!@(<(pkg-config) --cflags libgestures)'
+            ],
+          },
+          'link_settings': {
+            'ldflags': [
+              '<!@(<(pkg-config) --libs-only-L --libs-only-other libgestures)',
+            ],
+            'libraries': [
+              '<!@(<(pkg-config) --libs-only-l libgestures)',
+            ],
+          },
+        },
+      ],
+    }],
+    ['use_xkbcommon==1', {
+      'targets': [
+        {
+          'target_name': 'xkbcommon',
+          'type': 'none',
+          'direct_dependent_settings': {
+            'cflags': [
+              '<!@(<(pkg-config) --cflags xkbcommon)'
+            ],
+          },
+          'link_settings': {
+            'ldflags': [
+              '<!@(<(pkg-config) --libs-only-L --libs-only-other xkbcommon)',
+            ],
+            'libraries': [
+              '<!@(<(pkg-config) --libs-only-l xkbcommon)',
+            ],
+          },
+        },
+      ],
+    }],
+    ['ozone_platform_gbm==1', {
+      'targets': [
+        {
+          'target_name': 'gbm',
+          'type': 'none',
+          'direct_dependent_settings': {
+            'cflags': [
+              '<!@(<(pkg-config) --cflags gbm)',
+            ],
+          },
+          'link_settings': {
+            'ldflags': [
+              '<!@(<(pkg-config) --libs-only-L --libs-only-other gbm)',
+            ],
+            'libraries': [
+              '<!@(<(pkg-config) --libs-only-l gbm)',
+            ],
+          },
+        },
+      ],
+    }],
+    ['ozone_platform_drm==1 or ozone_platform_gbm==1', {
+      'targets': [
+        {
+          'target_name': 'libdrm',
+          'type': 'none',
+          'direct_dependent_settings': {
+            'cflags': [
+              '<!@(<(pkg-config) --cflags libdrm)',
+            ],
+          },
+          'link_settings': {
+            'libraries': [
+              '<!@(<(pkg-config) --libs-only-l libdrm)',
+            ],
+          },
+        },
+      ],
+    }],
+    ['use_udev==1', {
+      'targets': [
+        {
+          'target_name': 'udev',
+          'type': 'static_library',
+          'conditions': [
+            ['_toolset=="target"', {
+              'include_dirs': [
+                '../..',
+              ],
+              'hard_dependency': 1,
+              'actions': [
+                {
+                  'variables': {
+                    'output_h': '<(SHARED_INTERMEDIATE_DIR)/library_loaders/libudev0.h',
+                    'output_cc': '<(INTERMEDIATE_DIR)/libudev0_loader.cc',
+                    'generator': '../../tools/generate_library_loader/generate_library_loader.py',
+                  },
+                  'action_name': 'generate_libudev0_loader',
+                  'inputs': [
+                    '<(generator)',
+                  ],
+                  'outputs': [
+                    '<(output_h)',
+                    '<(output_cc)',
+                  ],
+                  'action': ['python',
+                             '<(generator)',
+                             '--name', 'LibUdev0Loader',
+                             '--output-h', '<(output_h)',
+                             '--output-cc', '<(output_cc)',
+                             '--header', '"third_party/libudev/libudev0.h"',
+                             '--link-directly=0',
+                             '<@(libudev_functions)',
+                  ],
+                  'message': 'Generating libudev0 library loader',
+                  'process_outputs_as_sources': 1,
+                },
+                {
+                  'variables': {
+                    'output_h': '<(SHARED_INTERMEDIATE_DIR)/library_loaders/libudev1.h',
+                    'output_cc': '<(INTERMEDIATE_DIR)/libudev1_loader.cc',
+                    'generator': '../../tools/generate_library_loader/generate_library_loader.py',
+                  },
+                  'action_name': 'generate_libudev1_loader',
+                  'inputs': [
+                    '<(generator)',
+                  ],
+                  'outputs': [
+                    '<(output_h)',
+                    '<(output_cc)',
+                  ],
+                  'action': ['python',
+                             '<(generator)',
+                             '--name', 'LibUdev1Loader',
+                             '--output-h', '<(output_h)',
+                             '--output-cc', '<(output_cc)',
+                             '--header', '"third_party/libudev/libudev1.h"',
+                             '--link-directly=0',
+                             '<@(libudev_functions)',
+                  ],
+                  'message': 'Generating libudev1 library loader',
+                  'process_outputs_as_sources': 1,
+                },
+              ],
+            }],
+          ],
+        },
+      ],
+    }],
+    ['use_libpci==1', {
+      'targets': [
+        {
+          'target_name': 'libpci',
+          'type': 'static_library',
+          'cflags': [
+            '<!@(<(pkg-config) --cflags libpci)',
+          ],
+          'direct_dependent_settings': {
+            'include_dirs': [
+              '<(SHARED_INTERMEDIATE_DIR)',
+            ],
+            'conditions': [
+              ['linux_link_libpci==1', {
+                'link_settings': {
+                  'ldflags': [
+                    '<!@(<(pkg-config) --libs-only-L --libs-only-other libpci)',
+                  ],
+                  'libraries': [
+                    '<!@(<(pkg-config) --libs-only-l libpci)',
+                  ],
+                }
+              }],
+            ],
+          },
+          'include_dirs': [
+            '../..',
+          ],
+          'hard_dependency': 1,
+          'actions': [
+            {
+              'variables': {
+                'output_h': '<(SHARED_INTERMEDIATE_DIR)/library_loaders/libpci.h',
+                'output_cc': '<(INTERMEDIATE_DIR)/libpci_loader.cc',
+                'generator': '../../tools/generate_library_loader/generate_library_loader.py',
+              },
+              'action_name': 'generate_libpci_loader',
+              'inputs': [
+                '<(generator)',
+              ],
+              'outputs': [
+                '<(output_h)',
+                '<(output_cc)',
+              ],
+              'action': ['python',
+                         '<(generator)',
+                         '--name', 'LibPciLoader',
+                         '--output-h', '<(output_h)',
+                         '--output-cc', '<(output_cc)',
+                         '--header', '<pci/pci.h>',
+                         # TODO(phajdan.jr): Report problem to pciutils project
+                         # and get it fixed so that we don't need --use-extern-c.
+                         '--use-extern-c',
+                         '--link-directly=<(linux_link_libpci)',
+                         '<@(libpci_functions)',
+              ],
+              'message': 'Generating libpci library loader',
+              'process_outputs_as_sources': 1,
+            },
+          ],
+        },
+      ],
+    }],
+  ],  # conditions
+  'targets': [
+    {
+      'target_name': 'dbus',
+      'type': 'none',
+      'direct_dependent_settings': {
+        'cflags': [
+          '<!@(<(pkg-config) --cflags dbus-1)',
+        ],
+      },
+      'link_settings': {
+        'ldflags': [
+          '<!@(<(pkg-config) --libs-only-L --libs-only-other dbus-1)',
+        ],
+        'libraries': [
+          '<!@(<(pkg-config) --libs-only-l dbus-1)',
+        ],
+      },
+    },
+    {
+      'target_name': 'fontconfig',
+      'type': 'none',
+      'conditions': [
+        ['_toolset=="target"', {
+          'conditions': [
+            ['use_system_fontconfig==1', {
+              'direct_dependent_settings': {
+                'cflags': [
+                  '<!@(<(pkg-config) --cflags fontconfig)',
+                ],
+              },
+              'link_settings': {
+                'ldflags': [
+                  '<!@(<(pkg-config) --libs-only-L --libs-only-other fontconfig)',
+                ],
+                'libraries': [
+                  '<!@(<(pkg-config) --libs-only-l fontconfig)',
+                ],
+              },
+            }, {  # use_system_fontconfig==0
+              'dependencies': [
+                '../../third_party/fontconfig/fontconfig.gyp:fontconfig',
+              ],
+              'export_dependent_settings' : [
+                '../../third_party/fontconfig/fontconfig.gyp:fontconfig',
+              ],
+            }],
+          ],
+        }],
+      ],
+    },
+    {
+      'target_name': 'freetype2',
+      'type': 'none',
+      'conditions': [
+        ['_toolset=="target"', {
+          'direct_dependent_settings': {
+            'cflags': [
+              '<!@(<(pkg-config) --cflags freetype2)',
+            ],
+          },
+          'link_settings': {
+            'ldflags': [
+              '<!@(<(pkg-config) --libs-only-L --libs-only-other freetype2)',
+            ],
+            'libraries': [
+              '<!@(<(pkg-config) --libs-only-l freetype2)',
+            ],
+          },
+        }],
+      ],
+    },
+    {
+      'target_name': 'gconf',
+      'type': 'none',
+      'conditions': [
+        ['use_gconf==1 and _toolset=="target"', {
+          'direct_dependent_settings': {
+            'cflags': [
+              '<!@(<(pkg-config) --cflags gconf-2.0)',
+            ],
+            'defines': [
+              'USE_GCONF',
+            ],
+          },
+          'link_settings': {
+            'ldflags': [
+              '<!@(<(pkg-config) --libs-only-L --libs-only-other gconf-2.0)',
+            ],
+            'libraries': [
+              '<!@(<(pkg-config) --libs-only-l gconf-2.0)',
+            ],
+          },
+        }],
+      ],
+    },
+    {
+      'target_name': 'gio',
+      'type': 'static_library',
+      'conditions': [
+        ['use_gio==1 and _toolset=="target"', {
+          'cflags': [
+            '<!@(<(pkg-config) --cflags gio-2.0)',
+          ],
+          'variables': {
+            'gio_warning_define': [
+              # glib >=2.40 deprecate g_settings_list_schemas in favor of
+              # g_settings_schema_source_list_schemas. This function is not
+              # available on earlier versions that we still need to support
+              # (specifically, 2.32), so disable the warning.
+              # TODO(mgiuca): Remove this suppression (and variable) when we
+              # drop support for Ubuntu 13.10 (saucy) and earlier. Update the
+              # code to use g_settings_schema_source_list_schemas instead.
+              'GLIB_DISABLE_DEPRECATION_WARNINGS',
+            ],
+          },
+          'defines': [
+            '<(gio_warning_define)',
+          ],
+          'direct_dependent_settings': {
+            'cflags': [
+              '<!@(<(pkg-config) --cflags gio-2.0)',
+            ],
+            'defines': [
+              'USE_GIO',
+              '<(gio_warning_define)',
+            ],
+            'include_dirs': [
+              '<(SHARED_INTERMEDIATE_DIR)',
+            ],
+          },
+          'include_dirs': [
+            '../..',
+          ],
+          'link_settings': {
+            'ldflags': [
+              '<!@(<(pkg-config) --libs-only-L --libs-only-other gio-2.0)',
+            ],
+            'libraries': [
+              '<!@(<(pkg-config) --libs-only-l gio-2.0)',
+            ],
+            'conditions': [
+              ['linux_link_gsettings==0 and OS=="linux"', {
+                'libraries': [
+                  '-ldl',
+                ],
+              }],
+            ],
+          },
+          'hard_dependency': 1,
+          'actions': [
+            {
+              'variables': {
+                'output_h': '<(SHARED_INTERMEDIATE_DIR)/library_loaders/libgio.h',
+                'output_cc': '<(INTERMEDIATE_DIR)/libgio_loader.cc',
+                'generator': '../../tools/generate_library_loader/generate_library_loader.py',
+              },
+              'action_name': 'generate_libgio_loader',
+              'inputs': [
+                '<(generator)',
+              ],
+              'outputs': [
+                '<(output_h)',
+                '<(output_cc)',
+              ],
+              'action': ['python',
+                         '<(generator)',
+                         '--name', 'LibGioLoader',
+                         '--output-h', '<(output_h)',
+                         '--output-cc', '<(output_cc)',
+                         '--header', '<gio/gio.h>',
+                         '--link-directly=<(linux_link_gsettings)',
+                         '<@(libgio_functions)',
+              ],
+              'message': 'Generating libgio library loader',
+              'process_outputs_as_sources': 1,
+            },
+          ],
+        }],
+      ],
+    },
+    {
+      'target_name': 'glib',
+      'type': 'none',
+      'toolsets': ['host', 'target'],
+      'variables': {
+        'glib_packages': 'glib-2.0 gmodule-2.0 gobject-2.0 gthread-2.0',
+      },
+      'conditions': [
+        ['_toolset=="target"', {
+          'direct_dependent_settings': {
+            'cflags': [
+              '<!@(<(pkg-config) --cflags <(glib_packages))',
+            ],
+          },
+          'link_settings': {
+            'ldflags': [
+              '<!@(<(pkg-config) --libs-only-L --libs-only-other <(glib_packages))',
+            ],
+            'libraries': [
+              '<!@(<(pkg-config) --libs-only-l <(glib_packages))',
+            ],
+          },
+        }, {
+          'direct_dependent_settings': {
+            'cflags': [
+              '<!@(pkg-config --cflags <(glib_packages))',
+            ],
+          },
+          'link_settings': {
+            'ldflags': [
+              '<!@(pkg-config --libs-only-L --libs-only-other <(glib_packages))',
+            ],
+            'libraries': [
+              '<!@(pkg-config --libs-only-l <(glib_packages))',
+            ],
+          },
+        }],
+      ],
+    },
+    {
+      'target_name': 'gnome_keyring',
+      'type': 'none',
+      'conditions': [
+        ['use_gnome_keyring==1', {
+          'direct_dependent_settings': {
+            'cflags': [
+              '<!@(<(pkg-config) --cflags gnome-keyring-1)',
+            ],
+            'defines': [
+              'USE_GNOME_KEYRING',
+            ],
+            'conditions': [
+              ['linux_link_gnome_keyring==0', {
+                'defines': ['DLOPEN_GNOME_KEYRING'],
+              }],
+            ],
+          },
+          'conditions': [
+            ['linux_link_gnome_keyring!=0', {
+              'link_settings': {
+                'ldflags': [
+                  '<!@(<(pkg-config) --libs-only-L --libs-only-other gnome-keyring-1)',
+                ],
+                'libraries': [
+                  '<!@(<(pkg-config) --libs-only-l gnome-keyring-1)',
+                ],
+              },
+            }, {
+              'conditions': [
+                ['OS=="linux"', {
+                 'link_settings': {
+                   'libraries': [
+                     '-ldl',
+                   ],
+                 },
+                }],
+              ],
+            }],
+          ],
+        }],
+      ],
+    },
+    {
+      # The unit tests use a few convenience functions from the GNOME
+      # Keyring library directly. We ignore linux_link_gnome_keyring and
+      # link directly in this version of the target to allow this.
+      # *** Do not use this target in the main binary! ***
+      'target_name': 'gnome_keyring_direct',
+      'type': 'none',
+      'conditions': [
+        ['use_gnome_keyring==1', {
+          'direct_dependent_settings': {
+            'cflags': [
+              '<!@(<(pkg-config) --cflags gnome-keyring-1)',
+            ],
+            'defines': [
+              'USE_GNOME_KEYRING',
+            ],
+            'conditions': [
+              ['linux_link_gnome_keyring==0', {
+                'defines': ['DLOPEN_GNOME_KEYRING'],
+              }],
+            ],
+          },
+          'link_settings': {
+            'ldflags': [
+              '<!@(<(pkg-config) --libs-only-L --libs-only-other gnome-keyring-1)',
+            ],
+            'libraries': [
+              '<!@(<(pkg-config) --libs-only-l gnome-keyring-1)',
+            ],
+          },
+        }],
+      ],
+    },
+    {
+      'target_name': 'libbrlapi',
+      'type': 'static_library',
+      'all_dependent_settings': {
+        'include_dirs': [
+          '<(SHARED_INTERMEDIATE_DIR)',
+        ],
+        'defines': [
+          'USE_BRLAPI',
+        ],
+        'conditions': [
+          ['linux_link_libbrlapi==1', {
+            'link_settings': {
+              'libraries': [
+                '-lbrlapi',
+              ],
+            }
+          }],
+        ],
+      },
+      'include_dirs': [
+        '../..',
+      ],
+      'hard_dependency': 1,
+      'actions': [
+        {
+          'variables': {
+            'output_h': '<(SHARED_INTERMEDIATE_DIR)/library_loaders/libbrlapi.h',
+            'output_cc': '<(INTERMEDIATE_DIR)/libbrlapi_loader.cc',
+            'generator': '../../tools/generate_library_loader/generate_library_loader.py',
+          },
+          'action_name': 'generate_brlapi_loader',
+          'inputs': [
+            '<(generator)',
+          ],
+          'outputs': [
+            '<(output_h)',
+            '<(output_cc)',
+          ],
+          'action': ['python',
+                     '<(generator)',
+                     '--name', 'LibBrlapiLoader',
+                     '--output-h', '<(output_h)',
+                     '--output-cc', '<(output_cc)',
+                     '--header', '<brlapi.h>',
+                     '--link-directly=<(linux_link_libbrlapi)',
+                     '<@(libbrlapi_functions)',
+          ],
+          'message': 'Generating libbrlapi library loader',
+          'process_outputs_as_sources': 1,
+        },
+      ],
+    },
+    {
+      'target_name': 'libcap',
+      'type': 'none',
+      'link_settings': {
+        'libraries': [
+          '-lcap',
+        ],
+      },
+    },
+    {
+      'target_name': 'libresolv',
+      'type': 'none',
+      'link_settings': {
+        'libraries': [
+          '-lresolv',
+        ],
+      },
+    },
+    {
+      # GN version: //third_party/speech-dispatcher
+      'target_name': 'libspeechd',
+      'type': 'static_library',
+      'direct_dependent_settings': {
+        'include_dirs': [
+          '<(SHARED_INTERMEDIATE_DIR)',
+        ],
+        'conditions': [
+          ['linux_link_libspeechd==1', {
+            'link_settings': {
+              'libraries': [
+                '-lspeechd',
+              ],
+            }
+          }],
+        ],
+      },
+      'include_dirs': [
+        '../..',
+      ],
+      'hard_dependency': 1,
+      'actions': [
+        {
+          'variables': {
+            'output_h': '<(SHARED_INTERMEDIATE_DIR)/library_loaders/libspeechd.h',
+            'output_cc': '<(INTERMEDIATE_DIR)/libspeechd_loader.cc',
+            'generator': '../../tools/generate_library_loader/generate_library_loader.py',
+
+            # speech-dispatcher >= 0.8 installs libspeechd.h into
+            # speech-dispatcher/libspeechd.h, whereas speech-dispatcher < 0.8
+            # puts libspeechd.h in the top-level include directory.
+            # Since we need to support both cases for now, we ship a copy of
+            # libspeechd.h in third_party/speech-dispatcher. If the user
+            # prefers to link against the speech-dispatcher directly, the
+            # `libspeechd_h_prefix' variable can be passed to gyp with a value
+            # such as "speech-dispatcher/" that will be prepended to
+            # "libspeechd.h" in the #include directive.
+            # TODO(phaldan.jr): Once we do not need to support
+            # speech-dispatcher < 0.8 we can get rid of all this (including
+            # third_party/speech-dispatcher) and just include
+            # speech-dispatcher/libspeechd.h unconditionally.
+            'libspeechd_h_prefix%': '',
+          },
+          'action_name': 'generate_libspeechd_loader',
+          'inputs': [
+            '<(generator)',
+          ],
+          'outputs': [
+            '<(output_h)',
+            '<(output_cc)',
+          ],
+          'action': ['python',
+                     '<(generator)',
+                     '--name', 'LibSpeechdLoader',
+                     '--output-h', '<(output_h)',
+                     '--output-cc', '<(output_cc)',
+                     '--header', '<<(libspeechd_h_prefix)libspeechd.h>',
+                     '--bundled-header',
+                     '"third_party/speech-dispatcher/libspeechd.h"',
+                     '--link-directly=<(linux_link_libspeechd)',
+                     'spd_open',
+                     'spd_say',
+                     'spd_stop',
+                     'spd_close',
+                     'spd_pause',
+                     'spd_resume',
+                     'spd_set_notification_on',
+                     'spd_set_voice_rate',
+                     'spd_set_voice_pitch',
+                     'spd_list_synthesis_voices',
+                     'spd_set_synthesis_voice',
+                     'spd_list_modules',
+                     'spd_set_output_module',
+                     'spd_set_language',
+          ],
+          'message': 'Generating libspeechd library loader',
+          'process_outputs_as_sources': 1,
+        },
+      ],
+    },
+    {
+      'target_name': 'pangocairo',
+      'type': 'none',
+      'toolsets': ['host', 'target'],
+      'conditions': [
+        ['use_pango==1 and use_cairo==1', {
+          'conditions': [
+            ['_toolset=="target"', {
+              'direct_dependent_settings': {
+                'cflags': [
+                  '<!@(<(pkg-config) --cflags pangocairo pangoft2)',
+                ],
+              },
+              'link_settings': {
+                'ldflags': [
+                  '<!@(<(pkg-config) --libs-only-L --libs-only-other pangocairo pangoft2)',
+                ],
+                'libraries': [
+                  '<!@(<(pkg-config) --libs-only-l pangocairo pangoft2)',
+                ],
+              },
+            }, {
+              'direct_dependent_settings': {
+                'cflags': [
+                  '<!@(pkg-config --cflags pangocairo pangoft2)',
+                ],
+              },
+              'link_settings': {
+                'ldflags': [
+                  '<!@(pkg-config --libs-only-L --libs-only-other pangocairo pangoft2)',
+                ],
+                'libraries': [
+                  '<!@(pkg-config --libs-only-l pangocairo pangoft2)',
+                ],
+              },
+            }],
+          ],
+        }],
+      ],
+    },
+    {
+      'target_name': 'ssl',
+      'type': 'none',
+      'conditions': [
+        ['_toolset=="target"', {
+          'conditions': [
+            ['use_openssl==1', {
+              'dependencies': [
+                '../../third_party/boringssl/boringssl.gyp:boringssl',
+              ],
+            }, {
+              'dependencies': [
+                '../../net/third_party/nss/ssl.gyp:libssl',
+              ],
+              'direct_dependent_settings': {
+                'include_dirs+': [
+                  # We need for our local copies of the libssl3 headers to come
+                  # before other includes, as we are shadowing system headers.
+                  '<(DEPTH)/net/third_party/nss/ssl',
+                ],
+              },
+            }],
+            # Link in the system NSS if it is used for either the internal
+            # crypto library (use_openssl==0) or platform certificate
+            # library (use_nss_certs==1).
+            ['use_openssl==0 or use_nss_certs==1', {
+              'direct_dependent_settings': {
+                'cflags': [
+                  '<!@(<(pkg-config) --cflags nss)',
+                ],
+              },
+              'link_settings': {
+                'ldflags': [
+                  '<!@(<(pkg-config) --libs-only-L --libs-only-other nss)',
+                ],
+                'libraries': [
+                  '<!@(<(pkg-config) --libs-only-l nss | sed -e "s/-lssl3//")',
+                ],
+              },
+              'conditions': [
+                ['clang==1', {
+                  'direct_dependent_settings': {
+                    'cflags': [
+                      # There is a broken header guard in /usr/include/nss/secmod.h:
+                      # https://bugzilla.mozilla.org/show_bug.cgi?id=884072
+                      '-Wno-header-guard',
+                    ],
+                  },
+                }],
+              ],
+            }],
+          ]
+        }],
+      ],
+    },
+  ],
+}
diff --git a/build/linux/unbundle/README b/build/linux/unbundle/README
new file mode 100644
index 0000000..d1b2a96
--- /dev/null
+++ b/build/linux/unbundle/README
@@ -0,0 +1,44 @@
+This directory contains files that make it possible to use system libraries.
+
+For more info please read the following:
+
+ - https://fedoraproject.org/wiki/Packaging:No_Bundled_Libraries
+ - https://wiki.gentoo.org/wiki/Why_not_bundle_dependencies
+ - http://www.debian.org/doc/debian-policy/ch-source.html#s-embeddedfiles
+
+For more Chromium-specific context please read
+http://spot.livejournal.com/312320.html .
+
+This directory is provided in the source tree to follow above guidelines.
+It is a compromise solution which takes into account Chromium developers
+who want to avoid the perceived burden of more conditional code in gyp,
+and expectations of Open Source community, where using system-provided
+libraries is the norm.
+
+Usage:
+
+1. remove_bundled_libraries.py <preserved-directories>
+
+   For example: remove_bundled_libraries.py third_party/mesa
+
+   The script scans sources looking for third_party directories.
+   Everything that is not explicitly preserved is removed (except for
+   gyp files), and the script fails if any directory passed on command
+   line does not exist (to ensure list is kept up to date).
+
+   This is intended to be used on sources extracted from a tarball,
+   not a repository.
+
+   NOTE: by default this will not remove anything (for safety). Pass
+   --do-remove flag to actually remove files.
+
+2. replace_gyp_files.py <gyp-flags>
+
+   For example: replace_gyp_files.py -Duse_system_harfbuzz=1
+
+   The script ignores flags other than -D for convenience. This makes it
+   possible to have a variable e.g. ${myconf} with all the options, and
+   execute:
+
+   build/linux/unbundle/replace_gyp_files.py ${myconf}
+   build/gyp_chromium ${myconf}
diff --git a/build/linux/unbundle/expat.gyp b/build/linux/unbundle/expat.gyp
new file mode 100644
index 0000000..030fb85
--- /dev/null
+++ b/build/linux/unbundle/expat.gyp
@@ -0,0 +1,17 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+  'targets': [
+    {
+      'target_name': 'expat',
+      'type': 'none',
+      'link_settings': {
+        'libraries': [
+          '-lexpat',
+        ],
+      },
+    },
+  ],
+}
diff --git a/build/linux/unbundle/ffmpeg.gyp b/build/linux/unbundle/ffmpeg.gyp
new file mode 100644
index 0000000..e3c3723
--- /dev/null
+++ b/build/linux/unbundle/ffmpeg.gyp
@@ -0,0 +1,54 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+  'targets': [
+    {
+      'target_name': 'ffmpeg',
+      'type': 'none',
+      'direct_dependent_settings': {
+        'cflags': [
+          '<!@(pkg-config --cflags libavcodec libavformat libavutil)',
+
+          '<!(python <(DEPTH)/tools/compile_test/compile_test.py '
+              '--code "#define __STDC_CONSTANT_MACROS\n'
+              '#include <libavcodec/avcodec.h>\n'
+              'int test() { return AV_CODEC_ID_OPUS; }" '
+              '--on-failure -DCHROMIUM_OMIT_AV_CODEC_ID_OPUS=1)',
+
+          '<!(python <(DEPTH)/tools/compile_test/compile_test.py '
+              '--code "#define __STDC_CONSTANT_MACROS\n'
+              '#include <libavcodec/avcodec.h>\n'
+              'int test() { return AV_CODEC_ID_VP9; }" '
+              '--on-failure -DCHROMIUM_OMIT_AV_CODEC_ID_VP9=1)',
+
+          '<!(python <(DEPTH)/tools/compile_test/compile_test.py '
+              '--code "#define __STDC_CONSTANT_MACROS\n'
+              '#include <libavcodec/avcodec.h>\n'
+              'int test() { return AV_PKT_DATA_MATROSKA_BLOCKADDITIONAL; }" '
+              '--on-failure -DCHROMIUM_OMIT_AV_PKT_DATA_MATROSKA_BLOCKADDITIONAL=1)',
+
+          '<!(python <(DEPTH)/tools/compile_test/compile_test.py '
+              '--code "#define __STDC_CONSTANT_MACROS\n'
+              '#include <libavcodec/avcodec.h>\n'
+              'int test() { struct AVFrame frame;\n'
+              'return av_frame_get_channels(&frame); }" '
+              '--on-failure -DCHROMIUM_NO_AVFRAME_CHANNELS=1)',
+        ],
+        'defines': [
+          '__STDC_CONSTANT_MACROS',
+          'USE_SYSTEM_FFMPEG',
+        ],
+      },
+      'link_settings': {
+        'ldflags': [
+          '<!@(pkg-config --libs-only-L --libs-only-other libavcodec libavformat libavutil)',
+        ],
+        'libraries': [
+          '<!@(pkg-config --libs-only-l libavcodec libavformat libavutil)',
+        ],
+      },
+    },
+  ],
+}
diff --git a/build/linux/unbundle/flac.gyp b/build/linux/unbundle/flac.gyp
new file mode 100644
index 0000000..9e4a664
--- /dev/null
+++ b/build/linux/unbundle/flac.gyp
@@ -0,0 +1,37 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+  'targets': [
+    {
+      'target_name': 'libflac',
+      'type': 'none',
+      'variables': {
+        'headers_root_path': 'include',
+        'header_filenames': [
+          'FLAC/callback.h',
+          'FLAC/metadata.h',
+          'FLAC/assert.h',
+          'FLAC/export.h',
+          'FLAC/format.h',
+          'FLAC/stream_decoder.h',
+          'FLAC/stream_encoder.h',
+          'FLAC/ordinals.h',
+          'FLAC/all.h',
+        ],
+      },
+      'includes': [
+        '../../build/shim_headers.gypi',
+      ],
+      'link_settings': {
+        'ldflags': [
+          '<!@(pkg-config --libs-only-L --libs-only-other flac)',
+        ],
+        'libraries': [
+          '<!@(pkg-config --libs-only-l flac)',
+        ],
+      },
+    },
+  ],
+}
diff --git a/build/linux/unbundle/harfbuzz.gyp b/build/linux/unbundle/harfbuzz.gyp
new file mode 100644
index 0000000..3bc1744
--- /dev/null
+++ b/build/linux/unbundle/harfbuzz.gyp
@@ -0,0 +1,47 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+  'variables': {
+    # Check for presence of harfbuzz-icu library, use it if present.
+    'harfbuzz_libraries':
+        '<!(python <(DEPTH)/tools/compile_test/compile_test.py '
+        '--code "int main() { return 0; }" '
+        '--run-linker '
+        '--on-success "harfbuzz harfbuzz-icu" '
+        '--on-failure "harfbuzz" '
+        '-- -lharfbuzz-icu)',
+  },
+  'targets': [
+    {
+      'target_name': 'harfbuzz-ng',
+      'type': 'none',
+      'cflags': [
+        '<!@(pkg-config --cflags <(harfbuzz_libraries))',
+      ],
+      'direct_dependent_settings': {
+        'cflags': [
+          '<!@(pkg-config --cflags <(harfbuzz_libraries))',
+        ],
+      },
+      'link_settings': {
+        'ldflags': [
+          '<!@(pkg-config --libs-only-L --libs-only-other <(harfbuzz_libraries))',
+        ],
+        'libraries': [
+          '<!@(pkg-config --libs-only-l <(harfbuzz_libraries))',
+        ],
+      },
+      'variables': {
+        'headers_root_path': 'src',
+        'header_filenames': [
+          'hb.h',
+        ],
+      },
+      'includes': [
+        '../../build/shim_headers.gypi',
+      ],
+    },
+  ],
+}
diff --git a/build/linux/unbundle/icu.gyp b/build/linux/unbundle/icu.gyp
new file mode 100644
index 0000000..16c36df
--- /dev/null
+++ b/build/linux/unbundle/icu.gyp
@@ -0,0 +1,248 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+  'targets': [
+    {
+      'target_name': 'icudata',
+      'type': 'none',
+      'direct_dependent_settings': {
+        'cflags': [
+          '<!@(pkg-config --cflags icu-uc)',
+        ],
+        'defines': [
+          'U_USING_ICU_NAMESPACE=0',
+        ],
+      },
+      'link_settings': {
+        'ldflags': [
+          '<!@(pkg-config --libs-only-L --libs-only-other icu-uc)',
+        ],
+        'libraries': [
+          '<!@(pkg-config --libs-only-l icu-uc)',
+        ],
+      },
+    },
+    {
+      'target_name': 'icui18n',
+      'type': 'none',
+      'toolsets': ['host', 'target'],
+      'direct_dependent_settings': {
+        'cflags': [
+          '<!@(pkg-config --cflags icu-i18n)',
+        ],
+        'defines': [
+          'U_USING_ICU_NAMESPACE=0',
+        ],
+      },
+      'link_settings': {
+        'ldflags': [
+          '<!@(pkg-config --libs-only-L --libs-only-other icu-i18n)',
+        ],
+        'libraries': [
+          '<!@(pkg-config --libs-only-l icu-i18n)',
+        ],
+      },
+      'variables': {
+        'headers_root_path': 'source/i18n',
+        'header_filenames': [
+          # This list can easily be updated using the command below:
+          # find third_party/icu/source/i18n/unicode -iname '*.h' \
+          # -printf "'%p',\n" | \
+          # sed -e 's|third_party/icu/source/i18n/||' | sort -u
+          'unicode/basictz.h',
+          'unicode/bmsearch.h',
+          'unicode/bms.h',
+          'unicode/calendar.h',
+          'unicode/choicfmt.h',
+          'unicode/coleitr.h',
+          'unicode/colldata.h',
+          'unicode/coll.h',
+          'unicode/curramt.h',
+          'unicode/currpinf.h',
+          'unicode/currunit.h',
+          'unicode/datefmt.h',
+          'unicode/dcfmtsym.h',
+          'unicode/decimfmt.h',
+          'unicode/dtfmtsym.h',
+          'unicode/dtitvfmt.h',
+          'unicode/dtitvinf.h',
+          'unicode/dtptngen.h',
+          'unicode/dtrule.h',
+          'unicode/fieldpos.h',
+          'unicode/fmtable.h',
+          'unicode/format.h',
+          'unicode/fpositer.h',
+          'unicode/gregocal.h',
+          'unicode/locdspnm.h',
+          'unicode/measfmt.h',
+          'unicode/measunit.h',
+          'unicode/measure.h',
+          'unicode/msgfmt.h',
+          'unicode/numfmt.h',
+          'unicode/numsys.h',
+          'unicode/plurfmt.h',
+          'unicode/plurrule.h',
+          'unicode/rbnf.h',
+          'unicode/rbtz.h',
+          'unicode/regex.h',
+          'unicode/search.h',
+          'unicode/selfmt.h',
+          'unicode/simpletz.h',
+          'unicode/smpdtfmt.h',
+          'unicode/sortkey.h',
+          'unicode/stsearch.h',
+          'unicode/tblcoll.h',
+          'unicode/timezone.h',
+          'unicode/tmunit.h',
+          'unicode/tmutamt.h',
+          'unicode/tmutfmt.h',
+          'unicode/translit.h',
+          'unicode/tzrule.h',
+          'unicode/tztrans.h',
+          'unicode/ucal.h',
+          'unicode/ucoleitr.h',
+          'unicode/ucol.h',
+          'unicode/ucsdet.h',
+          'unicode/ucurr.h',
+          'unicode/udat.h',
+          'unicode/udatpg.h',
+          'unicode/uldnames.h',
+          'unicode/ulocdata.h',
+          'unicode/umsg.h',
+          'unicode/unirepl.h',
+          'unicode/unum.h',
+          'unicode/uregex.h',
+          'unicode/usearch.h',
+          'unicode/uspoof.h',
+          'unicode/utmscale.h',
+          'unicode/utrans.h',
+          'unicode/vtzone.h',
+        ],
+      },
+      'includes': [
+        '../../build/shim_headers.gypi',
+      ],
+    },
+    {
+      'target_name': 'icuuc',
+      'type': 'none',
+      'toolsets': ['host', 'target'],
+      'direct_dependent_settings': {
+        'cflags': [
+          '<!@(pkg-config --cflags icu-uc)',
+        ],
+        'defines': [
+          'U_USING_ICU_NAMESPACE=0',
+        ],
+      },
+      'link_settings': {
+        'ldflags': [
+          '<!@(pkg-config --libs-only-L --libs-only-other icu-uc)',
+        ],
+        'libraries': [
+          '<!@(pkg-config --libs-only-l icu-uc)',
+        ],
+      },
+      'variables': {
+        'headers_root_path': 'source/common',
+        'header_filenames': [
+          # This list can easily be updated using the command below:
+          # find third_party/icu/source/common/unicode -iname '*.h' \
+          # -printf "'%p',\n" | \
+          # sed -e 's|third_party/icu/source/common/||' | sort -u
+          'unicode/brkiter.h',
+          'unicode/bytestream.h',
+          'unicode/caniter.h',
+          'unicode/chariter.h',
+          'unicode/dbbi.h',
+          'unicode/docmain.h',
+          'unicode/dtintrv.h',
+          'unicode/errorcode.h',
+          'unicode/icudataver.h',
+          'unicode/icuplug.h',
+          'unicode/idna.h',
+          'unicode/localpointer.h',
+          'unicode/locid.h',
+          'unicode/normalizer2.h',
+          'unicode/normlzr.h',
+          'unicode/pandroid.h',
+          'unicode/parseerr.h',
+          'unicode/parsepos.h',
+          'unicode/pfreebsd.h',
+          'unicode/plinux.h',
+          'unicode/pmac.h',
+          'unicode/popenbsd.h',
+          'unicode/ppalmos.h',
+          'unicode/ptypes.h',
+          'unicode/putil.h',
+          'unicode/pwin32.h',
+          'unicode/rbbi.h',
+          'unicode/rep.h',
+          'unicode/resbund.h',
+          'unicode/schriter.h',
+          'unicode/std_string.h',
+          'unicode/strenum.h',
+          'unicode/stringpiece.h',
+          'unicode/symtable.h',
+          'unicode/ubidi.h',
+          'unicode/ubrk.h',
+          'unicode/ucasemap.h',
+          'unicode/ucat.h',
+          'unicode/uchar.h',
+          'unicode/uchriter.h',
+          'unicode/uclean.h',
+          'unicode/ucnv_cb.h',
+          'unicode/ucnv_err.h',
+          'unicode/ucnv.h',
+          'unicode/ucnvsel.h',
+          'unicode/uconfig.h',
+          'unicode/udata.h',
+          'unicode/udeprctd.h',
+          'unicode/udraft.h',
+          'unicode/uenum.h',
+          'unicode/uidna.h',
+          'unicode/uintrnal.h',
+          'unicode/uiter.h',
+          'unicode/uloc.h',
+          'unicode/umachine.h',
+          'unicode/umisc.h',
+          'unicode/unifilt.h',
+          'unicode/unifunct.h',
+          'unicode/unimatch.h',
+          'unicode/uniset.h',
+          'unicode/unistr.h',
+          'unicode/unorm2.h',
+          'unicode/unorm.h',
+          'unicode/uobject.h',
+          'unicode/uobslete.h',
+          'unicode/urename.h',
+          'unicode/urep.h',
+          'unicode/ures.h',
+          'unicode/uscript.h',
+          'unicode/uset.h',
+          'unicode/usetiter.h',
+          'unicode/ushape.h',
+          'unicode/usprep.h',
+          'unicode/ustring.h',
+          'unicode/usystem.h',
+          'unicode/utext.h',
+          'unicode/utf16.h',
+          'unicode/utf32.h',
+          'unicode/utf8.h',
+          'unicode/utf.h',
+          'unicode/utf_old.h',
+          'unicode/utrace.h',
+          'unicode/utypeinfo.h',
+          'unicode/utypes.h',
+          'unicode/uvernum.h',
+          'unicode/uversion.h',
+        ],
+      },
+      'includes': [
+        '../../build/shim_headers.gypi',
+      ],
+    },
+  ],
+}
diff --git a/build/linux/unbundle/jsoncpp.gyp b/build/linux/unbundle/jsoncpp.gyp
new file mode 100644
index 0000000..c397f64
--- /dev/null
+++ b/build/linux/unbundle/jsoncpp.gyp
@@ -0,0 +1,39 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+  'targets': [
+    {
+      'target_name': 'jsoncpp',
+      'type': 'none',
+      'variables': {
+        'headers_root_path': 'source/include',
+        'header_filenames': [
+          'json/assertions.h',
+          'json/autolink.h',
+          'json/config.h',
+          'json/features.h',
+          'json/forwards.h',
+          'json/json.h',
+          'json/reader.h',
+          'json/value.h',
+          'json/writer.h',
+        ],
+      },
+      'includes': [
+        '../../build/shim_headers.gypi',
+      ],
+      'direct_dependent_settings': {
+        'include_dirs': [
+          '/usr/include/jsoncpp',
+        ],
+      },
+      'link_settings': {
+        'libraries': [
+          '-ljsoncpp',
+        ],
+      },
+    }
+  ],
+}
diff --git a/build/linux/unbundle/libXNVCtrl.gyp b/build/linux/unbundle/libXNVCtrl.gyp
new file mode 100644
index 0000000..f076bdb
--- /dev/null
+++ b/build/linux/unbundle/libXNVCtrl.gyp
@@ -0,0 +1,35 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+  'targets': [
+    {
+      'target_name': 'libXNVCtrl',
+      'type': 'none',
+      'variables': {
+        'headers_root_path': '.',
+        'header_filenames': [
+          'NVCtrlLib.h',
+          'NVCtrl.h',
+        ],
+      },
+      'includes': [
+        '../../build/shim_headers.gypi',
+      ],
+      'direct_dependent_settings': {
+        'cflags': [
+            '<!@(pkg-config --cflags libXNVCtrl)',
+        ],
+      },
+      'link_settings': {
+        'ldflags': [
+          '<!@(pkg-config --libs-only-L --libs-only-other libXNVCtrl)',
+        ],
+        'libraries': [
+          '<!@(pkg-config --libs-only-l libXNVCtrl)',
+        ],
+      },
+    }
+  ],
+}
diff --git a/build/linux/unbundle/libevent.gyp b/build/linux/unbundle/libevent.gyp
new file mode 100644
index 0000000..99d7435
--- /dev/null
+++ b/build/linux/unbundle/libevent.gyp
@@ -0,0 +1,27 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+  'targets': [
+    {
+      'target_name': 'libevent',
+      'type': 'none',
+      'toolsets': ['host', 'target'],
+      'variables': {
+        'headers_root_path': '.',
+        'header_filenames': [
+          'event.h',
+        ],
+      },
+      'includes': [
+        '../../build/shim_headers.gypi',
+      ],
+      'link_settings': {
+        'libraries': [
+          '-levent',
+        ],
+      },
+    }
+  ],
+}
diff --git a/build/linux/unbundle/libjpeg.gyp b/build/linux/unbundle/libjpeg.gyp
new file mode 100644
index 0000000..f56e7aa
--- /dev/null
+++ b/build/linux/unbundle/libjpeg.gyp
@@ -0,0 +1,29 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+  'targets': [
+    {
+      'target_name': 'libjpeg',
+      'type': 'none',
+      'direct_dependent_settings': {
+        'defines': [
+          'USE_SYSTEM_LIBJPEG',
+        ],
+        'conditions': [
+          ['os_bsd==1', {
+            'include_dirs': [
+              '/usr/local/include',
+            ],
+          }],
+        ],
+      },
+      'link_settings': {
+        'libraries': [
+          '-ljpeg',
+        ],
+      },
+    }
+  ],
+}
diff --git a/build/linux/unbundle/libpng.gyp b/build/linux/unbundle/libpng.gyp
new file mode 100644
index 0000000..d6933fc
--- /dev/null
+++ b/build/linux/unbundle/libpng.gyp
@@ -0,0 +1,38 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+  'targets': [
+    {
+      'target_name': 'libpng',
+      'type': 'none',
+      'dependencies': [
+        '../zlib/zlib.gyp:zlib',
+      ],
+      'direct_dependent_settings': {
+        'cflags': [
+          '<!@(pkg-config --cflags libpng)',
+        ],
+      },
+      'link_settings': {
+        'ldflags': [
+          '<!@(pkg-config --libs-only-L --libs-only-other libpng)',
+        ],
+        'libraries': [
+          '<!@(pkg-config --libs-only-l libpng)',
+        ],
+      },
+      'variables': {
+        'headers_root_path': '.',
+        'header_filenames': [
+          'png.h',
+          'pngconf.h',
+        ],
+      },
+      'includes': [
+        '../../build/shim_headers.gypi',
+      ],
+    },
+  ],
+}
diff --git a/build/linux/unbundle/libusb.gyp b/build/linux/unbundle/libusb.gyp
new file mode 100644
index 0000000..1c18033
--- /dev/null
+++ b/build/linux/unbundle/libusb.gyp
@@ -0,0 +1,34 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+  'targets': [
+    {
+      'target_name': 'libusb',
+      'type': 'none',
+      'variables': {
+        'headers_root_path': 'src/libusb',
+        'header_filenames': [
+          'libusb.h',
+        ],
+      },
+      'includes': [
+        '../../build/shim_headers.gypi',
+      ],
+      'direct_dependent_settings': {
+        'cflags': [
+          '<!@(pkg-config --cflags libusb-1.0)',
+        ],
+        'link_settings': {
+          'ldflags': [
+            '<!@(pkg-config --libs-only-L --libs-only-other libusb-1.0)',
+          ],
+          'libraries': [
+            '<!@(pkg-config --libs-only-l libusb-1.0)',
+          ],
+        },
+      },
+    },
+  ],
+}
diff --git a/build/linux/unbundle/libvpx.gyp b/build/linux/unbundle/libvpx.gyp
new file mode 100644
index 0000000..75671c5
--- /dev/null
+++ b/build/linux/unbundle/libvpx.gyp
@@ -0,0 +1,43 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+{
+  'targets': [
+    {
+      'target_name': 'libvpx',
+      'type': 'none',
+      'direct_dependent_settings': {
+        'cflags': [
+          '<!@(pkg-config --cflags vpx)',
+        ],
+      },
+      'variables': {
+        'headers_root_path': 'source/libvpx',
+        'header_filenames': [
+          'vpx/vp8.h',
+          'vpx/vp8cx.h',
+          'vpx/vp8dx.h',
+          'vpx/vpx_codec.h',
+          'vpx/vpx_codec_impl_bottom.h',
+          'vpx/vpx_codec_impl_top.h',
+          'vpx/vpx_decoder.h',
+          'vpx/vpx_encoder.h',
+          'vpx/vpx_frame_buffer.h',
+          'vpx/vpx_image.h',
+          'vpx/vpx_integer.h',
+        ],
+      },
+      'includes': [
+        '../../build/shim_headers.gypi',
+      ],
+      'link_settings': {
+        'ldflags': [
+          '<!@(pkg-config --libs-only-L --libs-only-other vpx)',
+        ],
+        'libraries': [
+          '<!@(pkg-config --libs-only-l vpx)',
+        ],
+      },
+    },
+  ],
+}
diff --git a/build/linux/unbundle/libwebp.gyp b/build/linux/unbundle/libwebp.gyp
new file mode 100644
index 0000000..6dbce2e
--- /dev/null
+++ b/build/linux/unbundle/libwebp.gyp
@@ -0,0 +1,28 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+  'targets': [
+    {
+      'target_name': 'libwebp',
+      'type': 'none',
+      'direct_dependent_settings': {
+        'defines': [
+          'ENABLE_WEBP',
+        ],
+      },
+      'link_settings': {
+        'libraries': [
+          # Check for presence of webpdemux library, use it if present.
+          '<!(python <(DEPTH)/tools/compile_test/compile_test.py '
+          '--code "int main() { return 0; }" '
+          '--run-linker '
+          '--on-success "-lwebp -lwebpdemux" '
+          '--on-failure "-lwebp" '
+          '-- -lwebpdemux)',
+        ],
+      },
+    }
+  ],
+}
diff --git a/build/linux/unbundle/libxml.gyp b/build/linux/unbundle/libxml.gyp
new file mode 100644
index 0000000..bc4f9fc
--- /dev/null
+++ b/build/linux/unbundle/libxml.gyp
@@ -0,0 +1,38 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+  'targets': [
+    {
+      'target_name': 'libxml',
+      'type': 'static_library',
+      'sources': [
+        'chromium/libxml_utils.h',
+        'chromium/libxml_utils.cc',
+      ],
+      'cflags': [
+        '<!@(pkg-config --cflags libxml-2.0)',
+      ],
+      'defines': [
+        'USE_SYSTEM_LIBXML',
+      ],
+      'direct_dependent_settings': {
+        'cflags': [
+          '<!@(pkg-config --cflags libxml-2.0)',
+        ],
+        'defines': [
+          'USE_SYSTEM_LIBXML',
+        ],
+      },
+      'link_settings': {
+        'ldflags': [
+          '<!@(pkg-config --libs-only-L --libs-only-other libxml-2.0)',
+        ],
+        'libraries': [
+          '<!@(pkg-config --libs-only-l libxml-2.0)',
+        ],
+      },
+    },
+  ],
+}
diff --git a/build/linux/unbundle/libxslt.gyp b/build/linux/unbundle/libxslt.gyp
new file mode 100644
index 0000000..f7f6bb9
--- /dev/null
+++ b/build/linux/unbundle/libxslt.gyp
@@ -0,0 +1,25 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+  'targets': [
+    {
+      'target_name': 'libxslt',
+      'type': 'none',
+      'direct_dependent_settings': {
+        'cflags': [
+          '<!@(pkg-config --cflags libxslt)',
+        ],
+      },
+      'link_settings': {
+        'ldflags': [
+          '<!@(pkg-config --libs-only-L --libs-only-other libxslt)',
+        ],
+        'libraries': [
+          '<!@(pkg-config --libs-only-l libxslt)',
+        ],
+      },
+    },
+  ],
+}
diff --git a/build/linux/unbundle/opus.gyp b/build/linux/unbundle/opus.gyp
new file mode 100644
index 0000000..e8c30ba
--- /dev/null
+++ b/build/linux/unbundle/opus.gyp
@@ -0,0 +1,38 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+  'targets': [
+    {
+      'target_name': 'opus',
+      'type': 'none',
+      'direct_dependent_settings': {
+        'cflags': [
+          '<!@(pkg-config --cflags opus)',
+        ],
+      },
+      'variables': {
+        'headers_root_path': 'src/include',
+        'header_filenames': [
+          'opus_custom.h',
+          'opus_defines.h',
+          'opus_multistream.h',
+          'opus_types.h',
+          'opus.h',
+        ],
+      },
+      'includes': [
+        '../../build/shim_headers.gypi',
+      ],
+      'link_settings': {
+        'ldflags': [
+          '<!@(pkg-config --libs-only-L --libs-only-other opus)',
+        ],
+        'libraries': [
+          '<!@(pkg-config --libs-only-l opus)',
+        ],
+      },
+    },
+  ],
+}
diff --git a/build/linux/unbundle/protobuf.gyp b/build/linux/unbundle/protobuf.gyp
new file mode 100644
index 0000000..7bcd992
--- /dev/null
+++ b/build/linux/unbundle/protobuf.gyp
@@ -0,0 +1,149 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+  'targets': [
+    {
+      'target_name': 'protobuf_lite',
+      'type': 'none',
+      'direct_dependent_settings': {
+        'cflags': [
+          # Use full protobuf, because vanilla protobuf doesn't have
+          # our custom patch to retain unknown fields in lite mode.
+          '<!@(pkg-config --cflags protobuf)',
+        ],
+        'defines': [
+          'USE_SYSTEM_PROTOBUF',
+
+          # This macro must be defined to suppress the use
+          # of dynamic_cast<>, which requires RTTI.
+          'GOOGLE_PROTOBUF_NO_RTTI',
+          'GOOGLE_PROTOBUF_NO_STATIC_INITIALIZER',
+        ],
+      },
+      'link_settings': {
+        # Use full protobuf, because vanilla protobuf doesn't have
+        # our custom patch to retain unknown fields in lite mode.
+        'ldflags': [
+          '<!@(pkg-config --libs-only-L --libs-only-other protobuf)',
+        ],
+        'libraries': [
+          '<!@(pkg-config --libs-only-l protobuf)',
+        ],
+      },
+      'variables': {
+        'headers_root_path': 'src',
+        'header_filenames': [
+          # This list can easily be updated using the command below:
+          # find third_party/protobuf/src -iname '*.h' -printf "'%p',\n" | \
+          # sed -e 's|third_party/protobuf/src/||' | sort -u
+          'google/protobuf/compiler/code_generator.h',
+          'google/protobuf/compiler/command_line_interface.h',
+          'google/protobuf/compiler/cpp/cpp_enum_field.h',
+          'google/protobuf/compiler/cpp/cpp_enum.h',
+          'google/protobuf/compiler/cpp/cpp_extension.h',
+          'google/protobuf/compiler/cpp/cpp_field.h',
+          'google/protobuf/compiler/cpp/cpp_file.h',
+          'google/protobuf/compiler/cpp/cpp_generator.h',
+          'google/protobuf/compiler/cpp/cpp_helpers.h',
+          'google/protobuf/compiler/cpp/cpp_message_field.h',
+          'google/protobuf/compiler/cpp/cpp_message.h',
+          'google/protobuf/compiler/cpp/cpp_options.h',
+          'google/protobuf/compiler/cpp/cpp_primitive_field.h',
+          'google/protobuf/compiler/cpp/cpp_service.h',
+          'google/protobuf/compiler/cpp/cpp_string_field.h',
+          'google/protobuf/compiler/cpp/cpp_unittest.h',
+          'google/protobuf/compiler/importer.h',
+          'google/protobuf/compiler/java/java_doc_comment.h',
+          'google/protobuf/compiler/java/java_enum_field.h',
+          'google/protobuf/compiler/java/java_enum.h',
+          'google/protobuf/compiler/java/java_extension.h',
+          'google/protobuf/compiler/java/java_field.h',
+          'google/protobuf/compiler/java/java_file.h',
+          'google/protobuf/compiler/java/java_generator.h',
+          'google/protobuf/compiler/java/java_helpers.h',
+          'google/protobuf/compiler/java/java_message_field.h',
+          'google/protobuf/compiler/java/java_message.h',
+          'google/protobuf/compiler/java/java_primitive_field.h',
+          'google/protobuf/compiler/java/java_service.h',
+          'google/protobuf/compiler/java/java_string_field.h',
+          'google/protobuf/compiler/mock_code_generator.h',
+          'google/protobuf/compiler/package_info.h',
+          'google/protobuf/compiler/parser.h',
+          'google/protobuf/compiler/plugin.h',
+          'google/protobuf/compiler/plugin.pb.h',
+          'google/protobuf/compiler/python/python_generator.h',
+          'google/protobuf/compiler/subprocess.h',
+          'google/protobuf/compiler/zip_writer.h',
+          'google/protobuf/descriptor_database.h',
+          'google/protobuf/descriptor.h',
+          'google/protobuf/descriptor.pb.h',
+          'google/protobuf/dynamic_message.h',
+          'google/protobuf/extension_set.h',
+          'google/protobuf/generated_enum_reflection.h',
+          'google/protobuf/generated_message_reflection.h',
+          'google/protobuf/generated_message_util.h',
+          'google/protobuf/io/coded_stream.h',
+          'google/protobuf/io/coded_stream_inl.h',
+          'google/protobuf/io/gzip_stream.h',
+          'google/protobuf/io/package_info.h',
+          'google/protobuf/io/printer.h',
+          'google/protobuf/io/tokenizer.h',
+          'google/protobuf/io/zero_copy_stream.h',
+          'google/protobuf/io/zero_copy_stream_impl.h',
+          'google/protobuf/io/zero_copy_stream_impl_lite.h',
+          'google/protobuf/message.h',
+          'google/protobuf/message_lite.h',
+          'google/protobuf/package_info.h',
+          'google/protobuf/reflection_ops.h',
+          'google/protobuf/repeated_field.h',
+          'google/protobuf/service.h',
+          'google/protobuf/stubs/atomicops.h',
+          'google/protobuf/stubs/atomicops_internals_arm64_gcc.h',
+          'google/protobuf/stubs/atomicops_internals_arm_gcc.h',
+          'google/protobuf/stubs/atomicops_internals_arm_qnx.h',
+          'google/protobuf/stubs/atomicops_internals_atomicword_compat.h',
+          'google/protobuf/stubs/atomicops_internals_macosx.h',
+          'google/protobuf/stubs/atomicops_internals_mips_gcc.h',
+          'google/protobuf/stubs/atomicops_internals_pnacl.h',
+          'google/protobuf/stubs/atomicops_internals_tsan.h',
+          'google/protobuf/stubs/atomicops_internals_x86_gcc.h',
+          'google/protobuf/stubs/atomicops_internals_x86_msvc.h',
+          'google/protobuf/stubs/common.h',
+          'google/protobuf/stubs/hash.h',
+          'google/protobuf/stubs/map-util.h',
+          'google/protobuf/stubs/once.h',
+          'google/protobuf/stubs/platform_macros.h',
+          'google/protobuf/stubs/stl_util.h',
+          'google/protobuf/stubs/stringprintf.h',
+          'google/protobuf/stubs/strutil.h',
+          'google/protobuf/stubs/substitute.h',
+          'google/protobuf/stubs/template_util.h',
+          'google/protobuf/stubs/type_traits.h',
+          'google/protobuf/testing/file.h',
+          'google/protobuf/testing/googletest.h',
+          'google/protobuf/test_util.h',
+          'google/protobuf/test_util_lite.h',
+          'google/protobuf/text_format.h',
+          'google/protobuf/unknown_field_set.h',
+          'google/protobuf/wire_format.h',
+          'google/protobuf/wire_format_lite.h',
+          'google/protobuf/wire_format_lite_inl.h',
+        ],
+      },
+      'includes': [
+        '../../build/shim_headers.gypi',
+      ],
+    },
+    {
+      'target_name': 'protoc',
+      'type': 'none',
+      'toolsets': ['host', 'target'],
+    },
+    {
+      'target_name': 'py_proto',
+      'type': 'none',
+    },
+  ],
+}
diff --git a/build/linux/unbundle/re2.gyp b/build/linux/unbundle/re2.gyp
new file mode 100644
index 0000000..e2e567a
--- /dev/null
+++ b/build/linux/unbundle/re2.gyp
@@ -0,0 +1,37 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+  'targets': [
+    {
+      'target_name': 're2',
+      'type': 'none',
+      'variables': {
+        'headers_root_path': '.',
+        'header_filenames': [
+          're2/filtered_re2.h',
+          're2/re2.h',
+          're2/set.h',
+          're2/stringpiece.h',
+          're2/variadic_function.h',
+        ],
+        'shim_generator_additional_args': [
+          # Chromium copy of re2 is patched to rename POSIX to POSIX_SYNTAX
+          # because of collision issues that break the build.
+          # Upstream refuses to make changes:
+          # http://code.google.com/p/re2/issues/detail?id=73 .
+          '--define', 'POSIX=POSIX_SYNTAX',
+        ],
+      },
+      'includes': [
+        '../../build/shim_headers.gypi',
+      ],
+      'link_settings': {
+        'libraries': [
+          '-lre2',
+        ],
+      },
+    }
+  ],
+}
diff --git a/build/linux/unbundle/remove_bundled_libraries.py b/build/linux/unbundle/remove_bundled_libraries.py
new file mode 100755
index 0000000..69e76f5
--- /dev/null
+++ b/build/linux/unbundle/remove_bundled_libraries.py
@@ -0,0 +1,102 @@
+#!/usr/bin/env python
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Removes bundled libraries to make sure they are not used.
+
+See README for more details.
+"""
+
+
+import optparse
+import os.path
+import sys
+
+
+def DoMain(argv):
+  my_dirname = os.path.abspath(os.path.dirname(__file__))
+  source_tree_root = os.path.abspath(
+    os.path.join(my_dirname, '..', '..', '..'))
+
+  if os.path.join(source_tree_root, 'build', 'linux', 'unbundle') != my_dirname:
+    print ('Sanity check failed: please run this script from ' +
+           'build/linux/unbundle directory.')
+    return 1
+
+  parser = optparse.OptionParser()
+  parser.add_option('--do-remove', action='store_true')
+
+  options, args = parser.parse_args(argv)
+
+  exclusion_used = {}
+  for exclusion in args:
+    exclusion_used[exclusion] = False
+
+  for root, dirs, files in os.walk(source_tree_root, topdown=False):
+    # Only look at paths which contain a "third_party" component
+    # (note that e.g. third_party.png doesn't count).
+    root_relpath = os.path.relpath(root, source_tree_root)
+    if 'third_party' not in root_relpath.split(os.sep):
+      continue
+
+    for f in files:
+      path = os.path.join(root, f)
+      relpath = os.path.relpath(path, source_tree_root)
+
+      excluded = False
+      for exclusion in args:
+        # Require precise exclusions. Find the right-most third_party
+        # in the relative path, and if there is more than one ignore
+        # the exclusion if it's completely contained within the part
+        # before right-most third_party path component.
+        split = relpath.rsplit(os.sep + 'third_party' + os.sep, 1)
+        if len(split) > 1 and split[0].startswith(exclusion):
+          continue
+
+        if relpath.startswith(exclusion):
+          # Multiple exclusions can match the same path. Go through all of them
+          # and mark each one as used.
+          exclusion_used[exclusion] = True
+          excluded = True
+      if excluded:
+        continue
+
+      # Deleting gyp files almost always leads to gyp failures.
+      # These files come from Chromium project, and can be replaced if needed.
+      if f.endswith('.gyp') or f.endswith('.gypi'):
+        continue
+
+      # Deleting .isolate files leads to gyp failures. They are usually
+      # not used by a distro build anyway.
+      # See http://www.chromium.org/developers/testing/isolated-testing
+      # for more info.
+      if f.endswith('.isolate'):
+        continue
+
+      if options.do_remove:
+        # Delete the file - best way to ensure it's not used during build.
+        os.remove(path)
+      else:
+        # By default just print paths that would be removed.
+        print path
+
+  exit_code = 0
+
+  # Fail if exclusion list contains stale entries - this helps keep it
+  # up to date.
+  for exclusion, used in exclusion_used.iteritems():
+    if not used:
+      print '%s does not exist' % exclusion
+      exit_code = 1
+
+  if not options.do_remove:
+    print ('To actually remove files printed above, please pass ' +
+           '--do-remove flag.')
+
+  return exit_code
+
+
+if __name__ == '__main__':
+  sys.exit(DoMain(sys.argv[1:]))
diff --git a/build/linux/unbundle/replace_gyp_files.py b/build/linux/unbundle/replace_gyp_files.py
new file mode 100755
index 0000000..d06ae41
--- /dev/null
+++ b/build/linux/unbundle/replace_gyp_files.py
@@ -0,0 +1,82 @@
+#!/usr/bin/env python
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Replaces gyp files in tree with files from here that
+make the build use system libraries.
+"""
+
+
+import optparse
+import os.path
+import shutil
+import sys
+
+
+REPLACEMENTS = {
+  'use_system_expat': 'third_party/expat/expat.gyp',
+  'use_system_ffmpeg': 'third_party/ffmpeg/ffmpeg.gyp',
+  'use_system_flac': 'third_party/flac/flac.gyp',
+  'use_system_harfbuzz': 'third_party/harfbuzz-ng/harfbuzz.gyp',
+  'use_system_icu': 'third_party/icu/icu.gyp',
+  'use_system_jsoncpp': 'third_party/jsoncpp/jsoncpp.gyp',
+  'use_system_libevent': 'third_party/libevent/libevent.gyp',
+  'use_system_libjpeg': 'third_party/libjpeg/libjpeg.gyp',
+  'use_system_libpng': 'third_party/libpng/libpng.gyp',
+  'use_system_libusb': 'third_party/libusb/libusb.gyp',
+  'use_system_libvpx': 'third_party/libvpx/libvpx.gyp',
+  'use_system_libwebp': 'third_party/libwebp/libwebp.gyp',
+  'use_system_libxml': 'third_party/libxml/libxml.gyp',
+  'use_system_libxnvctrl' : 'third_party/libXNVCtrl/libXNVCtrl.gyp',
+  'use_system_libxslt': 'third_party/libxslt/libxslt.gyp',
+  'use_system_opus': 'third_party/opus/opus.gyp',
+  'use_system_protobuf': 'third_party/protobuf/protobuf.gyp',
+  'use_system_re2': 'third_party/re2/re2.gyp',
+  'use_system_snappy': 'third_party/snappy/snappy.gyp',
+  'use_system_speex': 'third_party/speex/speex.gyp',
+  'use_system_sqlite': 'third_party/sqlite/sqlite.gyp',
+  'use_system_v8': 'v8/tools/gyp/v8.gyp',
+  'use_system_zlib': 'third_party/zlib/zlib.gyp',
+}
+
+
+def DoMain(argv):
+  my_dirname = os.path.dirname(__file__)
+  source_tree_root = os.path.abspath(
+    os.path.join(my_dirname, '..', '..', '..'))
+
+  parser = optparse.OptionParser()
+
+  # Accept arguments in gyp command-line syntax, so that the caller can re-use
+  # command-line for this script and gyp.
+  parser.add_option('-D', dest='defines', action='append')
+
+  parser.add_option('--undo', action='store_true')
+
+  options, args = parser.parse_args(argv)
+
+  for flag, path in REPLACEMENTS.items():
+    if '%s=1' % flag not in options.defines:
+      continue
+
+    if options.undo:
+      # Restore original file, and also remove the backup.
+      # This is meant to restore the source tree to its original state.
+      os.rename(os.path.join(source_tree_root, path + '.orig'),
+                os.path.join(source_tree_root, path))
+    else:
+      # Create a backup copy for --undo.
+      shutil.copyfile(os.path.join(source_tree_root, path),
+                      os.path.join(source_tree_root, path + '.orig'))
+
+      # Copy the gyp file from directory of this script to target path.
+      shutil.copyfile(os.path.join(my_dirname, os.path.basename(path)),
+                      os.path.join(source_tree_root, path))
+
+  return 0
+
+
+if __name__ == '__main__':
+  sys.exit(DoMain(sys.argv))
diff --git a/build/linux/unbundle/snappy.gyp b/build/linux/unbundle/snappy.gyp
new file mode 100644
index 0000000..ab856ed
--- /dev/null
+++ b/build/linux/unbundle/snappy.gyp
@@ -0,0 +1,29 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+  'targets': [
+    {
+      'target_name': 'snappy',
+      'type': 'none',
+      'variables': {
+        'headers_root_path': 'src',
+        'header_filenames': [
+          'snappy-c.h',
+          'snappy-sinksource.h',
+          'snappy-stubs-public.h',
+          'snappy.h',
+        ],
+      },
+      'includes': [
+        '../../build/shim_headers.gypi',
+      ],
+      'link_settings': {
+        'libraries': [
+          '-lsnappy',
+        ],
+      },
+    },
+  ],
+}
diff --git a/build/linux/unbundle/speex.gyp b/build/linux/unbundle/speex.gyp
new file mode 100644
index 0000000..75376c8
--- /dev/null
+++ b/build/linux/unbundle/speex.gyp
@@ -0,0 +1,45 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+  'targets': [
+    {
+      'target_name': 'libspeex',
+      'type': 'none',
+      'variables': {
+        'headers_root_path': 'include',
+        'header_filenames': [
+          'speex/speex_types.h',
+          'speex/speex_callbacks.h',
+          'speex/speex_config_types.h',
+          'speex/speex_stereo.h',
+          'speex/speex_echo.h',
+          'speex/speex_preprocess.h',
+          'speex/speex_jitter.h',
+          'speex/speex.h',
+          'speex/speex_resampler.h',
+          'speex/speex_buffer.h',
+          'speex/speex_header.h',
+          'speex/speex_bits.h',
+        ],
+      },
+      'includes': [
+        '../../build/shim_headers.gypi',
+      ],
+      'direct_dependent_settings': {
+        'cflags': [
+          '<!@(pkg-config --cflags speex)',
+        ],
+      },
+      'link_settings': {
+        'ldflags': [
+          '<!@(pkg-config --libs-only-L --libs-only-other speex)',
+        ],
+        'libraries': [
+          '<!@(pkg-config --libs-only-l speex)',
+        ],
+      },
+    },
+  ],
+}
diff --git a/build/linux/unbundle/sqlite.gyp b/build/linux/unbundle/sqlite.gyp
new file mode 100644
index 0000000..918da928
--- /dev/null
+++ b/build/linux/unbundle/sqlite.gyp
@@ -0,0 +1,28 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+  'targets': [
+    {
+      'target_name': 'sqlite',
+      'type': 'none',
+      'direct_dependent_settings': {
+        'cflags': [
+          '<!@(pkg-config --cflags sqlite3)',
+        ],
+        'defines': [
+          'USE_SYSTEM_SQLITE',
+        ],
+      },
+      'link_settings': {
+        'ldflags': [
+          '<!@(pkg-config --libs-only-L --libs-only-other sqlite3)',
+        ],
+        'libraries': [
+          '<!@(pkg-config --libs-only-l sqlite3)',
+        ],
+      },
+    },
+  ],
+}
diff --git a/build/linux/unbundle/v8.gyp b/build/linux/unbundle/v8.gyp
new file mode 100644
index 0000000..9b06347
--- /dev/null
+++ b/build/linux/unbundle/v8.gyp
@@ -0,0 +1,64 @@
+# Copyright 2013 the V8 project authors. All rights reserved.
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+#     * Redistributions of source code must retain the above copyright
+#       notice, this list of conditions and the following disclaimer.
+#     * Redistributions in binary form must reproduce the above
+#       copyright notice, this list of conditions and the following
+#       disclaimer in the documentation and/or other materials provided
+#       with the distribution.
+#     * Neither the name of Google Inc. nor the names of its
+#       contributors may be used to endorse or promote products derived
+#       from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+{
+  'includes': ['../../build/toolchain.gypi', '../../build/features.gypi'],
+  'targets': [
+    {
+      'target_name': 'v8',
+      'type': 'none',
+      'toolsets': ['host', 'target'],
+      'variables': {
+        'headers_root_path': '../../include',
+        'header_filenames': [
+          'v8-debug.h',
+          'v8-preparser.h',
+          'v8-profiler.h',
+          'v8-testing.h',
+          'v8.h',
+          'v8stdint.h',
+        ],
+      },
+      'includes': [
+        '../../../build/shim_headers.gypi',
+      ],
+      'link_settings': {
+        'libraries': [
+          '-lv8',
+        ],
+      },
+    },
+    {
+      'target_name': 'v8_shell',
+      'type': 'none',
+      'toolsets': ['host', 'target'],
+      'dependencies': [
+        'v8'
+      ],
+    },
+  ],
+}
diff --git a/build/linux/unbundle/zlib.gyp b/build/linux/unbundle/zlib.gyp
new file mode 100644
index 0000000..0a85ff0
--- /dev/null
+++ b/build/linux/unbundle/zlib.gyp
@@ -0,0 +1,67 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+  'targets': [
+    {
+      'target_name': 'zlib',
+      'type': 'none',
+      'variables': {
+        'headers_root_path': '.',
+        'header_filenames': [
+          'zlib.h',
+        ],
+      },
+      'includes': [
+        '../../build/shim_headers.gypi',
+      ],
+      'direct_dependent_settings': {
+        'defines': [
+          'USE_SYSTEM_ZLIB',
+        ],
+      },
+      'link_settings': {
+        'libraries': [
+          '-lz',
+        ],
+      },
+    },
+    {
+      'target_name': 'minizip',
+      'type': 'static_library',
+      'all_dependent_settings': {
+        'defines': [
+          'USE_SYSTEM_MINIZIP',
+        ],
+      },
+      'defines': [
+        'USE_SYSTEM_MINIZIP',
+      ],
+      'link_settings': {
+        'libraries': [
+          '-lminizip',
+        ],
+      },
+    },
+    {
+      'target_name': 'zip',
+      'type': 'static_library',
+      'dependencies': [
+        'minizip',
+        '../../base/base.gyp:base',
+      ],
+      'include_dirs': [
+        '../..',
+      ],
+      'sources': [
+        'google/zip.cc',
+        'google/zip.h',
+        'google/zip_internal.cc',
+        'google/zip_internal.h',
+        'google/zip_reader.cc',
+        'google/zip_reader.h',
+      ],
+    },
+  ],
+}
diff --git a/build/ls.py b/build/ls.py
new file mode 100755
index 0000000..638c3bd
--- /dev/null
+++ b/build/ls.py
@@ -0,0 +1,31 @@
+#!/usr/bin/python
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Recursively list files of the target directory. Ignores dot files."""
+
+import argparse
+import os
+import sys
+
+def main(target_directory):
+  for root, dirs, files in os.walk(target_directory):
+    files = [f for f in files if not f[0] == '.']
+    dirs[:] = [d for d in dirs if not d[0] == '.']
+    for f in files:
+      path = os.path.join(root, f)
+      print path
+
+if __name__ == '__main__':
+  parser = argparse.ArgumentParser(
+      description="Recursively list files of the target directory")
+  parser.add_argument("--target-directory",
+                      dest="target_directory",
+                      metavar="<target-directory>",
+                      type=str,
+                      required=True,
+                      help="The target directory")
+
+  args = parser.parse_args()
+  sys.exit(main(args.target_directory))
diff --git a/build/mac/OWNERS b/build/mac/OWNERS
new file mode 100644
index 0000000..c56e89d
--- /dev/null
+++ b/build/mac/OWNERS
@@ -0,0 +1,2 @@
+mark@chromium.org
+thomasvl@chromium.org
diff --git a/build/mac/asan.gyp b/build/mac/asan.gyp
new file mode 100644
index 0000000..5231681
--- /dev/null
+++ b/build/mac/asan.gyp
@@ -0,0 +1,53 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+   'targets': [
+     {
+       'target_name': 'asan_dynamic_runtime',
+       'type': 'none',
+       'variables': {
+         # Every target is going to depend on asan_dynamic_runtime, so allow
+         # this one to depend on itself.
+         'prune_self_dependency': 1,
+         # Path is relative to this GYP file.
+         'asan_rtl_mask_path':
+             '../../third_party/llvm-build/Release+Asserts/lib/clang/*/lib/darwin',
+         'asan_osx_dynamic':
+             '<(asan_rtl_mask_path)/libclang_rt.asan_osx_dynamic.dylib',
+         'asan_iossim_dynamic':
+             '<(asan_rtl_mask_path)/libclang_rt.asan_iossim_dynamic.dylib',
+       },
+       'conditions': [
+         ['OS=="mac"', {
+           'copies': [
+             {
+               'destination': '<(PRODUCT_DIR)',
+               'files': [
+                 '<!(/bin/ls <(asan_osx_dynamic))',
+               ],
+             },
+           ],
+         }],
+         # ASan works with iOS simulator only, not bare-metal iOS.
+         ['OS=="ios" and target_arch=="ia32"', {
+           'toolsets': ['host', 'target'],
+           'copies': [
+             {
+               'destination': '<(PRODUCT_DIR)',
+               'target_conditions': [
+                 ['_toolset=="host"', {
+                   'files': [ '<!(/bin/ls <(asan_osx_dynamic))'],
+                 }],
+                 ['_toolset=="target"', {
+                   'files': [ '<!(/bin/ls <(asan_iossim_dynamic))'],
+                 }],
+               ],
+             },
+           ],
+         }],
+       ],
+     },
+   ],
+}
diff --git a/build/mac/change_mach_o_flags.py b/build/mac/change_mach_o_flags.py
new file mode 100755
index 0000000..c2aeaec
--- /dev/null
+++ b/build/mac/change_mach_o_flags.py
@@ -0,0 +1,273 @@
+#!/usr/bin/env python
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Usage: change_mach_o_flags.py [--executable-heap] [--no-pie] <executablepath>
+
+Arranges for the executable at |executable_path| to have its data (heap)
+pages protected to prevent execution on Mac OS X 10.7 ("Lion"), and to have
+the PIE (position independent executable) bit set to enable ASLR (address
+space layout randomization). With --executable-heap or --no-pie, the
+respective bits are cleared instead of set, making the heap executable or
+disabling PIE/ASLR.
+
+This script is able to operate on thin (single-architecture) Mach-O files
+and fat (universal, multi-architecture) files. When operating on fat files,
+it will set or clear the bits for each architecture contained therein.
+
+NON-EXECUTABLE HEAP
+
+Traditionally in Mac OS X, 32-bit processes did not have data pages set to
+prohibit execution. Although user programs could call mprotect and
+mach_vm_protect to deny execution of code in data pages, the kernel would
+silently ignore such requests without updating the page tables, and the
+hardware would happily execute code on such pages. 64-bit processes were
+always given proper hardware protection of data pages. This behavior was
+controllable on a system-wide level via the vm.allow_data_exec sysctl, which
+is set by default to 1. The bit with value 1 (set by default) allows code
+execution on data pages for 32-bit processes, and the bit with value 2
+(clear by default) does the same for 64-bit processes.
+
+In Mac OS X 10.7, executables can "opt in" to having hardware protection
+against code execution on data pages applied. This is done by setting a new
+bit in the |flags| field of an executable's |mach_header|. When
+MH_NO_HEAP_EXECUTION is set, proper protections will be applied, regardless
+of the setting of vm.allow_data_exec. See xnu-1699.22.73/osfmk/vm/vm_map.c
+override_nx and xnu-1699.22.73/bsd/kern/mach_loader.c load_machfile.
+
+The Apple toolchain has been revised to set the MH_NO_HEAP_EXECUTION when
+producing executables, provided that -allow_heap_execute is not specified
+at link time. Only linkers shipping with Xcode 4.0 and later (ld64-123.2 and
+later) have this ability. See ld64-123.2.1/src/ld/Options.cpp
+Options::reconfigureDefaults() and
+ld64-123.2.1/src/ld/HeaderAndLoadCommands.hpp
+HeaderAndLoadCommandsAtom<A>::flags().
+
+This script sets the MH_NO_HEAP_EXECUTION bit on Mach-O executables. It is
+intended for use with executables produced by a linker that predates Apple's
+modifications to set this bit itself. It is also useful for setting this bit
+for non-i386 executables, including x86_64 executables. Apple's linker only
+sets it for 32-bit i386 executables, presumably under the assumption that
+the value of vm.allow_data_exec is set in stone. However, if someone were to
+change vm.allow_data_exec to 2 or 3, 64-bit x86_64 executables would run
+without hardware protection against code execution on data pages. This
+script can set the bit for x86_64 executables, guaranteeing that they run
+with appropriate protection even when vm.allow_data_exec has been tampered
+with.
+
+POSITION-INDEPENDENT EXECUTABLES/ADDRESS SPACE LAYOUT RANDOMIZATION
+
+This script sets or clears the MH_PIE bit in an executable's Mach-O header,
+enabling or disabling position independence on Mac OS X 10.5 and later.
+Processes running position-independent executables have varying levels of
+ASLR protection depending on the OS release. The main executable's load
+address, shared library load addresess, and the heap and stack base
+addresses may be randomized. Position-independent executables are produced
+by supplying the -pie flag to the linker (or defeated by supplying -no_pie).
+Executables linked with a deployment target of 10.7 or higher have PIE on
+by default.
+
+This script is never strictly needed during the build to enable PIE, as all
+linkers used are recent enough to support -pie. However, it's used to
+disable the PIE bit as needed on already-linked executables.
+"""
+
+import optparse
+import os
+import struct
+import sys
+
+
+# <mach-o/fat.h>
+FAT_MAGIC = 0xcafebabe
+FAT_CIGAM = 0xbebafeca
+
+# <mach-o/loader.h>
+MH_MAGIC = 0xfeedface
+MH_CIGAM = 0xcefaedfe
+MH_MAGIC_64 = 0xfeedfacf
+MH_CIGAM_64 = 0xcffaedfe
+MH_EXECUTE = 0x2
+MH_PIE = 0x00200000
+MH_NO_HEAP_EXECUTION = 0x01000000
+
+
+class MachOError(Exception):
+  """A class for exceptions thrown by this module."""
+
+  pass
+
+
+def CheckedSeek(file, offset):
+  """Seeks the file-like object at |file| to offset |offset| and raises a
+  MachOError if anything funny happens."""
+
+  file.seek(offset, os.SEEK_SET)
+  new_offset = file.tell()
+  if new_offset != offset:
+    raise MachOError, \
+          'seek: expected offset %d, observed %d' % (offset, new_offset)
+
+
+def CheckedRead(file, count):
+  """Reads |count| bytes from the file-like |file| object, raising a
+  MachOError if any other number of bytes is read."""
+
+  bytes = file.read(count)
+  if len(bytes) != count:
+    raise MachOError, \
+          'read: expected length %d, observed %d' % (count, len(bytes))
+
+  return bytes
+
+
+def ReadUInt32(file, endian):
+  """Reads an unsinged 32-bit integer from the file-like |file| object,
+  treating it as having endianness specified by |endian| (per the |struct|
+  module), and returns it as a number. Raises a MachOError if the proper
+  length of data can't be read from |file|."""
+
+  bytes = CheckedRead(file, 4)
+
+  (uint32,) = struct.unpack(endian + 'I', bytes)
+  return uint32
+
+
+def ReadMachHeader(file, endian):
+  """Reads an entire |mach_header| structure (<mach-o/loader.h>) from the
+  file-like |file| object, treating it as having endianness specified by
+  |endian| (per the |struct| module), and returns a 7-tuple of its members
+  as numbers. Raises a MachOError if the proper length of data can't be read
+  from |file|."""
+
+  bytes = CheckedRead(file, 28)
+
+  magic, cputype, cpusubtype, filetype, ncmds, sizeofcmds, flags = \
+      struct.unpack(endian + '7I', bytes)
+  return magic, cputype, cpusubtype, filetype, ncmds, sizeofcmds, flags
+
+
+def ReadFatArch(file):
+  """Reads an entire |fat_arch| structure (<mach-o/fat.h>) from the file-like
+  |file| object, treating it as having endianness specified by |endian|
+  (per the |struct| module), and returns a 5-tuple of its members as numbers.
+  Raises a MachOError if the proper length of data can't be read from
+  |file|."""
+
+  bytes = CheckedRead(file, 20)
+
+  cputype, cpusubtype, offset, size, align = struct.unpack('>5I', bytes)
+  return cputype, cpusubtype, offset, size, align
+
+
+def WriteUInt32(file, uint32, endian):
+  """Writes |uint32| as an unsinged 32-bit integer to the file-like |file|
+  object, treating it as having endianness specified by |endian| (per the
+  |struct| module)."""
+
+  bytes = struct.pack(endian + 'I', uint32)
+  assert len(bytes) == 4
+
+  file.write(bytes)
+
+
+def HandleMachOFile(file, options, offset=0):
+  """Seeks the file-like |file| object to |offset|, reads its |mach_header|,
+  and rewrites the header's |flags| field if appropriate. The header's
+  endianness is detected. Both 32-bit and 64-bit Mach-O headers are supported
+  (mach_header and mach_header_64). Raises MachOError if used on a header that
+  does not have a known magic number or is not of type MH_EXECUTE. The
+  MH_PIE and MH_NO_HEAP_EXECUTION bits are set or cleared in the |flags| field
+  according to |options| and written to |file| if any changes need to be made.
+  If already set or clear as specified by |options|, nothing is written."""
+
+  CheckedSeek(file, offset)
+  magic = ReadUInt32(file, '<')
+  if magic == MH_MAGIC or magic == MH_MAGIC_64:
+    endian = '<'
+  elif magic == MH_CIGAM or magic == MH_CIGAM_64:
+    endian = '>'
+  else:
+    raise MachOError, \
+          'Mach-O file at offset %d has illusion of magic' % offset
+
+  CheckedSeek(file, offset)
+  magic, cputype, cpusubtype, filetype, ncmds, sizeofcmds, flags = \
+      ReadMachHeader(file, endian)
+  assert magic == MH_MAGIC or magic == MH_MAGIC_64
+  if filetype != MH_EXECUTE:
+    raise MachOError, \
+          'Mach-O file at offset %d is type 0x%x, expected MH_EXECUTE' % \
+              (offset, filetype)
+
+  original_flags = flags
+
+  if options.no_heap_execution:
+    flags |= MH_NO_HEAP_EXECUTION
+  else:
+    flags &= ~MH_NO_HEAP_EXECUTION
+
+  if options.pie:
+    flags |= MH_PIE
+  else:
+    flags &= ~MH_PIE
+
+  if flags != original_flags:
+    CheckedSeek(file, offset + 24)
+    WriteUInt32(file, flags, endian)
+
+
+def HandleFatFile(file, options, fat_offset=0):
+  """Seeks the file-like |file| object to |offset| and loops over its
+  |fat_header| entries, calling HandleMachOFile for each."""
+
+  CheckedSeek(file, fat_offset)
+  magic = ReadUInt32(file, '>')
+  assert magic == FAT_MAGIC
+
+  nfat_arch = ReadUInt32(file, '>')
+
+  for index in xrange(0, nfat_arch):
+    cputype, cpusubtype, offset, size, align = ReadFatArch(file)
+    assert size >= 28
+
+    # HandleMachOFile will seek around. Come back here after calling it, in
+    # case it sought.
+    fat_arch_offset = file.tell()
+    HandleMachOFile(file, options, offset)
+    CheckedSeek(file, fat_arch_offset)
+
+
+def main(me, args):
+  parser = optparse.OptionParser('%prog [options] <executable_path>')
+  parser.add_option('--executable-heap', action='store_false',
+                    dest='no_heap_execution', default=True,
+                    help='Clear the MH_NO_HEAP_EXECUTION bit')
+  parser.add_option('--no-pie', action='store_false',
+                    dest='pie', default=True,
+                    help='Clear the MH_PIE bit')
+  (options, loose_args) = parser.parse_args(args)
+  if len(loose_args) != 1:
+    parser.print_usage()
+    return 1
+
+  executable_path = loose_args[0]
+  executable_file = open(executable_path, 'rb+')
+
+  magic = ReadUInt32(executable_file, '<')
+  if magic == FAT_CIGAM:
+    # Check FAT_CIGAM and not FAT_MAGIC because the read was little-endian.
+    HandleFatFile(executable_file, options)
+  elif magic == MH_MAGIC or magic == MH_CIGAM or \
+      magic == MH_MAGIC_64 or magic == MH_CIGAM_64:
+    HandleMachOFile(executable_file, options)
+  else:
+    raise MachOError, '%s is not a Mach-O or fat file' % executable_file
+
+  executable_file.close()
+  return 0
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv[0], sys.argv[1:]))
diff --git a/build/mac/change_mach_o_flags_from_xcode.sh b/build/mac/change_mach_o_flags_from_xcode.sh
new file mode 100755
index 0000000..1824f8d
--- /dev/null
+++ b/build/mac/change_mach_o_flags_from_xcode.sh
@@ -0,0 +1,15 @@
+#!/bin/sh
+
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This is a small wrapper script around change_mach_o_flags.py allowing it to
+# be invoked easily from Xcode. change_mach_o_flags.py expects its arguments
+# on the command line, but Xcode puts its parameters in the environment.
+
+set -e
+
+exec "$(dirname "${0}")/change_mach_o_flags.py" \
+     "${@}" \
+     "${BUILT_PRODUCTS_DIR}/${EXECUTABLE_PATH}"
diff --git a/build/mac/chrome_mac.croc b/build/mac/chrome_mac.croc
new file mode 100644
index 0000000..8cde00c
--- /dev/null
+++ b/build/mac/chrome_mac.croc
@@ -0,0 +1,36 @@
+# -*- python -*-
+# Crocodile config file for Chromium mac
+
+{
+  # List of rules, applied in order
+  'rules' : [
+    # Specify inclusions before exclusions, since rules are in order.
+
+    # Don't include chromeos, linux, or windows specific files
+    {
+      'regexp' : '.*(_|/)(chromeos|linux|win|views)(\\.|_)',
+      'include' : 0,
+    },
+    # Don't include ChromeOS dirs
+    {
+      'regexp' : '.*/chromeos/',
+      'include' : 0,
+    },
+
+    # Groups
+    {
+      'regexp' : '.*_test_mac\\.',
+      'group' : 'test',
+    },
+
+    # Languages
+    {
+      'regexp' : '.*\\.m$',
+      'language' : 'ObjC',
+    },
+    {
+      'regexp' : '.*\\.mm$',
+      'language' : 'ObjC++',
+    },
+  ],
+}
diff --git a/build/mac/copy_asan_runtime_dylib.sh b/build/mac/copy_asan_runtime_dylib.sh
new file mode 100755
index 0000000..f221c4a
--- /dev/null
+++ b/build/mac/copy_asan_runtime_dylib.sh
@@ -0,0 +1,76 @@
+#!/bin/bash
+
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# For app bundles built with ASan, copies the runtime lib
+# (libclang_rt.asan_osx_dynamic.dylib), on which their executables depend, from
+# the compiler installation path into the bundle and fixes the dylib's install
+# name in the binary to be relative to @executable_path.
+
+set -e
+
+BINARY="${BUILT_PRODUCTS_DIR}/${EXECUTABLE_PATH}"
+
+if [[ ! -f "$BINARY" ]]; then
+  # This is neither an .app bundle nor a standalone executable.
+  # Most certainly the script has been called for a data bundle.
+  exit 0
+fi
+
+BINARY_DIR="$(dirname "${BINARY}")"
+
+# Find the link to the ASan runtime encoded in the binary.
+BUILTIN_DYLIB_PATH=$(otool -L "${BINARY}" | \
+    sed -Ene 's/^[[:blank:]]+(.*libclang_rt\.asan_.*_dynamic\.dylib).*$/\1/p')
+
+if [[ "${BUILTIN_DYLIB_PATH}" == *asan_iossim_dynamic* ]]; then
+  ASAN_DYLIB_NAME=libclang_rt.asan_iossim_dynamic.dylib
+elif [[ "${BUILTIN_DYLIB_PATH}" == *asan_osx_dynamic* ]]; then
+  ASAN_DYLIB_NAME=libclang_rt.asan_osx_dynamic.dylib
+fi
+
+if [[ -z "${BUILTIN_DYLIB_PATH}" ]]; then
+  echo "${BINARY} does not depend on the ASan runtime library!" >&2
+  exit 1
+fi
+
+# TODO(glider): this doesn't work if we set CC and CXX to override the default
+# Clang.
+ASAN_DYLIB=$(find \
+    "${BUILT_PRODUCTS_DIR}/../../third_party/llvm-build/Release+Asserts/lib/clang/" \
+    -type f -path "*${ASAN_DYLIB_NAME}")
+
+DYLIB_BASENAME=$(basename "${ASAN_DYLIB}")
+if [[ "${DYLIB_BASENAME}" != "${ASAN_DYLIB_NAME}" ]]; then
+  echo "basename(${ASAN_DYLIB}) != ${ASAN_DYLIB_NAME}" >&2
+  exit 1
+fi
+
+# Check whether the directory containing the executable binary is named
+# "MacOS". In this case we're building a full-fledged OSX app and will put
+# the runtime into appname.app/Contents/Libraries/. Otherwise this is probably
+# an iOS gtest app, and the ASan runtime is put next to the executable.
+UPPER_DIR=$(dirname "${BINARY_DIR}")
+if [ "${UPPER_DIR}" == "MacOS" ]; then
+  LIBRARIES_DIR="${UPPER_DIR}/Libraries"
+  mkdir -p "${LIBRARIES_DIR}"
+  NEW_LC_ID_DYLIB="@executable_path/../Libraries/${ASAN_DYLIB_NAME}"
+else
+  LIBRARIES_DIR="${BINARY_DIR}"
+  NEW_LC_ID_DYLIB="@executable_path/${ASAN_DYLIB_NAME}"
+fi
+
+cp "${ASAN_DYLIB}" "${LIBRARIES_DIR}"
+
+# Make LC_ID_DYLIB of the runtime copy point to its location.
+install_name_tool \
+    -id "${NEW_LC_ID_DYLIB}" \
+    "${LIBRARIES_DIR}/${ASAN_DYLIB_NAME}"
+
+# Fix the rpath to the runtime library recorded in the binary.
+install_name_tool \
+    -change "${BUILTIN_DYLIB_PATH}" \
+    "${NEW_LC_ID_DYLIB}" \
+    "${BINARY}"
diff --git a/build/mac/copy_framework_unversioned.sh b/build/mac/copy_framework_unversioned.sh
new file mode 100755
index 0000000..380cc90
--- /dev/null
+++ b/build/mac/copy_framework_unversioned.sh
@@ -0,0 +1,118 @@
+#!/bin/bash
+
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Copies a framework to its new home, "unversioning" it.
+#
+# Normally, frameworks are versioned bundles.  The contents of a framework are
+# stored in a versioned directory within the bundle, and symbolic links
+# provide access to the actual code and resources.  See
+# http://developer.apple.com/mac/library/documentation/MacOSX/Conceptual/BPFrameworks/Concepts/FrameworkAnatomy.html
+#
+# The symbolic links usually found in frameworks create problems.  Symbolic
+# links are excluded from code signatures.  That means that it's possible to
+# remove or retarget a symbolic link within a framework without affecting the
+# seal.  In Chrome's case, the outer .app bundle contains a framework where
+# all application code and resources live.  In order for the signature on the
+# .app to be meaningful, it encompasses the framework.  Because framework
+# resources are accessed through the framework's symbolic links, this
+# arrangement results in a case where the resources can be altered without
+# affecting the .app signature's validity.
+#
+# Indirection through symbolic links also carries a runtime performance
+# penalty on open() operations, although open() typically completes so quickly
+# that this is not considered a major performance problem.
+#
+# To resolve these problems, the frameworks that ship within Chrome's .app
+# bundle are unversioned.  Unversioning is simple: instead of using the
+# original outer .framework directory as the framework that ships within the
+# .app, the inner versioned directory is used.  Instead of accessing bundled
+# resources through symbolic links, they are accessed directly.  In normal
+# situations, the only hard-coded use of the versioned directory is by dyld,
+# when loading the framework's code, but this is handled through a normal
+# Mach-O load command, and it is easy to adjust the load command to point to
+# the unversioned framework code rather than the versioned counterpart.
+#
+# The resulting framework bundles aren't strictly conforming, but they work
+# as well as normal versioned framework bundles.
+#
+# An option to skip running install_name_tool is available. By passing -I as
+# the first argument to this script, install_name_tool will be skipped. This
+# is only suitable for copied frameworks that will not be linked against, or
+# when install_name_tool will be run on any linker output when something is
+# linked against the copied framework. This option exists to allow signed
+# frameworks to pass through without subjecting them to any modifications that
+# would break their signatures.
+
+set -e
+
+RUN_INSTALL_NAME_TOOL=1
+if [ $# -eq 3 ] && [ "${1}" = "-I" ] ; then
+  shift
+  RUN_INSTALL_NAME_TOOL=
+fi
+
+if [ $# -ne 2 ] ; then
+  echo "usage: ${0} [-I] FRAMEWORK DESTINATION_DIR" >& 2
+  exit 1
+fi
+
+# FRAMEWORK should be a path to a versioned framework bundle, ending in
+# .framework.  DESTINATION_DIR is the directory that the unversioned framework
+# bundle will be copied to.
+
+FRAMEWORK="${1}"
+DESTINATION_DIR="${2}"
+
+FRAMEWORK_NAME="$(basename "${FRAMEWORK}")"
+if [ "${FRAMEWORK_NAME: -10}" != ".framework" ] ; then
+  echo "${0}: ${FRAMEWORK_NAME} does not end in .framework" >& 2
+  exit 1
+fi
+FRAMEWORK_NAME_NOEXT="${FRAMEWORK_NAME:0:$((${#FRAMEWORK_NAME} - 10))}"
+
+# Find the current version.
+VERSIONS="${FRAMEWORK}/Versions"
+CURRENT_VERSION_LINK="${VERSIONS}/Current"
+CURRENT_VERSION_ID="$(readlink "${VERSIONS}/Current")"
+CURRENT_VERSION="${VERSIONS}/${CURRENT_VERSION_ID}"
+
+# Make sure that the framework's structure makes sense as a versioned bundle.
+if [ ! -e "${CURRENT_VERSION}/${FRAMEWORK_NAME_NOEXT}" ] ; then
+  echo "${0}: ${FRAMEWORK_NAME} does not contain a dylib" >& 2
+  exit 1
+fi
+
+DESTINATION="${DESTINATION_DIR}/${FRAMEWORK_NAME}"
+
+# Copy the versioned directory within the versioned framework to its
+# destination location.
+mkdir -p "${DESTINATION_DIR}"
+rsync -acC --delete --exclude Headers --exclude PrivateHeaders \
+    --include '*.so' "${CURRENT_VERSION}/" "${DESTINATION}"
+
+if [[ -n "${RUN_INSTALL_NAME_TOOL}" ]]; then
+  # Adjust the Mach-O LC_ID_DYLIB load command in the framework.  This does not
+  # change the LC_LOAD_DYLIB load commands in anything that may have already
+  # linked against the framework.  Not all frameworks will actually need this
+  # to be changed.  Some frameworks may already be built with the proper
+  # LC_ID_DYLIB for use as an unversioned framework.  Xcode users can do this
+  # by setting LD_DYLIB_INSTALL_NAME to
+  # $(DYLIB_INSTALL_NAME_BASE:standardizepath)/$(WRAPPER_NAME)/$(PRODUCT_NAME)
+  # If invoking ld via gcc or g++, pass the desired path to -Wl,-install_name
+  # at link time.
+  FRAMEWORK_DYLIB="${DESTINATION}/${FRAMEWORK_NAME_NOEXT}"
+  LC_ID_DYLIB_OLD="$(otool -l "${FRAMEWORK_DYLIB}" |
+                         grep -A10 "^ *cmd LC_ID_DYLIB$" |
+                         grep -m1 "^ *name" |
+                         sed -Ee 's/^ *name (.*) \(offset [0-9]+\)$/\1/')"
+  VERSION_PATH="/Versions/${CURRENT_VERSION_ID}/${FRAMEWORK_NAME_NOEXT}"
+  LC_ID_DYLIB_NEW="$(echo "${LC_ID_DYLIB_OLD}" |
+                     sed -Ee "s%${VERSION_PATH}$%/${FRAMEWORK_NAME_NOEXT}%")"
+
+  if [ "${LC_ID_DYLIB_NEW}" != "${LC_ID_DYLIB_OLD}" ] ; then
+    install_name_tool -id "${LC_ID_DYLIB_NEW}" "${FRAMEWORK_DYLIB}"
+  fi
+fi
diff --git a/build/mac/edit_xibs.sh b/build/mac/edit_xibs.sh
new file mode 100755
index 0000000..b7b749e
--- /dev/null
+++ b/build/mac/edit_xibs.sh
@@ -0,0 +1,19 @@
+#!/bin/sh
+
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This script is a convenience to run GYP for /src/chrome/chrome_nibs.gyp
+# with the Xcode generator (as you likely use ninja). Documentation:
+#   http://dev.chromium.org/developers/design-documents/mac-xib-files
+
+set -e
+
+RELSRC=$(dirname "$0")/../..
+SRC=$(cd "$RELSRC" && pwd)
+export PYTHONPATH="$PYTHONPATH:$SRC/build"
+export GYP_GENERATORS=xcode
+"$SRC/tools/gyp/gyp" -I"$SRC/build/common.gypi" "$SRC/chrome/chrome_nibs.gyp"
+echo "You can now edit XIB files in Xcode using:"
+echo "  $SRC/chrome/chrome_nibs.xcodeproj"
diff --git a/build/mac/find_sdk.py b/build/mac/find_sdk.py
new file mode 100755
index 0000000..0534766
--- /dev/null
+++ b/build/mac/find_sdk.py
@@ -0,0 +1,90 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Prints the lowest locally available SDK version greater than or equal to a
+given minimum sdk version to standard output.
+
+Usage:
+  python find_sdk.py 10.6  # Ignores SDKs < 10.6
+"""
+
+import os
+import re
+import subprocess
+import sys
+
+
+from optparse import OptionParser
+
+
+def parse_version(version_str):
+  """'10.6' => [10, 6]"""
+  return map(int, re.findall(r'(\d+)', version_str))
+
+
+def main():
+  parser = OptionParser()
+  parser.add_option("--verify",
+                    action="store_true", dest="verify", default=False,
+                    help="return the sdk argument and warn if it doesn't exist")
+  parser.add_option("--sdk_path",
+                    action="store", type="string", dest="sdk_path", default="",
+                    help="user-specified SDK path; bypasses verification")
+  parser.add_option("--print_sdk_path",
+                    action="store_true", dest="print_sdk_path", default=False,
+                    help="Additionaly print the path the SDK (appears first).")
+  (options, args) = parser.parse_args()
+  min_sdk_version = args[0]
+
+  job = subprocess.Popen(['xcode-select', '-print-path'],
+                         stdout=subprocess.PIPE,
+                         stderr=subprocess.STDOUT)
+  out, err = job.communicate()
+  if job.returncode != 0:
+    print >> sys.stderr, out
+    print >> sys.stderr, err
+    raise Exception(('Error %d running xcode-select, you might have to run '
+      '|sudo xcode-select --switch /Applications/Xcode.app/Contents/Developer| '
+      'if you are using Xcode 4.') % job.returncode)
+  # The Developer folder moved in Xcode 4.3.
+  xcode43_sdk_path = os.path.join(
+      out.rstrip(), 'Platforms/MacOSX.platform/Developer/SDKs')
+  if os.path.isdir(xcode43_sdk_path):
+    sdk_dir = xcode43_sdk_path
+  else:
+    sdk_dir = os.path.join(out.rstrip(), 'SDKs')
+  sdks = [re.findall('^MacOSX(10\.\d+)\.sdk$', s) for s in os.listdir(sdk_dir)]
+  sdks = [s[0] for s in sdks if s]  # [['10.5'], ['10.6']] => ['10.5', '10.6']
+  sdks = [s for s in sdks  # ['10.5', '10.6'] => ['10.6']
+          if parse_version(s) >= parse_version(min_sdk_version)]
+  if not sdks:
+    raise Exception('No %s+ SDK found' % min_sdk_version)
+  best_sdk = sorted(sdks, key=parse_version)[0]
+
+  if options.verify and best_sdk != min_sdk_version and not options.sdk_path:
+    print >> sys.stderr, ''
+    print >> sys.stderr, '                                           vvvvvvv'
+    print >> sys.stderr, ''
+    print >> sys.stderr, \
+        'This build requires the %s SDK, but it was not found on your system.' \
+        % min_sdk_version
+    print >> sys.stderr, \
+        'Either install it, or explicitly set mac_sdk in your GYP_DEFINES.'
+    print >> sys.stderr, ''
+    print >> sys.stderr, '                                           ^^^^^^^'
+    print >> sys.stderr, ''
+    return min_sdk_version
+
+  if options.print_sdk_path:
+    print subprocess.check_output(['xcodebuild', '-version', '-sdk',
+                                   'macosx' + best_sdk, 'Path']).strip()
+
+  return best_sdk
+
+
+if __name__ == '__main__':
+  if sys.platform != 'darwin':
+    raise Exception("This script only runs on Mac")
+  print main()
diff --git a/build/mac/make_more_helpers.sh b/build/mac/make_more_helpers.sh
new file mode 100755
index 0000000..6f5c474
--- /dev/null
+++ b/build/mac/make_more_helpers.sh
@@ -0,0 +1,91 @@
+#!/bin/bash
+
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Usage: make_more_helpers.sh <directory_within_contents> <app_name>
+#
+# This script creates additional helper .app bundles for Chromium, based on
+# the existing helper .app bundle, changing their Mach-O header's flags to
+# enable and disable various features. Based on Chromium Helper.app, it will
+# create Chromium Helper EH.app, which has the MH_NO_HEAP_EXECUTION bit
+# cleared to support Chromium child processes that require an executable heap,
+# and Chromium Helper NP.app, which has the MH_PIE bit cleared to support
+# Chromium child processes that cannot tolerate ASLR.
+#
+# This script expects to be called from the chrome_exe target as a postbuild,
+# and operates directly within the built-up browser app's versioned directory.
+#
+# Each helper is adjusted by giving it the proper bundle name, renaming the
+# executable, adjusting several Info.plist keys, and changing the executable's
+# Mach-O flags.
+
+set -eu
+
+make_helper() {
+  local containing_dir="${1}"
+  local app_name="${2}"
+  local feature="${3}"
+  local flags="${4}"
+
+  local helper_name="${app_name} Helper"
+  local helper_stem="${containing_dir}/${helper_name}"
+  local original_helper="${helper_stem}.app"
+  if [[ ! -d "${original_helper}" ]]; then
+    echo "${0}: error: ${original_helper} is a required directory" >& 2
+    exit 1
+  fi
+  local original_helper_exe="${original_helper}/Contents/MacOS/${helper_name}"
+  if [[ ! -f "${original_helper_exe}" ]]; then
+    echo "${0}: error: ${original_helper_exe} is a required file" >& 2
+    exit 1
+  fi
+
+  local feature_helper="${helper_stem} ${feature}.app"
+
+  rsync -acC --delete --include '*.so' "${original_helper}/" "${feature_helper}"
+
+  local helper_feature="${helper_name} ${feature}"
+  local helper_feature_exe="${feature_helper}/Contents/MacOS/${helper_feature}"
+  mv "${feature_helper}/Contents/MacOS/${helper_name}" "${helper_feature_exe}"
+
+  local change_flags="$(dirname "${0}")/change_mach_o_flags.py"
+  "${change_flags}" ${flags} "${helper_feature_exe}"
+
+  local feature_info="${feature_helper}/Contents/Info"
+  local feature_info_plist="${feature_info}.plist"
+
+  defaults write "${feature_info}" "CFBundleDisplayName" "${helper_feature}"
+  defaults write "${feature_info}" "CFBundleExecutable" "${helper_feature}"
+
+  cfbundleid="$(defaults read "${feature_info}" "CFBundleIdentifier")"
+  feature_cfbundleid="${cfbundleid}.${feature}"
+  defaults write "${feature_info}" "CFBundleIdentifier" "${feature_cfbundleid}"
+
+  cfbundlename="$(defaults read "${feature_info}" "CFBundleName")"
+  feature_cfbundlename="${cfbundlename} ${feature}"
+  defaults write "${feature_info}" "CFBundleName" "${feature_cfbundlename}"
+
+  # As usual, defaults might have put the plist into whatever format excites
+  # it, but Info.plists get converted back to the expected XML format.
+  plutil -convert xml1 "${feature_info_plist}"
+
+  # `defaults` also changes the file permissions, so make the file
+  # world-readable again.
+  chmod a+r "${feature_info_plist}"
+}
+
+if [[ ${#} -ne 2 ]]; then
+  echo "usage: ${0} <directory_within_contents> <app_name>" >& 2
+  exit 1
+fi
+
+DIRECTORY_WITHIN_CONTENTS="${1}"
+APP_NAME="${2}"
+
+CONTENTS_DIR="${BUILT_PRODUCTS_DIR}/${CONTENTS_FOLDER_PATH}"
+CONTAINING_DIR="${CONTENTS_DIR}/${DIRECTORY_WITHIN_CONTENTS}"
+
+make_helper "${CONTAINING_DIR}" "${APP_NAME}" "EH" "--executable-heap"
+make_helper "${CONTAINING_DIR}" "${APP_NAME}" "NP" "--no-pie"
diff --git a/build/mac/strip_from_xcode b/build/mac/strip_from_xcode
new file mode 100755
index 0000000..c26b9fb
--- /dev/null
+++ b/build/mac/strip_from_xcode
@@ -0,0 +1,62 @@
+#!/bin/bash
+
+# Copyright (c) 2008 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This is a handy wrapper script that figures out how to call the strip
+# utility (strip_save_dsym in this case), if it even needs to be called at all,
+# and then does it.  This script should be called by a post-link phase in
+# targets that might generate Mach-O executables, dynamic libraries, or
+# loadable bundles.
+#
+# An example "Strip If Needed" build phase placed after "Link Binary With
+# Libraries" would do:
+# exec "${XCODEPROJ_DEPTH}/build/mac/strip_from_xcode"
+
+if [ "${CONFIGURATION}" != "Release" ] ; then
+  # Only strip in release mode.
+  exit 0
+fi
+
+declare -a FLAGS
+
+# MACH_O_TYPE is not set for a command-line tool, so check PRODUCT_TYPE too.
+# Weird.
+if [ "${MACH_O_TYPE}" = "mh_execute" ] || \
+   [ "${PRODUCT_TYPE}" = "com.apple.product-type.tool" ] ; then
+  # Strip everything (no special flags).  No-op.
+  true
+elif [ "${MACH_O_TYPE}" = "mh_dylib" ] || \
+     [ "${MACH_O_TYPE}" = "mh_bundle" ]; then
+  # Strip debugging symbols and local symbols
+  FLAGS[${#FLAGS[@]}]=-S
+  FLAGS[${#FLAGS[@]}]=-x
+elif [ "${MACH_O_TYPE}" = "staticlib" ] ; then
+  # Don't strip static libraries.
+  exit 0
+else
+  # Warn, but don't treat this as an error.
+  echo $0: warning: unrecognized MACH_O_TYPE ${MACH_O_TYPE}
+  exit 0
+fi
+
+if [ -n "${STRIPFLAGS}" ] ; then
+  # Pick up the standard STRIPFLAGS Xcode setting, used for "Additional Strip
+  # Flags".
+  for stripflag in "${STRIPFLAGS}" ; do
+    FLAGS[${#FLAGS[@]}]="${stripflag}"
+  done
+fi
+
+if [ -n "${CHROMIUM_STRIP_SAVE_FILE}" ] ; then
+  # An Xcode project can communicate a file listing symbols to saved in this
+  # environment variable by setting it as a build setting.  This isn't a
+  # standard Xcode setting.  It's used in preference to STRIPFLAGS to
+  # eliminate quoting ambiguity concerns.
+  FLAGS[${#FLAGS[@]}]=-s
+  FLAGS[${#FLAGS[@]}]="${CHROMIUM_STRIP_SAVE_FILE}"
+fi
+
+exec "$(dirname ${0})/strip_save_dsym" "${FLAGS[@]}" \
+     "${BUILT_PRODUCTS_DIR}/${EXECUTABLE_PATH}"
diff --git a/build/mac/strip_save_dsym b/build/mac/strip_save_dsym
new file mode 100755
index 0000000..c9cf226
--- /dev/null
+++ b/build/mac/strip_save_dsym
@@ -0,0 +1,335 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Usage: strip_save_dsym <whatever-arguments-you-would-pass-to-strip>
+#
+# strip_save_dsym is a wrapper around the standard strip utility.  Given an
+# input Mach-O file, strip_save_dsym will save a copy of the file in a "fake"
+# .dSYM bundle for debugging, and then call strip to strip the Mach-O file.
+# Note that the .dSYM file is a "fake" in that it's not a self-contained
+# .dSYM bundle, it just contains a copy of the original (unstripped) Mach-O
+# file, and therefore contains references to object files on the filesystem.
+# The generated .dSYM bundle is therefore unsuitable for debugging in the
+# absence of these .o files.
+#
+# If a .dSYM already exists and has a newer timestamp than the Mach-O file,
+# this utility does nothing.  That allows strip_save_dsym to be run on a file
+# that has already been stripped without trashing the .dSYM.
+#
+# Rationale: the "right" way to generate dSYM bundles, dsymutil, is incredibly
+# slow.  On the other hand, doing a file copy (which is really all that
+# dsymutil does) is comparatively fast.  Since we usually just want to strip
+# a release-mode executable but still be able to debug it, and we don't care
+# so much about generating a hermetic dSYM bundle, we'll prefer the file copy.
+# If a real dSYM is ever needed, it's still possible to create one by running
+# dsymutil and pointing it at the original Mach-O file inside the "fake"
+# bundle, provided that the object files are available.
+
+import errno
+import os
+import re
+import shutil
+import subprocess
+import sys
+import time
+
+# Returns a list of architectures contained in a Mach-O file.  The file can be
+# a universal (fat) file, in which case there will be one list element for
+# each contained architecture, or it can be a thin single-architecture Mach-O
+# file, in which case the list will contain a single element identifying the
+# architecture.  On error, returns an empty list.  Determines the architecture
+# list by calling file.
+def macho_archs(macho):
+  macho_types = ["executable",
+                 "dynamically linked shared library",
+                 "bundle"]
+  macho_types_re = "Mach-O (?:64-bit )?(?:" + "|".join(macho_types) + ")"
+
+  file_cmd = subprocess.Popen(["/usr/bin/file", "-b", "--", macho],
+                              stdout=subprocess.PIPE)
+
+  archs = []
+
+  type_line = file_cmd.stdout.readline()
+  type_match = re.match("^%s (.*)$" % macho_types_re, type_line)
+  if type_match:
+    archs.append(type_match.group(1))
+    return [type_match.group(1)]
+  else:
+    type_match = re.match("^Mach-O universal binary with (.*) architectures$",
+                          type_line)
+    if type_match:
+      for i in range(0, int(type_match.group(1))):
+        arch_line = file_cmd.stdout.readline()
+        arch_match = re.match(
+                     "^.* \(for architecture (.*)\):\t%s .*$" % macho_types_re,
+                     arch_line)
+        if arch_match:
+          archs.append(arch_match.group(1))
+
+  if file_cmd.wait() != 0:
+    archs = []
+
+  if len(archs) == 0:
+    print >> sys.stderr, "No architectures in %s" % macho
+
+  return archs
+
+# Returns a dictionary mapping architectures contained in the file as returned
+# by macho_archs to the LC_UUID load command for that architecture.
+# Architectures with no LC_UUID load command are omitted from the dictionary.
+# Determines the UUID value by calling otool.
+def macho_uuids(macho):
+  uuids = {}
+
+  archs = macho_archs(macho)
+  if len(archs) == 0:
+    return uuids
+
+  for arch in archs:
+    if arch == "":
+      continue
+
+    otool_cmd = subprocess.Popen(["/usr/bin/otool", "-arch", arch, "-l", "-",
+                                  macho],
+                                 stdout=subprocess.PIPE)
+    # state 0 is when nothing UUID-related has been seen yet.  State 1 is
+    # entered after a load command begins, but it may not be an LC_UUID load
+    # command.  States 2, 3, and 4 are intermediate states while reading an
+    # LC_UUID command.  State 5 is the terminal state for a successful LC_UUID
+    # read.  State 6 is the error state.
+    state = 0
+    uuid = ""
+    for otool_line in otool_cmd.stdout:
+      if state == 0:
+        if re.match("^Load command .*$", otool_line):
+          state = 1
+      elif state == 1:
+        if re.match("^     cmd LC_UUID$", otool_line):
+          state = 2
+        else:
+          state = 0
+      elif state == 2:
+        if re.match("^ cmdsize 24$", otool_line):
+          state = 3
+        else:
+          state = 6
+      elif state == 3:
+        # The UUID display format changed in the version of otool shipping
+        # with the Xcode 3.2.2 prerelease.  The new format is traditional:
+        #    uuid 4D7135B2-9C56-C5F5-5F49-A994258E0955
+        # and with Xcode 3.2.6, then line is indented one more space:
+        #     uuid 4D7135B2-9C56-C5F5-5F49-A994258E0955
+        # The old format, from cctools-750 and older's otool, breaks the UUID
+        # up into a sequence of bytes:
+        #    uuid 0x4d 0x71 0x35 0xb2 0x9c 0x56 0xc5 0xf5
+        #         0x5f 0x49 0xa9 0x94 0x25 0x8e 0x09 0x55
+        new_uuid_match = re.match("^ {3,4}uuid (.{8}-.{4}-.{4}-.{4}-.{12})$",
+                                  otool_line)
+        if new_uuid_match:
+          uuid = new_uuid_match.group(1)
+
+          # Skip state 4, there is no second line to read.
+          state = 5
+        else:
+          old_uuid_match = re.match("^   uuid 0x(..) 0x(..) 0x(..) 0x(..) "
+                                    "0x(..) 0x(..) 0x(..) 0x(..)$",
+                                    otool_line)
+          if old_uuid_match:
+            state = 4
+            uuid = old_uuid_match.group(1) + old_uuid_match.group(2) + \
+                   old_uuid_match.group(3) + old_uuid_match.group(4) + "-" + \
+                   old_uuid_match.group(5) + old_uuid_match.group(6) + "-" + \
+                   old_uuid_match.group(7) + old_uuid_match.group(8) + "-"
+          else:
+            state = 6
+      elif state == 4:
+        old_uuid_match = re.match("^        0x(..) 0x(..) 0x(..) 0x(..) "
+                                  "0x(..) 0x(..) 0x(..) 0x(..)$",
+                                  otool_line)
+        if old_uuid_match:
+          state = 5
+          uuid += old_uuid_match.group(1) + old_uuid_match.group(2) + "-" + \
+                  old_uuid_match.group(3) + old_uuid_match.group(4) + \
+                  old_uuid_match.group(5) + old_uuid_match.group(6) + \
+                  old_uuid_match.group(7) + old_uuid_match.group(8)
+        else:
+          state = 6
+
+    if otool_cmd.wait() != 0:
+      state = 6
+
+    if state == 5:
+      uuids[arch] = uuid.upper()
+
+  if len(uuids) == 0:
+    print >> sys.stderr, "No UUIDs in %s" % macho
+
+  return uuids
+
+# Given a path to a Mach-O file and possible information from the environment,
+# determines the desired path to the .dSYM.
+def dsym_path(macho):
+  # If building a bundle, the .dSYM should be placed next to the bundle.  Use
+  # WRAPPER_NAME to make this determination.  If called from xcodebuild,
+  # WRAPPER_NAME will be set to the name of the bundle.
+  dsym = ""
+  if "WRAPPER_NAME" in os.environ:
+    if "BUILT_PRODUCTS_DIR" in os.environ:
+      dsym = os.path.join(os.environ["BUILT_PRODUCTS_DIR"],
+                          os.environ["WRAPPER_NAME"])
+    else:
+      dsym = os.environ["WRAPPER_NAME"]
+  else:
+    dsym = macho
+
+  dsym += ".dSYM"
+
+  return dsym
+
+# Creates a fake .dSYM bundle at dsym for macho, a Mach-O image with the
+# architectures and UUIDs specified by the uuids map.
+def make_fake_dsym(macho, dsym):
+  uuids = macho_uuids(macho)
+  if len(uuids) == 0:
+    return False
+
+  dwarf_dir = os.path.join(dsym, "Contents", "Resources", "DWARF")
+  dwarf_file = os.path.join(dwarf_dir, os.path.basename(macho))
+  try:
+    os.makedirs(dwarf_dir)
+  except OSError, (err, error_string):
+    if err != errno.EEXIST:
+      raise
+  shutil.copyfile(macho, dwarf_file)
+
+  # info_template is the same as what dsymutil would have written, with the
+  # addition of the fake_dsym key.
+  info_template = \
+'''<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE plist PUBLIC "-//Apple Computer//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
+<plist version="1.0">
+	<dict>
+		<key>CFBundleDevelopmentRegion</key>
+		<string>English</string>
+		<key>CFBundleIdentifier</key>
+		<string>com.apple.xcode.dsym.%(root_name)s</string>
+		<key>CFBundleInfoDictionaryVersion</key>
+		<string>6.0</string>
+		<key>CFBundlePackageType</key>
+		<string>dSYM</string>
+		<key>CFBundleSignature</key>
+		<string>????</string>
+		<key>CFBundleShortVersionString</key>
+		<string>1.0</string>
+		<key>CFBundleVersion</key>
+		<string>1</string>
+		<key>dSYM_UUID</key>
+		<dict>
+%(uuid_dict)s		</dict>
+		<key>fake_dsym</key>
+		<true/>
+	</dict>
+</plist>
+'''
+
+  root_name = os.path.basename(dsym)[:-5]  # whatever.dSYM without .dSYM
+  uuid_dict = ""
+  for arch in sorted(uuids):
+    uuid_dict += "\t\t\t<key>" + arch + "</key>\n"\
+                 "\t\t\t<string>" + uuids[arch] + "</string>\n"
+  info_dict = {
+    "root_name": root_name,
+    "uuid_dict": uuid_dict,
+  }
+  info_contents = info_template % info_dict
+  info_file = os.path.join(dsym, "Contents", "Info.plist")
+  info_fd = open(info_file, "w")
+  info_fd.write(info_contents)
+  info_fd.close()
+
+  return True
+
+# For a Mach-O file, determines where the .dSYM bundle should be located.  If
+# the bundle does not exist or has a modification time older than the Mach-O
+# file, calls make_fake_dsym to create a fake .dSYM bundle there, then strips
+# the Mach-O file and sets the modification time on the .dSYM bundle and Mach-O
+# file to be identical.
+def strip_and_make_fake_dsym(macho):
+  dsym = dsym_path(macho)
+  macho_stat = os.stat(macho)
+  dsym_stat = None
+  try:
+    dsym_stat = os.stat(dsym)
+  except OSError, (err, error_string):
+    if err != errno.ENOENT:
+      raise
+
+  if dsym_stat is None or dsym_stat.st_mtime < macho_stat.st_mtime:
+    # Make a .dSYM bundle
+    if not make_fake_dsym(macho, dsym):
+      return False
+
+    # Strip the Mach-O file
+    remove_dsym = True
+    try:
+      strip_cmdline = ['xcrun', 'strip'] + sys.argv[1:]
+      strip_cmd = subprocess.Popen(strip_cmdline)
+      if strip_cmd.wait() == 0:
+        remove_dsym = False
+    finally:
+      if remove_dsym:
+        shutil.rmtree(dsym)
+
+    # Update modification time on the Mach-O file and .dSYM bundle
+    now = time.time()
+    os.utime(macho, (now, now))
+    os.utime(dsym, (now, now))
+
+  return True
+
+def main(argv=None):
+  if argv is None:
+    argv = sys.argv
+
+  # This only supports operating on one file at a time.  Look at the arguments
+  # to strip to figure out what the source to be stripped is.  Arguments are
+  # processed in the same way that strip does, although to reduce complexity,
+  # this doesn't do all of the same checking as strip.  For example, strip
+  # has no -Z switch and would treat -Z on the command line as an error.  For
+  # the purposes this is needed for, that's fine.
+  macho = None
+  process_switches = True
+  ignore_argument = False
+  for arg in argv[1:]:
+    if ignore_argument:
+      ignore_argument = False
+      continue
+    if process_switches:
+      if arg == "-":
+        process_switches = False
+      # strip has these switches accept an argument:
+      if arg in ["-s", "-R", "-d", "-o", "-arch"]:
+        ignore_argument = True
+      if arg[0] == "-":
+        continue
+    if macho is None:
+      macho = arg
+    else:
+      print >> sys.stderr, "Too many things to strip"
+      return 1
+
+  if macho is None:
+    print >> sys.stderr, "Nothing to strip"
+    return 1
+
+  if not strip_and_make_fake_dsym(macho):
+    return 1
+
+  return 0
+
+if __name__ == "__main__":
+  sys.exit(main(sys.argv))
diff --git a/build/mac/tweak_info_plist.py b/build/mac/tweak_info_plist.py
new file mode 100755
index 0000000..2057bac
--- /dev/null
+++ b/build/mac/tweak_info_plist.py
@@ -0,0 +1,280 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+#
+# Xcode supports build variable substitutions and CPP; sadly, that doesn't work
+# because:
+#
+# 1. Xcode wants to do the Info.plist work before it runs any build phases,
+#    this means if we were to generate a .h file for INFOPLIST_PREFIX_HEADER
+#    we'd have to put it in another target so it runs in time.
+# 2. Xcode also doesn't check to see if the header being used as a prefix for
+#    the Info.plist has changed.  So even if we updated it, it's only looking
+#    at the modtime of the info.plist to see if that's changed.
+#
+# So, we work around all of this by making a script build phase that will run
+# during the app build, and simply update the info.plist in place.  This way
+# by the time the app target is done, the info.plist is correct.
+#
+
+import optparse
+import os
+from os import environ as env
+import plistlib
+import re
+import subprocess
+import sys
+import tempfile
+
+TOP = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))
+
+
+def _GetOutput(args):
+  """Runs a subprocess and waits for termination. Returns (stdout, returncode)
+  of the process. stderr is attached to the parent."""
+  proc = subprocess.Popen(args, stdout=subprocess.PIPE)
+  (stdout, stderr) = proc.communicate()
+  return (stdout, proc.returncode)
+
+
+def _GetOutputNoError(args):
+  """Similar to _GetOutput() but ignores stderr. If there's an error launching
+  the child (like file not found), the exception will be caught and (None, 1)
+  will be returned to mimic quiet failure."""
+  try:
+    proc = subprocess.Popen(args, stdout=subprocess.PIPE,
+                            stderr=subprocess.PIPE)
+  except OSError:
+    return (None, 1)
+  (stdout, stderr) = proc.communicate()
+  return (stdout, proc.returncode)
+
+
+def _RemoveKeys(plist, *keys):
+  """Removes a varargs of keys from the plist."""
+  for key in keys:
+    try:
+      del plist[key]
+    except KeyError:
+      pass
+
+
+def _AddVersionKeys(plist, version=None):
+  """Adds the product version number into the plist. Returns True on success and
+  False on error. The error will be printed to stderr."""
+  if version:
+    match = re.match('\d+\.\d+\.(\d+\.\d+)$', version)
+    if not match:
+      print >>sys.stderr, 'Invalid version string specified: "%s"' % version
+      return False
+
+    full_version = match.group(0)
+    bundle_version = match.group(1)
+
+  else:
+    # Pull in the Chrome version number.
+    VERSION_TOOL = os.path.join(TOP, 'build/util/version.py')
+    VERSION_FILE = os.path.join(TOP, 'chrome/VERSION')
+
+    (stdout, retval1) = _GetOutput([VERSION_TOOL, '-f', VERSION_FILE, '-t',
+                                    '@MAJOR@.@MINOR@.@BUILD@.@PATCH@'])
+    full_version = stdout.rstrip()
+
+    (stdout, retval2) = _GetOutput([VERSION_TOOL, '-f', VERSION_FILE, '-t',
+                                    '@BUILD@.@PATCH@'])
+    bundle_version = stdout.rstrip()
+
+    # If either of the two version commands finished with non-zero returncode,
+    # report the error up.
+    if retval1 or retval2:
+      return False
+
+  # Add public version info so "Get Info" works.
+  plist['CFBundleShortVersionString'] = full_version
+
+  # Honor the 429496.72.95 limit.  The maximum comes from splitting 2^32 - 1
+  # into  6, 2, 2 digits.  The limitation was present in Tiger, but it could
+  # have been fixed in later OS release, but hasn't been tested (it's easy
+  # enough to find out with "lsregister -dump).
+  # http://lists.apple.com/archives/carbon-dev/2006/Jun/msg00139.html
+  # BUILD will always be an increasing value, so BUILD_PATH gives us something
+  # unique that meetings what LS wants.
+  plist['CFBundleVersion'] = bundle_version
+
+  # Return with no error.
+  return True
+
+
+def _DoSCMKeys(plist, add_keys):
+  """Adds the SCM information, visible in about:version, to property list. If
+  |add_keys| is True, it will insert the keys, otherwise it will remove them."""
+  scm_revision = None
+  if add_keys:
+    # Pull in the Chrome revision number.
+    VERSION_TOOL = os.path.join(TOP, 'build/util/version.py')
+    LASTCHANGE_FILE = os.path.join(TOP, 'build/util/LASTCHANGE')
+    (stdout, retval) = _GetOutput([VERSION_TOOL, '-f', LASTCHANGE_FILE, '-t',
+                                  '@LASTCHANGE@'])
+    if retval:
+      return False
+    scm_revision = stdout.rstrip()
+
+  # See if the operation failed.
+  _RemoveKeys(plist, 'SCMRevision')
+  if scm_revision != None:
+    plist['SCMRevision'] = scm_revision
+  elif add_keys:
+    print >>sys.stderr, 'Could not determine SCM revision.  This may be OK.'
+
+  return True
+
+
+def _AddBreakpadKeys(plist, branding):
+  """Adds the Breakpad keys. This must be called AFTER _AddVersionKeys() and
+  also requires the |branding| argument."""
+  plist['BreakpadReportInterval'] = '3600'  # Deliberately a string.
+  plist['BreakpadProduct'] = '%s_Mac' % branding
+  plist['BreakpadProductDisplay'] = branding
+  plist['BreakpadVersion'] = plist['CFBundleShortVersionString']
+  # These are both deliberately strings and not boolean.
+  plist['BreakpadSendAndExit'] = 'YES'
+  plist['BreakpadSkipConfirm'] = 'YES'
+
+
+def _RemoveBreakpadKeys(plist):
+  """Removes any set Breakpad keys."""
+  _RemoveKeys(plist,
+      'BreakpadURL',
+      'BreakpadReportInterval',
+      'BreakpadProduct',
+      'BreakpadProductDisplay',
+      'BreakpadVersion',
+      'BreakpadSendAndExit',
+      'BreakpadSkipConfirm')
+
+
+def _TagSuffixes():
+  # Keep this list sorted in the order that tag suffix components are to
+  # appear in a tag value. That is to say, it should be sorted per ASCII.
+  components = ('32bit', 'full')
+  assert tuple(sorted(components)) == components
+
+  components_len = len(components)
+  combinations = 1 << components_len
+  tag_suffixes = []
+  for combination in xrange(0, combinations):
+    tag_suffix = ''
+    for component_index in xrange(0, components_len):
+      if combination & (1 << component_index):
+        tag_suffix += '-' + components[component_index]
+    tag_suffixes.append(tag_suffix)
+  return tag_suffixes
+
+
+def _AddKeystoneKeys(plist, bundle_identifier):
+  """Adds the Keystone keys. This must be called AFTER _AddVersionKeys() and
+  also requires the |bundle_identifier| argument (com.example.product)."""
+  plist['KSVersion'] = plist['CFBundleShortVersionString']
+  plist['KSProductID'] = bundle_identifier
+  plist['KSUpdateURL'] = 'https://tools.google.com/service/update2'
+
+  _RemoveKeys(plist, 'KSChannelID')
+  for tag_suffix in _TagSuffixes():
+    if tag_suffix:
+      plist['KSChannelID' + tag_suffix] = tag_suffix
+
+
+def _RemoveKeystoneKeys(plist):
+  """Removes any set Keystone keys."""
+  _RemoveKeys(plist,
+      'KSVersion',
+      'KSProductID',
+      'KSUpdateURL')
+
+  tag_keys = []
+  for tag_suffix in _TagSuffixes():
+    tag_keys.append('KSChannelID' + tag_suffix)
+  _RemoveKeys(plist, *tag_keys)
+
+
+def Main(argv):
+  parser = optparse.OptionParser('%prog [options]')
+  parser.add_option('--breakpad', dest='use_breakpad', action='store',
+      type='int', default=False, help='Enable Breakpad [1 or 0]')
+  parser.add_option('--breakpad_uploads', dest='breakpad_uploads',
+      action='store', type='int', default=False,
+      help='Enable Breakpad\'s uploading of crash dumps [1 or 0]')
+  parser.add_option('--keystone', dest='use_keystone', action='store',
+      type='int', default=False, help='Enable Keystone [1 or 0]')
+  parser.add_option('--scm', dest='add_scm_info', action='store', type='int',
+      default=True, help='Add SCM metadata [1 or 0]')
+  parser.add_option('--branding', dest='branding', action='store',
+      type='string', default=None, help='The branding of the binary')
+  parser.add_option('--bundle_id', dest='bundle_identifier',
+      action='store', type='string', default=None,
+      help='The bundle id of the binary')
+  parser.add_option('--version', dest='version', action='store', type='string',
+      default=None, help='The version string [major.minor.build.patch]')
+  (options, args) = parser.parse_args(argv)
+
+  if len(args) > 0:
+    print >>sys.stderr, parser.get_usage()
+    return 1
+
+  # Read the plist into its parsed format.
+  DEST_INFO_PLIST = os.path.join(env['TARGET_BUILD_DIR'], env['INFOPLIST_PATH'])
+  plist = plistlib.readPlist(DEST_INFO_PLIST)
+
+  # Insert the product version.
+  if not _AddVersionKeys(plist, version=options.version):
+    return 2
+
+  # Add Breakpad if configured to do so.
+  if options.use_breakpad:
+    if options.branding is None:
+      print >>sys.stderr, 'Use of Breakpad requires branding.'
+      return 1
+    _AddBreakpadKeys(plist, options.branding)
+    if options.breakpad_uploads:
+      plist['BreakpadURL'] = 'https://clients2.google.com/cr/report'
+    else:
+      # This allows crash dumping to a file without uploading the
+      # dump, for testing purposes.  Breakpad does not recognise
+      # "none" as a special value, but this does stop crash dump
+      # uploading from happening.  We need to specify something
+      # because if "BreakpadURL" is not present, Breakpad will not
+      # register its crash handler and no crash dumping will occur.
+      plist['BreakpadURL'] = 'none'
+  else:
+    _RemoveBreakpadKeys(plist)
+
+  # Only add Keystone in Release builds.
+  if options.use_keystone and env['CONFIGURATION'] == 'Release':
+    if options.bundle_identifier is None:
+      print >>sys.stderr, 'Use of Keystone requires the bundle id.'
+      return 1
+    _AddKeystoneKeys(plist, options.bundle_identifier)
+  else:
+    _RemoveKeystoneKeys(plist)
+
+  # Adds or removes any SCM keys.
+  if not _DoSCMKeys(plist, options.add_scm_info):
+    return 3
+
+  # Now that all keys have been mutated, rewrite the file.
+  temp_info_plist = tempfile.NamedTemporaryFile()
+  plistlib.writePlist(plist, temp_info_plist.name)
+
+  # Info.plist will work perfectly well in any plist format, but traditionally
+  # applications use xml1 for this, so convert it to ensure that it's valid.
+  proc = subprocess.Popen(['plutil', '-convert', 'xml1', '-o', DEST_INFO_PLIST,
+                           temp_info_plist.name])
+  proc.wait()
+  return proc.returncode
+
+
+if __name__ == '__main__':
+  sys.exit(Main(sys.argv[1:]))
diff --git a/build/mac/verify_no_objc.sh b/build/mac/verify_no_objc.sh
new file mode 100755
index 0000000..e18a5ea
--- /dev/null
+++ b/build/mac/verify_no_objc.sh
@@ -0,0 +1,42 @@
+#!/bin/bash
+
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This script makes sure that no __OBJC,__image_info section appears in the
+# executable file built by the Xcode target that runs the script. If such a
+# section appears, the script prints an error message and exits nonzero.
+#
+# Why is this important?
+#
+# On 10.5, there's a bug in CFBundlePreflightExecutable that causes it to
+# crash when operating in an executable that has not loaded at its default
+# address (that is, when it's a position-independent executable with the
+# MH_PIE bit set in its mach_header) and the executable has an
+# __OBJC,__image_info section. See http://crbug.com/88697.
+#
+# Chrome's main executables don't use any Objective-C at all, and don't need
+# to carry this section around. Not linking them as Objective-C when they
+# don't need it anyway saves about 4kB in the linked executable, although most
+# of that 4kB is just filled with zeroes.
+#
+# This script makes sure that nobody goofs and accidentally introduces these
+# sections into the main executables.
+
+set -eu
+
+executable="${BUILT_PRODUCTS_DIR}/${EXECUTABLE_PATH}"
+
+if xcrun otool -arch i386 -o "${executable}" | grep -q '^Contents.*section$'; \
+then
+  echo "${0}: ${executable} has an __OBJC,__image_info section" 2>&1
+  exit 1
+fi
+
+if [[ ${PIPESTATUS[0]} -ne 0 ]]; then
+  echo "${0}: otool failed" 2>&1
+  exit 1
+fi
+
+exit 0
diff --git a/build/module_args/dart.gni b/build/module_args/dart.gni
new file mode 100644
index 0000000..ee6b038
--- /dev/null
+++ b/build/module_args/dart.gni
@@ -0,0 +1,6 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This variable should point to the Dart SDK.
+dart_sdk_root = "//third_party/dart-sdk/dart-sdk"
diff --git a/build/module_args/mojo.gni b/build/module_args/mojo.gni
new file mode 100644
index 0000000..fee9114
--- /dev/null
+++ b/build/module_args/mojo.gni
@@ -0,0 +1,16 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This variable should point to the parent directory of the Mojo SDK.
+mojo_sdk_root = "//"
+
+# To build the Mojo shell from source, set this variable to true. To use the
+# prebuilt shell, omit this variable or set it to false. Note that the prebuilt
+# shell will be used only on platforms for which it is published (currently
+# Linux and Android).
+mojo_build_mojo_shell_from_source = true
+
+# To build the network service from source, set this variable to true. To use
+# the prebuilt network service, omit this variable or set it to false.
+mojo_build_network_service_from_source = true
diff --git a/build/module_args/nacl.gni b/build/module_args/nacl.gni
new file mode 100644
index 0000000..61e0768
--- /dev/null
+++ b/build/module_args/nacl.gni
@@ -0,0 +1,6 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Override nacl's build directory.
+nacl_shared_build_dir = "//build"
diff --git a/build/module_args/v8.gni b/build/module_args/v8.gni
new file mode 100644
index 0000000..8b5204c
--- /dev/null
+++ b/build/module_args/v8.gni
@@ -0,0 +1,13 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+if (is_android) {
+  import("//build/config/android/config.gni")
+}
+
+# TODO(sky): nuke this. Temporary while sorting out http://crbug.com/465456.
+enable_correct_v8_arch = false
+
+v8_use_external_startup_data = !(is_chromeos || is_win)
+v8_extra_library_files = []
diff --git a/build/nocompile.gypi b/build/nocompile.gypi
new file mode 100644
index 0000000..8c0f288
--- /dev/null
+++ b/build/nocompile.gypi
@@ -0,0 +1,96 @@
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into an target to create a unittest that
+# invokes a set of no-compile tests.  A no-compile test is a test that asserts
+# a particular construct will not compile.
+#
+# Also see:
+#   http://dev.chromium.org/developers/testing/no-compile-tests
+#
+# To use this, create a gyp target with the following form:
+# {
+#   'target_name': 'my_module_nc_unittests',
+#   'type': 'executable',
+#   'sources': [
+#     'nc_testset_1.nc',
+#     'nc_testset_2.nc',
+#   ],
+#   'includes': ['path/to/this/gypi/file'],
+# }
+#
+# The .nc files are C++ files that contain code we wish to assert will not
+# compile.  Each individual test case in the file should be put in its own
+# #ifdef section.  The expected output should be appended with a C++-style
+# comment that has a python list of regular expressions.  This will likely
+# be greater than 80-characters. Giving a solid expected output test is
+# important so that random compile failures do not cause the test to pass.
+#
+# Example .nc file:
+#
+#   #if defined(TEST_NEEDS_SEMICOLON)  // [r"expected ',' or ';' at end of input"]
+#
+#   int a = 1
+#
+#   #elif defined(TEST_NEEDS_CAST)  // [r"invalid conversion from 'void*' to 'char*'"]
+#
+#   void* a = NULL;
+#   char* b = a;
+#
+#   #endif
+#
+# If we needed disable TEST_NEEDS_SEMICOLON, then change the define to:
+#
+#   DISABLE_TEST_NEEDS_SEMICOLON
+#   TEST_NEEDS_CAST
+#
+# The lines above are parsed by a regexp so avoid getting creative with the
+# formatting or ifdef logic; it will likely just not work.
+#
+# Implementation notes:
+# The .nc files are actually processed by a python script which executes the
+# compiler and generates a .cc file that is empty on success, or will have a
+# series of #error lines on failure, and a set of trivially passing gunit
+# TEST() functions on success. This allows us to fail at the compile step when
+# something goes wrong, and know during the unittest run that the test was at
+# least processed when things go right.
+
+{
+  # TODO(awong): Disabled until http://crbug.com/105388 is resolved.
+  'sources/': [['exclude', '\\.nc$']],
+  'conditions': [
+    [ 'OS!="win" and clang==1', {
+      'rules': [
+        {
+          'variables': {
+            'nocompile_driver': '<(DEPTH)/tools/nocompile_driver.py',
+            'nc_result_path': ('<(INTERMEDIATE_DIR)/<(module_dir)/'
+                               '<(RULE_INPUT_ROOT)_nc.cc'),
+           },
+          'rule_name': 'run_nocompile',
+          'extension': 'nc',
+          'inputs': [
+            '<(nocompile_driver)',
+          ],
+          'outputs': [
+            '<(nc_result_path)'
+          ],
+          'action': [
+            'python',
+            '<(nocompile_driver)',
+            '4', # number of compilers to invoke in parallel.
+            '<(RULE_INPUT_PATH)',
+            '-Wall -Werror -Wfatal-errors -I<(DEPTH)',
+            '<(nc_result_path)',
+            ],
+          'message': 'Generating no compile results for <(RULE_INPUT_PATH)',
+          'process_outputs_as_sources': 1,
+        },
+      ],
+    }, {
+      'sources/': [['exclude', '\\.nc$']]
+    }],  # 'OS!="win" and clang=="1"'
+  ],
+}
+
diff --git a/build/output_dll_copy.rules b/build/output_dll_copy.rules
new file mode 100644
index 0000000..c6e9051
--- /dev/null
+++ b/build/output_dll_copy.rules
@@ -0,0 +1,17 @@
+<?xml version="1.0" encoding="utf-8"?>
+<VisualStudioToolFile
+	Name="Output DLL copy"
+	Version="8.00"
+	>
+	<Rules>
+		<CustomBuildRule
+			Name="Output DLL copy"
+			CommandLine="xcopy /R /C /Y $(InputPath) $(OutDir)"
+			Outputs="$(OutDir)\$(InputFileName)"
+			FileExtensions="*.dll"
+			>
+			<Properties>
+			</Properties>
+		</CustomBuildRule>
+	</Rules>
+</VisualStudioToolFile>
diff --git a/build/precompile.cc b/build/precompile.cc
new file mode 100644
index 0000000..db1ef6d
--- /dev/null
+++ b/build/precompile.cc
@@ -0,0 +1,7 @@
+// Copyright (c) 2011 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// Precompiled header generator for Windows builds. No include is needed
+// in this file as the PCH include is forced via the "Forced Include File"
+// flag in the projects generated by GYP.
diff --git a/build/precompile.h b/build/precompile.h
new file mode 100644
index 0000000..32c2f11
--- /dev/null
+++ b/build/precompile.h
@@ -0,0 +1,109 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// Precompiled header for Chromium project on Windows, not used by
+// other build configurations. Using precompiled headers speeds the
+// build up significantly, around 1/4th on VS 2010 on an HP Z600 with 12
+// GB of memory.
+//
+// Numeric comments beside includes are the number of times they were
+// included under src/chrome/browser on 2011/8/20, which was used as a
+// baseline for deciding what to include in the PCH. Includes without
+// a numeric comment are generally included at least 5 times. It may
+// be possible to tweak the speed of the build by commenting out or
+// removing some of the less frequently used headers.
+
+#if defined(BUILD_PRECOMPILE_H_)
+#error You shouldn't include the precompiled header file more than once.
+#endif
+
+#define BUILD_PRECOMPILE_H_
+
+#define _USE_MATH_DEFINES
+
+// The Windows header needs to come before almost all the other
+// Windows-specific headers.
+#include <Windows.h>
+#include <dwmapi.h>
+#include <shellapi.h>
+#include <wtypes.h>  // 2
+
+// Defines in atlbase.h cause conflicts; if we could figure out how
+// this family of headers can be included in the PCH, it might speed
+// up the build as several of them are used frequently.
+/*
+#include <atlbase.h>
+#include <atlapp.h>
+#include <atlcom.h>
+#include <atlcrack.h>  // 2
+#include <atlctrls.h>  // 2
+#include <atlmisc.h>  // 2
+#include <atlsafe.h>  // 1
+#include <atltheme.h>  // 1
+#include <atlwin.h>  // 2
+*/
+
+// Objbase.h and other files that rely on it bring in [ #define
+// interface struct ] which can cause problems in a multi-platform
+// build like Chrome's. #undef-ing it does not work as there are
+// currently 118 targets that break if we do this, so leaving out of
+// the precompiled header for now.
+//#include <commctrl.h>  // 2
+//#include <commdlg.h>  // 3
+//#include <cryptuiapi.h>  // 2
+//#include <Objbase.h>  // 2
+//#include <objidl.h>  // 1
+//#include <ole2.h>  // 1
+//#include <oleacc.h>  // 2
+//#include <oleauto.h>  // 1
+//#include <oleidl.h>  // 1
+//#include <propkey.h>  // 2
+//#include <propvarutil.h>  // 2
+//#include <pstore.h>  // 2
+//#include <shlguid.h>  // 1
+//#include <shlwapi.h>  // 1
+//#include <shobjidl.h>  // 4
+//#include <urlhist.h>  // 2
+
+// Caused other conflicts in addition to the 'interface' issue above.
+// #include <shlobj.h>
+
+#include <errno.h>
+#include <fcntl.h>
+#include <limits.h>  // 4
+#include <math.h>
+#include <memory.h>  // 1
+#include <signal.h>
+#include <stdarg.h>  // 1
+#include <stddef.h>
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+#include <time.h>  // 4
+
+#include <algorithm>
+#include <bitset>  // 3
+#include <cmath>
+#include <cstddef>
+#include <cstdio>  // 3
+#include <cstdlib>  // 2
+#include <cstring>
+#include <deque>
+#include <fstream>  // 3
+#include <functional>
+#include <iomanip>  // 2
+#include <iosfwd>  // 2
+#include <iterator>
+#include <limits>
+#include <list>
+#include <map>
+#include <numeric>  // 2
+#include <ostream>
+#include <queue>
+#include <set>
+#include <sstream>
+#include <stack>
+#include <string>
+#include <utility>
+#include <vector>
diff --git a/build/protoc.gypi b/build/protoc.gypi
new file mode 100644
index 0000000..fafdf9d
--- /dev/null
+++ b/build/protoc.gypi
@@ -0,0 +1,123 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into a target to provide a rule
+# to invoke protoc in a consistent manner. For Java-targets, see
+# protoc_java.gypi.
+#
+# To use this, create a gyp target with the following form:
+# {
+#   'target_name': 'my_proto_lib',
+#   'type': 'static_library',
+#   'sources': [
+#     'foo.proto',
+#     'bar.proto',
+#   ],
+#   'variables': {
+#     # Optional, see below: 'proto_in_dir': '.'
+#     'proto_out_dir': 'dir/for/my_proto_lib'
+#   },
+#   'includes': ['path/to/this/gypi/file'],
+# }
+# If necessary, you may add normal .cc files to the sources list or other gyp
+# dependencies.  The proto headers are guaranteed to be generated before any
+# source files, even within this target, are compiled.
+#
+# The 'proto_in_dir' variable must be the relative path to the
+# directory containing the .proto files.  If left out, it defaults to '.'.
+#
+# The 'proto_out_dir' variable specifies the path suffix that output
+# files are generated under.  Targets that gyp-depend on my_proto_lib
+# will be able to include the resulting proto headers with an include
+# like:
+#   #include "dir/for/my_proto_lib/foo.pb.h"
+#
+# If you need to add an EXPORT macro to a protobuf's c++ header, set the
+# 'cc_generator_options' variable with the value: 'dllexport_decl=FOO_EXPORT:'
+# e.g. 'dllexport_decl=BASE_EXPORT:'
+#
+# It is likely you also need to #include a file for the above EXPORT macro to
+# work. You can do so with the 'cc_include' variable.
+# e.g. 'base/base_export.h'
+#
+# Implementation notes:
+# A proto_out_dir of foo/bar produces
+#   <(SHARED_INTERMEDIATE_DIR)/protoc_out/foo/bar/{file1,file2}.pb.{cc,h}
+#   <(SHARED_INTERMEDIATE_DIR)/pyproto/foo/bar/{file1,file2}_pb2.py
+
+{
+  'variables': {
+    'protoc_wrapper': '<(DEPTH)/tools/protoc_wrapper/protoc_wrapper.py',
+    'cc_dir': '<(SHARED_INTERMEDIATE_DIR)/protoc_out/<(proto_out_dir)',
+    'py_dir': '<(PRODUCT_DIR)/pyproto/<(proto_out_dir)',
+    'cc_generator_options%': '',
+    'cc_include%': '',
+    'proto_in_dir%': '.',
+    'conditions': [
+      ['use_system_protobuf==0', {
+        'protoc': '<(PRODUCT_DIR)/<(EXECUTABLE_PREFIX)protoc<(EXECUTABLE_SUFFIX)',
+      }, { # use_system_protobuf==1
+        'protoc': '<!(which protoc)',
+      }],
+    ],
+  },
+  'rules': [
+    {
+      'rule_name': 'genproto',
+      'extension': 'proto',
+      'inputs': [
+        '<(protoc_wrapper)',
+        '<(protoc)',
+      ],
+      'outputs': [
+        '<(py_dir)/<(RULE_INPUT_ROOT)_pb2.py',
+        '<(cc_dir)/<(RULE_INPUT_ROOT).pb.cc',
+        '<(cc_dir)/<(RULE_INPUT_ROOT).pb.h',
+      ],
+      'action': [
+        'python',
+        '<(protoc_wrapper)',
+        '--include',
+        '<(cc_include)',
+        '--protobuf',
+        '<(cc_dir)/<(RULE_INPUT_ROOT).pb.h',
+        # Using the --arg val form (instead of --arg=val) allows gyp's msvs rule
+        # generation to correct 'val' which is a path.
+        '--proto-in-dir','<(proto_in_dir)',
+        # Naively you'd use <(RULE_INPUT_PATH) here, but protoc requires
+        # --proto_path is a strict prefix of the path given as an argument.
+        '--proto-in-file','<(RULE_INPUT_ROOT)<(RULE_INPUT_EXT)',
+        '--use-system-protobuf=<(use_system_protobuf)',
+        '--',
+        '<(protoc)',
+        '--cpp_out', '<(cc_generator_options)<(cc_dir)',
+        '--python_out', '<(py_dir)',
+      ],
+      'message': 'Generating C++ and Python code from <(RULE_INPUT_PATH)',
+      'process_outputs_as_sources': 1,
+    },
+  ],
+  'dependencies': [
+    '<(DEPTH)/third_party/protobuf/protobuf.gyp:protoc#host',
+    '<(DEPTH)/third_party/protobuf/protobuf.gyp:protobuf_lite',
+  ],
+  'include_dirs': [
+    '<(SHARED_INTERMEDIATE_DIR)/protoc_out',
+    '<(DEPTH)',
+  ],
+  'direct_dependent_settings': {
+    'include_dirs': [
+      '<(SHARED_INTERMEDIATE_DIR)/protoc_out',
+      '<(DEPTH)',
+    ]
+  },
+  'export_dependent_settings': [
+    # The generated headers reference headers within protobuf_lite,
+    # so dependencies must be able to find those headers too.
+    '<(DEPTH)/third_party/protobuf/protobuf.gyp:protobuf_lite',
+  ],
+  # This target exports a hard dependency because it generates header
+  # files.
+  'hard_dependency': 1,
+}
diff --git a/build/protoc_java.gypi b/build/protoc_java.gypi
new file mode 100644
index 0000000..6fd80d85
--- /dev/null
+++ b/build/protoc_java.gypi
@@ -0,0 +1,83 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into a target to provide a rule
+# to invoke protoc in a consistent manner. This is only to be included
+# for Java targets. When including this file, a .jar-file will be generated.
+# For other targets, see protoc.gypi.
+#
+# To use this, create a gyp target with the following form:
+# {
+#   'target_name': 'my_proto_lib',
+#   'sources': [
+#     'foo.proto',
+#     'bar.proto',
+#   ],
+#   'variables': {
+#     'proto_in_dir': '.'
+#   },
+#   'includes': ['path/to/this/gypi/file'],
+# }
+#
+# The 'proto_in_dir' variable must be the relative path to the
+# directory containing the .proto files.  If left out, it defaults to '.'.
+#
+# The 'output_java_files' variable specifies a list of output files that will
+# be generated. It is based on the package and java_outer_classname fields in
+# the proto. All the values must be prefixed with >(java_out_dir), since that
+# is the root directory of all the output.
+#
+# Implementation notes:
+# A target_name of foo and proto-specified 'package' java.package.path produces:
+#   <(PRODUCT_DIR)/java_proto/foo/{java/package/path/}{Foo,Bar}.java
+# where Foo and Bar are taken from 'java_outer_classname' of the protos.
+#
+# How the .jar-file is created is different than how protoc is used for other
+# targets, and as such, this lives in its own file.
+
+{
+  'variables': {
+    'protoc': '<(PRODUCT_DIR)/<(EXECUTABLE_PREFIX)android_protoc<(EXECUTABLE_SUFFIX)',
+    'java_out_dir': '<(PRODUCT_DIR)/java_proto/<(_target_name)/src',
+    'proto_in_dir%': '.',
+    'stamp_file': '<(java_out_dir).stamp',
+    'script': '<(DEPTH)/build/protoc_java.py',
+
+    # The rest of the variables here are for the java.gypi include.
+    'java_in_dir': '<(DEPTH)/build/android/empty',
+    'generated_src_dirs': ['<(java_out_dir)'],
+    # Adding the |stamp_file| to |additional_input_paths| makes the actions in
+    # the include of java.gypi depend on the genproto_java action.
+    'additional_input_paths': ['<(stamp_file)'],
+    'run_findbugs': 0,
+  },
+  'actions': [
+    {
+      'action_name': 'genproto_java',
+      'inputs': [
+        '<(script)',
+        '<(protoc)',
+        '<@(_sources)',
+      ],
+      # We do not know the names of the generated files, so we use a stamp.
+      'outputs': [
+        '<(stamp_file)',
+      ],
+      'action': [
+        '<(script)',
+        '--protoc=<(protoc)',
+        '--proto-path=<(proto_in_dir)',
+        '--java-out-dir=<(java_out_dir)',
+        '--stamp=<(stamp_file)',
+        '<@(_sources)',
+      ],
+      'message': 'Generating Java code from protobuf files in <(proto_in_dir)',
+    },
+  ],
+  'dependencies': [
+    '<(DEPTH)/third_party/android_protobuf/android_protobuf.gyp:android_protoc#host',
+    '<(DEPTH)/third_party/android_protobuf/android_protobuf.gyp:protobuf_nano_javalib',
+  ],
+  'includes': [ 'java.gypi' ],
+}
diff --git a/build/protoc_java.py b/build/protoc_java.py
new file mode 100755
index 0000000..470667c
--- /dev/null
+++ b/build/protoc_java.py
@@ -0,0 +1,68 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Generate java source files from protobuf files.
+
+This is a helper file for the genproto_java action in protoc_java.gypi.
+
+It performs the following steps:
+1. Deletes all old sources (ensures deleted classes are not part of new jars).
+2. Creates source directory.
+3. Generates Java files using protoc (output into either --java-out-dir or
+   --srcjar).
+4. Creates a new stamp file.
+"""
+
+import os
+import optparse
+import shutil
+import subprocess
+import sys
+
+sys.path.append(os.path.join(os.path.dirname(__file__), "android", "gyp"))
+from util import build_utils
+
+def main(argv):
+  parser = optparse.OptionParser()
+  build_utils.AddDepfileOption(parser)
+  parser.add_option("--protoc", help="Path to protoc binary.")
+  parser.add_option("--proto-path", help="Path to proto directory.")
+  parser.add_option("--java-out-dir",
+      help="Path to output directory for java files.")
+  parser.add_option("--srcjar", help="Path to output srcjar.")
+  parser.add_option("--stamp", help="File to touch on success.")
+  options, args = parser.parse_args(argv)
+
+  build_utils.CheckOptions(options, parser, ['protoc', 'proto_path'])
+  if not options.java_out_dir and not options.srcjar:
+    print 'One of --java-out-dir or --srcjar must be specified.'
+    return 1
+
+  with build_utils.TempDir() as temp_dir:
+    # Specify arguments to the generator.
+    generator_args = ['optional_field_style=reftypes',
+                      'store_unknown_fields=true']
+    out_arg = '--javanano_out=' + ','.join(generator_args) + ':' + temp_dir
+    # Generate Java files using protoc.
+    build_utils.CheckOutput(
+        [options.protoc, '--proto_path', options.proto_path, out_arg]
+        + args)
+
+    if options.java_out_dir:
+      build_utils.DeleteDirectory(options.java_out_dir)
+      shutil.copytree(temp_dir, options.java_out_dir)
+    else:
+      build_utils.ZipDir(options.srcjar, temp_dir)
+
+  if options.depfile:
+    build_utils.WriteDepfile(
+        options.depfile,
+        args + [options.protoc] + build_utils.GetPythonDependencies())
+
+  if options.stamp:
+    build_utils.Touch(options.stamp)
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv[1:]))
diff --git a/build/release.gypi b/build/release.gypi
new file mode 100644
index 0000000..9b8b11d
--- /dev/null
+++ b/build/release.gypi
@@ -0,0 +1,29 @@
+{
+  'conditions': [
+    # Handle build types.
+    ['buildtype=="Dev"', {
+      'includes': ['internal/release_impl.gypi'],
+    }],
+    ['buildtype=="Dev" and incremental_chrome_dll==1', {
+      'msvs_settings': {
+        'VCLinkerTool': {
+          # Enable incremental linking and disable conflicting link options:
+          # http://msdn.microsoft.com/en-us/library/4khtbfyf.aspx
+          'LinkIncremental': '2',
+          'OptimizeReferences': '1',
+          'EnableCOMDATFolding': '1',
+          'Profile': 'false',
+        },
+      },
+    }],
+    ['buildtype=="Official"', {
+      'includes': ['internal/release_impl_official.gypi'],
+    }],
+    # TODO(bradnelson): may also need:
+    #     checksenabled
+    #     coverage
+    #     dom_stats
+    #     pgo_instrument
+    #     pgo_optimize
+  ],
+}
diff --git a/build/repack_action.gypi b/build/repack_action.gypi
new file mode 100644
index 0000000..04b982a
--- /dev/null
+++ b/build/repack_action.gypi
@@ -0,0 +1,31 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into an action to invoke grit repack in a
+# consistent manner. To use this the following variables need to be
+# defined:
+#   pak_inputs: list: paths of pak files that need to be combined.
+#   pak_output: string: the output pak file path.
+
+{
+  # GYP version: //tools/grit/repack.gni
+  'variables': {
+    'repack_path': '<(DEPTH)/tools/grit/grit/format/repack.py',
+    'repack_options%': [],
+  },
+  'inputs': [
+    '<(repack_path)',
+    '<@(pak_inputs)',
+  ],
+  'outputs': [
+    '<(pak_output)'
+  ],
+  'action': [
+    'python',
+    '<(repack_path)',
+    '<@(repack_options)',
+    '<(pak_output)',
+    '<@(pak_inputs)',
+  ],
+}
diff --git a/build/rmdir_and_stamp.py b/build/rmdir_and_stamp.py
new file mode 100755
index 0000000..6aa11f8
--- /dev/null
+++ b/build/rmdir_and_stamp.py
@@ -0,0 +1,45 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Wipes out a directory recursively and then touches a stamp file.
+
+This odd pairing of operations is used to support build scripts which
+slurp up entire directories (e.g. build/android/javac.py when handling
+generated sources) as inputs.
+
+The general pattern of use is:
+
+  - Add a target which generates |gen_sources| into |out_path| from |inputs|.
+  - Include |stamp_file| as an input for that target or any of its rules which
+    generate files in |out_path|.
+  - Add an action which depends on |inputs| and which outputs |stamp_file|;
+    the action should run this script and pass |out_path| and |stamp_file| as
+    its arguments.
+
+The net result is that you will force |out_path| to be wiped and all
+|gen_sources| to be regenerated any time any file in |inputs| changes.
+
+See //third_party/mojo/mojom_bindings_generator.gypi for an example use case.
+
+"""
+
+import errno
+import os
+import shutil
+import sys
+
+
+def Main(dst_dir, stamp_file):
+  try:
+    shutil.rmtree(os.path.normpath(dst_dir))
+  except OSError as e:
+    # Ignore only "not found" errors.
+    if e.errno != errno.ENOENT:
+      raise e
+  with open(stamp_file, 'a'):
+    os.utime(stamp_file, None)
+
+if __name__ == '__main__':
+  sys.exit(Main(sys.argv[1], sys.argv[2]))
diff --git a/build/sanitize-mac-build-log.sed b/build/sanitize-mac-build-log.sed
new file mode 100644
index 0000000..b4111c7
--- /dev/null
+++ b/build/sanitize-mac-build-log.sed
@@ -0,0 +1,33 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Use this sed script to reduce a Mac build log into something readable.
+
+# Drop uninformative lines.
+/^distcc/d
+/^Check dependencies/d
+/^    setenv /d
+/^    cd /d
+/^make: Nothing to be done/d
+/^$/d
+
+# Xcode prints a short "compiling foobar.o" line followed by the lengthy
+# full command line.  These deletions drop the command line.
+\|^    /Developer/usr/bin/|d
+\|^    /Developer/Library/PrivateFrameworks/DevToolsCore\.framework/|d
+\|^    /Developer/Library/Xcode/Plug-ins/CoreBuildTasks\.xcplugin/|d
+
+# Drop any goma command lines as well.
+\|^    .*/gomacc |d
+
+# And, if you've overridden something from your own bin directory, remove those
+# full command lines, too.
+\|^    /Users/[^/]*/bin/|d
+
+# There's already a nice note for bindings, don't need the command line.
+\|^python scripts/rule_binding\.py|d
+
+# Shorten the "compiling foobar.o" line.
+s|^Distributed-CompileC (.*) normal i386 c\+\+ com\.apple\.compilers\.gcc\.4_2|    CC \1|
+s|^CompileC (.*) normal i386 c\+\+ com\.apple\.compilers\.gcc\.4_2|    CC \1|
diff --git a/build/sanitize-mac-build-log.sh b/build/sanitize-mac-build-log.sh
new file mode 100755
index 0000000..df5a7af
--- /dev/null
+++ b/build/sanitize-mac-build-log.sh
@@ -0,0 +1,5 @@
+#!/bin/sh
+# Copyright (c) 2010 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+sed -r -f `dirname "${0}"`/`basename "${0}" sh`sed
diff --git a/build/sanitize-win-build-log.sed b/build/sanitize-win-build-log.sed
new file mode 100644
index 0000000..c18e664
--- /dev/null
+++ b/build/sanitize-win-build-log.sed
@@ -0,0 +1,15 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Use this sed script to reduce a Windows build log into something
+# machine-parsable.
+
+# Drop uninformative lines.
+/The operation completed successfully\./d
+
+# Drop parallelization indicators on lines.
+s/^[0-9]+>//
+
+# Shorten bindings generation lines
+s/^.*"python".*idl_compiler\.py".*("[^"]+\.idl").*$/  idl_compiler \1/
diff --git a/build/sanitize-win-build-log.sh b/build/sanitize-win-build-log.sh
new file mode 100755
index 0000000..df5a7af
--- /dev/null
+++ b/build/sanitize-win-build-log.sh
@@ -0,0 +1,5 @@
+#!/bin/sh
+# Copyright (c) 2010 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+sed -r -f `dirname "${0}"`/`basename "${0}" sh`sed
diff --git a/build/sanitizers/BUILD.gn b/build/sanitizers/BUILD.gn
new file mode 100644
index 0000000..4f81f3e
--- /dev/null
+++ b/build/sanitizers/BUILD.gn
@@ -0,0 +1,24 @@
+# Copyright (c) 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+if (is_linux && !is_chromeos) {
+  # TODO(GYP): Figure out which of these work and are needed on other platforms.
+  copy("copy_llvm_symbolizer") {
+    if (is_win) {
+      sources = [
+        "//third_party/llvm-build/Release+Asserts/bin/llvm-symbolizer.exe",
+      ]
+      outputs = [
+        "$root_out_dir/llvm-symbolizer.exe",
+      ]
+    } else {
+      sources = [
+        "//third_party/llvm-build/Release+Asserts/bin/llvm-symbolizer",
+      ]
+      outputs = [
+        "$root_out_dir/llvm-symbolizer",
+      ]
+    }
+  }
+}
diff --git a/build/sanitizers/OWNERS b/build/sanitizers/OWNERS
new file mode 100644
index 0000000..0be2be8
--- /dev/null
+++ b/build/sanitizers/OWNERS
@@ -0,0 +1,4 @@
+glider@chromium.org
+earthdok@chromium.org
+per-file tsan_suppressions.cc=*
+per-file lsan_suppressions.cc=*
diff --git a/build/sanitizers/asan_suppressions.cc b/build/sanitizers/asan_suppressions.cc
new file mode 100644
index 0000000..df94bc8
--- /dev/null
+++ b/build/sanitizers/asan_suppressions.cc
@@ -0,0 +1,23 @@
+// Copyright 2015 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// This file contains the default suppressions for AddressSanitizer.
+// It should only be used under very limited circumstances such as suppressing
+// a report caused by an interceptor call in a system-installed library.
+
+#if defined(ADDRESS_SANITIZER)
+
+// Please make sure the code below declares a single string variable
+// kASanDefaultSuppressions which contains ASan suppressions delimited by
+// newlines.
+char kASanDefaultSuppressions[] =
+// http://crbug.com/178677
+"interceptor_via_lib:libsqlite3.so\n"
+
+// PLEASE READ ABOVE BEFORE ADDING NEW SUPPRESSIONS.
+
+// End of suppressions.
+;  // Please keep this semicolon.
+
+#endif  // ADDRESS_SANITIZER
diff --git a/build/sanitizers/lsan_suppressions.cc b/build/sanitizers/lsan_suppressions.cc
new file mode 100644
index 0000000..e9a8b7e
--- /dev/null
+++ b/build/sanitizers/lsan_suppressions.cc
@@ -0,0 +1,108 @@
+// Copyright 2015 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// This file contains the default suppressions for LeakSanitizer.
+// You can also pass additional suppressions via LSAN_OPTIONS:
+// LSAN_OPTIONS=suppressions=/path/to/suppressions. Please refer to
+// http://dev.chromium.org/developers/testing/leaksanitizer for more info.
+
+#if defined(LEAK_SANITIZER)
+
+// Please make sure the code below declares a single string variable
+// kLSanDefaultSuppressions which contains LSan suppressions delimited by
+// newlines. See http://dev.chromium.org/developers/testing/leaksanitizer
+// for the instructions on writing suppressions.
+char kLSanDefaultSuppressions[] =
+// Intentional leak used as sanity test for Valgrind/memcheck.
+"leak:base::ToolsSanityTest_MemoryLeak_Test::TestBody\n"
+
+// ================ Leaks in third-party code ================
+
+// False positives in libfontconfig. http://crbug.com/39050
+"leak:libfontconfig\n"
+
+// Leaks in Nvidia's libGL.
+"leak:libGL.so\n"
+
+// A small leak in V8. http://crbug.com/46571#c9
+"leak:blink::V8GCController::collectGarbage\n"
+
+// TODO(earthdok): revisit NSS suppressions after the switch to BoringSSL
+// NSS leaks in CertDatabaseNSSTest tests. http://crbug.com/51988
+"leak:net::NSSCertDatabase::ImportFromPKCS12\n"
+"leak:net::NSSCertDatabase::ListCerts\n"
+"leak:net::NSSCertDatabase::DeleteCertAndKey\n"
+"leak:crypto::ScopedTestNSSDB::ScopedTestNSSDB\n"
+// Another leak due to not shutting down NSS properly. http://crbug.com/124445
+"leak:error_get_my_stack\n"
+// The NSS suppressions above will not fire when the fast stack unwinder is
+// used, because it can't unwind through NSS libraries. Apply blanket
+// suppressions for now.
+"leak:libnssutil3\n"
+"leak:libnspr4\n"
+"leak:libnss3\n"
+"leak:libplds4\n"
+"leak:libnssckbi\n"
+
+// XRandR has several one time leaks.
+"leak:libxrandr\n"
+
+// xrandr leak. http://crbug.com/119677
+"leak:XRRFindDisplay\n"
+
+// Suppressions for objects which can be owned by the V8 heap. This is a
+// temporary workaround until LeakSanitizer supports the V8 heap.
+// Those should only fire in (browser)tests. If you see one of them in Chrome,
+// then it's a real leak.
+// http://crbug.com/328552
+"leak:WTF::StringImpl::createUninitialized\n"
+"leak:WTF::StringImpl::create8BitIfPossible\n"
+"leak:blink::MouseEvent::create\n"
+"leak:blink::*::*GetterCallback\n"
+"leak:blink::CSSComputedStyleDeclaration::create\n"
+"leak:blink::V8PerIsolateData::ensureDomInJSContext\n"
+"leak:gin/object_template_builder.h\n"
+"leak:gin::internal::Dispatcher\n"
+"leak:blink::LocalDOMWindow::getComputedStyle\n"
+// This should really be RemoteDOMWindow::create, but symbolization is
+// weird in release builds. https://crbug.com/484760
+"leak:blink::RemoteFrame::create\n"
+// Likewise, this should really be blink::WindowProxy::initializeIfNeeded.
+// https://crbug.com/484760
+"leak:blink::WindowProxy::createContext\n"
+
+// http://crbug.com/356785
+"leak:content::RenderViewImplTest_DecideNavigationPolicyForWebUI_Test::TestBody\n"
+
+// ================ Leaks in Chromium code ================
+// PLEASE DO NOT ADD SUPPRESSIONS FOR NEW LEAKS.
+// Instead, commits that introduce memory leaks should be reverted. Suppressing
+// the leak is acceptable in some cases when reverting is impossible, i.e. when
+// enabling leak detection for the first time for a test target with
+// pre-existing leaks.
+
+// Small test-only leak in ppapi_unittests. http://crbug.com/258113
+"leak:ppapi::proxy::PPP_Instance_Private_ProxyTest_PPPInstancePrivate_Test\n"
+
+// http://crbug.com/322671
+"leak:content::SpeechRecognitionBrowserTest::SetUpOnMainThread\n"
+
+// http://crbug.com/355641
+"leak:TrayAccessibilityTest\n"
+
+// http://crbug.com/354644
+"leak:CertificateViewerUITest::ShowModalCertificateViewer\n"
+
+// http://crbug.com/356306
+"leak:content::SetProcessTitleFromCommandLine\n"
+
+// http://crbug.com/506433
+"leak:blink::ResourceFetcher::garbageCollectDocumentResources\n"
+
+// PLEASE READ ABOVE BEFORE ADDING NEW SUPPRESSIONS.
+
+// End of suppressions.
+;  // Please keep this semicolon.
+
+#endif  // LEAK_SANITIZER
diff --git a/build/sanitizers/sanitizer_options.cc b/build/sanitizers/sanitizer_options.cc
new file mode 100644
index 0000000..a659a22
--- /dev/null
+++ b/build/sanitizers/sanitizer_options.cc
@@ -0,0 +1,164 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+//
+// This file contains the default options for various compiler-based dynamic
+// tools.
+
+#include "build/build_config.h"
+
+#if defined(ADDRESS_SANITIZER) && defined(OS_MACOSX)
+#include <crt_externs.h>  // for _NSGetArgc, _NSGetArgv
+#include <string.h>
+#endif  // ADDRESS_SANITIZER && OS_MACOSX
+
+#if defined(ADDRESS_SANITIZER) || defined(LEAK_SANITIZER) || \
+    defined(MEMORY_SANITIZER) || defined(THREAD_SANITIZER)
+// Functions returning default options are declared weak in the tools' runtime
+// libraries. To make the linker pick the strong replacements for those
+// functions from this module, we explicitly force its inclusion by passing
+// -Wl,-u_sanitizer_options_link_helper
+extern "C"
+void _sanitizer_options_link_helper() { }
+
+// The callbacks we define here will be called from the sanitizer runtime, but
+// aren't referenced from the Chrome executable. We must ensure that those
+// callbacks are not sanitizer-instrumented, and that they aren't stripped by
+// the linker.
+#define SANITIZER_HOOK_ATTRIBUTE          \
+  extern "C"                              \
+  __attribute__((no_sanitize_address))    \
+  __attribute__((no_sanitize_memory))     \
+  __attribute__((no_sanitize_thread))     \
+  __attribute__((visibility("default")))  \
+  __attribute__((used))
+#endif
+
+#if defined(ADDRESS_SANITIZER)
+// Default options for AddressSanitizer in various configurations:
+//   malloc_context_size=5 - limit the size of stack traces collected by ASan
+//     for each malloc/free by 5 frames. These stack traces tend to accumulate
+//     very fast in applications using JIT (v8 in Chrome's case), see
+//     https://code.google.com/p/address-sanitizer/issues/detail?id=177
+//   symbolize=false - disable the in-process symbolization, which isn't 100%
+//     compatible with the existing sandboxes and doesn't make much sense for
+//     stripped official binaries.
+//   legacy_pthread_cond=1 - run in the libpthread 2.2.5 compatibility mode to
+//     work around libGL.so using the obsolete API, see
+//     http://crbug.com/341805. This may break if pthread_cond_t objects are
+//     accessed by both instrumented and non-instrumented binaries (e.g. if
+//     they reside in shared memory). This option is going to be deprecated in
+//     upstream AddressSanitizer and must not be used anywhere except the
+//     official builds.
+//   check_printf=1 - check the memory accesses to printf (and other formatted
+//     output routines) arguments.
+//   use_sigaltstack=1 - handle signals on an alternate signal stack. Useful
+//     for stack overflow detection.
+//   strip_path_prefix=Release/../../ - prefixes up to and including this
+//     substring will be stripped from source file paths in symbolized reports
+//     (if symbolize=true, which is set when running with LeakSanitizer).
+//   fast_unwind_on_fatal=1 - use the fast (frame-pointer-based) stack unwinder
+//     to print error reports. V8 doesn't generate debug info for the JIT code,
+//     so the slow unwinder may not work properly.
+//   detect_stack_use_after_return=1 - use fake stack to delay the reuse of
+//     stack allocations and detect stack-use-after-return errors.
+#if defined(OS_LINUX)
+#if defined(GOOGLE_CHROME_BUILD)
+// Default AddressSanitizer options for the official build. These do not affect
+// tests on buildbots (which don't set GOOGLE_CHROME_BUILD) or non-official
+// Chromium builds.
+const char kAsanDefaultOptions[] =
+    "legacy_pthread_cond=1 malloc_context_size=5 "
+    "symbolize=false check_printf=1 use_sigaltstack=1 detect_leaks=0 "
+    "strip_path_prefix=Release/../../ fast_unwind_on_fatal=1";
+#else
+// Default AddressSanitizer options for buildbots and non-official builds.
+const char *kAsanDefaultOptions =
+    "symbolize=false check_printf=1 use_sigaltstack=1 "
+    "detect_leaks=0 strip_path_prefix=Release/../../ fast_unwind_on_fatal=1 "
+    "detect_stack_use_after_return=1 ";
+#endif  // GOOGLE_CHROME_BUILD
+
+#elif defined(OS_MACOSX)
+const char *kAsanDefaultOptions =
+    "check_printf=1 use_sigaltstack=1 "
+    "strip_path_prefix=Release/../../ fast_unwind_on_fatal=1 "
+    "detect_stack_use_after_return=1 detect_odr_violation=0 ";
+static const char kNaClDefaultOptions[] = "handle_segv=0";
+static const char kNaClFlag[] = "--type=nacl-loader";
+#endif  // OS_LINUX
+
+#if defined(OS_LINUX) || defined(OS_MACOSX)
+SANITIZER_HOOK_ATTRIBUTE const char *__asan_default_options() {
+#if defined(OS_MACOSX)
+  char*** argvp = _NSGetArgv();
+  int* argcp = _NSGetArgc();
+  if (!argvp || !argcp) return kAsanDefaultOptions;
+  char** argv = *argvp;
+  int argc = *argcp;
+  for (int i = 0; i < argc; ++i) {
+    if (strcmp(argv[i], kNaClFlag) == 0) {
+      return kNaClDefaultOptions;
+    }
+  }
+#endif
+  return kAsanDefaultOptions;
+}
+
+extern "C" char kASanDefaultSuppressions[];
+
+SANITIZER_HOOK_ATTRIBUTE const char *__asan_default_suppressions() {
+  return kASanDefaultSuppressions;
+}
+#endif  // OS_LINUX || OS_MACOSX
+#endif  // ADDRESS_SANITIZER
+
+#if defined(THREAD_SANITIZER) && defined(OS_LINUX)
+// Default options for ThreadSanitizer in various configurations:
+//   detect_deadlocks=1 - enable deadlock (lock inversion) detection.
+//   second_deadlock_stack=1 - more verbose deadlock reports.
+//   report_signal_unsafe=0 - do not report async-signal-unsafe functions
+//     called from signal handlers.
+//   report_thread_leaks=0 - do not report unjoined threads at the end of
+//     the program execution.
+//   print_suppressions=1 - print the list of matched suppressions.
+//   history_size=7 - make the history buffer proportional to 2^7 (the maximum
+//     value) to keep more stack traces.
+//   strip_path_prefix=Release/../../ - prefixes up to and including this
+//     substring will be stripped from source file paths in symbolized reports.
+const char kTsanDefaultOptions[] =
+    "detect_deadlocks=1 second_deadlock_stack=1 report_signal_unsafe=0 "
+    "report_thread_leaks=0 print_suppressions=1 history_size=7 "
+    "strip_path_prefix=Release/../../ ";
+
+SANITIZER_HOOK_ATTRIBUTE const char *__tsan_default_options() {
+  return kTsanDefaultOptions;
+}
+
+extern "C" char kTSanDefaultSuppressions[];
+
+SANITIZER_HOOK_ATTRIBUTE const char *__tsan_default_suppressions() {
+  return kTSanDefaultSuppressions;
+}
+
+#endif  // THREAD_SANITIZER && OS_LINUX
+
+#if defined(LEAK_SANITIZER)
+// Default options for LeakSanitizer:
+//   print_suppressions=1 - print the list of matched suppressions.
+//   strip_path_prefix=Release/../../ - prefixes up to and including this
+//     substring will be stripped from source file paths in symbolized reports.
+const char kLsanDefaultOptions[] =
+    "print_suppressions=1 strip_path_prefix=Release/../../ ";
+
+SANITIZER_HOOK_ATTRIBUTE const char *__lsan_default_options() {
+  return kLsanDefaultOptions;
+}
+
+extern "C" char kLSanDefaultSuppressions[];
+
+SANITIZER_HOOK_ATTRIBUTE const char *__lsan_default_suppressions() {
+  return kLSanDefaultSuppressions;
+}
+
+#endif  // LEAK_SANITIZER
diff --git a/build/sanitizers/sanitizers.gyp b/build/sanitizers/sanitizers.gyp
new file mode 100644
index 0000000..91dab8a
--- /dev/null
+++ b/build/sanitizers/sanitizers.gyp
@@ -0,0 +1,92 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+  'targets': [
+    {
+      'target_name': 'sanitizer_options',
+      'type': 'static_library',
+      'toolsets': ['host', 'target'],
+      'variables': {
+         # Every target is going to depend on sanitizer_options, so allow
+         # this one to depend on itself.
+         'prune_self_dependency': 1,
+         # Do not let 'none' targets depend on this one, they don't need to.
+         'link_dependency': 1,
+       },
+      'sources': [
+        'sanitizer_options.cc',
+      ],
+      'include_dirs': [
+        '../..',
+      ],
+      # Some targets may want to opt-out from ASan, TSan and MSan and link
+      # without the corresponding runtime libraries. We drop the libc++
+      # dependency and omit the compiler flags to avoid bringing instrumented
+      # code to those targets.
+      'conditions': [
+        ['use_custom_libcxx==1', {
+          'dependencies!': [
+            '../../buildtools/third_party/libc++/libc++.gyp:libcxx_proxy',
+          ],
+        }],
+        ['tsan==1', {
+          'sources': [
+            'tsan_suppressions.cc',
+          ],
+        }],
+        ['lsan==1', {
+          'sources': [
+            'lsan_suppressions.cc',
+          ],
+        }],
+        ['asan==1', {
+          'sources': [
+            'asan_suppressions.cc',
+          ],
+        }],
+      ],
+      'cflags/': [
+        ['exclude', '-fsanitize='],
+        ['exclude', '-fsanitize-'],
+      ],
+      'direct_dependent_settings': {
+        'ldflags': [
+          '-Wl,-u_sanitizer_options_link_helper',
+        ],
+        'target_conditions': [
+          ['_type=="executable"', {
+            'xcode_settings': {
+              'OTHER_LDFLAGS': [
+                '-Wl,-u,__sanitizer_options_link_helper',
+              ],
+            },
+          }],
+        ],
+      },
+    },
+    {
+      # Copy llvm-symbolizer to the product dir so that LKGR bots can package it.
+      'target_name': 'llvm-symbolizer',
+      'type': 'none',
+      'variables': {
+
+       # Path is relative to this GYP file.
+       'llvm_symbolizer_path':
+           '../../third_party/llvm-build/Release+Asserts/bin/llvm-symbolizer<(EXECUTABLE_SUFFIX)',
+      },
+      'conditions': [
+        ['clang==1', {
+          'copies': [{
+            'destination': '<(PRODUCT_DIR)',
+            'files': [
+              '<(llvm_symbolizer_path)',
+            ],
+          }],
+        }],
+      ],
+    },
+  ],
+}
+
diff --git a/build/sanitizers/tsan_suppressions.cc b/build/sanitizers/tsan_suppressions.cc
new file mode 100644
index 0000000..fe64dd2
--- /dev/null
+++ b/build/sanitizers/tsan_suppressions.cc
@@ -0,0 +1,318 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// This file contains the default suppressions for ThreadSanitizer.
+// You can also pass additional suppressions via TSAN_OPTIONS:
+// TSAN_OPTIONS=suppressions=/path/to/suppressions. Please refer to
+// http://dev.chromium.org/developers/testing/threadsanitizer-tsan-v2
+// for more info.
+
+#if defined(THREAD_SANITIZER)
+
+// Please make sure the code below declares a single string variable
+// kTSanDefaultSuppressions contains TSan suppressions delimited by newlines.
+// See http://dev.chromium.org/developers/testing/threadsanitizer-tsan-v2
+// for the instructions on writing suppressions.
+char kTSanDefaultSuppressions[] =
+// False positives in libflashplayer.so and libglib.so. Since we don't
+// instrument them, we cannot reason about the synchronization in them.
+"race:libflashplayer.so\n"
+"race:libglib*.so\n"
+
+// Intentional race in ToolsSanityTest.DataRace in base_unittests.
+"race:base/tools_sanity_unittest.cc\n"
+
+// Data race on WatchdogCounter [test-only].
+"race:base/threading/watchdog_unittest.cc\n"
+
+// Races in libevent, http://crbug.com/23244.
+"race:libevent/event.c\n"
+
+// http://crbug.com/46840.
+"race:base::HistogramSamples::IncreaseSum\n"
+"race:base::Histogram::Add\n"
+"race:base::HistogramSamples::Add\n"
+
+// http://crbug.com/84094.
+"race:sqlite3StatusSet\n"
+"race:pcache1EnforceMaxPage\n"
+"race:pcache1AllocPage\n"
+
+// http://crbug.com/102327.
+// Test-only race, won't fix.
+"race:tracked_objects::ThreadData::ShutdownSingleThreadedCleanup\n"
+
+// http://crbug.com/115540
+"race:*GetCurrentThreadIdentifier\n"
+
+// http://crbug.com/120808
+"race:base/threading/watchdog.cc\n"
+
+// http://crbug.com/157586
+"race:third_party/libvpx/source/libvpx/vp8/decoder/threading.c\n"
+
+// http://crbug.com/158718
+"race:third_party/ffmpeg/libavcodec/pthread.c\n"
+"race:third_party/ffmpeg/libavcodec/pthread_frame.c\n"
+"race:third_party/ffmpeg/libavcodec/vp8.c\n"
+"race:third_party/ffmpeg/libavutil/mem.c\n"
+"race:*HashFrameForTesting\n"
+"race:third_party/ffmpeg/libavcodec/h264pred.c\n"
+"race:media::ReleaseData\n"
+
+// http://crbug.com/158922
+"race:third_party/libvpx/source/libvpx/vp8/encoder/*\n"
+"race:third_party/libvpx/source/libvpx/vp9/encoder/*\n"
+
+// http://crbug.com/189177
+"race:thread_manager\n"
+"race:v8::Locker::Initialize\n"
+
+// http://crbug.com/239359
+"race:media::TestInputCallback::OnData\n"
+
+// http://crbug.com/244368
+"race:skia::BeginPlatformPaint\n"
+
+// http://crbug.com/244385
+"race:unixTempFileDir\n"
+
+// http://crbug.com/244755
+"race:v8::internal::Zone::NewExpand\n"
+"race:TooLateToEnableNow\n"
+"race:adjust_segment_bytes_allocated\n"
+
+// http://crbug.com/244774
+"race:webrtc::RTPReceiver::ProcessBitrate\n"
+"race:webrtc::RTPSender::ProcessBitrate\n"
+"race:webrtc::VideoCodingModuleImpl::Decode\n"
+"race:webrtc::RTPSender::SendOutgoingData\n"
+"race:webrtc::VP8EncoderImpl::GetEncodedPartitions\n"
+"race:webrtc::VP8EncoderImpl::Encode\n"
+"race:webrtc::ViEEncoder::DeliverFrame\n"
+"race:webrtc::vcm::VideoReceiver::Decode\n"
+"race:webrtc::VCMReceiver::FrameForDecoding\n"
+"race:*trace_event_unique_catstatic*\n"
+
+// http://crbug.com/244856
+"race:AutoPulseLock\n"
+
+// http://crbug.com/246968
+"race:webrtc::VideoCodingModuleImpl::RegisterPacketRequestCallback\n"
+
+// http://crbug.com/246974
+"race:content::GpuWatchdogThread::CheckArmed\n"
+
+// http://crbug.com/257396
+"race:base::trace_event::"
+    "TraceEventTestFixture_TraceSamplingScope_Test::TestBody\n"
+
+// http://crbug.com/258479
+"race:SamplingStateScope\n"
+"race:g_trace_state\n"
+
+// http://crbug.com/258499
+"race:third_party/skia/include/core/SkRefCnt.h\n"
+
+// http://crbug.com/268924
+"race:base::g_power_monitor\n"
+"race:base::PowerMonitor::PowerMonitor\n"
+"race:base::PowerMonitor::AddObserver\n"
+"race:base::PowerMonitor::RemoveObserver\n"
+"race:base::PowerMonitor::IsOnBatteryPower\n"
+
+// http://crbug.com/258935
+"race:base::Thread::StopSoon\n"
+
+// http://crbug.com/268941
+"race:tracked_objects::ThreadData::tls_index_\n"
+
+// http://crbug.com/272095
+"race:base::g_top_manager\n"
+
+// http://crbug.com/273047
+"race:base::*::g_lazy_tls_ptr\n"
+"race:IPC::SyncChannel::ReceivedSyncMsgQueue::lazy_tls_ptr_\n"
+
+// http://crbug.com/280466
+"race:content::WebRtcAudioCapturer::SetCapturerSource\n"
+
+// http://crbug.com/285242
+"race:media::PulseAudioOutputStream::SetVolume\n"
+
+// http://crbug.com/308590
+"race:CustomThreadWatcher::~CustomThreadWatcher\n"
+
+// http://crbug.com/310851
+"race:net::ProxyResolverV8Tracing::Job::~Job\n"
+
+// http://crbug.com/313726
+"race:CallbackWasCalled\n"
+
+// http://crbug.com/327330
+"race:PrepareTextureMailbox\n"
+"race:cc::LayerTreeHost::PaintLayerContents\n"
+
+// http://crbug.com/476529
+"deadlock:cc::VideoLayerImpl::WillDraw\n"
+
+// http://crbug.com/328826
+"race:gLCDOrder\n"
+"race:gLCDOrientation\n"
+
+// http://crbug.com/328868
+"race:PR_Lock\n"
+
+// http://crbug.com/329225
+"race:blink::currentTimeFunction\n"
+
+// http://crbug.com/329460
+"race:extensions::InfoMap::AddExtension\n"
+
+// http://crbug.com/333244
+"race:content::"
+    "VideoCaptureImplTest::MockVideoCaptureImpl::~MockVideoCaptureImpl\n"
+
+// http://crbug.com/333871
+"race:v8::internal::Interface::NewValue()::value_interface\n"
+"race:v8::internal::IsMinusZero(double)::minus_zero\n"
+"race:v8::internal::FastCloneShallowObjectStub::InitializeInterfaceDescriptor\n"
+"race:v8::internal::KeyedLoadStubCompiler::registers\n"
+"race:v8::internal::KeyedStoreStubCompiler::registers()::registers\n"
+"race:v8::internal::KeyedLoadFastElementStub::InitializeInterfaceDescriptor\n"
+"race:v8::internal::KeyedStoreFastElementStub::InitializeInterfaceDescriptor\n"
+"race:v8::internal::LoadStubCompiler::registers\n"
+"race:v8::internal::StoreStubCompiler::registers\n"
+"race:v8::internal::HValue::LoopWeight\n"
+
+// http://crbug.com/334140
+"race:CommandLine::HasSwitch\n"
+"race:CommandLine::current_process_commandline_\n"
+"race:CommandLine::GetSwitchValueASCII\n"
+
+// http://crbug.com/338675
+"race:blink::s_platform\n"
+"race:content::"
+    "RendererWebKitPlatformSupportImpl::~RendererWebKitPlatformSupportImpl\n"
+
+// http://crbug.com/345240
+"race:WTF::s_shutdown\n"
+
+// http://crbug.com/345245
+"race:jingle_glue::JingleThreadWrapper::~JingleThreadWrapper\n"
+"race:webrtc::voe::Channel::UpdatePacketDelay\n"
+"race:webrtc::voe::Channel::GetDelayEstimate\n"
+"race:webrtc::VCMCodecDataBase::DeregisterReceiveCodec\n"
+"race:webrtc::GainControlImpl::set_stream_analog_level\n"
+
+// http://crbug.com/345618
+"race:WebCore::AudioDestinationNode::render\n"
+
+// http://crbug.com/345624
+"race:media::DataSource::set_host\n"
+
+// http://crbug.com/347534
+"race:v8::internal::V8::TearDown\n"
+
+// http://crbug.com/347538
+"race:sctp_timer_start\n"
+
+// http://crbug.com/347548
+"race:cricket::WebRtcVideoMediaChannel::MaybeResetVieSendCodec\n"
+"race:cricket::WebRtcVideoMediaChannel::SetSendCodec\n"
+
+// http://crbug.com/347553
+"race:blink::WebString::reset\n"
+
+// http://crbug.com/348511
+"race:webrtc::acm1::AudioCodingModuleImpl::PlayoutData10Ms\n"
+
+// http://crbug.com/348982
+"race:cricket::P2PTransportChannel::OnConnectionDestroyed\n"
+"race:cricket::P2PTransportChannel::AddConnection\n"
+
+// http://crbug.com/348984
+"race:sctp_express_handle_sack\n"
+"race:system_base_info\n"
+
+// http://crbug.com/363999
+"race:v8::internal::EnterDebugger::*EnterDebugger\n"
+
+// https://code.google.com/p/v8/issues/detail?id=3143
+"race:v8::internal::FLAG_track_double_fields\n"
+
+// https://crbug.com/369257
+// TODO(mtklein): annotate properly and remove suppressions.
+"race:SandboxIPCHandler::HandleFontMatchRequest\n"
+"race:SkFontConfigInterfaceDirect::matchFamilyName\n"
+"race:SkFontConfigInterface::GetSingletonDirectInterface\n"
+"race:FcStrStaticName\n"
+
+// http://crbug.com/372807
+"deadlock:net::X509Certificate::CreateCertificateListFromBytes\n"
+"deadlock:net::X509Certificate::CreateFromBytes\n"
+"deadlock:net::SSLClientSocketNSS::Core::DoHandshakeLoop\n"
+
+// http://crbug.com/374135
+"race:media::AlsaWrapper::PcmWritei\n"
+
+// False positive in libc's tzset_internal, http://crbug.com/379738.
+"race:tzset_internal\n"
+
+// http://crbug.com/380554
+"deadlock:g_type_add_interface_static\n"
+
+// http:://crbug.com/386385
+"race:content::AppCacheStorageImpl::DatabaseTask::CallRunCompleted\n"
+
+// http://crbug.com/388730
+"race:g_next_user_script_id\n"
+
+// http://crbug.com/389098
+"race:webrtc::voe::TransmitMixer::EnableStereoChannelSwapping\n"
+
+// http://crbug.com/397022
+"deadlock:"
+"base::trace_event::TraceEventTestFixture_ThreadOnceBlocking_Test::TestBody\n"
+
+// http://crbug.com/415472
+"deadlock:base::trace_event::TraceLog::GetCategoryGroupEnabled\n"
+
+// http://crbug.com/490856
+"deadlock:content::TracingControllerImpl::SetEnabledOnFileThread\n"
+
+// http://crbug.com/417193
+// Suppressing both AudioContext.{cpp,h}.
+"race:modules/webaudio/AudioContext\n"
+
+// https://code.google.com/p/skia/issues/detail?id=3294
+"race:SkBaseMutex::acquire\n"
+
+// https://crbug.com/430533
+"race:TileTaskGraphRunner::Run\n"
+
+// https://crbug.com/448203
+"race:blink::RemoteFrame::detach\n"
+
+// https://crbug.com/454652
+"race:net::NetworkChangeNotifier::SetTestNotificationsOnly\n"
+
+// https://crbug.com/455638
+"deadlock:dbus::Bus::ShutdownAndBlock\n"
+
+// https://crbug.com/455665
+"race:mojo::common::*::tick_clock\n"
+"race:mojo::common::internal::NowTicks\n"
+"race:tracked_objects::ThreadData::InitializeThreadContext\n"
+
+// https://crbug.com/459429
+"race:randomnessPid\n"
+
+// https://crbug.com/454655
+"race:content::BrowserTestBase::PostTaskToInProcessRendererAndWait\n"
+
+// End of suppressions.
+;  // Please keep this semicolon.
+
+#endif  // THREAD_SANITIZER
diff --git a/build/secondary/testing/gmock/BUILD.gn b/build/secondary/testing/gmock/BUILD.gn
new file mode 100644
index 0000000..4ec6224
--- /dev/null
+++ b/build/secondary/testing/gmock/BUILD.gn
@@ -0,0 +1,54 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+config("gmock_config") {
+  # Gmock headers need to be able to find themselves.
+  include_dirs = [ "include" ]
+}
+
+static_library("gmock") {
+  # TODO http://crbug.com/412064 enable this flag all the time.
+  testonly = !is_component_build
+  sources = [
+    # Sources based on files in r173 of gmock.
+    "include/gmock/gmock-actions.h",
+    "include/gmock/gmock-cardinalities.h",
+    "include/gmock/gmock-generated-actions.h",
+    "include/gmock/gmock-generated-function-mockers.h",
+    "include/gmock/gmock-generated-matchers.h",
+    "include/gmock/gmock-generated-nice-strict.h",
+    "include/gmock/gmock-matchers.h",
+    "include/gmock/gmock-spec-builders.h",
+    "include/gmock/gmock.h",
+    "include/gmock/internal/gmock-generated-internal-utils.h",
+    "include/gmock/internal/gmock-internal-utils.h",
+    "include/gmock/internal/gmock-port.h",
+
+    #"src/gmock-all.cc",  # Not needed by our build.
+    "src/gmock-cardinalities.cc",
+    "src/gmock-internal-utils.cc",
+    "src/gmock-matchers.cc",
+    "src/gmock-spec-builders.cc",
+    "src/gmock.cc",
+  ]
+
+  # This project includes some stuff form gtest's guts.
+  include_dirs = [ "../gtest/include" ]
+
+  public_configs = [
+    ":gmock_config",
+    "//testing/gtest:gtest_config",
+  ]
+}
+
+static_library("gmock_main") {
+  # TODO http://crbug.com/412064 enable this flag all the time.
+  testonly = !is_component_build
+  sources = [
+    "src/gmock_main.cc",
+  ]
+  deps = [
+    ":gmock",
+  ]
+}
diff --git a/build/secondary/testing/gtest/BUILD.gn b/build/secondary/testing/gtest/BUILD.gn
new file mode 100644
index 0000000..073faec
--- /dev/null
+++ b/build/secondary/testing/gtest/BUILD.gn
@@ -0,0 +1,135 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+config("gtest_config") {
+  visibility = [
+    ":*",
+    "//testing/gmock:*",  # gmock also shares this config.
+  ]
+
+  defines = [
+    # In order to allow regex matches in gtest to be shared between Windows
+    # and other systems, we tell gtest to always use it's internal engine.
+    "GTEST_HAS_POSIX_RE=0",
+
+    # Chrome doesn't support / require C++11, yet.
+    "GTEST_LANG_CXX11=0",
+  ]
+
+  # Gtest headers need to be able to find themselves.
+  include_dirs = [ "include" ]
+
+  if (is_win) {
+    cflags = [ "/wd4800" ]  # Unused variable warning.
+  }
+
+  if (is_posix) {
+    defines += [
+      # gtest isn't able to figure out when RTTI is disabled for gcc
+      # versions older than 4.3.2, and assumes it's enabled.  Our Mac
+      # and Linux builds disable RTTI, and cannot guarantee that the
+      # compiler will be 4.3.2. or newer.  The Mac, for example, uses
+      # 4.2.1 as that is the latest available on that platform.  gtest
+      # must be instructed that RTTI is disabled here, and for any
+      # direct dependents that might include gtest headers.
+      "GTEST_HAS_RTTI=0",
+    ]
+  }
+
+  if (is_android) {
+    defines += [
+      # We want gtest features that use tr1::tuple, but we currently
+      # don't support the variadic templates used by libstdc++'s
+      # implementation. gtest supports this scenario by providing its
+      # own implementation but we must opt in to it.
+      "GTEST_USE_OWN_TR1_TUPLE=1",
+
+      # GTEST_USE_OWN_TR1_TUPLE only works if GTEST_HAS_TR1_TUPLE is set.
+      # gtest r625 made it so that GTEST_HAS_TR1_TUPLE is set to 0
+      # automatically on android, so it has to be set explicitly here.
+      "GTEST_HAS_TR1_TUPLE=1",
+    ]
+  }
+}
+
+config("gtest_direct_config") {
+  visibility = [ ":*" ]
+  defines = [ "UNIT_TEST" ]
+}
+
+static_library("gtest") {
+  # TODO http://crbug.com/412064 enable this flag all the time.
+  testonly = !is_component_build
+  sources = [
+    "include/gtest/gtest-death-test.h",
+    "include/gtest/gtest-message.h",
+    "include/gtest/gtest-param-test.h",
+    "include/gtest/gtest-printers.h",
+    "include/gtest/gtest-spi.h",
+    "include/gtest/gtest-test-part.h",
+    "include/gtest/gtest-typed-test.h",
+    "include/gtest/gtest.h",
+    "include/gtest/gtest_pred_impl.h",
+    "include/gtest/internal/gtest-death-test-internal.h",
+    "include/gtest/internal/gtest-filepath.h",
+    "include/gtest/internal/gtest-internal.h",
+    "include/gtest/internal/gtest-linked_ptr.h",
+    "include/gtest/internal/gtest-param-util-generated.h",
+    "include/gtest/internal/gtest-param-util.h",
+    "include/gtest/internal/gtest-port.h",
+    "include/gtest/internal/gtest-string.h",
+    "include/gtest/internal/gtest-tuple.h",
+    "include/gtest/internal/gtest-type-util.h",
+
+    #"gtest/src/gtest-all.cc",  # Not needed by our build.
+    "../multiprocess_func_list.cc",
+    "../multiprocess_func_list.h",
+    "../platform_test.h",
+    "src/gtest-death-test.cc",
+    "src/gtest-filepath.cc",
+    "src/gtest-internal-inl.h",
+    "src/gtest-port.cc",
+    "src/gtest-printers.cc",
+    "src/gtest-test-part.cc",
+    "src/gtest-typed-test.cc",
+    "src/gtest.cc",
+  ]
+
+  if (is_mac) {
+    sources += [
+      "../gtest_mac.h",
+      "../gtest_mac.mm",
+      "../platform_test_mac.mm",
+    ]
+  }
+
+  include_dirs = [ "." ]
+
+  all_dependent_configs = [ ":gtest_config" ]
+  public_configs = [ ":gtest_direct_config" ]
+
+  configs -= [ "//build/config/compiler:chromium_code" ]
+  configs += [ "//build/config/compiler:no_chromium_code" ]
+
+  config("gtest_warnings") {
+    if (is_win && is_clang) {
+      # The Mutex constructor initializer list in gtest-port.cc is incorrectly
+      # ordered. See
+      # https://groups.google.com/d/msg/googletestframework/S5uSV8L2TX8/U1FaTDa6J6sJ.
+      cflags = [ "-Wno-reorder" ]
+    }
+  }
+  configs += [ ":gtest_warnings" ]
+}
+
+source_set("gtest_main") {
+  # TODO http://crbug.com/412064 enable this flag all the time.
+  testonly = !is_component_build
+  sources = [
+    "src/gtest_main.cc",
+  ]
+  deps = [
+    ":gtest",
+  ]
+}
diff --git a/build/secondary/third_party/android_tools/BUILD.gn b/build/secondary/third_party/android_tools/BUILD.gn
new file mode 100644
index 0000000..afafffc
--- /dev/null
+++ b/build/secondary/third_party/android_tools/BUILD.gn
@@ -0,0 +1,104 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/android/rules.gni")
+
+config("cpu_features_include") {
+  include_dirs = [ "ndk/sources/android/cpufeatures" ]
+}
+
+# This is the GN version of
+# //build/android/ndk.gyp:cpu_features
+source_set("cpu_features") {
+  sources = [
+    "ndk/sources/android/cpufeatures/cpu-features.c",
+  ]
+  public_configs = [ ":cpu_features_include" ]
+
+  configs -= [ "//build/config/compiler:chromium_code" ]
+  configs += [ "//build/config/compiler:no_chromium_code" ]
+}
+
+android_java_prebuilt("android_gcm_java") {
+  jar_path = "$android_sdk_root/extras/google/gcm/gcm-client/dist/gcm.jar"
+}
+
+android_java_prebuilt("uiautomator_java") {
+  jar_path = "$android_sdk/uiautomator.jar"
+}
+
+android_java_prebuilt("android_support_annotations_javalib") {
+  jar_path = "$android_sdk_root/extras/android/support/annotations/android-support-annotations.jar"
+}
+
+java_prebuilt("android_support_multidex_java") {
+  jar_path = "$android_sdk_root/extras/android/support/multidex/library/libs/android-support-multidex.jar"
+}
+
+android_java_prebuilt("android_support_v13_java") {
+  jar_path =
+      "$android_sdk_root/extras/android/support/v13/android-support-v13.jar"
+}
+
+android_resources("android_support_v7_appcompat_resources") {
+  v14_skip = true
+  resource_dirs =
+      [ "$android_sdk_root/extras/android/support/v7/appcompat/res" ]
+  custom_package = "android.support.v7.appcompat"
+}
+
+android_java_prebuilt("android_support_v7_appcompat_java") {
+  deps = [
+    ":android_support_v7_appcompat_resources",
+  ]
+  jar_path = "$android_sdk_root/extras/android/support/v7/appcompat/libs/android-support-v7-appcompat.jar"
+}
+
+android_resources("android_support_v7_mediarouter_resources") {
+  v14_skip = true
+  resource_dirs =
+      [ "$android_sdk_root/extras/android/support/v7/mediarouter/res" ]
+  deps = [
+    ":android_support_v7_appcompat_resources",
+  ]
+  custom_package = "android.support.v7.mediarouter"
+}
+
+android_java_prebuilt("android_support_v7_mediarouter_java") {
+  deps = [
+    ":android_support_v7_mediarouter_resources",
+    ":android_support_v7_appcompat_java",
+  ]
+  jar_path = "$android_sdk_root/extras/android/support/v7/mediarouter/libs/android-support-v7-mediarouter.jar"
+}
+
+android_resources("android_support_v7_recyclerview_resources") {
+  v14_skip = true
+  resource_dirs =
+      [ "$android_sdk_root/extras/android/support/v7/recyclerview/res" ]
+  custom_package = "android.support.v7.recyclerview"
+}
+
+android_java_prebuilt("android_support_v7_recyclerview_java") {
+  deps = [
+    ":android_support_v7_appcompat_java",
+    ":android_support_v7_recyclerview_resources",
+  ]
+  jar_path = "$android_sdk_root/extras/android/support/v7/recyclerview/libs/android-support-v7-recyclerview.jar"
+}
+
+android_resources("google_play_services_default_resources") {
+  v14_skip = true
+  resource_dirs = [ "$android_sdk_root/extras/google/google_play_services/libproject/google-play-services_lib/res" ]
+  custom_package = "com.google.android.gms"
+}
+android_java_prebuilt("google_play_services_default_java") {
+  deps = [
+    ":android_support_v13_java",
+    ":android_support_v7_mediarouter_java",
+    ":google_play_services_default_resources",
+  ]
+  proguard_preprocess = false
+  jar_path = "$android_sdk_root/extras/google/google_play_services/libproject/google-play-services_lib/libs/google-play-services.jar"
+}
diff --git a/build/secondary/third_party/libjpeg_turbo/BUILD.gn b/build/secondary/third_party/libjpeg_turbo/BUILD.gn
new file mode 100644
index 0000000..62e60ae
--- /dev/null
+++ b/build/secondary/third_party/libjpeg_turbo/BUILD.gn
@@ -0,0 +1,221 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Do not use the targets in this file unless you need a certain libjpeg
+# implementation. Use the meta target //third_party:jpeg instead.
+
+import("//build/config/sanitizers/sanitizers.gni")
+
+if (current_cpu == "arm") {
+  import("//build/config/arm.gni")
+}
+
+if (current_cpu == "x86" || current_cpu == "x64") {
+  import("//third_party/yasm/yasm_assemble.gni")
+
+  yasm_assemble("simd_asm") {
+    defines = []
+
+    if (current_cpu == "x86") {
+      sources = [
+        "simd/jccolor-mmx.asm",
+        "simd/jccolor-sse2.asm",
+        "simd/jcgray-mmx.asm",
+        "simd/jcgray-sse2.asm",
+        "simd/jchuff-sse2.asm",
+        "simd/jcsample-mmx.asm",
+        "simd/jcsample-sse2.asm",
+        "simd/jdcolor-mmx.asm",
+        "simd/jdcolor-sse2.asm",
+        "simd/jdmerge-mmx.asm",
+        "simd/jdmerge-sse2.asm",
+        "simd/jdsample-mmx.asm",
+        "simd/jdsample-sse2.asm",
+        "simd/jfdctflt-3dn.asm",
+        "simd/jfdctflt-sse.asm",
+        "simd/jfdctfst-mmx.asm",
+        "simd/jfdctfst-sse2.asm",
+        "simd/jfdctint-mmx.asm",
+        "simd/jfdctint-sse2.asm",
+        "simd/jidctflt-3dn.asm",
+        "simd/jidctflt-sse.asm",
+        "simd/jidctflt-sse2.asm",
+        "simd/jidctfst-mmx.asm",
+        "simd/jidctfst-sse2.asm",
+        "simd/jidctint-mmx.asm",
+        "simd/jidctint-sse2.asm",
+        "simd/jidctred-mmx.asm",
+        "simd/jidctred-sse2.asm",
+        "simd/jquant-3dn.asm",
+        "simd/jquant-mmx.asm",
+        "simd/jquant-sse.asm",
+        "simd/jquantf-sse2.asm",
+        "simd/jquanti-sse2.asm",
+        "simd/jsimdcpu.asm",
+      ]
+      defines += [
+        "__x86__",
+        "PIC",
+      ]
+    } else if (current_cpu == "x64") {
+      sources = [
+        "simd/jccolor-sse2-64.asm",
+        "simd/jcgray-sse2-64.asm",
+        "simd/jchuff-sse2-64.asm",
+        "simd/jcsample-sse2-64.asm",
+        "simd/jdcolor-sse2-64.asm",
+        "simd/jdmerge-sse2-64.asm",
+        "simd/jdsample-sse2-64.asm",
+        "simd/jfdctflt-sse-64.asm",
+        "simd/jfdctfst-sse2-64.asm",
+        "simd/jfdctint-sse2-64.asm",
+        "simd/jidctflt-sse2-64.asm",
+        "simd/jidctfst-sse2-64.asm",
+        "simd/jidctint-sse2-64.asm",
+        "simd/jidctred-sse2-64.asm",
+        "simd/jquantf-sse2-64.asm",
+        "simd/jquanti-sse2-64.asm",
+      ]
+      defines += [
+        "__x86_64__",
+        "PIC",
+      ]
+    }
+
+    if (is_win) {
+      defines += [ "MSVC" ]
+      include_dirs = [ "win" ]
+      if (current_cpu == "x86") {
+        defines += [ "WIN32" ]
+      } else {
+        defines += [ "WIN64" ]
+      }
+    } else if (is_mac) {
+      defines += [ "MACHO" ]
+      include_dirs = [ "mac" ]
+    } else if (is_linux || is_android) {
+      defines += [ "ELF" ]
+      include_dirs = [ "linux" ]
+    }
+  }
+}
+
+source_set("simd") {
+  if (current_cpu == "x86") {
+    deps = [
+      ":simd_asm",
+    ]
+    sources = [
+      "simd/jsimd_i386.c",
+    ]
+    if (is_win) {
+      cflags = [ "/wd4245" ]
+    }
+  } else if (current_cpu == "x64") {
+    deps = [
+      ":simd_asm",
+    ]
+    sources = [
+      "simd/jsimd_x86_64.c",
+    ]
+  } else if (current_cpu == "arm" && arm_version >= 7 &&
+             (arm_use_neon || arm_optionally_use_neon)) {
+    sources = [
+      "simd/jsimd_arm.c",
+      "simd/jsimd_arm_neon.S",
+    ]
+  } else {
+    sources = [
+      "jsimd_none.c",
+    ]
+  }
+  if (is_win) {
+    cflags = [ "/wd4245" ]
+  }
+}
+
+config("libjpeg_config") {
+  include_dirs = [ "." ]
+}
+
+source_set("libjpeg") {
+  sources = [
+    "jcapimin.c",
+    "jcapistd.c",
+    "jccoefct.c",
+    "jccolor.c",
+    "jcdctmgr.c",
+    "jchuff.c",
+    "jchuff.h",
+    "jcinit.c",
+    "jcmainct.c",
+    "jcmarker.c",
+    "jcmaster.c",
+    "jcomapi.c",
+    "jconfig.h",
+    "jcparam.c",
+    "jcphuff.c",
+    "jcprepct.c",
+    "jcsample.c",
+    "jdapimin.c",
+    "jdapistd.c",
+    "jdatadst.c",
+    "jdatasrc.c",
+    "jdcoefct.c",
+    "jdcolor.c",
+    "jdct.h",
+    "jddctmgr.c",
+    "jdhuff.c",
+    "jdhuff.h",
+    "jdinput.c",
+    "jdmainct.c",
+    "jdmarker.c",
+    "jdmaster.c",
+    "jdmerge.c",
+    "jdphuff.c",
+    "jdpostct.c",
+    "jdsample.c",
+    "jerror.c",
+    "jerror.h",
+    "jfdctflt.c",
+    "jfdctfst.c",
+    "jfdctint.c",
+    "jidctflt.c",
+    "jidctfst.c",
+    "jidctint.c",
+    "jidctred.c",
+    "jinclude.h",
+    "jmemmgr.c",
+    "jmemnobs.c",
+    "jmemsys.h",
+    "jmorecfg.h",
+    "jpegint.h",
+    "jpeglib.h",
+    "jpeglibmangler.h",
+    "jquant1.c",
+    "jquant2.c",
+    "jutils.c",
+    "jversion.h",
+  ]
+
+  defines = [
+    "WITH_SIMD",
+    "NO_GETENV",
+  ]
+
+  configs -= [ "//build/config/compiler:chromium_code" ]
+  configs += [ "//build/config/compiler:no_chromium_code" ]
+
+  public_configs = [ ":libjpeg_config" ]
+
+  # MemorySanitizer doesn't support assembly code, so keep it disabled in
+  # MSan builds for now.
+  if (is_msan) {
+    sources += [ "jsimd_none.c" ]
+  } else {
+    deps = [
+      ":simd",
+    ]
+  }
+}
diff --git a/build/secondary/third_party/libsrtp/BUILD.gn b/build/secondary/third_party/libsrtp/BUILD.gn
new file mode 100644
index 0000000..7601bea
--- /dev/null
+++ b/build/secondary/third_party/libsrtp/BUILD.gn
@@ -0,0 +1,391 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+declare_args() {
+  use_system_libsrtp = false
+  use_srtp_boringssl = true
+}
+
+config("libsrtp_config") {
+  defines = [
+    "HAVE_CONFIG_H",
+    "HAVE_STDLIB_H",
+    "HAVE_STRING_H",
+    "TESTAPP_SOURCE",
+  ]
+
+  include_dirs = [
+    "config",
+    "srtp/include",
+    "srtp/crypto/include",
+  ]
+
+  if (use_srtp_boringssl) {
+    defines += [ "OPENSSL" ]
+  }
+
+  if (is_posix) {
+    defines += [
+      "HAVE_INT16_T",
+      "HAVE_INT32_T",
+      "HAVE_INT8_T",
+      "HAVE_UINT16_T",
+      "HAVE_UINT32_T",
+      "HAVE_UINT64_T",
+      "HAVE_UINT8_T",
+      "HAVE_STDINT_H",
+      "HAVE_INTTYPES_H",
+      "HAVE_NETINET_IN_H",
+      "HAVE_ARPA_INET_H",
+      "HAVE_UNISTD_H",
+    ]
+    cflags = [ "-Wno-unused-variable" ]
+  }
+
+  if (is_win) {
+    defines += [
+      "HAVE_BYTESWAP_METHODS_H",
+
+      # All Windows architectures are this way.
+      "SIZEOF_UNSIGNED_LONG=4",
+      "SIZEOF_UNSIGNED_LONG_LONG=8",
+    ]
+  }
+
+  if (current_cpu == "x64" || current_cpu == "x86" || current_cpu == "arm") {
+    defines += [
+      # TODO(leozwang): CPU_RISC doesn"t work properly on android/arm
+      # platform for unknown reasons, need to investigate the root cause
+      # of it. CPU_RISC is used for optimization only, and CPU_CISC should
+      # just work just fine, it has been tested on android/arm with srtp
+      # test applications and libjingle.
+      "CPU_CISC",
+    ]
+  }
+
+  if (current_cpu == "mipsel") {
+    defines += [ "CPU_RISC" ]
+  }
+}
+
+config("system_libsrtp_config") {
+  defines = [ "USE_SYSTEM_LIBSRTP" ]
+  include_dirs = [ "/usr/include/srtp" ]
+}
+
+if (use_system_libsrtp) {
+  group("libsrtp") {
+    public_configs = [
+      ":libsrtp_config",
+      ":system_libsrtp_config",
+    ]
+    libs = [ "-lsrtp" ]
+  }
+} else {
+  static_library("libsrtp") {
+    configs -= [ "//build/config/compiler:chromium_code" ]
+    configs += [ "//build/config/compiler:no_chromium_code" ]
+    public_configs = [ ":libsrtp_config" ]
+
+    sources = [
+      # includes
+      "srtp/include/ekt.h",
+      "srtp/include/getopt_s.h",
+      "srtp/include/rtp.h",
+      "srtp/include/rtp_priv.h",
+      "srtp/include/srtp.h",
+      "srtp/include/srtp_priv.h",
+      "srtp/include/ut_sim.h",
+
+      # headers
+      "srtp/crypto/include/aes.h",
+      "srtp/crypto/include/aes_cbc.h",
+      "srtp/crypto/include/aes_icm.h",
+      "srtp/crypto/include/alloc.h",
+      "srtp/crypto/include/auth.h",
+      "srtp/crypto/include/cipher.h",
+      "srtp/crypto/include/crypto.h",
+      "srtp/crypto/include/crypto_kernel.h",
+      "srtp/crypto/include/crypto_math.h",
+      "srtp/crypto/include/crypto_types.h",
+      "srtp/crypto/include/cryptoalg.h",
+      "srtp/crypto/include/datatypes.h",
+      "srtp/crypto/include/err.h",
+      "srtp/crypto/include/gf2_8.h",
+      "srtp/crypto/include/hmac.h",
+      "srtp/crypto/include/integers.h",
+      "srtp/crypto/include/kernel_compat.h",
+      "srtp/crypto/include/key.h",
+      "srtp/crypto/include/null_auth.h",
+      "srtp/crypto/include/null_cipher.h",
+      "srtp/crypto/include/prng.h",
+      "srtp/crypto/include/rand_source.h",
+      "srtp/crypto/include/rdb.h",
+      "srtp/crypto/include/rdbx.h",
+      "srtp/crypto/include/sha1.h",
+      "srtp/crypto/include/stat.h",
+      "srtp/crypto/include/xfm.h",
+
+      # sources
+      "srtp/crypto/cipher/aes.c",
+      "srtp/crypto/cipher/aes_cbc.c",
+      "srtp/crypto/cipher/aes_icm.c",
+      "srtp/crypto/cipher/cipher.c",
+      "srtp/crypto/cipher/null_cipher.c",
+      "srtp/crypto/hash/auth.c",
+      "srtp/crypto/hash/hmac.c",
+      "srtp/crypto/hash/null_auth.c",
+      "srtp/crypto/hash/sha1.c",
+      "srtp/crypto/kernel/alloc.c",
+      "srtp/crypto/kernel/crypto_kernel.c",
+      "srtp/crypto/kernel/err.c",
+      "srtp/crypto/kernel/key.c",
+      "srtp/crypto/math/datatypes.c",
+      "srtp/crypto/math/gf2_8.c",
+      "srtp/crypto/math/stat.c",
+      "srtp/crypto/replay/rdb.c",
+      "srtp/crypto/replay/rdbx.c",
+      "srtp/crypto/replay/ut_sim.c",
+      "srtp/crypto/rng/ctr_prng.c",
+      "srtp/crypto/rng/prng.c",
+      "srtp/crypto/rng/rand_source.c",
+      "srtp/srtp/ekt.c",
+      "srtp/srtp/srtp.c",
+    ]
+
+    if (is_clang) {
+      cflags = [ "-Wno-implicit-function-declaration" ]
+    }
+
+    if (use_srtp_boringssl) {
+      deps = [
+        "//third_party/boringssl:boringssl",
+      ]
+      public_deps = [
+        "//third_party/boringssl:boringssl",
+      ]
+      sources -= [
+        "srtp/crypto/cipher/aes_cbc.c",
+        "srtp/crypto/cipher/aes_icm.c",
+        "srtp/crypto/hash/hmac.c",
+        "srtp/crypto/hash/sha1.c",
+        "srtp/crypto/rng/ctr_prng.c",
+        "srtp/crypto/rng/prng.c",
+      ]
+      sources += [
+        "srtp/crypto/cipher/aes_gcm_ossl.c",
+        "srtp/crypto/cipher/aes_icm_ossl.c",
+        "srtp/crypto/hash/hmac_ossl.c",
+        "srtp/crypto/include/aes_gcm_ossl.h",
+        "srtp/crypto/include/aes_icm_ossl.h",
+      ]
+    }
+  }
+
+  # TODO(GYP): A bunch of these tests don't compile (in gyp either). They're
+  # not very broken, so could probably be made to work if it's useful.
+  if (!is_win) {
+    executable("rdbx_driver") {
+      configs -= [ "//build/config/compiler:chromium_code" ]
+      configs += [ "//build/config/compiler:no_chromium_code" ]
+      deps = [
+        ":libsrtp",
+      ]
+      sources = [
+        "srtp/include/getopt_s.h",
+        "srtp/test/getopt_s.c",
+        "srtp/test/rdbx_driver.c",
+      ]
+    }
+
+    executable("srtp_driver") {
+      configs -= [ "//build/config/compiler:chromium_code" ]
+      configs += [ "//build/config/compiler:no_chromium_code" ]
+      deps = [
+        ":libsrtp",
+      ]
+      sources = [
+        "srtp/include/getopt_s.h",
+        "srtp/include/srtp_priv.h",
+        "srtp/test/getopt_s.c",
+        "srtp/test/srtp_driver.c",
+      ]
+    }
+
+    executable("roc_driver") {
+      configs -= [ "//build/config/compiler:chromium_code" ]
+      configs += [ "//build/config/compiler:no_chromium_code" ]
+      deps = [
+        ":libsrtp",
+      ]
+      sources = [
+        "srtp/crypto/include/rdbx.h",
+        "srtp/include/ut_sim.h",
+        "srtp/test/roc_driver.c",
+      ]
+    }
+
+    executable("replay_driver") {
+      configs -= [ "//build/config/compiler:chromium_code" ]
+      configs += [ "//build/config/compiler:no_chromium_code" ]
+      deps = [
+        ":libsrtp",
+      ]
+      sources = [
+        "srtp/crypto/include/rdbx.h",
+        "srtp/include/ut_sim.h",
+        "srtp/test/replay_driver.c",
+      ]
+    }
+
+    executable("rtpw") {
+      configs -= [ "//build/config/compiler:chromium_code" ]
+      configs += [ "//build/config/compiler:no_chromium_code" ]
+      deps = [
+        ":libsrtp",
+      ]
+      sources = [
+        "srtp/crypto/include/datatypes.h",
+        "srtp/include/getopt_s.h",
+        "srtp/include/rtp.h",
+        "srtp/include/srtp.h",
+        "srtp/test/getopt_s.c",
+        "srtp/test/rtp.c",
+        "srtp/test/rtpw.c",
+      ]
+      if (is_android) {
+        defines = [ "HAVE_SYS_SOCKET_H" ]
+      }
+      if (is_clang) {
+        cflags = [ "-Wno-implicit-function-declaration" ]
+      }
+    }
+
+    executable("srtp_test_cipher_driver") {
+      configs -= [ "//build/config/compiler:chromium_code" ]
+      configs += [ "//build/config/compiler:no_chromium_code" ]
+      deps = [
+        ":libsrtp",
+      ]
+      sources = [
+        "srtp/crypto/test/cipher_driver.c",
+        "srtp/include/getopt_s.h",
+        "srtp/test/getopt_s.c",
+      ]
+    }
+
+    executable("srtp_test_datatypes_driver") {
+      configs -= [ "//build/config/compiler:chromium_code" ]
+      configs += [ "//build/config/compiler:no_chromium_code" ]
+      deps = [
+        ":libsrtp",
+      ]
+      sources = [
+        "srtp/crypto/test/datatypes_driver.c",
+      ]
+    }
+
+    executable("srtp_test_stat_driver") {
+      configs -= [ "//build/config/compiler:chromium_code" ]
+      configs += [ "//build/config/compiler:no_chromium_code" ]
+      deps = [
+        ":libsrtp",
+      ]
+      sources = [
+        "srtp/crypto/test/stat_driver.c",
+      ]
+    }
+
+    executable("srtp_test_sha1_driver") {
+      configs -= [ "//build/config/compiler:chromium_code" ]
+      configs += [ "//build/config/compiler:no_chromium_code" ]
+      deps = [
+        ":libsrtp",
+      ]
+      sources = [
+        "srtp/crypto/test/sha1_driver.c",
+      ]
+    }
+
+    executable("srtp_test_kernel_driver") {
+      configs -= [ "//build/config/compiler:chromium_code" ]
+      configs += [ "//build/config/compiler:no_chromium_code" ]
+      deps = [
+        ":libsrtp",
+      ]
+      sources = [
+        "srtp/crypto/test/kernel_driver.c",
+        "srtp/include/getopt_s.h",
+        "srtp/test/getopt_s.c",
+      ]
+    }
+
+    executable("srtp_test_aes_calc") {
+      configs -= [ "//build/config/compiler:chromium_code" ]
+      configs += [ "//build/config/compiler:no_chromium_code" ]
+      deps = [
+        ":libsrtp",
+      ]
+      sources = [
+        "srtp/crypto/test/aes_calc.c",
+      ]
+    }
+
+    executable("srtp_test_rand_gen") {
+      configs -= [ "//build/config/compiler:chromium_code" ]
+      configs += [ "//build/config/compiler:no_chromium_code" ]
+      deps = [
+        ":libsrtp",
+      ]
+      sources = [
+        "srtp/crypto/test/rand_gen.c",
+        "srtp/include/getopt_s.h",
+        "srtp/test/getopt_s.c",
+      ]
+    }
+
+    executable("srtp_test_rand_gen_soak") {
+      configs -= [ "//build/config/compiler:chromium_code" ]
+      configs += [ "//build/config/compiler:no_chromium_code" ]
+      deps = [
+        ":libsrtp",
+      ]
+      sources = [
+        "srtp/crypto/test/rand_gen_soak.c",
+        "srtp/include/getopt_s.h",
+        "srtp/test/getopt_s.c",
+      ]
+    }
+
+    executable("srtp_test_env") {
+      configs -= [ "//build/config/compiler:chromium_code" ]
+      configs += [ "//build/config/compiler:no_chromium_code" ]
+      deps = [
+        ":libsrtp",
+      ]
+      sources = [
+        "srtp/crypto/test/env.c",
+      ]
+    }
+
+    group("srtp_runtest") {
+      deps = [
+        ":rdbx_driver",
+        ":srtp_driver",
+        ":roc_driver",
+        ":replay_driver",
+        ":rtpw",
+        ":srtp_test_cipher_driver",
+        ":srtp_test_datatypes_driver",
+        ":srtp_test_stat_driver",
+        ":srtp_test_sha1_driver",
+        ":srtp_test_kernel_driver",
+        ":srtp_test_aes_calc",
+        ":srtp_test_rand_gen",
+        ":srtp_test_rand_gen_soak",
+        ":srtp_test_env",
+      ]
+    }
+  }
+}
diff --git a/build/secondary/third_party/nss/BUILD.gn b/build/secondary/third_party/nss/BUILD.gn
new file mode 100644
index 0000000..25d449e
--- /dev/null
+++ b/build/secondary/third_party/nss/BUILD.gn
@@ -0,0 +1,1211 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/linux/pkg_config.gni")
+
+if (is_linux) {
+  # This is a dependency on NSS with no libssl. On Linux we use a built-in SSL
+  # library but the system NSS libraries. Non-Linux platforms using NSS use the
+  # hermetic one in //third_party/nss.
+  #
+  # Generally you should depend on //crypto:platform instead of using this
+  # config since that will properly pick up NSS or OpenSSL depending on
+  # platform and build config.
+  pkg_config("system_nss_no_ssl_config") {
+    packages = [ "nss" ]
+    extra_args = [
+      "-v",
+      "-lssl3",
+    ]
+  }
+} else {
+  include_nss_root_certs = is_ios
+  include_nss_libpkix = is_ios
+
+  config("nspr_config") {
+    defines = [ "NO_NSPR_10_SUPPORT" ]
+    include_dirs = [
+      "nspr/pr/include",
+      "nspr/lib/ds",
+      "nspr/lib/libc/include",
+    ]
+
+    if (component_mode != "shared_library") {
+      defines += [ "NSPR_STATIC" ]
+    }
+  }
+
+  component("nspr") {
+    output_name = "crnspr"
+    sources = [
+      "nspr/lib/ds/plarena.c",
+      "nspr/lib/ds/plarena.h",
+      "nspr/lib/ds/plarenas.h",
+      "nspr/lib/ds/plhash.c",
+      "nspr/lib/ds/plhash.h",
+      "nspr/lib/libc/include/plbase64.h",
+      "nspr/lib/libc/include/plerror.h",
+      "nspr/lib/libc/include/plgetopt.h",
+      "nspr/lib/libc/include/plstr.h",
+      "nspr/lib/libc/src/base64.c",
+      "nspr/lib/libc/src/plerror.c",
+      "nspr/lib/libc/src/plgetopt.c",
+      "nspr/lib/libc/src/strcase.c",
+      "nspr/lib/libc/src/strcat.c",
+      "nspr/lib/libc/src/strchr.c",
+      "nspr/lib/libc/src/strcmp.c",
+      "nspr/lib/libc/src/strcpy.c",
+      "nspr/lib/libc/src/strdup.c",
+      "nspr/lib/libc/src/strlen.c",
+      "nspr/lib/libc/src/strpbrk.c",
+      "nspr/lib/libc/src/strstr.c",
+      "nspr/lib/libc/src/strtok.c",
+      "nspr/pr/include/md/_darwin.cfg",
+      "nspr/pr/include/md/_darwin.h",
+      "nspr/pr/include/md/_pcos.h",
+      "nspr/pr/include/md/_pth.h",
+      "nspr/pr/include/md/_unix_errors.h",
+      "nspr/pr/include/md/_unixos.h",
+      "nspr/pr/include/md/_win32_errors.h",
+      "nspr/pr/include/md/_win95.cfg",
+      "nspr/pr/include/md/_win95.h",
+      "nspr/pr/include/md/prosdep.h",
+      "nspr/pr/include/nspr.h",
+      "nspr/pr/include/obsolete/pralarm.h",
+      "nspr/pr/include/obsolete/probslet.h",
+      "nspr/pr/include/obsolete/protypes.h",
+      "nspr/pr/include/obsolete/prsem.h",
+      "nspr/pr/include/pratom.h",
+      "nspr/pr/include/prbit.h",
+      "nspr/pr/include/prclist.h",
+      "nspr/pr/include/prcmon.h",
+      "nspr/pr/include/prcountr.h",
+      "nspr/pr/include/prcpucfg.h",
+      "nspr/pr/include/prcvar.h",
+      "nspr/pr/include/prdtoa.h",
+      "nspr/pr/include/prenv.h",
+      "nspr/pr/include/prerr.h",
+      "nspr/pr/include/prerror.h",
+      "nspr/pr/include/prinet.h",
+      "nspr/pr/include/prinit.h",
+      "nspr/pr/include/prinrval.h",
+      "nspr/pr/include/prio.h",
+      "nspr/pr/include/pripcsem.h",
+      "nspr/pr/include/private/pprio.h",
+      "nspr/pr/include/private/pprmwait.h",
+      "nspr/pr/include/private/pprthred.h",
+      "nspr/pr/include/private/primpl.h",
+      "nspr/pr/include/private/prpriv.h",
+      "nspr/pr/include/prlink.h",
+      "nspr/pr/include/prlock.h",
+      "nspr/pr/include/prlog.h",
+      "nspr/pr/include/prlong.h",
+      "nspr/pr/include/prmem.h",
+      "nspr/pr/include/prmon.h",
+      "nspr/pr/include/prmwait.h",
+      "nspr/pr/include/prnetdb.h",
+      "nspr/pr/include/prolock.h",
+      "nspr/pr/include/prpdce.h",
+      "nspr/pr/include/prprf.h",
+      "nspr/pr/include/prproces.h",
+      "nspr/pr/include/prrng.h",
+      "nspr/pr/include/prrwlock.h",
+      "nspr/pr/include/prshm.h",
+      "nspr/pr/include/prshma.h",
+      "nspr/pr/include/prsystem.h",
+      "nspr/pr/include/prthread.h",
+      "nspr/pr/include/prtime.h",
+      "nspr/pr/include/prtpool.h",
+      "nspr/pr/include/prtrace.h",
+      "nspr/pr/include/prtypes.h",
+      "nspr/pr/include/prvrsion.h",
+      "nspr/pr/include/prwin16.h",
+      "nspr/pr/src/io/prdir.c",
+      "nspr/pr/src/io/prfdcach.c",
+      "nspr/pr/src/io/prfile.c",
+      "nspr/pr/src/io/prio.c",
+      "nspr/pr/src/io/priometh.c",
+      "nspr/pr/src/io/pripv6.c",
+      "nspr/pr/src/io/prlayer.c",
+      "nspr/pr/src/io/prlog.c",
+      "nspr/pr/src/io/prmapopt.c",
+      "nspr/pr/src/io/prmmap.c",
+      "nspr/pr/src/io/prmwait.c",
+      "nspr/pr/src/io/prpolevt.c",
+      "nspr/pr/src/io/prprf.c",
+      "nspr/pr/src/io/prscanf.c",
+      "nspr/pr/src/io/prsocket.c",
+      "nspr/pr/src/io/prstdio.c",
+      "nspr/pr/src/linking/prlink.c",
+      "nspr/pr/src/malloc/prmalloc.c",
+      "nspr/pr/src/malloc/prmem.c",
+      "nspr/pr/src/md/prosdep.c",
+      "nspr/pr/src/md/unix/darwin.c",
+      "nspr/pr/src/md/unix/os_Darwin.s",
+      "nspr/pr/src/md/unix/unix.c",
+      "nspr/pr/src/md/unix/unix_errors.c",
+      "nspr/pr/src/md/unix/uxproces.c",
+      "nspr/pr/src/md/unix/uxrng.c",
+      "nspr/pr/src/md/unix/uxshm.c",
+      "nspr/pr/src/md/unix/uxwrap.c",
+      "nspr/pr/src/md/windows/ntgc.c",
+      "nspr/pr/src/md/windows/ntinrval.c",
+      "nspr/pr/src/md/windows/ntmisc.c",
+      "nspr/pr/src/md/windows/ntsec.c",
+      "nspr/pr/src/md/windows/ntsem.c",
+      "nspr/pr/src/md/windows/w32ipcsem.c",
+      "nspr/pr/src/md/windows/w32poll.c",
+      "nspr/pr/src/md/windows/w32rng.c",
+      "nspr/pr/src/md/windows/w32shm.c",
+      "nspr/pr/src/md/windows/w95cv.c",
+      "nspr/pr/src/md/windows/w95dllmain.c",
+      "nspr/pr/src/md/windows/w95io.c",
+      "nspr/pr/src/md/windows/w95sock.c",
+      "nspr/pr/src/md/windows/w95thred.c",
+      "nspr/pr/src/md/windows/win32_errors.c",
+      "nspr/pr/src/memory/prseg.c",
+      "nspr/pr/src/memory/prshm.c",
+      "nspr/pr/src/memory/prshma.c",
+      "nspr/pr/src/misc/pralarm.c",
+      "nspr/pr/src/misc/pratom.c",
+      "nspr/pr/src/misc/praton.c",
+      "nspr/pr/src/misc/prcountr.c",
+      "nspr/pr/src/misc/prdtoa.c",
+      "nspr/pr/src/misc/prenv.c",
+      "nspr/pr/src/misc/prerr.c",
+      "nspr/pr/src/misc/prerror.c",
+      "nspr/pr/src/misc/prerrortable.c",
+      "nspr/pr/src/misc/prinit.c",
+      "nspr/pr/src/misc/prinrval.c",
+      "nspr/pr/src/misc/pripc.c",
+      "nspr/pr/src/misc/pripcsem.c",
+      "nspr/pr/src/misc/prlog2.c",
+      "nspr/pr/src/misc/prlong.c",
+      "nspr/pr/src/misc/prnetdb.c",
+      "nspr/pr/src/misc/prolock.c",
+      "nspr/pr/src/misc/prrng.c",
+      "nspr/pr/src/misc/prsystem.c",
+      "nspr/pr/src/misc/prthinfo.c",
+      "nspr/pr/src/misc/prtime.c",
+      "nspr/pr/src/misc/prtpool.c",
+      "nspr/pr/src/misc/prtrace.c",
+      "nspr/pr/src/pthreads/ptio.c",
+      "nspr/pr/src/pthreads/ptmisc.c",
+      "nspr/pr/src/pthreads/ptsynch.c",
+      "nspr/pr/src/pthreads/ptthread.c",
+      "nspr/pr/src/threads/combined/prucpu.c",
+      "nspr/pr/src/threads/combined/prucv.c",
+      "nspr/pr/src/threads/combined/prulock.c",
+      "nspr/pr/src/threads/combined/prustack.c",
+      "nspr/pr/src/threads/combined/pruthr.c",
+      "nspr/pr/src/threads/prcmon.c",
+      "nspr/pr/src/threads/prcthr.c",
+      "nspr/pr/src/threads/prdump.c",
+      "nspr/pr/src/threads/prmon.c",
+      "nspr/pr/src/threads/prrwlock.c",
+      "nspr/pr/src/threads/prsem.c",
+      "nspr/pr/src/threads/prtpd.c",
+    ]
+
+    public_configs = [ ":nspr_config" ]
+
+    configs -= [ "//build/config/compiler:chromium_code" ]
+    if (is_win) {
+      configs -= [
+        "//build/config/win:unicode",  # Requires 8-bit mode.
+        "//build/config/win:lean_and_mean",  # Won"t compile with lean and mean.
+      ]
+    }
+    configs += [
+      "//build/config/compiler:no_chromium_code",
+      "//build/config/compiler:no_size_t_to_int_warning",
+    ]
+
+    cflags = []
+    defines = [
+      "_NSPR_BUILD_",
+      "FORCE_PR_LOG",
+    ]
+
+    include_dirs = [ "nspr/pr/include/private" ]
+
+    if (is_win) {
+      cflags = [ "/wd4554" ]  # Check precidence.
+      defines += [
+        "XP_PC",
+        "WIN32",
+        "WIN95",
+        "_PR_GLOBAL_THREADS_ONLY",
+        "_CRT_SECURE_NO_WARNINGS",
+      ]
+    } else {
+      sources -= [
+        "nspr/pr/src/md/windows/ntgc.c",
+        "nspr/pr/src/md/windows/ntinrval.c",
+        "nspr/pr/src/md/windows/ntmisc.c",
+        "nspr/pr/src/md/windows/ntsec.c",
+        "nspr/pr/src/md/windows/ntsem.c",
+        "nspr/pr/src/md/windows/w32ipcsem.c",
+        "nspr/pr/src/md/windows/w32poll.c",
+        "nspr/pr/src/md/windows/w32rng.c",
+        "nspr/pr/src/md/windows/w32shm.c",
+        "nspr/pr/src/md/windows/w95cv.c",
+        "nspr/pr/src/md/windows/w95dllmain.c",
+        "nspr/pr/src/md/windows/w95io.c",
+        "nspr/pr/src/md/windows/w95sock.c",
+        "nspr/pr/src/md/windows/w95thred.c",
+        "nspr/pr/src/md/windows/win32_errors.c",
+        "nspr/pr/src/threads/combined/prucpu.c",
+        "nspr/pr/src/threads/combined/prucv.c",
+        "nspr/pr/src/threads/combined/prulock.c",
+        "nspr/pr/src/threads/combined/prustack.c",
+        "nspr/pr/src/threads/combined/pruthr.c",
+      ]
+    }
+
+    if (!is_posix) {
+      sources -= [
+        "nspr/pr/src/md/unix/darwin.c",
+        "nspr/pr/src/md/unix/os_Darwin.s",
+        "nspr/pr/src/md/unix/unix.c",
+        "nspr/pr/src/md/unix/unix_errors.c",
+        "nspr/pr/src/md/unix/uxproces.c",
+        "nspr/pr/src/md/unix/uxrng.c",
+        "nspr/pr/src/md/unix/uxshm.c",
+        "nspr/pr/src/md/unix/uxwrap.c",
+        "nspr/pr/src/pthreads/ptio.c",
+        "nspr/pr/src/pthreads/ptmisc.c",
+        "nspr/pr/src/pthreads/ptsynch.c",
+        "nspr/pr/src/pthreads/ptthread.c",
+      ]
+    }
+
+    if (current_cpu == "x86") {
+      defines += [ "_X86_" ]
+    } else if (current_cpu == "x64") {
+      defines += [ "_AMD64_" ]
+    }
+
+    if (is_mac || is_ios) {
+      sources -= [
+        "nspr/pr/src/io/prdir.c",
+        "nspr/pr/src/io/prfile.c",
+        "nspr/pr/src/io/prio.c",
+        "nspr/pr/src/io/prsocket.c",
+        "nspr/pr/src/misc/pripcsem.c",
+        "nspr/pr/src/threads/prcthr.c",
+        "nspr/pr/src/threads/prdump.c",
+        "nspr/pr/src/threads/prmon.c",
+        "nspr/pr/src/threads/prsem.c",
+      ]
+      defines += [
+        "XP_UNIX",
+        "DARWIN",
+        "XP_MACOSX",
+        "_PR_PTHREADS",
+        "HAVE_BSD_FLOCK",
+        "HAVE_DLADDR",
+        "HAVE_LCHOWN",
+        "HAVE_SOCKLEN_T",
+        "HAVE_STRERROR",
+      ]
+    }
+
+    if (is_mac) {
+      defines += [ "HAVE_CRT_EXTERNS_H" ]
+      libs = [
+        "CoreFoundation.framework",
+        "CoreServices.framework",
+      ]
+    }
+
+    if (is_clang) {
+      cflags += [
+        # nspr uses a bunch of deprecated functions (NSLinkModule etc) in
+        # prlink.c on mac.
+        "-Wno-deprecated-declarations",
+
+        # nspr passes "const char*" through "void*".
+        "-Wno-incompatible-pointer-types",
+
+        # nspr passes "int*" through "unsigned int*".
+        "-Wno-pointer-sign",
+      ]
+
+      # nspr uses assert(!"foo") instead of assert(false && "foo").
+      configs -= [ "//build/config/clang:extra_warnings" ]
+    }
+  }
+
+  component("nss") {
+    output_name = "crnss"
+    sources = [
+      # Ensure at least one object file is produced, so that MSVC does not
+      # warn when creating the static/shared library. See the note for
+      # the "nssckbi" target for why the "nss" target was split as such.
+      "nss/lib/nss/nssver.c",
+    ]
+
+    public_deps = [
+      ":nss_static",
+    ]
+
+    if (include_nss_root_certs) {
+      public_deps += [ ":nssckbi" ]
+    }
+
+    if (component_mode == "shared_library") {
+      if (is_mac) {
+        ldflags = [ "-all_load" ]
+      } else if (is_win) {
+        # Pass the def file to the linker.
+        ldflags =
+            [ "/DEF:" + rebase_path("nss/exports_win.def", root_build_dir) ]
+      }
+    }
+  }
+
+  config("nssckbi_config") {
+    include_dirs = [ "nss/lib/ckfw/builtins" ]
+  }
+
+  # This is really more of a pseudo-target to work around the fact that
+  # a single static_library target cannot contain two object files of the
+  # same name (hash.o / hash.obj). Logically, this is part of the
+  # "nss_static" target. By separating it out, it creates a possible
+  # circular dependency between "nss_static" and "nssckbi" when
+  # "exclude_nss_root_certs" is not specified, as "nss_static" depends on
+  # the "builtinsC_GetFunctionList" exported by this target. This is an
+  # artifact of how NSS is being statically built, which is not an
+  # officially supported configuration - normally, "nssckbi.dll/so" would
+  # depend on libnss3.dll/so, and the higher layer caller would instruct
+  # libnss3.dll to dynamically load nssckbi.dll, breaking the circle.
+  #
+  # TODO(rsleevi): http://crbug.com/128134 - Break the circular dependency
+  # without requiring nssckbi to be built as a shared library.
+  source_set("nssckbi") {
+    visibility = [ ":nss" ]  # This target is internal implementation detail.
+
+    sources = [
+      "nss/lib/ckfw/builtins/anchor.c",
+      "nss/lib/ckfw/builtins/bfind.c",
+      "nss/lib/ckfw/builtins/binst.c",
+      "nss/lib/ckfw/builtins/bobject.c",
+      "nss/lib/ckfw/builtins/bsession.c",
+      "nss/lib/ckfw/builtins/bslot.c",
+      "nss/lib/ckfw/builtins/btoken.c",
+      "nss/lib/ckfw/builtins/builtins.h",
+      "nss/lib/ckfw/builtins/certdata.c",
+      "nss/lib/ckfw/builtins/ckbiver.c",
+      "nss/lib/ckfw/builtins/constants.c",
+      "nss/lib/ckfw/builtins/nssckbi.h",
+      "nss/lib/ckfw/ck.h",
+      "nss/lib/ckfw/ckfw.h",
+      "nss/lib/ckfw/ckfwm.h",
+      "nss/lib/ckfw/ckfwtm.h",
+      "nss/lib/ckfw/ckmd.h",
+      "nss/lib/ckfw/ckt.h",
+      "nss/lib/ckfw/crypto.c",
+      "nss/lib/ckfw/find.c",
+      "nss/lib/ckfw/hash.c",
+      "nss/lib/ckfw/instance.c",
+      "nss/lib/ckfw/mechanism.c",
+      "nss/lib/ckfw/mutex.c",
+      "nss/lib/ckfw/nssck.api",
+      "nss/lib/ckfw/nssckepv.h",
+      "nss/lib/ckfw/nssckft.h",
+      "nss/lib/ckfw/nssckfw.h",
+      "nss/lib/ckfw/nssckfwc.h",
+      "nss/lib/ckfw/nssckfwt.h",
+      "nss/lib/ckfw/nssckg.h",
+      "nss/lib/ckfw/nssckmdt.h",
+      "nss/lib/ckfw/nssckt.h",
+      "nss/lib/ckfw/object.c",
+      "nss/lib/ckfw/session.c",
+      "nss/lib/ckfw/sessobj.c",
+      "nss/lib/ckfw/slot.c",
+      "nss/lib/ckfw/token.c",
+      "nss/lib/ckfw/wrap.c",
+    ]
+
+    configs -= [ "//build/config/compiler:chromium_code" ]
+
+    if (is_win) {
+      configs -= [ "//build/config/win:unicode" ]  # Requires 8-bit mode.
+    }
+    configs += [ "//build/config/compiler:no_chromium_code" ]
+
+    include_dirs = [ "nss/lib/ckfw" ]
+    public_configs = [ ":nssckbi_config" ]
+
+    public_deps = [
+      ":nss_static",
+    ]
+  }
+
+  config("nss_static_config") {
+    defines = [
+      "NSS_STATIC",
+      "NSS_USE_STATIC_LIBS",
+      "USE_UTIL_DIRECTLY",
+    ]
+    if (is_win) {
+      defines += [ "_WINDOWS" ]
+    }
+    include_dirs = [
+      "nspr/pr/include",
+      "nspr/lib/ds",
+      "nspr/lib/libc/include",
+      "nss/lib/base",
+      "nss/lib/certdb",
+      "nss/lib/certhigh",
+      "nss/lib/cryptohi",
+      "nss/lib/dev",
+      "nss/lib/freebl",
+      "nss/lib/freebl/ecl",
+      "nss/lib/nss",
+      "nss/lib/pk11wrap",
+      "nss/lib/pkcs7",
+      "nss/lib/pki",
+      "nss/lib/smime",
+      "nss/lib/softoken",
+      "nss/lib/util",
+    ]
+  }
+
+  if (is_win && current_cpu == "x86") {
+    source_set("nss_static_avx") {
+      sources = [
+        "nss/lib/freebl/intel-gcm-wrap.c",
+        "nss/lib/freebl/intel-gcm-x86-masm.asm",
+        "nss/lib/freebl/intel-gcm.h",
+      ]
+      defines = [
+        "_WINDOWS",
+        "_X86_",
+        "INTEL_GCM",
+        "MP_API_COMPATIBLE",
+        "MP_ASSEMBLY_DIV_2DX1D",
+        "MP_ASSEMBLY_MULTIPLY",
+        "MP_ASSEMBLY_SQUARE",
+        "MP_NO_MP_WORD",
+        "MP_USE_UINT_DIGIT",
+        "NSS_DISABLE_DBM",
+        "NSS_STATIC",
+        "NSS_USE_STATIC_LIBS",
+        "NSS_X86",
+        "NSS_X86_OR_X64",
+        "RIJNDAEL_INCLUDE_TABLES",
+        "SHLIB_PREFIX=\"\"",
+        "SHLIB_SUFFIX=\"dll\"",
+        "SHLIB_VERSION=\"3\"",
+        "SOFTOKEN_LIB_NAME=\"softokn3.dll\"",
+        "SOFTOKEN_SHLIB_VERSION=\"3\"",
+        "USE_HW_AES",
+        "USE_UTIL_DIRECTLY",
+        "WIN32",
+        "WIN95",
+        "XP_PC",
+      ]
+      include_dirs = [
+        "nspr/pr/include",
+        "nspr/lib/ds",
+        "nspr/lib/libc/include",
+        "nss/lib/freebl/ecl",
+        "nss/lib/util",
+      ]
+    }
+  }
+
+  source_set("nss_static") {
+    visibility = [ ":*" ]  # Internal implementation detail.
+
+    sources = [
+      "nss/lib/base/arena.c",
+      "nss/lib/base/base.h",
+      "nss/lib/base/baset.h",
+      "nss/lib/base/error.c",
+      "nss/lib/base/errorval.c",
+      "nss/lib/base/hash.c",
+      "nss/lib/base/hashops.c",
+      "nss/lib/base/item.c",
+      "nss/lib/base/libc.c",
+      "nss/lib/base/list.c",
+      "nss/lib/base/nssbase.h",
+      "nss/lib/base/nssbaset.h",
+      "nss/lib/base/nssutf8.c",
+      "nss/lib/base/tracker.c",
+      "nss/lib/certdb/alg1485.c",
+      "nss/lib/certdb/cert.h",
+      "nss/lib/certdb/certdb.c",
+      "nss/lib/certdb/certdb.h",
+      "nss/lib/certdb/certi.h",
+      "nss/lib/certdb/certt.h",
+      "nss/lib/certdb/certv3.c",
+      "nss/lib/certdb/certxutl.c",
+      "nss/lib/certdb/certxutl.h",
+      "nss/lib/certdb/crl.c",
+      "nss/lib/certdb/genname.c",
+      "nss/lib/certdb/genname.h",
+      "nss/lib/certdb/polcyxtn.c",
+      "nss/lib/certdb/secname.c",
+      "nss/lib/certdb/stanpcertdb.c",
+      "nss/lib/certdb/xauthkid.c",
+      "nss/lib/certdb/xbsconst.c",
+      "nss/lib/certdb/xconst.c",
+      "nss/lib/certdb/xconst.h",
+      "nss/lib/certhigh/certhigh.c",
+      "nss/lib/certhigh/certhtml.c",
+      "nss/lib/certhigh/certreq.c",
+      "nss/lib/certhigh/certvfy.c",
+      "nss/lib/certhigh/crlv2.c",
+      "nss/lib/certhigh/ocsp.c",
+      "nss/lib/certhigh/ocsp.h",
+      "nss/lib/certhigh/ocspi.h",
+      "nss/lib/certhigh/ocspsig.c",
+      "nss/lib/certhigh/ocspt.h",
+      "nss/lib/certhigh/ocspti.h",
+      "nss/lib/certhigh/xcrldist.c",
+      "nss/lib/cryptohi/cryptohi.h",
+      "nss/lib/cryptohi/cryptoht.h",
+      "nss/lib/cryptohi/dsautil.c",
+      "nss/lib/cryptohi/key.h",
+      "nss/lib/cryptohi/keyhi.h",
+      "nss/lib/cryptohi/keyi.h",
+      "nss/lib/cryptohi/keyt.h",
+      "nss/lib/cryptohi/keythi.h",
+      "nss/lib/cryptohi/sechash.c",
+      "nss/lib/cryptohi/sechash.h",
+      "nss/lib/cryptohi/seckey.c",
+      "nss/lib/cryptohi/secsign.c",
+      "nss/lib/cryptohi/secvfy.c",
+      "nss/lib/dev/ckhelper.c",
+      "nss/lib/dev/ckhelper.h",
+      "nss/lib/dev/dev.h",
+      "nss/lib/dev/devm.h",
+      "nss/lib/dev/devslot.c",
+      "nss/lib/dev/devt.h",
+      "nss/lib/dev/devtm.h",
+      "nss/lib/dev/devtoken.c",
+      "nss/lib/dev/devutil.c",
+      "nss/lib/dev/nssdev.h",
+      "nss/lib/dev/nssdevt.h",
+      "nss/lib/freebl/aeskeywrap.c",
+      "nss/lib/freebl/alg2268.c",
+      "nss/lib/freebl/alghmac.c",
+      "nss/lib/freebl/alghmac.h",
+      "nss/lib/freebl/arcfive.c",
+      "nss/lib/freebl/arcfour.c",
+      "nss/lib/freebl/blapi.h",
+      "nss/lib/freebl/blapii.h",
+      "nss/lib/freebl/blapit.h",
+      "nss/lib/freebl/camellia.c",
+      "nss/lib/freebl/camellia.h",
+      "nss/lib/freebl/chacha20/chacha20.c",
+      "nss/lib/freebl/chacha20/chacha20.h",
+      "nss/lib/freebl/chacha20/chacha20_vec.c",
+      "nss/lib/freebl/chacha20poly1305.c",
+      "nss/lib/freebl/chacha20poly1305.h",
+      "nss/lib/freebl/ctr.c",
+      "nss/lib/freebl/ctr.h",
+      "nss/lib/freebl/cts.c",
+      "nss/lib/freebl/cts.h",
+      "nss/lib/freebl/des.c",
+      "nss/lib/freebl/des.h",
+      "nss/lib/freebl/desblapi.c",
+      "nss/lib/freebl/dh.c",
+      "nss/lib/freebl/drbg.c",
+      "nss/lib/freebl/dsa.c",
+      "nss/lib/freebl/ec.c",
+      "nss/lib/freebl/ec.h",
+      "nss/lib/freebl/ecdecode.c",
+      "nss/lib/freebl/ecl/ec2.h",
+      "nss/lib/freebl/ecl/ec_naf.c",
+      "nss/lib/freebl/ecl/ecl-curve.h",
+      "nss/lib/freebl/ecl/ecl-exp.h",
+      "nss/lib/freebl/ecl/ecl-priv.h",
+      "nss/lib/freebl/ecl/ecl.c",
+      "nss/lib/freebl/ecl/ecl.h",
+      "nss/lib/freebl/ecl/ecl_curve.c",
+      "nss/lib/freebl/ecl/ecl_gf.c",
+      "nss/lib/freebl/ecl/ecl_mult.c",
+      "nss/lib/freebl/ecl/ecp.h",
+      "nss/lib/freebl/ecl/ecp_256.c",
+      "nss/lib/freebl/ecl/ecp_256_32.c",
+      "nss/lib/freebl/ecl/ecp_384.c",
+      "nss/lib/freebl/ecl/ecp_521.c",
+      "nss/lib/freebl/ecl/ecp_aff.c",
+      "nss/lib/freebl/ecl/ecp_jac.c",
+      "nss/lib/freebl/ecl/ecp_jm.c",
+      "nss/lib/freebl/ecl/ecp_mont.c",
+      "nss/lib/freebl/gcm.c",
+      "nss/lib/freebl/gcm.h",
+      "nss/lib/freebl/hmacct.c",
+      "nss/lib/freebl/hmacct.h",
+      "nss/lib/freebl/intel-aes-x86-masm.asm",
+      "nss/lib/freebl/intel-aes.h",
+      "nss/lib/freebl/jpake.c",
+      "nss/lib/freebl/md2.c",
+      "nss/lib/freebl/md5.c",
+      "nss/lib/freebl/mpi/logtab.h",
+      "nss/lib/freebl/mpi/mp_gf2m-priv.h",
+      "nss/lib/freebl/mpi/mp_gf2m.c",
+      "nss/lib/freebl/mpi/mp_gf2m.h",
+      "nss/lib/freebl/mpi/mpcpucache.c",
+      "nss/lib/freebl/mpi/mpi-config.h",
+      "nss/lib/freebl/mpi/mpi-priv.h",
+      "nss/lib/freebl/mpi/mpi.c",
+      "nss/lib/freebl/mpi/mpi.h",
+      "nss/lib/freebl/mpi/mpi_amd64.c",
+      "nss/lib/freebl/mpi/mpi_arm.c",
+      "nss/lib/freebl/mpi/mpi_arm_mac.c",
+      "nss/lib/freebl/mpi/mpi_x86_asm.c",
+      "nss/lib/freebl/mpi/mplogic.c",
+      "nss/lib/freebl/mpi/mplogic.h",
+      "nss/lib/freebl/mpi/mpmontg.c",
+      "nss/lib/freebl/mpi/mpprime.c",
+      "nss/lib/freebl/mpi/mpprime.h",
+      "nss/lib/freebl/mpi/primes.c",
+      "nss/lib/freebl/nss_build_config_mac.h",
+      "nss/lib/freebl/poly1305/poly1305-donna-x64-sse2-incremental-source.c",
+      "nss/lib/freebl/poly1305/poly1305.c",
+      "nss/lib/freebl/poly1305/poly1305.h",
+      "nss/lib/freebl/pqg.c",
+      "nss/lib/freebl/pqg.h",
+      "nss/lib/freebl/rawhash.c",
+      "nss/lib/freebl/rijndael.c",
+      "nss/lib/freebl/rijndael.h",
+      "nss/lib/freebl/rijndael32.tab",
+      "nss/lib/freebl/rsa.c",
+      "nss/lib/freebl/rsapkcs.c",
+      "nss/lib/freebl/secmpi.h",
+      "nss/lib/freebl/secrng.h",
+      "nss/lib/freebl/seed.c",
+      "nss/lib/freebl/seed.h",
+      "nss/lib/freebl/sha256.h",
+      "nss/lib/freebl/sha512.c",
+      "nss/lib/freebl/sha_fast.c",
+      "nss/lib/freebl/sha_fast.h",
+      "nss/lib/freebl/shsign.h",
+      "nss/lib/freebl/shvfy.c",
+      "nss/lib/freebl/sysrand.c",
+      "nss/lib/freebl/tlsprfalg.c",
+      "nss/lib/freebl/unix_rand.c",
+      "nss/lib/freebl/win_rand.c",
+      "nss/lib/nss/nss.h",
+      "nss/lib/nss/nssinit.c",
+      "nss/lib/nss/nssrenam.h",
+      "nss/lib/nss/utilwrap.c",
+      "nss/lib/pk11wrap/debug_module.c",
+      "nss/lib/pk11wrap/dev3hack.c",
+      "nss/lib/pk11wrap/dev3hack.h",
+      "nss/lib/pk11wrap/pk11akey.c",
+      "nss/lib/pk11wrap/pk11auth.c",
+      "nss/lib/pk11wrap/pk11cert.c",
+      "nss/lib/pk11wrap/pk11cxt.c",
+      "nss/lib/pk11wrap/pk11err.c",
+      "nss/lib/pk11wrap/pk11func.h",
+      "nss/lib/pk11wrap/pk11kea.c",
+      "nss/lib/pk11wrap/pk11list.c",
+      "nss/lib/pk11wrap/pk11load.c",
+      "nss/lib/pk11wrap/pk11mech.c",
+      "nss/lib/pk11wrap/pk11merge.c",
+      "nss/lib/pk11wrap/pk11nobj.c",
+      "nss/lib/pk11wrap/pk11obj.c",
+      "nss/lib/pk11wrap/pk11pars.c",
+      "nss/lib/pk11wrap/pk11pbe.c",
+      "nss/lib/pk11wrap/pk11pk12.c",
+      "nss/lib/pk11wrap/pk11pqg.c",
+      "nss/lib/pk11wrap/pk11pqg.h",
+      "nss/lib/pk11wrap/pk11priv.h",
+      "nss/lib/pk11wrap/pk11pub.h",
+      "nss/lib/pk11wrap/pk11sdr.c",
+      "nss/lib/pk11wrap/pk11sdr.h",
+      "nss/lib/pk11wrap/pk11skey.c",
+      "nss/lib/pk11wrap/pk11slot.c",
+      "nss/lib/pk11wrap/pk11util.c",
+      "nss/lib/pk11wrap/secmod.h",
+      "nss/lib/pk11wrap/secmodi.h",
+      "nss/lib/pk11wrap/secmodt.h",
+      "nss/lib/pk11wrap/secmodti.h",
+      "nss/lib/pk11wrap/secpkcs5.h",
+      "nss/lib/pkcs7/certread.c",
+      "nss/lib/pkcs7/p7common.c",
+      "nss/lib/pkcs7/p7create.c",
+      "nss/lib/pkcs7/p7decode.c",
+      "nss/lib/pkcs7/p7encode.c",
+      "nss/lib/pkcs7/p7local.c",
+      "nss/lib/pkcs7/p7local.h",
+      "nss/lib/pkcs7/pkcs7t.h",
+      "nss/lib/pkcs7/secmime.c",
+      "nss/lib/pkcs7/secmime.h",
+      "nss/lib/pkcs7/secpkcs7.h",
+      "nss/lib/pki/asymmkey.c",
+      "nss/lib/pki/certdecode.c",
+      "nss/lib/pki/certificate.c",
+      "nss/lib/pki/cryptocontext.c",
+      "nss/lib/pki/nsspki.h",
+      "nss/lib/pki/nsspkit.h",
+      "nss/lib/pki/pki.h",
+      "nss/lib/pki/pki3hack.c",
+      "nss/lib/pki/pki3hack.h",
+      "nss/lib/pki/pkibase.c",
+      "nss/lib/pki/pkim.h",
+      "nss/lib/pki/pkistore.c",
+      "nss/lib/pki/pkistore.h",
+      "nss/lib/pki/pkit.h",
+      "nss/lib/pki/pkitm.h",
+      "nss/lib/pki/symmkey.c",
+      "nss/lib/pki/tdcache.c",
+      "nss/lib/pki/trustdomain.c",
+      "nss/lib/smime/cms.h",
+      "nss/lib/smime/cmslocal.h",
+      "nss/lib/smime/cmsreclist.h",
+      "nss/lib/smime/cmst.h",
+      "nss/lib/smime/smime.h",
+      "nss/lib/softoken/fipsaudt.c",
+      "nss/lib/softoken/fipstest.c",
+      "nss/lib/softoken/fipstokn.c",
+      "nss/lib/softoken/jpakesftk.c",
+      "nss/lib/softoken/lgglue.c",
+      "nss/lib/softoken/lgglue.h",
+      "nss/lib/softoken/lowkey.c",
+      "nss/lib/softoken/lowkeyi.h",
+      "nss/lib/softoken/lowkeyti.h",
+      "nss/lib/softoken/lowpbe.c",
+      "nss/lib/softoken/lowpbe.h",
+      "nss/lib/softoken/padbuf.c",
+      "nss/lib/softoken/pkcs11.c",
+      "nss/lib/softoken/pkcs11c.c",
+      "nss/lib/softoken/pkcs11i.h",
+      "nss/lib/softoken/pkcs11ni.h",
+      "nss/lib/softoken/pkcs11u.c",
+      "nss/lib/softoken/sdb.c",
+      "nss/lib/softoken/sdb.h",
+      "nss/lib/softoken/sftkdb.c",
+      "nss/lib/softoken/sftkdb.h",
+      "nss/lib/softoken/sftkdbt.h",
+      "nss/lib/softoken/sftkdbti.h",
+      "nss/lib/softoken/sftkhmac.c",
+      "nss/lib/softoken/sftkpars.c",
+      "nss/lib/softoken/sftkpars.h",
+      "nss/lib/softoken/sftkpwd.c",
+      "nss/lib/softoken/softkver.c",
+      "nss/lib/softoken/softkver.h",
+      "nss/lib/softoken/softoken.h",
+      "nss/lib/softoken/softoknt.h",
+      "nss/lib/softoken/tlsprf.c",
+      "nss/lib/ssl/sslerr.h",
+      "nss/lib/util/SECerrs.h",
+      "nss/lib/util/base64.h",
+      "nss/lib/util/ciferfam.h",
+      "nss/lib/util/derdec.c",
+      "nss/lib/util/derenc.c",
+      "nss/lib/util/dersubr.c",
+      "nss/lib/util/dertime.c",
+      "nss/lib/util/errstrs.c",
+      "nss/lib/util/hasht.h",
+      "nss/lib/util/nssb64.h",
+      "nss/lib/util/nssb64d.c",
+      "nss/lib/util/nssb64e.c",
+      "nss/lib/util/nssb64t.h",
+      "nss/lib/util/nssilckt.h",
+      "nss/lib/util/nssilock.c",
+      "nss/lib/util/nssilock.h",
+      "nss/lib/util/nsslocks.h",
+      "nss/lib/util/nssrwlk.c",
+      "nss/lib/util/nssrwlk.h",
+      "nss/lib/util/nssrwlkt.h",
+      "nss/lib/util/nssutil.h",
+      "nss/lib/util/oidstring.c",
+      "nss/lib/util/pkcs11.h",
+      "nss/lib/util/pkcs11f.h",
+      "nss/lib/util/pkcs11n.h",
+      "nss/lib/util/pkcs11p.h",
+      "nss/lib/util/pkcs11t.h",
+      "nss/lib/util/pkcs11u.h",
+      "nss/lib/util/pkcs1sig.c",
+      "nss/lib/util/pkcs1sig.h",
+      "nss/lib/util/portreg.c",
+      "nss/lib/util/portreg.h",
+      "nss/lib/util/quickder.c",
+      "nss/lib/util/secalgid.c",
+      "nss/lib/util/secasn1.h",
+      "nss/lib/util/secasn1d.c",
+      "nss/lib/util/secasn1e.c",
+      "nss/lib/util/secasn1t.h",
+      "nss/lib/util/secasn1u.c",
+      "nss/lib/util/seccomon.h",
+      "nss/lib/util/secder.h",
+      "nss/lib/util/secdert.h",
+      "nss/lib/util/secdig.c",
+      "nss/lib/util/secdig.h",
+      "nss/lib/util/secdigt.h",
+      "nss/lib/util/secerr.h",
+      "nss/lib/util/secitem.c",
+      "nss/lib/util/secitem.h",
+      "nss/lib/util/secoid.c",
+      "nss/lib/util/secoid.h",
+      "nss/lib/util/secoidt.h",
+      "nss/lib/util/secport.c",
+      "nss/lib/util/secport.h",
+      "nss/lib/util/sectime.c",
+      "nss/lib/util/templates.c",
+      "nss/lib/util/utf8.c",
+      "nss/lib/util/utilmod.c",
+      "nss/lib/util/utilmodt.h",
+      "nss/lib/util/utilpars.c",
+      "nss/lib/util/utilpars.h",
+      "nss/lib/util/utilparst.h",
+      "nss/lib/util/utilrename.h",
+    ]
+
+    sources -= [
+      # mpi_arm.c is included by mpi_arm_mac.c.
+      # NOTE: mpi_arm.c can be used directly on Linux. mpi_arm.c will need
+      # to be excluded conditionally if we start to build NSS on Linux.
+      "nss/lib/freebl/mpi/mpi_arm.c",
+
+      # primes.c is included by mpprime.c.
+      "nss/lib/freebl/mpi/primes.c",
+
+      # unix_rand.c and win_rand.c are included by sysrand.c.
+      "nss/lib/freebl/unix_rand.c",
+      "nss/lib/freebl/win_rand.c",
+
+      # debug_module.c is included by pk11load.c.
+      "nss/lib/pk11wrap/debug_module.c",
+    ]
+
+    configs -= [ "//build/config/compiler:chromium_code" ]
+    if (is_win) {
+      configs -= [ "//build/config/win:unicode" ]  # Requires 8-bit mode.
+    }
+    configs += [
+      "//build/config/compiler:no_chromium_code",
+      "//build/config/compiler:no_size_t_to_int_warning",
+    ]
+    public_configs = [ ":nss_static_config" ]
+
+    cflags = []
+
+    # Only need the defines and includes not in nss_static_config.
+    defines = [
+      "MP_API_COMPATIBLE",
+      "NSS_DISABLE_DBM",
+      "RIJNDAEL_INCLUDE_TABLES",
+      "SHLIB_VERSION=\"3\"",
+      "SOFTOKEN_SHLIB_VERSION=\"3\"",
+    ]
+    include_dirs = [
+      "nss/lib/freebl/mpi",
+      "nss/lib/ssl",
+    ]
+
+    if (is_win) {
+      cflags += [ "/wd4101" ]  # Unreferenced local variable.
+    }
+
+    if (include_nss_libpkix) {
+      sources += [
+        "nss/lib/certhigh/certvfypkix.c",
+        "nss/lib/certhigh/certvfypkixprint.c",
+        "nss/lib/libpkix/include/pkix.h",
+        "nss/lib/libpkix/include/pkix_certsel.h",
+        "nss/lib/libpkix/include/pkix_certstore.h",
+        "nss/lib/libpkix/include/pkix_checker.h",
+        "nss/lib/libpkix/include/pkix_crlsel.h",
+        "nss/lib/libpkix/include/pkix_errorstrings.h",
+        "nss/lib/libpkix/include/pkix_params.h",
+        "nss/lib/libpkix/include/pkix_pl_pki.h",
+        "nss/lib/libpkix/include/pkix_pl_system.h",
+        "nss/lib/libpkix/include/pkix_results.h",
+        "nss/lib/libpkix/include/pkix_revchecker.h",
+        "nss/lib/libpkix/include/pkix_sample_modules.h",
+        "nss/lib/libpkix/include/pkix_util.h",
+        "nss/lib/libpkix/include/pkixt.h",
+        "nss/lib/libpkix/pkix/certsel/pkix_certselector.c",
+        "nss/lib/libpkix/pkix/certsel/pkix_certselector.h",
+        "nss/lib/libpkix/pkix/certsel/pkix_comcertselparams.c",
+        "nss/lib/libpkix/pkix/certsel/pkix_comcertselparams.h",
+        "nss/lib/libpkix/pkix/checker/pkix_basicconstraintschecker.c",
+        "nss/lib/libpkix/pkix/checker/pkix_basicconstraintschecker.h",
+        "nss/lib/libpkix/pkix/checker/pkix_certchainchecker.c",
+        "nss/lib/libpkix/pkix/checker/pkix_certchainchecker.h",
+        "nss/lib/libpkix/pkix/checker/pkix_crlchecker.c",
+        "nss/lib/libpkix/pkix/checker/pkix_crlchecker.h",
+        "nss/lib/libpkix/pkix/checker/pkix_ekuchecker.c",
+        "nss/lib/libpkix/pkix/checker/pkix_ekuchecker.h",
+        "nss/lib/libpkix/pkix/checker/pkix_expirationchecker.c",
+        "nss/lib/libpkix/pkix/checker/pkix_expirationchecker.h",
+        "nss/lib/libpkix/pkix/checker/pkix_namechainingchecker.c",
+        "nss/lib/libpkix/pkix/checker/pkix_namechainingchecker.h",
+        "nss/lib/libpkix/pkix/checker/pkix_nameconstraintschecker.c",
+        "nss/lib/libpkix/pkix/checker/pkix_nameconstraintschecker.h",
+        "nss/lib/libpkix/pkix/checker/pkix_ocspchecker.c",
+        "nss/lib/libpkix/pkix/checker/pkix_ocspchecker.h",
+        "nss/lib/libpkix/pkix/checker/pkix_policychecker.c",
+        "nss/lib/libpkix/pkix/checker/pkix_policychecker.h",
+        "nss/lib/libpkix/pkix/checker/pkix_revocationchecker.c",
+        "nss/lib/libpkix/pkix/checker/pkix_revocationchecker.h",
+        "nss/lib/libpkix/pkix/checker/pkix_revocationmethod.c",
+        "nss/lib/libpkix/pkix/checker/pkix_revocationmethod.h",
+        "nss/lib/libpkix/pkix/checker/pkix_signaturechecker.c",
+        "nss/lib/libpkix/pkix/checker/pkix_signaturechecker.h",
+        "nss/lib/libpkix/pkix/checker/pkix_targetcertchecker.c",
+        "nss/lib/libpkix/pkix/checker/pkix_targetcertchecker.h",
+        "nss/lib/libpkix/pkix/crlsel/pkix_comcrlselparams.c",
+        "nss/lib/libpkix/pkix/crlsel/pkix_comcrlselparams.h",
+        "nss/lib/libpkix/pkix/crlsel/pkix_crlselector.c",
+        "nss/lib/libpkix/pkix/crlsel/pkix_crlselector.h",
+        "nss/lib/libpkix/pkix/params/pkix_procparams.c",
+        "nss/lib/libpkix/pkix/params/pkix_procparams.h",
+        "nss/lib/libpkix/pkix/params/pkix_resourcelimits.c",
+        "nss/lib/libpkix/pkix/params/pkix_resourcelimits.h",
+        "nss/lib/libpkix/pkix/params/pkix_trustanchor.c",
+        "nss/lib/libpkix/pkix/params/pkix_trustanchor.h",
+        "nss/lib/libpkix/pkix/params/pkix_valparams.c",
+        "nss/lib/libpkix/pkix/params/pkix_valparams.h",
+        "nss/lib/libpkix/pkix/results/pkix_buildresult.c",
+        "nss/lib/libpkix/pkix/results/pkix_buildresult.h",
+        "nss/lib/libpkix/pkix/results/pkix_policynode.c",
+        "nss/lib/libpkix/pkix/results/pkix_policynode.h",
+        "nss/lib/libpkix/pkix/results/pkix_valresult.c",
+        "nss/lib/libpkix/pkix/results/pkix_valresult.h",
+        "nss/lib/libpkix/pkix/results/pkix_verifynode.c",
+        "nss/lib/libpkix/pkix/results/pkix_verifynode.h",
+        "nss/lib/libpkix/pkix/store/pkix_store.c",
+        "nss/lib/libpkix/pkix/store/pkix_store.h",
+        "nss/lib/libpkix/pkix/top/pkix_build.c",
+        "nss/lib/libpkix/pkix/top/pkix_build.h",
+        "nss/lib/libpkix/pkix/top/pkix_lifecycle.c",
+        "nss/lib/libpkix/pkix/top/pkix_lifecycle.h",
+        "nss/lib/libpkix/pkix/top/pkix_validate.c",
+        "nss/lib/libpkix/pkix/top/pkix_validate.h",
+        "nss/lib/libpkix/pkix/util/pkix_error.c",
+        "nss/lib/libpkix/pkix/util/pkix_error.h",
+        "nss/lib/libpkix/pkix/util/pkix_errpaths.c",
+        "nss/lib/libpkix/pkix/util/pkix_list.c",
+        "nss/lib/libpkix/pkix/util/pkix_list.h",
+        "nss/lib/libpkix/pkix/util/pkix_logger.c",
+        "nss/lib/libpkix/pkix/util/pkix_logger.h",
+        "nss/lib/libpkix/pkix/util/pkix_tools.c",
+        "nss/lib/libpkix/pkix/util/pkix_tools.h",
+        "nss/lib/libpkix/pkix_pl_nss/module/pkix_pl_aiamgr.c",
+        "nss/lib/libpkix/pkix_pl_nss/module/pkix_pl_aiamgr.h",
+        "nss/lib/libpkix/pkix_pl_nss/module/pkix_pl_colcertstore.c",
+        "nss/lib/libpkix/pkix_pl_nss/module/pkix_pl_colcertstore.h",
+        "nss/lib/libpkix/pkix_pl_nss/module/pkix_pl_httpcertstore.c",
+        "nss/lib/libpkix/pkix_pl_nss/module/pkix_pl_httpcertstore.h",
+        "nss/lib/libpkix/pkix_pl_nss/module/pkix_pl_httpdefaultclient.c",
+        "nss/lib/libpkix/pkix_pl_nss/module/pkix_pl_httpdefaultclient.h",
+        "nss/lib/libpkix/pkix_pl_nss/module/pkix_pl_nsscontext.c",
+        "nss/lib/libpkix/pkix_pl_nss/module/pkix_pl_nsscontext.h",
+        "nss/lib/libpkix/pkix_pl_nss/module/pkix_pl_pk11certstore.c",
+        "nss/lib/libpkix/pkix_pl_nss/module/pkix_pl_pk11certstore.h",
+        "nss/lib/libpkix/pkix_pl_nss/module/pkix_pl_socket.c",
+        "nss/lib/libpkix/pkix_pl_nss/module/pkix_pl_socket.h",
+        "nss/lib/libpkix/pkix_pl_nss/pki/pkix_pl_basicconstraints.c",
+        "nss/lib/libpkix/pkix_pl_nss/pki/pkix_pl_basicconstraints.h",
+        "nss/lib/libpkix/pkix_pl_nss/pki/pkix_pl_cert.c",
+        "nss/lib/libpkix/pkix_pl_nss/pki/pkix_pl_cert.h",
+        "nss/lib/libpkix/pkix_pl_nss/pki/pkix_pl_certpolicyinfo.c",
+        "nss/lib/libpkix/pkix_pl_nss/pki/pkix_pl_certpolicyinfo.h",
+        "nss/lib/libpkix/pkix_pl_nss/pki/pkix_pl_certpolicymap.c",
+        "nss/lib/libpkix/pkix_pl_nss/pki/pkix_pl_certpolicymap.h",
+        "nss/lib/libpkix/pkix_pl_nss/pki/pkix_pl_certpolicyqualifier.c",
+        "nss/lib/libpkix/pkix_pl_nss/pki/pkix_pl_certpolicyqualifier.h",
+        "nss/lib/libpkix/pkix_pl_nss/pki/pkix_pl_crl.c",
+        "nss/lib/libpkix/pkix_pl_nss/pki/pkix_pl_crl.h",
+        "nss/lib/libpkix/pkix_pl_nss/pki/pkix_pl_crldp.c",
+        "nss/lib/libpkix/pkix_pl_nss/pki/pkix_pl_crldp.h",
+        "nss/lib/libpkix/pkix_pl_nss/pki/pkix_pl_crlentry.c",
+        "nss/lib/libpkix/pkix_pl_nss/pki/pkix_pl_crlentry.h",
+        "nss/lib/libpkix/pkix_pl_nss/pki/pkix_pl_date.c",
+        "nss/lib/libpkix/pkix_pl_nss/pki/pkix_pl_date.h",
+        "nss/lib/libpkix/pkix_pl_nss/pki/pkix_pl_generalname.c",
+        "nss/lib/libpkix/pkix_pl_nss/pki/pkix_pl_generalname.h",
+        "nss/lib/libpkix/pkix_pl_nss/pki/pkix_pl_infoaccess.c",
+        "nss/lib/libpkix/pkix_pl_nss/pki/pkix_pl_infoaccess.h",
+        "nss/lib/libpkix/pkix_pl_nss/pki/pkix_pl_nameconstraints.c",
+        "nss/lib/libpkix/pkix_pl_nss/pki/pkix_pl_nameconstraints.h",
+        "nss/lib/libpkix/pkix_pl_nss/pki/pkix_pl_ocspcertid.c",
+        "nss/lib/libpkix/pkix_pl_nss/pki/pkix_pl_ocspcertid.h",
+        "nss/lib/libpkix/pkix_pl_nss/pki/pkix_pl_ocsprequest.c",
+        "nss/lib/libpkix/pkix_pl_nss/pki/pkix_pl_ocsprequest.h",
+        "nss/lib/libpkix/pkix_pl_nss/pki/pkix_pl_ocspresponse.c",
+        "nss/lib/libpkix/pkix_pl_nss/pki/pkix_pl_ocspresponse.h",
+        "nss/lib/libpkix/pkix_pl_nss/pki/pkix_pl_publickey.c",
+        "nss/lib/libpkix/pkix_pl_nss/pki/pkix_pl_publickey.h",
+        "nss/lib/libpkix/pkix_pl_nss/pki/pkix_pl_x500name.c",
+        "nss/lib/libpkix/pkix_pl_nss/pki/pkix_pl_x500name.h",
+        "nss/lib/libpkix/pkix_pl_nss/system/pkix_pl_bigint.c",
+        "nss/lib/libpkix/pkix_pl_nss/system/pkix_pl_bigint.h",
+        "nss/lib/libpkix/pkix_pl_nss/system/pkix_pl_bytearray.c",
+        "nss/lib/libpkix/pkix_pl_nss/system/pkix_pl_bytearray.h",
+        "nss/lib/libpkix/pkix_pl_nss/system/pkix_pl_common.c",
+        "nss/lib/libpkix/pkix_pl_nss/system/pkix_pl_common.h",
+        "nss/lib/libpkix/pkix_pl_nss/system/pkix_pl_error.c",
+        "nss/lib/libpkix/pkix_pl_nss/system/pkix_pl_hashtable.c",
+        "nss/lib/libpkix/pkix_pl_nss/system/pkix_pl_hashtable.h",
+        "nss/lib/libpkix/pkix_pl_nss/system/pkix_pl_lifecycle.c",
+        "nss/lib/libpkix/pkix_pl_nss/system/pkix_pl_lifecycle.h",
+        "nss/lib/libpkix/pkix_pl_nss/system/pkix_pl_mem.c",
+        "nss/lib/libpkix/pkix_pl_nss/system/pkix_pl_mem.h",
+        "nss/lib/libpkix/pkix_pl_nss/system/pkix_pl_monitorlock.c",
+        "nss/lib/libpkix/pkix_pl_nss/system/pkix_pl_monitorlock.h",
+        "nss/lib/libpkix/pkix_pl_nss/system/pkix_pl_mutex.c",
+        "nss/lib/libpkix/pkix_pl_nss/system/pkix_pl_mutex.h",
+        "nss/lib/libpkix/pkix_pl_nss/system/pkix_pl_object.c",
+        "nss/lib/libpkix/pkix_pl_nss/system/pkix_pl_object.h",
+        "nss/lib/libpkix/pkix_pl_nss/system/pkix_pl_oid.c",
+        "nss/lib/libpkix/pkix_pl_nss/system/pkix_pl_oid.h",
+        "nss/lib/libpkix/pkix_pl_nss/system/pkix_pl_primhash.c",
+        "nss/lib/libpkix/pkix_pl_nss/system/pkix_pl_primhash.h",
+        "nss/lib/libpkix/pkix_pl_nss/system/pkix_pl_rwlock.c",
+        "nss/lib/libpkix/pkix_pl_nss/system/pkix_pl_rwlock.h",
+        "nss/lib/libpkix/pkix_pl_nss/system/pkix_pl_string.c",
+        "nss/lib/libpkix/pkix_pl_nss/system/pkix_pl_string.h",
+      ]
+
+      # Disable the LDAP code in libpkix.
+      defines += [ "NSS_PKIX_NO_LDAP" ]
+
+      include_dirs += [
+        "nss/lib/libpkix/include",
+        "nss/lib/libpkix/pkix/certsel",
+        "nss/lib/libpkix/pkix/checker",
+        "nss/lib/libpkix/pkix/crlsel",
+        "nss/lib/libpkix/pkix/params",
+        "nss/lib/libpkix/pkix/results",
+        "nss/lib/libpkix/pkix/store",
+        "nss/lib/libpkix/pkix/top",
+        "nss/lib/libpkix/pkix/util",
+        "nss/lib/libpkix/pkix_pl_nss/module",
+        "nss/lib/libpkix/pkix_pl_nss/pki",
+        "nss/lib/libpkix/pkix_pl_nss/system",
+      ]
+    } else {
+      defines += [ "NSS_DISABLE_LIBPKIX" ]
+    }
+
+    if (!include_nss_root_certs) {
+      defines += [ "NSS_DISABLE_ROOT_CERTS" ]
+    }
+
+    if (current_cpu == "x64" && !is_win) {
+      sources -= [
+        "nss/lib/freebl/chacha20/chacha20.c",
+        "nss/lib/freebl/poly1305/poly1305.c",
+      ]
+    } else {
+      sources -= [
+        "nss/lib/freebl/chacha20/chacha20_vec.c",
+        "nss/lib/freebl/poly1305/poly1305-donna-x64-sse2-incremental-source.c",
+      ]
+    }
+
+    if (is_mac || is_ios) {
+      sources -= [ "nss/lib/freebl/mpi/mpi_amd64.c" ]
+      cflags += [
+        "-include",
+        rebase_path("//third_party/nss/nss/lib/freebl/nss_build_config_mac.h",
+                    root_build_dir),
+      ]
+      defines += [
+        "XP_UNIX",
+        "DARWIN",
+        "HAVE_STRERROR",
+        "HAVE_BSD_FLOCK",
+        "SHLIB_SUFFIX=\"dylib\"",
+        "SHLIB_PREFIX=\"lib\"",
+        "SOFTOKEN_LIB_NAME=\"libsoftokn3.dylib\"",
+      ]
+
+      configs -= [ "//build/config/gcc:symbol_visibility_hidden" ]
+    } else {
+      # Not Mac/iOS.
+      sources -= [ "nss/lib/freebl/mpi/mpi_arm_mac.c" ]
+    }
+
+    if (is_win) {
+      defines += [
+        "SHLIB_SUFFIX=\"dll\"",
+        "SHLIB_PREFIX=\"\"",
+        "SOFTOKEN_LIB_NAME=\"softokn3.dll\"",
+        "XP_PC",
+        "WIN32",
+        "WIN95",
+      ]
+
+      if (current_cpu == "x86") {
+        defines += [
+          "NSS_X86_OR_X64",
+          "NSS_X86",
+          "_X86_",
+          "MP_ASSEMBLY_MULTIPLY",
+          "MP_ASSEMBLY_SQUARE",
+          "MP_ASSEMBLY_DIV_2DX1D",
+          "MP_USE_UINT_DIGIT",
+          "MP_NO_MP_WORD",
+          "USE_HW_AES",
+          "INTEL_GCM",
+        ]
+        sources -= [ "nss/lib/freebl/mpi/mpi_amd64.c" ]
+      } else if (current_cpu == "x64") {
+        sources -= [
+          "nss/lib/freebl/intel-aes-x86-masm.asm",
+          "nss/lib/freebl/mpi/mpi_amd64.c",
+          "nss/lib/freebl/mpi/mpi_x86_asm.c",
+        ]
+        defines += [
+          "NSS_USE_64",
+          "NSS_X86_OR_X64",
+          "NSS_X64",
+          "_AMD64_",
+          "MP_CHAR_STORE_SLOW",
+          "MP_IS_LITTLE_ENDIAN",
+          "WIN64",
+        ]
+      }
+    } else {
+      # Not Windows.
+      sources -= [
+        # mpi_x86_asm.c contains MSVC inline assembly code.
+        "nss/lib/freebl/mpi/mpi_x86_asm.c",
+      ]
+    }
+
+    if (is_clang) {
+      cflags += [
+        # nss doesn"t explicitly cast between different enum types.
+        "-Wno-conversion",
+
+        # nss passes "const char*" through "void*".
+        "-Wno-incompatible-pointer-types",
+
+        # nss prefers `a && b || c` over `(a && b) || c`.
+        "-Wno-logical-op-parentheses",
+
+        # nss doesn"t use exhaustive switches on enums
+        "-Wno-switch",
+
+        # nss has some `unsigned < 0` checks.
+        "-Wno-tautological-compare",
+      ]
+    }
+
+    public_deps = [
+      ":nspr",
+    ]
+    deps = [
+      ":nspr",
+      "//third_party/sqlite",
+    ]
+
+    if (is_win && current_cpu == "x86") {
+      deps += [ ":nss_static_avx" ]
+    }
+  }
+}  # Windows/Mac/iOS.
diff --git a/build/secondary/tools/grit/BUILD.gn b/build/secondary/tools/grit/BUILD.gn
new file mode 100644
index 0000000..660bf1b
--- /dev/null
+++ b/build/secondary/tools/grit/BUILD.gn
@@ -0,0 +1,27 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This target creates a stamp file that depends on all the sources in the grit
+# directory. By depending on this, a target can force itself to be rebuilt if
+# grit itself changes.
+action("grit_sources") {
+  depfile = "$target_out_dir/grit_sources.d"
+  script = "//build/secondary/tools/grit/stamp_grit_sources.py"
+
+  inputs = [
+    "grit.py",
+  ]
+
+  # Note that we can't call this "grit_sources.stamp" because that file is
+  # implicitly created by GN for script actions.
+  outputs = [
+    "$target_out_dir/grit_sources.script.stamp",
+  ]
+
+  args = [
+    rebase_path("//tools/grit", root_build_dir),
+    rebase_path(outputs[0], root_build_dir),
+    rebase_path(depfile, root_build_dir),
+  ]
+}
diff --git a/build/secondary/tools/grit/grit_rule.gni b/build/secondary/tools/grit/grit_rule.gni
new file mode 100644
index 0000000..bdf812f
--- /dev/null
+++ b/build/secondary/tools/grit/grit_rule.gni
@@ -0,0 +1,483 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Instantiate grit. This will produce a script target to run grit, and a
+# static library that compiles the .cc files.
+#
+# Parameters
+#
+#   source (required)
+#       Path to .grd file.
+#
+#   outputs (required)
+#       List of outputs from grit, relative to the target_gen_dir. Grit will
+#       verify at build time that this list is correct and will fail if there
+#       is a mismatch between the outputs specified by the .grd file and the
+#       outputs list here.
+#
+#       To get this list, you can look in the .grd file for
+#       <output filename="..." and put those filename here. The base directory
+#       of the list in Grit and the output list specified in the GN grit target
+#       are the same (the target_gen_dir) so you can generally copy the names
+#       exactly.
+#
+#       To get the list of outputs programatically, run:
+#           python tools/grit/grit_info.py --outputs . path/to/your.grd
+#       And strip the leading "./" from the output files.
+#
+#   defines (optional)
+#       Extra defines to pass to grit (on top of the global grit_defines list).
+#
+#   grit_flags (optional)
+#       List of strings containing extra command-line flags to pass to Grit.
+#
+#   resource_ids (optional)
+#       Path to a grit "firstidsfile". Default is
+#       //tools/gritsettings/resource_ids. Set to "" to use the value specified
+#       in the <grit> nodes of the processed files.
+#
+#   output_dir (optional)
+#       Directory for generated files. If you specify this, you will often
+#       want to specify output_name if the target name is not particularly
+#       unique, since this can cause files from multiple grit targets to
+#       overwrite each other.
+#
+#   output_name (optiona)
+#       Provide an alternate base name for the generated files, like the .d
+#       files. Normally these are based on the target name and go in the
+#       output_dir, but if multiple targets with the same name end up in
+#       the same output_dir, they can collide.
+#
+#   depfile_dir (optional)
+#       If set, used to store the depfile and corresponding stamp file.
+#       Defaults to output_dir
+#
+#   use_qualified_include (optional)
+#       If set, output_dir is not added to include_dirs.
+#
+#   configs (optional)
+#       List of additional configs to be applied to the generated target.
+#   deps  (optional)
+#   inputs  (optional)
+#       List of additional files, required for grit to process source file.
+#   visibility  (optional)
+#       Normal meaning.
+#
+# Example
+#
+#   grit("my_resources") {
+#     # Source and outputs are required.
+#     source = "myfile.grd"
+#     outputs = [
+#       "foo_strings.h",
+#       "foo_strings.pak",
+#     ]
+#
+#     grit_flags = [ "-E", "foo=bar" ]  # Optional extra flags.
+#     # You can also put deps here if the grit source depends on generated
+#     # files.
+#   }
+import("//build/config/chrome_build.gni")
+import("//build/config/crypto.gni")
+import("//build/config/features.gni")
+import("//build/config/ui.gni")
+
+grit_defines = []
+
+# Mac and iOS want Title Case strings.
+use_titlecase_in_grd_files = is_mac || is_ios
+if (use_titlecase_in_grd_files) {
+  grit_defines += [
+    "-D",
+    "use_titlecase",
+  ]
+}
+
+if (is_chrome_branded) {
+  grit_defines += [
+    "-D",
+    "_google_chrome",
+    "-E",
+    "CHROMIUM_BUILD=google_chrome",
+  ]
+} else {
+  grit_defines += [
+    "-D",
+    "_chromium",
+    "-E",
+    "CHROMIUM_BUILD=chromium",
+  ]
+}
+
+if (is_chromeos) {
+  grit_defines += [
+    "-D",
+    "chromeos",
+    "-D",
+    "scale_factors=2x",
+  ]
+}
+
+if (is_desktop_linux) {
+  grit_defines += [
+    "-D",
+    "desktop_linux",
+  ]
+}
+
+if (toolkit_views) {
+  grit_defines += [
+    "-D",
+    "toolkit_views",
+  ]
+}
+
+if (use_aura) {
+  grit_defines += [
+    "-D",
+    "use_aura",
+  ]
+}
+
+if (use_ash) {
+  grit_defines += [
+    "-D",
+    "use_ash",
+  ]
+}
+
+if (use_nss_certs) {
+  grit_defines += [
+    "-D",
+    "use_nss_certs",
+  ]
+}
+
+if (use_ozone) {
+  grit_defines += [
+    "-D",
+    "use_ozone",
+  ]
+}
+
+if (enable_image_loader_extension) {
+  grit_defines += [
+    "-D",
+    "image_loader_extension",
+  ]
+}
+
+if (enable_remoting) {
+  grit_defines += [
+    "-D",
+    "remoting",
+  ]
+}
+
+if (is_android) {
+  grit_defines += [
+    "-t",
+    "android",
+    "-E",
+    "ANDROID_JAVA_TAGGED_ONLY=true",
+  ]
+}
+
+if (is_mac || is_ios) {
+  grit_defines += [
+    "-D",
+    "scale_factors=2x",
+  ]
+}
+
+if (is_ios) {
+  grit_defines += [
+    "-t",
+    "ios",
+
+    # iOS uses a whitelist to filter resources.
+    "-w",
+    rebase_path("//build/ios/grit_whitelist.txt", root_build_dir),
+  ]
+}
+
+if (enable_extensions) {
+  grit_defines += [
+    "-D",
+    "enable_extensions",
+  ]
+}
+if (enable_media_router) {
+  grit_defines += [
+    "-D",
+    "enable_media_router",
+  ]
+}
+if (enable_plugins) {
+  grit_defines += [
+    "-D",
+    "enable_plugins",
+  ]
+}
+if (enable_basic_printing || enable_print_preview) {
+  grit_defines += [
+    "-D",
+    "enable_printing",
+  ]
+  if (enable_print_preview) {
+    grit_defines += [
+      "-D",
+      "enable_print_preview",
+    ]
+  }
+}
+if (enable_themes) {
+  grit_defines += [
+    "-D",
+    "enable_themes",
+  ]
+}
+if (enable_app_list) {
+  grit_defines += [
+    "-D",
+    "enable_app_list",
+  ]
+}
+if (enable_settings_app) {
+  grit_defines += [
+    "-D",
+    "enable_settings_app",
+  ]
+}
+if (enable_google_now) {
+  grit_defines += [
+    "-D",
+    "enable_google_now",
+  ]
+}
+
+# Note: use_concatenated_impulse_responses is omitted. It is never used and
+# should probably be removed from GYP build.
+if (enable_webrtc) {
+  grit_defines += [
+    "-D",
+    "enable_webrtc",
+  ]
+}
+if (enable_hangout_services_extension) {
+  grit_defines += [
+    "-D",
+    "enable_hangout_services_extension",
+  ]
+}
+if (enable_task_manager) {
+  grit_defines += [
+    "-D",
+    "enable_task_manager",
+  ]
+}
+if (enable_notifications) {
+  grit_defines += [
+    "-D",
+    "enable_notifications",
+  ]
+}
+if (enable_wifi_bootstrapping) {
+  grit_defines += [
+    "-D",
+    "enable_wifi_bootstrapping",
+  ]
+}
+if (enable_service_discovery) {
+  grit_defines += [
+    "-D",
+    "enable_service_discovery",
+  ]
+}
+if (mac_views_browser) {
+  grit_defines += [
+    "-D",
+    "mac_views_browser",
+  ]
+}
+
+grit_resource_id_file = "//tools/gritsettings/resource_ids"
+grit_info_script = "//tools/grit/grit_info.py"
+
+template("grit") {
+  assert(defined(invoker.source),
+         "\"source\" must be defined for the grit template $target_name")
+
+  grit_inputs = [ invoker.source ]
+
+  if (defined(invoker.resource_ids)) {
+    resource_ids = invoker.resource_ids
+  } else {
+    resource_ids = grit_resource_id_file
+  }
+  if (resource_ids != "") {
+    # The script depends on the ID file. Only add this dependency if the ID
+    # file is specified.
+    grit_inputs += [ resource_ids ]
+  }
+
+  if (defined(invoker.output_dir)) {
+    output_dir = invoker.output_dir
+  } else {
+    output_dir = target_gen_dir
+  }
+
+  if (defined(invoker.output_name)) {
+    grit_output_name = invoker.output_name
+  } else {
+    grit_output_name = target_name
+  }
+
+  if (defined(invoker.depfile_dir)) {
+    depfile_dir = invoker.depfile_dir
+  } else {
+    depfile_dir = output_dir
+  }
+
+  # These are all passed as arguments to the script so have to be relative to
+  # the build directory.
+  if (resource_ids != "") {
+    resource_ids = rebase_path(resource_ids, root_build_dir)
+  }
+  rebased_output_dir = rebase_path(output_dir, root_build_dir)
+  source_path = rebase_path(invoker.source, root_build_dir)
+
+  if (defined(invoker.grit_flags)) {
+    grit_flags = invoker.grit_flags
+  } else {
+    grit_flags = []  # These are optional so default to empty list.
+  }
+
+  assert_files_flags = []
+
+  # We want to make sure the declared outputs actually match what Grit is
+  # writing. We write the list to a file (some of the output lists are long
+  # enough to not fit on a Windows command line) and ask Grit to verify those
+  # are the actual outputs at runtime.
+  asserted_list_file =
+      "$target_out_dir/${grit_output_name}_expected_outputs.txt"
+  write_file(asserted_list_file,
+             rebase_path(invoker.outputs, root_build_dir, output_dir))
+  assert_files_flags += [ "--assert-file-list=" +
+                          rebase_path(asserted_list_file, root_build_dir) ]
+  grit_outputs =
+      get_path_info(rebase_path(invoker.outputs, ".", output_dir), "abspath")
+
+  # The config and the action below get this visibility son only the generated
+  # source set can depend on them. The variable "target_name" will get
+  # overwritten inside the inner classes so we need to compute it here.
+  target_visibility = [ ":$target_name" ]
+
+  # The current grit setup makes an file in $output_dir/grit/foo.h that
+  # the source code expects to include via "grit/foo.h". It would be nice to
+  # change this to including absolute paths relative to the root gen directory
+  # (like "mycomponent/foo.h"). This config sets up the include path.
+  grit_config = target_name + "_grit_config"
+  config(grit_config) {
+    if (!defined(invoker.use_qualified_include) ||
+        !invoker.use_qualified_include) {
+      include_dirs = [ output_dir ]
+    }
+    visibility = target_visibility
+  }
+
+  grit_custom_target = target_name + "_grit"
+  action(grit_custom_target) {
+    script = "//tools/grit/grit.py"
+    inputs = grit_inputs
+
+    depfile = "$depfile_dir/${grit_output_name}_stamp.d"
+    outputs = [ "${depfile}.stamp" ] + grit_outputs
+
+    args = [
+      "-i",
+      source_path,
+      "build",
+    ]
+    if (resource_ids != "") {
+      args += [
+        "-f",
+        resource_ids,
+      ]
+    }
+    args += [
+              "-o",
+              rebased_output_dir,
+              "--depdir",
+              ".",
+              "--depfile",
+              rebase_path(depfile, root_build_dir),
+              "--write-only-new=1",
+              "--depend-on-stamp",
+            ] + grit_defines
+
+    # Add extra defines with -D flags.
+    if (defined(invoker.defines)) {
+      foreach(i, invoker.defines) {
+        args += [
+          "-D",
+          i,
+        ]
+      }
+    }
+
+    args += grit_flags + assert_files_flags
+
+    if (defined(invoker.visibility)) {
+      # This needs to include both what the invoker specified (since they
+      # probably include generated headers from this target), as well as the
+      # generated source set (since there's no guarantee that the visibility
+      # specified by the invoker includes our target).
+      #
+      # Only define visibility at all if the invoker specified it. Otherwise,
+      # we want to keep the public "no visibility specified" default.
+      visibility = target_visibility + invoker.visibility
+    }
+
+    deps = [
+      "//tools/grit:grit_sources",
+    ]
+    if (defined(invoker.deps)) {
+      deps += invoker.deps
+    }
+    if (defined(invoker.inputs)) {
+      inputs += invoker.inputs
+    }
+  }
+
+  # This is the thing that people actually link with, it must be named the
+  # same as the argument the template was invoked with.
+  source_set(target_name) {
+    # Since we generate a file, we need to be run before the targets that
+    # depend on us.
+    sources = grit_outputs
+
+    # Deps set on the template invocation will go on the action that runs
+    # grit above rather than this library. This target needs to depend on the
+    # action publicly so other scripts can take the outputs from the grit
+    # script as inputs.
+    public_deps = [
+      ":$grit_custom_target",
+    ]
+    public_configs = [ ":$grit_config" ]
+
+    if (defined(invoker.public_configs)) {
+      public_configs += invoker.public_configs
+    }
+
+    if (defined(invoker.configs)) {
+      configs += invoker.configs
+    }
+
+    if (defined(invoker.visibility)) {
+      visibility = invoker.visibility
+    }
+    output_name = grit_output_name
+  }
+}
diff --git a/build/secondary/tools/grit/repack.gni b/build/secondary/tools/grit/repack.gni
new file mode 100644
index 0000000..1030674
--- /dev/null
+++ b/build/secondary/tools/grit/repack.gni
@@ -0,0 +1,47 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file defines a template to invoke grit repack in a consistent manner.
+#
+# Parameters:
+#   sources  [required]
+#       List of pak files that need to be combined.
+#
+#   output  [required]
+#       File name (single string) of the output file.
+#
+#   repack_options  [optional]
+#       List of extra arguments to pass.
+#
+#   deps  [optional]
+#   visibility  [optional]
+#       Normal meaning.
+template("repack") {
+  action(target_name) {
+    assert(defined(invoker.sources), "Need sources for $target_name")
+    assert(defined(invoker.output), "Need output for $target_name")
+
+    if (defined(invoker.visibility)) {
+      visibility = invoker.visibility
+    }
+
+    script = "//tools/grit/grit/format/repack.py"
+
+    inputs = invoker.sources
+    outputs = [
+      invoker.output,
+    ]
+
+    args = []
+    if (defined(invoker.repack_options)) {
+      args += invoker.repack_options
+    }
+    args += [ rebase_path(invoker.output, root_build_dir) ]
+    args += rebase_path(invoker.sources, root_build_dir)
+
+    if (defined(invoker.deps)) {
+      deps = invoker.deps
+    }
+  }
+}
diff --git a/build/secondary/tools/grit/stamp_grit_sources.py b/build/secondary/tools/grit/stamp_grit_sources.py
new file mode 100644
index 0000000..d43d4b8
--- /dev/null
+++ b/build/secondary/tools/grit/stamp_grit_sources.py
@@ -0,0 +1,55 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This script enumerates the files in the given directory, writing an empty
+# stamp file and a .d file listing the inputs required to make the stamp. This
+# allows us to dynamically depend on the grit sources without enumerating the
+# grit directory for every invocation of grit (which is what adding the source
+# files to every .grd file's .d file would entail) or shelling out to grit
+# synchronously during GN execution to get the list (which would be slow).
+#
+# Usage:
+#    stamp_grit_sources.py <directory> <stamp-file> <.d-file>
+
+import os
+import sys
+
+def GritSourceFiles(grit_root_dir):
+  files = []
+  for root, _, filenames in os.walk(grit_root_dir):
+    grit_src = [os.path.join(root, f) for f in filenames
+                if f.endswith('.py') and not f.endswith('_unittest.py')]
+    files.extend(grit_src)
+  files = [f.replace('\\', '/') for f in files]
+  return sorted(files)
+
+
+def WriteDepFile(dep_file, stamp_file, source_files):
+  with open(dep_file, "w") as f:
+    f.write(stamp_file)
+    f.write(": ")
+    f.write(' '.join(source_files))
+
+
+def WriteStampFile(stamp_file):
+  with open(stamp_file, "w"):
+    pass
+
+
+def main(argv):
+  if len(argv) != 4:
+    print "Error: expecting 3 args."
+    return 1
+
+  grit_root_dir = sys.argv[1]
+  stamp_file = sys.argv[2]
+  dep_file = sys.argv[3]
+
+  WriteStampFile(stamp_file)
+  WriteDepFile(dep_file, stamp_file, GritSourceFiles(grit_root_dir))
+  return 0
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv))
diff --git a/build/set_clang_warning_flags.gypi b/build/set_clang_warning_flags.gypi
new file mode 100644
index 0000000..f6d7aea
--- /dev/null
+++ b/build/set_clang_warning_flags.gypi
@@ -0,0 +1,58 @@
+# Copyright (c) 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included to set clang-specific compiler flags.
+# To use this the following variable can be defined:
+#   clang_warning_flags:       list: Compiler flags to pass to clang.
+#   clang_warning_flags_unset: list: Compiler flags to not pass to clang.
+#
+# Only use this in third-party code. In chromium_code, fix your code to not
+# warn instead!
+#
+# Note that the gypi file is included in target_defaults, so it does not need
+# to be explicitly included.
+#
+# Warning flags set by this will be used on all platforms. If you want to set
+# warning flags on only some platforms, you have to do so manually.
+#
+# To use this, create a gyp target with the following form:
+# {
+#   'target_name': 'my_target',
+#   'variables': {
+#     'clang_warning_flags': ['-Wno-awesome-warning'],
+#     'clang_warning_flags_unset': ['-Wpreviously-set-flag'],
+#   }
+# }
+
+{
+  'variables': {
+    'clang_warning_flags_unset%': [],  # Provide a default value.
+  },
+  'conditions': [
+    ['clang==1', {
+      # This uses >@ instead of @< to also see clang_warning_flags set in
+      # targets directly, not just the clang_warning_flags in target_defaults.
+      'cflags': [ '>@(clang_warning_flags)' ],
+      'cflags!': [ '>@(clang_warning_flags_unset)' ],
+      'xcode_settings': {
+        'WARNING_CFLAGS': ['>@(clang_warning_flags)'],
+        'WARNING_CFLAGS!': ['>@(clang_warning_flags_unset)'],
+      },
+      'msvs_settings': {
+        'VCCLCompilerTool': {
+          'AdditionalOptions': [ '>@(clang_warning_flags)' ],
+          'AdditionalOptions!': [ '>@(clang_warning_flags_unset)' ],
+        },
+      },
+    }],
+    ['clang==0 and host_clang==1', {
+      'target_conditions': [
+        ['_toolset=="host"', {
+          'cflags': [ '>@(clang_warning_flags)' ],
+          'cflags!': [ '>@(clang_warning_flags_unset)' ],
+        }],
+      ],
+    }],
+  ],
+}
diff --git a/build/shim_headers.gypi b/build/shim_headers.gypi
new file mode 100644
index 0000000..56d8d3a
--- /dev/null
+++ b/build/shim_headers.gypi
@@ -0,0 +1,60 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into a target to handle shim headers
+# in a consistent manner. To use this the following variables need to be
+# defined:
+#   headers_root_path: string: path to directory containing headers
+#   header_filenames: list: list of header file names
+
+{
+  'variables': {
+    'shim_headers_path': '<(SHARED_INTERMEDIATE_DIR)/shim_headers/<(_target_name)/<(_toolset)',
+    'shim_generator_additional_args%': [],
+  },
+  'include_dirs++': [
+    '<(shim_headers_path)',
+  ],
+  'all_dependent_settings': {
+    # Repeating this with different numbers of plusses is unfortunately required
+    # to make sure that even if this include is inside nested conditions/etc, it
+    # still gets inserted at the beginning of the include_dirs list. See
+    # http://crbug.com/263818 for details.
+    'include_dirs+++': [
+      '<(shim_headers_path)',
+    ],
+    'include_dirs++++': [
+      '<(shim_headers_path)',
+    ],
+    'include_dirs+++++': [
+      '<(shim_headers_path)',
+    ],
+  },
+  'actions': [
+    {
+      'variables': {
+        'generator_path': '<(DEPTH)/tools/generate_shim_headers/generate_shim_headers.py',
+        'generator_args': [
+          '--headers-root', '<(headers_root_path)',
+          '--output-directory', '<(shim_headers_path)',
+          '<@(shim_generator_additional_args)',
+          '<@(header_filenames)',
+        ],
+      },
+      'action_name': 'generate_<(_target_name)_shim_headers',
+      'inputs': [
+        '<(generator_path)',
+      ],
+      'outputs': [
+        '<!@pymod_do_main(generate_shim_headers <@(generator_args) --outputs)',
+      ],
+      'action': ['python',
+                 '<(generator_path)',
+                 '<@(generator_args)',
+                 '--generate',
+      ],
+      'message': 'Generating <(_target_name) shim headers',
+    },
+  ],
+}
diff --git a/build/slave/OWNERS b/build/slave/OWNERS
new file mode 100644
index 0000000..f562c92
--- /dev/null
+++ b/build/slave/OWNERS
@@ -0,0 +1,20 @@
+set noparent
+agable@chromium.org
+agable@google.com
+cmp@chromium.org
+cmp@google.com
+dpranke@chromium.org
+iannucci@chromium.org
+iannucci@google.com
+johnw@chromium.org
+johnw@google.com
+maruel@chromium.org
+maruel@google.com
+mmoss@chromium.org
+mmoss@google.com
+pschmidt@chromium.org
+pschmidt@google.com
+stip@chromium.org
+stip@google.com
+szager@chromium.org
+szager@google.com
diff --git a/build/slave/README b/build/slave/README
new file mode 100644
index 0000000..e3718b2
--- /dev/null
+++ b/build/slave/README
@@ -0,0 +1,8 @@
+This is a directory which contains configuration information for the
+buildsystem.
+
+* Under recipes, the buildsystem should use only this directory as an
+  entry point into src/.
+
+* Scripts in this directory must not import from outside this directory or shell
+  to scripts outside this directory.
diff --git a/build/some.gyp b/build/some.gyp
new file mode 100644
index 0000000..44a1dd5
--- /dev/null
+++ b/build/some.gyp
@@ -0,0 +1,24 @@
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+{
+  'targets': [
+    {
+      'target_name': 'some',
+      'type': 'none',
+      'dependencies': [
+        # This file is intended to be locally modified. List the targets you use
+        # regularly. The generated some.sln will contains projects for only
+        # those targets and the targets they are transitively dependent on. This
+        # can result in a solution that loads and unloads faster in Visual
+        # Studio.
+        #
+        # Tip: Create a dummy CL to hold your local edits to this file, so they
+        # don't accidentally get added to another CL that you are editing.
+        #
+        # Example:
+        # '../chrome/chrome.gyp:chrome',
+      ],
+    },
+  ],
+}
diff --git a/build/symlink.py b/build/symlink.py
new file mode 100755
index 0000000..1c5d3dd
--- /dev/null
+++ b/build/symlink.py
@@ -0,0 +1,39 @@
+#!/usr/bin/env python
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Make a symlink and optionally touch a file (to handle dependencies)."""
+import errno
+import optparse
+import os.path
+import shutil
+import sys
+def Main(argv):
+  parser = optparse.OptionParser()
+  parser.add_option('-f', '--force', action='store_true')
+  parser.add_option('--touch')
+  options, args = parser.parse_args(argv[1:])
+  if len(args) < 2:
+    parser.error('at least two arguments required.')
+  target = args[-1]
+  sources = args[:-1]
+  for s in sources:
+    t = os.path.join(target, os.path.basename(s))
+    if len(sources) == 1 and not os.path.isdir(target):
+      t = target
+    try:
+      os.symlink(s, t)
+    except OSError, e:
+      if e.errno == errno.EEXIST and options.force:
+        if os.path.isdir(t):
+          shutil.rmtree(t, ignore_errors=True)
+        else:
+          os.remove(t)
+        os.symlink(s, t)
+      else:
+        raise
+  if options.touch:
+    with open(options.touch, 'w') as f:
+      pass
+if __name__ == '__main__':
+  sys.exit(Main(sys.argv))
diff --git a/build/temp_gyp/README.chromium b/build/temp_gyp/README.chromium
new file mode 100644
index 0000000..8045d61
--- /dev/null
+++ b/build/temp_gyp/README.chromium
@@ -0,0 +1,3 @@
+This directory will be removed once the files in it are committed upstream and
+Chromium imports an upstream revision with these files.  Contact mark for
+details.
diff --git a/build/temp_gyp/pdfsqueeze.gyp b/build/temp_gyp/pdfsqueeze.gyp
new file mode 100644
index 0000000..2b3b1ff
--- /dev/null
+++ b/build/temp_gyp/pdfsqueeze.gyp
@@ -0,0 +1,40 @@
+# Copyright (c) 2009 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+  'targets': [
+    {
+      'target_name': 'pdfsqueeze',
+      'type': 'executable',
+      'sources': [
+        '../../third_party/pdfsqueeze/pdfsqueeze.m',
+      ],
+      'defines': [
+        # Use defines to map the full path names that will be used for
+        # the vars into the short forms expected by pdfsqueeze.m.
+        '______third_party_pdfsqueeze_ApplyGenericRGB_qfilter=ApplyGenericRGB_qfilter',
+        '______third_party_pdfsqueeze_ApplyGenericRGB_qfilter_len=ApplyGenericRGB_qfilter_len',
+      ],
+      'include_dirs': [
+        '<(INTERMEDIATE_DIR)',
+      ],
+      'libraries': [
+        '$(SDKROOT)/System/Library/Frameworks/Foundation.framework',
+        '$(SDKROOT)/System/Library/Frameworks/Quartz.framework',
+      ],
+      'actions': [
+        {
+          'action_name': 'Generate inline filter data',
+          'inputs': [
+            '../../third_party/pdfsqueeze/ApplyGenericRGB.qfilter',
+          ],
+          'outputs': [
+            '<(INTERMEDIATE_DIR)/ApplyGenericRGB.h',
+          ],
+          'action': ['xxd', '-i', '<@(_inputs)', '<@(_outputs)'],
+        },
+      ],
+    },
+  ],
+}
diff --git a/build/toolchain/OWNERS b/build/toolchain/OWNERS
new file mode 100644
index 0000000..c6cda3f
--- /dev/null
+++ b/build/toolchain/OWNERS
@@ -0,0 +1,3 @@
+brettw@chromium.org
+dpranke@chromium.org
+scottmg@chromium.org
diff --git a/build/toolchain/android/BUILD.gn b/build/toolchain/android/BUILD.gn
new file mode 100644
index 0000000..e543fc6
--- /dev/null
+++ b/build/toolchain/android/BUILD.gn
@@ -0,0 +1,155 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/sysroot.gni")  # Imports android/config.gni.
+import("//build/toolchain/ccache.gni")
+import("//build/toolchain/clang.gni")
+import("//build/toolchain/goma.gni")
+import("//build/toolchain/gcc_toolchain.gni")
+
+# The Android GCC toolchains share most of the same parameters, so we have this
+# wrapper around gcc_toolchain to avoid duplication of logic.
+#
+# Parameters:
+#  - android_ndk_sysroot
+#      Sysroot for this architecture.
+#  - android_ndk_lib_dir
+#      Subdirectory inside of android_ndk_sysroot where libs go.
+#  - tool_prefix
+#      Prefix to be added to the tool names.
+#  - toolchain_cpu
+#      Same as gcc_toolchain
+template("android_gcc_toolchain") {
+  gcc_toolchain(target_name) {
+    # Make our manually injected libs relative to the build dir.
+    android_ndk_lib = rebase_path(
+            invoker.android_ndk_sysroot + "/" + invoker.android_ndk_lib_dir,
+            root_build_dir)
+
+    libs_section_prefix = "$android_ndk_lib/crtbegin_dynamic.o"
+    libs_section_postfix = "$android_ndk_lib/crtend_android.o"
+
+    solink_libs_section_prefix = "$android_ndk_lib/crtbegin_so.o"
+    solink_libs_section_postfix = "$android_ndk_lib/crtend_so.o"
+
+    # The tools should be run relative to the build dir.
+    tool_prefix = rebase_path(invoker.tool_prefix, root_build_dir)
+
+    if (use_goma) {
+      assert(!use_ccache, "Goma and ccache can't be used together.")
+      compiler_prefix = "$goma_dir/gomacc "
+    } else if (use_ccache) {
+      compiler_prefix = "ccache "
+    } else {
+      compiler_prefix = ""
+    }
+
+    is_clang = invoker.is_clang
+    if (is_clang) {
+      prefix = rebase_path("//third_party/llvm-build/Release+Asserts/bin",
+                           root_build_dir)
+
+      cc = compiler_prefix + prefix + "/clang"
+      cxx = compiler_prefix + prefix + "/clang++"
+    } else {
+      cc = compiler_prefix + tool_prefix + "gcc"
+      cxx = compiler_prefix + tool_prefix + "g++"
+    }
+
+    ar = tool_prefix + "ar"
+    ld = cxx
+    readelf = compiler_prefix + tool_prefix + "readelf"
+    nm = compiler_prefix + tool_prefix + "nm"
+
+    toolchain_os = "android"
+    toolchain_cpu = invoker.toolchain_cpu
+
+    # We make the assumption that the gcc_toolchain will produce a soname with
+    # the following definition.
+    soname = "{{target_output_name}}{{output_extension}}"
+
+    stripped_soname = "lib.stripped/${soname}"
+    temp_stripped_soname = "${stripped_soname}.tmp"
+
+    android_strip = "${tool_prefix}strip"
+
+    strip_command =
+        "$android_strip --strip-unneeded -o $temp_stripped_soname $soname"
+    replace_command = "if ! cmp -s $temp_stripped_soname $stripped_soname; then mv $temp_stripped_soname $stripped_soname; fi"
+    postsolink = "$strip_command && $replace_command"
+    solink_outputs = [ stripped_soname ]
+    default_output_extension = android_product_extension
+
+    # We make the assumption that the gcc_toolchain will produce an exe with
+    # the following definition.
+    exe = "{{root_out_dir}}/{{target_output_name}}{{output_extension}}"
+    stripped_exe = "exe.stripped/$exe"
+    postlink = "$android_strip --strip-unneeded -o $stripped_exe $exe"
+    link_outputs = [ stripped_exe ]
+  }
+}
+
+template("android_gcc_toolchains_helper") {
+  android_gcc_toolchain(target_name) {
+    android_ndk_sysroot = invoker.android_ndk_sysroot
+    android_ndk_lib_dir = invoker.android_ndk_lib_dir
+    tool_prefix = invoker.tool_prefix
+    toolchain_cpu = invoker.toolchain_cpu
+  }
+  android_gcc_toolchain("clang_$target_name") {
+    android_ndk_sysroot = invoker.android_ndk_sysroot
+    android_ndk_lib_dir = invoker.android_ndk_lib_dir
+    tool_prefix = invoker.tool_prefix
+    toolchain_cpu = invoker.toolchain_cpu
+    is_clang = true
+  }
+}
+
+android_gcc_toolchains_helper("x86") {
+  android_ndk_sysroot = "$android_ndk_root/$x86_android_sysroot_subdir"
+  android_ndk_lib_dir = "usr/lib"
+
+  tool_prefix = "$x86_android_toolchain_root/bin/i686-linux-android-"
+  toolchain_cpu = "x86"
+}
+
+android_gcc_toolchains_helper("arm") {
+  android_ndk_sysroot = "$android_ndk_root/$arm_android_sysroot_subdir"
+  android_ndk_lib_dir = "usr/lib"
+
+  tool_prefix = "$arm_android_toolchain_root/bin/arm-linux-androideabi-"
+  toolchain_cpu = "arm"
+}
+
+android_gcc_toolchains_helper("mipsel") {
+  android_ndk_sysroot = "$android_ndk_root/$mips_android_sysroot_subdir"
+  android_ndk_lib_dir = "usr/lib"
+
+  tool_prefix = "$mips_android_toolchain_root/bin/mipsel-linux-android-"
+  toolchain_cpu = "mipsel"
+}
+
+android_gcc_toolchains_helper("x64") {
+  android_ndk_sysroot = "$android_ndk_root/$x86_64_android_sysroot_subdir"
+  android_ndk_lib_dir = "usr/lib64"
+
+  tool_prefix = "$x86_64_android_toolchain_root/bin/x86_64-linux-android-"
+  toolchain_cpu = "x86_64"
+}
+
+android_gcc_toolchains_helper("arm64") {
+  android_ndk_sysroot = "$android_ndk_root/$arm64_android_sysroot_subdir"
+  android_ndk_lib_dir = "usr/lib"
+
+  tool_prefix = "$arm64_android_toolchain_root/bin/arm-linux-androideabi-"
+  toolchain_cpu = "aarch64"
+}
+
+android_gcc_toolchains_helper("mips64el") {
+  android_ndk_sysroot = "$android_ndk_root/$mips64_android_sysroot_subdir"
+  android_ndk_lib_dir = "usr/lib64"
+
+  tool_prefix = "$mips64_android_toolchain_root/bin/mipsel-linux-android-"
+  toolchain_cpu = "mipsel64el"
+}
diff --git a/build/toolchain/ccache.gni b/build/toolchain/ccache.gni
new file mode 100644
index 0000000..806e079
--- /dev/null
+++ b/build/toolchain/ccache.gni
@@ -0,0 +1,25 @@
+# Copyright (c) 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Defines the configuration of ccache - a c/c++ compiler cache which can
+# greatly reduce recompilation times.
+#
+# TIPS:
+#
+# Set clang_use_chrome_plugins=false if using ccache 3.1.9 or earlier, since
+# these versions don't support -Xclang.  (3.1.10 and later will silently
+# ignore -Xclang, so it doesn't matter if you disable clang_use_chrome_plugins
+# or not).
+#
+# Use ccache 3.2 or later to avoid clang unused argument warnings:
+# https://bugzilla.samba.org/show_bug.cgi?id=8118
+#
+# To avoid -Wparentheses-equality clang warnings, at some cost in terms of
+# speed, you can do:
+# export CCACHE_CPP2=yes
+
+declare_args() {
+  # Set to true to enable ccache.  Probably doesn't work on windows.
+  use_ccache = false
+}
diff --git a/build/toolchain/clang.gni b/build/toolchain/clang.gni
new file mode 100644
index 0000000..c680384
--- /dev/null
+++ b/build/toolchain/clang.gni
@@ -0,0 +1,9 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+declare_args() {
+  # Enable the optional type profiler in Clang, which will tag heap allocations
+  # with the allocation type.
+  use_clang_type_profiler = false
+}
diff --git a/build/toolchain/cros/BUILD.gn b/build/toolchain/cros/BUILD.gn
new file mode 100644
index 0000000..140958b
--- /dev/null
+++ b/build/toolchain/cros/BUILD.gn
@@ -0,0 +1,35 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/toolchain/clang.gni")
+import("//build/toolchain/gcc_toolchain.gni")
+
+declare_args() {
+  # The CrOS build system supports many different kinds of targets across
+  # many different architectures. Bringing your own toolchain is also supported,
+  # so it's actually impossible to enumerate all toolchains for all targets
+  # as GN toolchain specifications.
+  # These arguments provide a mechanism for specifying your CC, CXX and AR at
+  # buildfile-generation time, allowing the CrOS build system to always use
+  # the right tools for the current target.
+  cros_target_cc = ""
+  cros_target_cxx = ""
+  cros_target_ar = ""
+}
+
+gcc_toolchain("target") {
+  assert(cros_target_cc != "", "Must provide target CC.")
+  assert(cros_target_cxx != "", "Must provide target CXX.")
+  assert(cros_target_ar != "", "Must provide target AR.")
+
+  cc = "${cros_target_cc}"
+  cxx = "${cros_target_cxx}"
+
+  ar = "${cros_target_ar}"
+  ld = cxx
+
+  toolchain_cpu = "${target_cpu}"
+  toolchain_os = "linux"
+  is_clang = is_clang
+}
diff --git a/build/toolchain/fnl/BUILD.gn b/build/toolchain/fnl/BUILD.gn
new file mode 100644
index 0000000..1e4a075
--- /dev/null
+++ b/build/toolchain/fnl/BUILD.gn
@@ -0,0 +1,24 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/toolchain/gcc_toolchain.gni")
+
+declare_args() {
+  toolchain_prefix = ""
+}
+
+gcc_toolchain("target") {
+  assert(toolchain_prefix != "", "Must provide toolchain_prefix")
+
+  cc = "${toolchain_prefix}gcc"
+  cxx = "${toolchain_prefix}g++"
+  ar = "${toolchain_prefix}ar"
+  ld = cxx
+  readelf = "${toolchain_prefix}readelf"
+  nm = "${toolchain_prefix}nm"
+
+  toolchain_cpu = "${target_cpu}"
+  toolchain_os = "linux"
+  is_clang = is_clang
+}
diff --git a/build/toolchain/gcc_toolchain.gni b/build/toolchain/gcc_toolchain.gni
new file mode 100644
index 0000000..bb1d791
--- /dev/null
+++ b/build/toolchain/gcc_toolchain.gni
@@ -0,0 +1,252 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This value will be inherited in the toolchain below.
+concurrent_links = exec_script("get_concurrent_links.py", [], "value")
+
+# This template defines a toolchain for something that works like gcc
+# (including clang).
+#
+# It requires the following variables specifying the executables to run:
+#  - cc
+#  - cxx
+#  - ar
+#  - ld
+#  - readelf
+#  - nm
+# and the following which is used in the toolchain_args
+#  - toolchain_cpu  (What "current_cpu" should be set to when invoking a
+#                    build using this toolchain.)
+#  - toolchain_os  (What "current_os" should be set to when invoking a
+#                   build using this toolchain.)
+#
+# Optional parameters:
+#  - libs_section_prefix
+#  - libs_section_postfix
+#      The contents of these strings, if specified, will be placed around
+#      the libs section of the linker line. It allows one to inject libraries
+#      at the beginning and end for all targets in a toolchain.
+#  - solink_libs_section_prefix
+#  - solink_libs_section_postfix
+#      Same as libs_section_{pre,post}fix except used for solink instead of link.
+#  - post_solink
+#      The content of this string, if specified, will be appended to the solink
+#      command.
+#  - deps
+#      Just forwarded to the toolchain definition.
+#  - is_clang
+#  - strip
+#      Location of the strip executable. When specified, strip will be run on
+#      all shared libraries and executables as they are built. The pre-stripped
+#      artifacts will be put in lib.stripped/ and exe.stripped/.
+template("gcc_toolchain") {
+  toolchain(target_name) {
+    assert(defined(invoker.cc), "gcc_toolchain() must specify a \"cc\" value")
+    assert(defined(invoker.cxx), "gcc_toolchain() must specify a \"cxx\" value")
+    assert(defined(invoker.ar), "gcc_toolchain() must specify a \"ar\" value")
+    assert(defined(invoker.ld), "gcc_toolchain() must specify a \"ld\" value")
+    assert(defined(invoker.readelf),
+           "gcc_toolchain() must specify a \"readelf\" value")
+    assert(defined(invoker.nm), "gcc_toolchain() must specify a \"nm\" value")
+    assert(defined(invoker.toolchain_cpu),
+           "gcc_toolchain() must specify a \"toolchain_cpu\"")
+    assert(defined(invoker.toolchain_os),
+           "gcc_toolchain() must specify a \"toolchain_os\"")
+
+    # We can't do string interpolation ($ in strings) on things with dots in
+    # them. To allow us to use $cc below, for example, we create copies of
+    # these values in our scope.
+    cc = invoker.cc
+    cxx = invoker.cxx
+    ar = invoker.ar
+    ld = invoker.ld
+    readelf = invoker.readelf
+    nm = invoker.nm
+
+    # Bring these into our scope for string interpolation with default values.
+    if (defined(invoker.libs_section_prefix)) {
+      libs_section_prefix = invoker.libs_section_prefix
+    } else {
+      libs_section_prefix = ""
+    }
+
+    if (defined(invoker.libs_section_postfix)) {
+      libs_section_postfix = invoker.libs_section_postfix
+    } else {
+      libs_section_postfix = ""
+    }
+
+    if (defined(invoker.solink_libs_section_prefix)) {
+      solink_libs_section_prefix = invoker.solink_libs_section_prefix
+    } else {
+      solink_libs_section_prefix = ""
+    }
+
+    if (defined(invoker.solink_libs_section_postfix)) {
+      solink_libs_section_postfix = invoker.solink_libs_section_postfix
+    } else {
+      solink_libs_section_postfix = ""
+    }
+
+    # These library switches can apply to all tools below.
+    lib_switch = "-l"
+    lib_dir_switch = "-L"
+
+    tool("cc") {
+      depfile = "{{output}}.d"
+      command = "$cc -MMD -MF $depfile {{defines}} {{include_dirs}} {{cflags}} {{cflags_c}} -c {{source}} -o {{output}}"
+      depsformat = "gcc"
+      description = "CC {{output}}"
+      outputs = [
+        "{{target_out_dir}}/{{target_output_name}}/{{source_name_part}}.o",
+      ]
+    }
+
+    tool("cxx") {
+      depfile = "{{output}}.d"
+      command = "$cxx -MMD -MF $depfile {{defines}} {{include_dirs}} {{cflags}} {{cflags_cc}} -c {{source}} -o {{output}}"
+      depsformat = "gcc"
+      description = "CXX {{output}}"
+      outputs = [
+        "{{target_out_dir}}/{{target_output_name}}/{{source_name_part}}.o",
+      ]
+    }
+
+    tool("asm") {
+      # For GCC we can just use the C compiler to compile assembly.
+      depfile = "{{output}}.d"
+      command = "$cc -MMD -MF $depfile {{defines}} {{include_dirs}} {{cflags}} {{cflags_c}} -c {{source}} -o {{output}}"
+      depsformat = "gcc"
+      description = "ASM {{output}}"
+      outputs = [
+        "{{target_out_dir}}/{{target_output_name}}/{{source_name_part}}.o",
+      ]
+    }
+
+    tool("alink") {
+      rspfile = "{{output}}.rsp"
+      command = "rm -f {{output}} && $ar rcs {{output}} @$rspfile"
+      description = "AR {{output}}"
+      rspfile_content = "{{inputs}}"
+      outputs = [
+        "{{target_out_dir}}/{{target_output_name}}{{output_extension}}",
+      ]
+      default_output_extension = ".a"
+      output_prefix = "lib"
+    }
+
+    tool("solink") {
+      soname = "{{target_output_name}}{{output_extension}}"  # e.g. "libfoo.so".
+      sofile = "{{root_out_dir}}/$soname"  # Possibly including toolchain dir.
+      rspfile = sofile + ".rsp"
+
+      # These variables are not built into GN but are helpers that implement
+      # (1) linking to produce a .so, (2) extracting the symbols from that file
+      # to a temporary file, (3) if the temporary file has differences from the
+      # existing .TOC file, overwrite it, otherwise, don't change it.
+      tocfile = sofile + ".TOC"
+      temporary_tocname = sofile + ".tmp"
+      link_command =
+          "$ld -shared {{ldflags}} -o $sofile -Wl,-soname=$soname @$rspfile"
+      toc_command = "{ $readelf -d $sofile | grep SONAME ; $nm -gD -f p $sofile | cut -f1-2 -d' '; } > $temporary_tocname"
+      replace_command = "if ! cmp -s $temporary_tocname $tocfile; then mv $temporary_tocname $tocfile; fi"
+
+      command = "$link_command && $toc_command && $replace_command"
+      if (defined(invoker.postsolink)) {
+        command += " && " + invoker.postsolink
+      }
+      rspfile_content = "-Wl,--whole-archive {{inputs}} {{solibs}} -Wl,--no-whole-archive $solink_libs_section_prefix {{libs}} $solink_libs_section_postfix"
+
+      description = "SOLINK $sofile"
+
+      # Use this for {{output_extension}} expansions unless a target manually
+      # overrides it (in which case {{output_extension}} will be what the target
+      # specifies).
+      default_output_extension = ".so"
+      if (defined(invoker.default_output_extension)) {
+        default_output_extension = invoker.default_output_extension
+      }
+
+      output_prefix = "lib"
+
+      # Since the above commands only updates the .TOC file when it changes, ask
+      # Ninja to check if the timestamp actually changed to know if downstream
+      # dependencies should be recompiled.
+      restat = true
+
+      # Tell GN about the output files. It will link to the sofile but use the
+      # tocfile for dependency management.
+      outputs = [
+        sofile,
+        tocfile,
+      ]
+      if (defined(invoker.solink_outputs)) {
+        outputs += invoker.solink_outputs
+      }
+      link_output = sofile
+      depend_output = tocfile
+    }
+
+    tool("link") {
+      exename = "{{target_output_name}}{{output_extension}}"
+      outfile = "{{root_out_dir}}/$exename"
+      rspfile = "$outfile.rsp"
+      unstripped_outfile = outfile
+
+      if (defined(invoker.strip)) {
+        unstripped_outfile = "{{root_out_dir}}/exe.unstripped/$exename"
+      }
+
+      command = "$ld {{ldflags}} -o $unstripped_outfile -Wl,--start-group @$rspfile {{solibs}} -Wl,--end-group $libs_section_prefix {{libs}} $libs_section_postfix"
+      if (defined(invoker.strip)) {
+        strip = invoker.strip
+        strip_command = "${strip} --strip-unneeded -o $outfile $unstripped_outfile"
+        command += " && " + strip_command
+      }
+      if (defined(invoker.postlink)) {
+        command += " && " + invoker.postlink
+      }
+      description = "LINK $outfile"
+      rspfile_content = "{{inputs}}"
+      outputs = [
+        outfile,
+      ]
+      if (outfile != unstripped_outfile) {
+        outputs += [ unstripped_outfile ]
+      }
+      if (defined(invoker.link_outputs)) {
+        outputs += invoker.link_outputs
+      }
+    }
+
+    tool("stamp") {
+      command = "touch {{output}}"
+      description = "STAMP {{output}}"
+    }
+
+    tool("copy") {
+      command = "ln -f {{source}} {{output}} 2>/dev/null || (rm -rf {{output}} && cp -af {{source}} {{output}})"
+      description = "COPY {{source}} {{output}}"
+    }
+
+    # When invoking this toolchain not as the default one, these args will be
+    # passed to the build. They are ignored when this is the default toolchain.
+    toolchain_args() {
+      current_cpu = invoker.toolchain_cpu
+      current_os = invoker.toolchain_os
+
+      # These values need to be passed through unchanged.
+      target_os = target_os
+      target_cpu = target_cpu
+
+      if (defined(invoker.is_clang)) {
+        is_clang = invoker.is_clang
+      }
+    }
+
+    if (defined(invoker.deps)) {
+      deps = invoker.deps
+    }
+  }
+}
diff --git a/build/toolchain/get_concurrent_links.py b/build/toolchain/get_concurrent_links.py
new file mode 100644
index 0000000..6a40101
--- /dev/null
+++ b/build/toolchain/get_concurrent_links.py
@@ -0,0 +1,64 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This script computs the number of concurrent links we want to run in the build
+# as a function of machine spec. It's based on GetDefaultConcurrentLinks in GYP.
+
+import os
+import re
+import subprocess
+import sys
+
+def GetDefaultConcurrentLinks():
+  # Inherit the legacy environment variable for people that have set it in GYP.
+  pool_size = int(os.getenv('GYP_LINK_CONCURRENCY', 0))
+  if pool_size:
+    return pool_size
+
+  if sys.platform in ('win32', 'cygwin'):
+    import ctypes
+
+    class MEMORYSTATUSEX(ctypes.Structure):
+      _fields_ = [
+        ("dwLength", ctypes.c_ulong),
+        ("dwMemoryLoad", ctypes.c_ulong),
+        ("ullTotalPhys", ctypes.c_ulonglong),
+        ("ullAvailPhys", ctypes.c_ulonglong),
+        ("ullTotalPageFile", ctypes.c_ulonglong),
+        ("ullAvailPageFile", ctypes.c_ulonglong),
+        ("ullTotalVirtual", ctypes.c_ulonglong),
+        ("ullAvailVirtual", ctypes.c_ulonglong),
+        ("sullAvailExtendedVirtual", ctypes.c_ulonglong),
+      ]
+
+    stat = MEMORYSTATUSEX(dwLength=ctypes.sizeof(MEMORYSTATUSEX))
+    ctypes.windll.kernel32.GlobalMemoryStatusEx(ctypes.byref(stat))
+
+    mem_limit = max(1, stat.ullTotalPhys / (4 * (2 ** 30)))  # total / 4GB
+    hard_cap = max(1, int(os.getenv('GYP_LINK_CONCURRENCY_MAX', 2**32)))
+    return min(mem_limit, hard_cap)
+  elif sys.platform.startswith('linux'):
+    if os.path.exists("/proc/meminfo"):
+      with open("/proc/meminfo") as meminfo:
+        memtotal_re = re.compile(r'^MemTotal:\s*(\d*)\s*kB')
+        for line in meminfo:
+          match = memtotal_re.match(line)
+          if not match:
+            continue
+          # Allow 8Gb per link on Linux because Gold is quite memory hungry
+          return max(1, int(match.group(1)) / (8 * (2 ** 20)))
+    return 1
+  elif sys.platform == 'darwin':
+    try:
+      avail_bytes = int(subprocess.check_output(['sysctl', '-n', 'hw.memsize']))
+      # A static library debug build of Chromium's unit_tests takes ~2.7GB, so
+      # 4GB per ld process allows for some more bloat.
+      return max(1, avail_bytes / (4 * (2 ** 30)))  # total / 4GB
+    except Exception:
+      return 1
+  else:
+    # TODO(scottmg): Implement this for other platforms.
+    return 1
+
+print GetDefaultConcurrentLinks()
diff --git a/build/toolchain/goma.gni b/build/toolchain/goma.gni
new file mode 100644
index 0000000..c0f4cf2
--- /dev/null
+++ b/build/toolchain/goma.gni
@@ -0,0 +1,22 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Defines the configuration of Goma.
+#
+# This is currently designed to match the GYP build exactly, so as not to break
+# people during the transition.
+
+declare_args() {
+  # Set to true to enable distributed compilation using Goma.
+  use_goma = false
+
+  # Set the default value based on the platform.
+  if (is_win) {
+    # Absolute directory containing the Goma source code.
+    goma_dir = "C:\goma\goma-win"
+  } else {
+    # Absolute directory containing the Goma source code.
+    goma_dir = getenv("HOME") + "/goma"
+  }
+}
diff --git a/build/toolchain/linux/BUILD.gn b/build/toolchain/linux/BUILD.gn
new file mode 100644
index 0000000..c16e31c
--- /dev/null
+++ b/build/toolchain/linux/BUILD.gn
@@ -0,0 +1,115 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/sysroot.gni")
+import("//build/toolchain/ccache.gni")
+import("//build/toolchain/clang.gni")
+import("//build/toolchain/gcc_toolchain.gni")
+import("//build/toolchain/goma.gni")
+
+if (use_goma) {
+  assert(!use_ccache, "Goma and ccache can't be used together.")
+  compiler_prefix = "$goma_dir/gomacc "
+} else if (use_ccache) {
+  compiler_prefix = "ccache "
+} else {
+  compiler_prefix = ""
+}
+
+gcc_toolchain("arm") {
+  cc = "${compiler_prefix}arm-linux-gnueabi-gcc"
+  cxx = "${compiler_prefix}arm-linux-gnueabi-g++"
+
+  ar = "arm-linux-gnueabi-ar"
+  ld = cxx
+  readelf = "arm-linux-gnueabi-readelf"
+  nm = "arm-linux-gnueabi-nm"
+
+  toolchain_cpu = "arm"
+  toolchain_os = "linux"
+  is_clang = false
+}
+
+gcc_toolchain("clang_x86") {
+  if (use_clang_type_profiler) {
+    prefix = rebase_path("//third_party/llvm-allocated-type/Linux_ia32/bin",
+                         root_build_dir)
+  } else {
+    prefix = rebase_path("//third_party/llvm-build/Release+Asserts/bin",
+                         root_build_dir)
+  }
+  cc = "${compiler_prefix}$prefix/clang"
+  cxx = "${compiler_prefix}$prefix/clang++"
+  readelf = "readelf"
+  nm = "nm"
+  ar = "ar"
+  ld = cxx
+
+  toolchain_cpu = "x86"
+  toolchain_os = "linux"
+  is_clang = true
+}
+
+gcc_toolchain("x86") {
+  cc = "${compiler_prefix}gcc"
+  cxx = "$compiler_prefix}g++"
+
+  readelf = "readelf"
+  nm = "nm"
+  ar = "ar"
+  ld = cxx
+
+  toolchain_cpu = "x86"
+  toolchain_os = "linux"
+  is_clang = false
+}
+
+gcc_toolchain("clang_x64") {
+  if (use_clang_type_profiler) {
+    prefix = rebase_path("//third_party/llvm-allocated-type/Linux_x64/bin",
+                         root_build_dir)
+  } else {
+    prefix = rebase_path("//third_party/llvm-build/Release+Asserts/bin",
+                         root_build_dir)
+  }
+  cc = "${compiler_prefix}$prefix/clang"
+  cxx = "${compiler_prefix}$prefix/clang++"
+
+  readelf = "readelf"
+  nm = "nm"
+  ar = "ar"
+  ld = cxx
+  strip = "strip"
+
+  toolchain_cpu = "x64"
+  toolchain_os = "linux"
+  is_clang = true
+}
+
+gcc_toolchain("x64") {
+  cc = "${compiler_prefix}gcc"
+  cxx = "${compiler_prefix}g++"
+
+  readelf = "readelf"
+  nm = "nm"
+  ar = "ar"
+  ld = cxx
+
+  toolchain_cpu = "x64"
+  toolchain_os = "linux"
+  is_clang = false
+}
+
+gcc_toolchain("mipsel") {
+  cc = "mipsel-linux-gnu-gcc"
+  cxx = "mipsel-linux-gnu-g++"
+  ar = "mipsel-linux-gnu-ar"
+  ld = cxx
+  readelf = "mipsel-linux-gnu-readelf"
+  nm = "mipsel-linux-gnu-nm"
+
+  toolchain_cpu = "mipsel"
+  toolchain_os = "linux"
+  is_clang = false
+}
diff --git a/build/toolchain/mac/BUILD.gn b/build/toolchain/mac/BUILD.gn
new file mode 100644
index 0000000..8efdd5c
--- /dev/null
+++ b/build/toolchain/mac/BUILD.gn
@@ -0,0 +1,261 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# TODO(brettw) Use "gcc_toolchain.gni" like the Linux toolchains. This requires
+# some enhancements since the commands on Mac are slightly different than on
+# Linux.
+
+import("../goma.gni")
+import("//build/config/ios/ios_sdk.gni")
+import("//build/config/mac/mac_sdk.gni")
+
+assert(host_os == "mac")
+
+import("//build/toolchain/clang.gni")
+import("//build/toolchain/goma.gni")
+import("//build/config/sysroot.gni")
+
+if (use_goma) {
+  goma_prefix = "$goma_dir/gomacc "
+} else {
+  goma_prefix = ""
+}
+
+# This will copy the gyp-mac-tool to the build directory. We pass in the source
+# file of the win tool.
+gyp_mac_tool_source =
+    rebase_path("//tools/gyp/pylib/gyp/mac_tool.py", root_build_dir)
+exec_script("setup_toolchain.py", [ gyp_mac_tool_source ])
+
+# Shared toolchain definition. Invocations should set toolchain_os to set the
+# build args in this definition.
+template("mac_toolchain") {
+  toolchain(target_name) {
+    assert(defined(invoker.cc), "mac_toolchain() must specify a \"cc\" value")
+    assert(defined(invoker.cxx), "mac_toolchain() must specify a \"cxx\" value")
+    assert(defined(invoker.ld), "mac_toolchain() must specify a \"ld\" value")
+    assert(defined(invoker.toolchain_cpu),
+           "mac_toolchain() must specify a \"toolchain_cpu\"")
+    assert(defined(invoker.toolchain_os),
+           "mac_toolchain() must specify a \"toolchain_os\"")
+
+    # We can't do string interpolation ($ in strings) on things with dots in
+    # them. To allow us to use $cc below, for example, we create copies of
+    # these values in our scope.
+    cc = invoker.cc
+    cxx = invoker.cxx
+    ld = invoker.ld
+
+    # Make these apply to all tools below.
+    lib_switch = "-l"
+    lib_dir_switch = "-L"
+
+    sysroot_flags = ""
+
+    if (defined(invoker.sysroot_flags)) {
+      sysroot_flags = invoker.sysroot_flags
+    }
+
+    toolchain_flags = ""
+    if (invoker.toolchain_cpu == "i386") {
+      toolchain_flags = "-m32"
+    }
+
+    tool("cc") {
+      depfile = "{{output}}.d"
+      command = "$cc -MMD -MF $depfile {{defines}} {{include_dirs}} $sysroot_flags $toolchain_flags {{cflags}} {{cflags_c}} -c {{source}} -o {{output}}"
+      depsformat = "gcc"
+      description = "CC {{output}}"
+      outputs = [
+        "{{target_out_dir}}/{{target_output_name}}/{{source_name_part}}.o",
+      ]
+    }
+
+    tool("cxx") {
+      depfile = "{{output}}.d"
+      command = "$cxx -MMD -MF $depfile {{defines}} {{include_dirs}} $sysroot_flags $toolchain_flags {{cflags}} {{cflags_cc}} -c {{source}} -o {{output}}"
+      depsformat = "gcc"
+      description = "CXX {{output}}"
+      outputs = [
+        "{{target_out_dir}}/{{target_output_name}}/{{source_name_part}}.o",
+      ]
+    }
+
+    tool("asm") {
+      # For GCC we can just use the C compiler to compile assembly.
+      depfile = "{{output}}.d"
+      command = "$cc -MMD -MF $depfile {{defines}} {{include_dirs}} $sysroot_flags $toolchain_flags {{cflags}} {{cflags_c}} -c {{source}} -o {{output}}"
+      depsformat = "gcc"
+      description = "ASM {{output}}"
+      outputs = [
+        "{{target_out_dir}}/{{target_output_name}}/{{source_name_part}}.o",
+      ]
+    }
+
+    tool("objc") {
+      depfile = "{{output}}.d"
+      command = "$cxx -MMD -MF $depfile {{defines}} {{include_dirs}} $sysroot_flags $toolchain_flags {{cflags}} {{cflags_c}} {{cflags_objc}} -c {{source}} -o {{output}}"
+      depsformat = "gcc"
+      description = "OBJC {{output}}"
+      outputs = [
+        "{{target_out_dir}}/{{target_output_name}}/{{source_name_part}}.o",
+      ]
+    }
+
+    tool("objcxx") {
+      depfile = "{{output}}.d"
+      command = "$cxx -MMD -MF $depfile {{defines}} {{include_dirs}} $sysroot_flags $toolchain_flags {{cflags}} {{cflags_cc}} {{cflags_objcc}} -c {{source}} -o {{output}}"
+      depsformat = "gcc"
+      description = "OBJCXX {{output}}"
+      outputs = [
+        "{{target_out_dir}}/{{target_output_name}}/{{source_name_part}}.o",
+      ]
+    }
+
+    tool("alink") {
+      command = "rm -f {{output}} && ./gyp-mac-tool filter-libtool libtool -static -o {{output}} {{inputs}}"
+      description = "LIBTOOL-STATIC {{output}}"
+      outputs = [
+        "{{target_out_dir}}/{{target_output_name}}{{output_extension}}",
+      ]
+      default_output_extension = ".a"
+      output_prefix = "lib"
+    }
+
+    tool("solink") {
+      dylib = "{{root_out_dir}}/{{target_output_name}}{{output_extension}}"  # eg "./libfoo.dylib"
+      rspfile = dylib + ".rsp"
+
+      # These variables are not build into GN but are helpers that implement
+      # (1) linking to produce a .so, (2) extracting the symbols from that file
+      # to a temporary file, (3) if the temporary file has differences from the
+      # existing .TOC file, overwrite it, oterwise, don't change it.
+      #
+      # As a special case, if the library reexports symbols from other dynamic
+      # libraries, we always update the .TOC and skip the temporary file and
+      # diffing steps, since that library always needs to be re-linked.
+      tocname = dylib + ".TOC"
+      temporary_tocname = dylib + ".tmp"
+
+      does_reexport_command = "[ ! -e $dylib -o ! -e $tocname ] || otool -l $dylib | grep -q LC_REEXPORT_DYLIB"
+      link_command = "$ld -shared $sysroot_flags $toolchain_flags {{ldflags}} -o $dylib -Wl,-filelist,$rspfile {{solibs}} {{libs}}"
+      replace_command = "if ! cmp -s $temporary_tocname $tocname; then mv $temporary_tocname $tocname"
+      extract_toc_command = "{ otool -l $dylib | grep LC_ID_DYLIB -A 5; nm -gP $dylib | cut -f1-2 -d' ' | grep -v U\$\$; true; }"
+
+      command = "if $does_reexport_command ; then $link_command && $extract_toc_command > $tocname; else $link_command && $extract_toc_command > $temporary_tocname && $replace_command ; fi; fi"
+
+      rspfile_content = "{{inputs_newline}}"
+
+      description = "SOLINK {{output}}"
+
+      # Use this for {{output_extension}} expansions unless a target manually
+      # overrides it (in which case {{output_extension}} will be what the target
+      # specifies).
+      default_output_extension = ".dylib"
+
+      output_prefix = "lib"
+
+      # Since the above commands only updates the .TOC file when it changes, ask
+      # Ninja to check if the timestamp actually changed to know if downstream
+      # dependencies should be recompiled.
+      restat = true
+
+      # Tell GN about the output files. It will link to the dylib but use the
+      # tocname for dependency management.
+      outputs = [
+        dylib,
+        tocname,
+      ]
+      link_output = dylib
+      depend_output = tocname
+    }
+
+    tool("link") {
+      outfile = "{{root_out_dir}}/{{target_output_name}}{{output_extension}}"
+      rspfile = "$outfile.rsp"
+
+      command = "$ld $sysroot_flags $toolchain_flags {{ldflags}} -Xlinker -rpath -Xlinker @executable_path/Frameworks -o $outfile -Wl,-filelist,$rspfile {{solibs}} {{libs}}"
+      description = "LINK $outfile"
+      rspfile_content = "{{inputs_newline}}"
+      outputs = [
+        outfile,
+      ]
+    }
+
+    tool("stamp") {
+      command = "touch {{output}}"
+      description = "STAMP {{output}}"
+    }
+
+    tool("copy") {
+      command = "ln -f {{source}} {{output}} 2>/dev/null || (rm -rf {{output}} && cp -af {{source}} {{output}})"
+      description = "COPY {{source}} {{output}}"
+    }
+
+    toolchain_args() {
+      current_cpu = invoker.toolchain_cpu
+      current_os = invoker.toolchain_os
+
+      # These values need to be passed through unchanged.
+      target_os = target_os
+      target_cpu = target_cpu
+
+      if (defined(invoker.is_clang)) {
+        is_clang = invoker.is_clang
+      }
+    }
+  }
+}
+
+# Toolchain used for iOS device targets.
+mac_toolchain("ios_clang_arm") {
+  toolchain_cpu = "arm"
+  toolchain_os = "mac"
+  prefix = rebase_path("//third_party/llvm-build/Release+Asserts/bin",
+                       root_build_dir)
+  cc = "${goma_prefix}$prefix/clang"
+  cxx = "${goma_prefix}$prefix/clang++"
+  ld = cxx
+  is_clang = true
+  sysroot_flags = "-isysroot $ios_device_sdk_path -miphoneos-version-min=$ios_deployment_target"
+}
+
+# Toolchain used for iOS simulator targets.
+mac_toolchain("ios_clang_x64") {
+  toolchain_cpu = "x64"
+  toolchain_os = "mac"
+  prefix = rebase_path("//third_party/llvm-build/Release+Asserts/bin",
+                       root_build_dir)
+  cc = "${goma_prefix}$prefix/clang"
+  cxx = "${goma_prefix}$prefix/clang++"
+  ld = cxx
+  is_clang = true
+  sysroot_flags = "-isysroot $ios_simulator_sdk_path -mios-simulator-version-min=$ios_deployment_target"
+}
+
+# Toolchain used for Mac host targets.
+mac_toolchain("clang_x64") {
+  toolchain_cpu = "x64"
+  toolchain_os = "mac"
+  prefix = rebase_path("//third_party/llvm-build/Release+Asserts/bin",
+                       root_build_dir)
+  cc = "${goma_prefix}$prefix/clang"
+  cxx = "${goma_prefix}$prefix/clang++"
+  ld = cxx
+  is_clang = true
+  sysroot_flags = "-isysroot $mac_sdk_path -mmacosx-version-min=$mac_sdk_min"
+}
+
+# Toolchain used for Mac host (i386) targets.
+mac_toolchain("clang_i386") {
+  toolchain_cpu = "i386"
+  toolchain_os = "mac"
+  prefix = rebase_path("//third_party/llvm-build/Release+Asserts/bin",
+                       root_build_dir)
+  cc = "${goma_prefix}$prefix/clang"
+  cxx = "${goma_prefix}$prefix/clang++"
+  ld = cxx
+  is_clang = true
+  sysroot_flags = "-isysroot $mac_sdk_path -mmacosx-version-min=$mac_sdk_min"
+}
diff --git a/build/toolchain/mac/setup_toolchain.py b/build/toolchain/mac/setup_toolchain.py
new file mode 100644
index 0000000..431078f
--- /dev/null
+++ b/build/toolchain/mac/setup_toolchain.py
@@ -0,0 +1,29 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+import stat
+import sys
+
+def CopyTool(source_path):
+  """Copies the given tool to the current directory, including a warning not
+  to edit it."""
+  with open(source_path) as source_file:
+    tool_source = source_file.readlines()
+
+  # Add header and write it out to the current directory (which should be the
+  # root build dir).
+  out_path = 'gyp-mac-tool'
+  with open(out_path, 'w') as tool_file:
+    tool_file.write(''.join([tool_source[0],
+                             '# Generated by setup_toolchain.py do not edit.\n']
+                            + tool_source[1:]))
+  st = os.stat(out_path)
+  os.chmod(out_path, st.st_mode | stat.S_IEXEC)
+
+# Find the tool source, it's the first argument, and copy it.
+if len(sys.argv) != 2:
+  print "Need one argument (mac_tool source path)."
+  sys.exit(1)
+CopyTool(sys.argv[1])
diff --git a/build/toolchain/nacl/BUILD.gn b/build/toolchain/nacl/BUILD.gn
new file mode 100644
index 0000000..5fa637c
--- /dev/null
+++ b/build/toolchain/nacl/BUILD.gn
@@ -0,0 +1,63 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+toolchain("x86_newlib") {
+  toolprefix = "gen/sdk/toolchain/linux_x86_newlib/bin/x86_64-nacl-"
+  cc = toolprefix + "gcc"
+  cxx = toolprefix + "g++"
+  ld = toolprefix + "g++"
+
+  tool("cc") {
+    command = "$cc -MMD -MF \$out.d \$defines \$includes \$cflags \$cflags_c -c \$in -o \$out"
+    description = "CC(NaCl x86 Newlib) \$out"
+    depfile = "\$out.d"
+    depsformat = "gcc"
+  }
+  tool("cxx") {
+    # cflags_pch_cc
+    command = "$cxx -MMD -MF \$out.d \$defines \$includes \$cflags \$cflags_cc -c \$in -o \$out"
+    description = "CXX(NaCl x86 Newlib) \$out"
+    depfile = "\$out.d"
+    depsformat = "gcc"
+  }
+  tool("alink") {
+    command = "rm -f \$out && ${toolprefix}ar rcs \$out \$in"
+    description = "AR(NaCl x86 Newlib) \$out"
+  }
+  tool("solink") {
+    command = "if [ ! -e \$lib -o ! -e \${lib}.TOC ]; then $ld -shared \$ldflags -o \$lib -Wl,-soname=\$soname -Wl,--whole-archive \$in \$solibs -Wl,--no-whole-archive \$libs && { readelf -d \${lib} | grep SONAME ; nm -gD -f p \${lib} | cut -f1-2 -d' '; } > \${lib}.TOC; else $ld -shared \$ldflags -o \$lib -Wl,-soname=\$soname -Wl,--whole-archive \$in \$solibs -Wl,--no-whole-archive \$libs && { readelf -d \${lib} | grep SONAME ; nm -gD -f p \${lib} | cut -f1-2 -d' '; } > \${lib}.tmp && if ! cmp -s \${lib}.tmp \${lib}.TOC; then mv \${lib}.tmp \${lib}.TOC ; fi; fi"
+    description = "SOLINK(NaCl x86 Newlib) \$lib"
+
+    #pool = "link_pool"
+    restat = "1"
+  }
+  tool("link") {
+    command = "$ld \$ldflags -o \$out -Wl,--start-group \$in \$solibs -Wl,--end-group \$libs"
+    description = "LINK(NaCl x86 Newlib) \$out"
+
+    #pool = "link_pool"
+  }
+
+  if (is_win) {
+    tool("stamp") {
+      command = "$python_path gyp-win-tool stamp \$out"
+      description = "STAMP \$out"
+    }
+  } else {
+    tool("stamp") {
+      command = "touch \$out"
+      description = "STAMP \$out"
+    }
+  }
+
+  toolchain_args() {
+    # Override the default OS detection. The build config will set the is_*
+    # flags accordingly.
+    current_os = "nacl"
+
+    # Component build not supported in NaCl, since it does not support shared
+    # libraries.
+    is_component_build = false
+  }
+}
diff --git a/build/toolchain/win/BUILD.gn b/build/toolchain/win/BUILD.gn
new file mode 100644
index 0000000..454cdde
--- /dev/null
+++ b/build/toolchain/win/BUILD.gn
@@ -0,0 +1,253 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+declare_args() {
+  # Path to the directory containing the VC binaries for the right
+  # combination of host and target architectures. Currently only the
+  # 64-bit host toolchain is supported, with either 32-bit or 64-bit targets.
+  # If vc_bin_dir is not specified on the command line (and it normally
+  # isn't), we will dynamically determine the right value to use at runtime.
+  vc_bin_dir = ""
+}
+
+import("//build/config/win/visual_studio_version.gni")
+import("//build/toolchain/goma.gni")
+
+# Should only be running on Windows.
+assert(is_win)
+
+# Setup the Visual Studio state.
+#
+# Its arguments are the VS path and the compiler wrapper tool. It will write
+# "environment.x86" and "environment.x64" to the build directory and return a
+# list to us.
+gyp_win_tool_path =
+    rebase_path("//tools/gyp/pylib/gyp/win_tool.py", root_build_dir)
+
+toolchain_data = exec_script("setup_toolchain.py",
+                             [
+                               visual_studio_path,
+                               gyp_win_tool_path,
+                               windows_sdk_path,
+                               visual_studio_runtime_dirs,
+                               current_cpu,
+                             ],
+                             "scope")
+
+if (vc_bin_dir == "") {
+  vc_bin_dir = toolchain_data.vc_bin_dir
+}
+
+if (use_goma) {
+  goma_prefix = "$goma_dir/gomacc.exe "
+} else {
+  goma_prefix = ""
+}
+
+# This value will be inherited in the toolchain below.
+concurrent_links = exec_script("../get_concurrent_links.py", [], "value")
+
+# Parameters:
+#  current_cpu: current_cpu to pass as a build arg
+#  environment: File name of environment file.
+template("msvc_toolchain") {
+  if (defined(invoker.concurrent_links)) {
+    concurrent_links = invoker.concurrent_links
+  }
+
+  env = invoker.environment
+
+  if (is_debug) {
+    configuration = "Debug"
+  } else {
+    configuration = "Release"
+  }
+  exec_script("../../vs_toolchain.py",
+              [
+                "copy_dlls",
+                rebase_path(root_build_dir),
+                configuration,
+                invoker.current_cpu,
+              ])
+
+  cl = invoker.cl
+
+  toolchain(target_name) {
+    # Make these apply to all tools below.
+    lib_switch = ""
+    lib_dir_switch = "/LIBPATH:"
+
+    tool("cc") {
+      rspfile = "{{output}}.rsp"
+
+      # TODO(brettw) enable this when GN support in the binary has been rolled.
+      #precompiled_header_type = "msvc"
+      pdbname = "{{target_out_dir}}/{{target_output_name}}_c.pdb"
+      command = "ninja -t msvc -e $env -- $cl /nologo /showIncludes /FC @$rspfile /c {{source}} /Fo{{output}} /Fd$pdbname"
+      depsformat = "msvc"
+      description = "CC {{output}}"
+      outputs = [
+        "{{target_out_dir}}/{{target_output_name}}/{{source_name_part}}.obj",
+      ]
+      rspfile_content = "{{defines}} {{include_dirs}} {{cflags}} {{cflags_c}}"
+    }
+
+    tool("cxx") {
+      rspfile = "{{output}}.rsp"
+
+      # TODO(brettw) enable this when GN support in the binary has been rolled.
+      #precompiled_header_type = "msvc"
+
+      # The PDB name needs to be different between C and C++ compiled files.
+      pdbname = "{{target_out_dir}}/{{target_output_name}}_cc.pdb"
+      command = "ninja -t msvc -e $env -- $cl /nologo /showIncludes /FC @$rspfile /c {{source}} /Fo{{output}} /Fd$pdbname"
+      depsformat = "msvc"
+      description = "CXX {{output}}"
+      outputs = [
+        "{{target_out_dir}}/{{target_output_name}}/{{source_name_part}}.obj",
+      ]
+      rspfile_content = "{{defines}} {{include_dirs}} {{cflags}} {{cflags_cc}}"
+    }
+
+    tool("rc") {
+      command = "$python_path gyp-win-tool rc-wrapper $env rc.exe {{defines}} {{include_dirs}} /fo{{output}} {{source}}"
+      outputs = [
+        "{{target_out_dir}}/{{target_output_name}}/{{source_name_part}}.res",
+      ]
+      description = "RC {{output}}"
+    }
+
+    tool("asm") {
+      # TODO(brettw): "/safeseh" assembler argument is hardcoded here. Extract
+      # assembler flags to a variable like cflags. crbug.com/418613
+      command = "$python_path gyp-win-tool asm-wrapper $env ml.exe {{defines}} {{include_dirs}} /safeseh /c /Fo {{output}} {{source}}"
+      description = "ASM {{output}}"
+      outputs = [
+        "{{target_out_dir}}/{{target_output_name}}/{{source_name_part}}.obj",
+      ]
+    }
+
+    tool("alink") {
+      rspfile = "{{output}}.rsp"
+      command = "$python_path gyp-win-tool link-wrapper $env False lib.exe /nologo /ignore:4221 /OUT:{{output}} @$rspfile"
+      description = "LIB {{output}}"
+      outputs = [
+        # Ignore {{output_extension}} and always use .lib, there's no reason to
+        # allow targets to override this extension on Windows.
+        "{{target_out_dir}}/{{target_output_name}}.lib",
+      ]
+      default_output_extension = ".lib"
+
+      # The use of inputs_newline is to work around a fixed per-line buffer
+      # size in the linker.
+      rspfile_content = "{{inputs_newline}}"
+    }
+
+    tool("solink") {
+      dllname = "{{root_out_dir}}/{{target_output_name}}{{output_extension}}"  # e.g. foo.dll
+      libname =
+          "{{root_out_dir}}/{{target_output_name}}{{output_extension}}.lib"  # e.g. foo.dll.lib
+      rspfile = "${dllname}.rsp"
+
+      link_command = "$python_path gyp-win-tool link-wrapper $env False link.exe /nologo /IMPLIB:$libname /DLL /OUT:$dllname /PDB:${dllname}.pdb @$rspfile"
+
+      # TODO(brettw) support manifests
+      #manifest_command = "$python_path gyp-win-tool manifest-wrapper $env mt.exe -nologo -manifest $manifests -out:${dllname}.manifest"
+      #command = "cmd /c $link_command && $manifest_command"
+      command = link_command
+
+      default_output_extension = ".dll"
+      description = "LINK(DLL) {{output}}"
+      outputs = [
+        dllname,
+        libname,
+      ]
+      link_output = libname
+      depend_output = libname
+
+      # The use of inputs_newline is to work around a fixed per-line buffer
+      # size in the linker.
+      rspfile_content = "{{libs}} {{solibs}} {{inputs_newline}} {{ldflags}}"
+    }
+
+    tool("link") {
+      rspfile = "{{output}}.rsp"
+
+      link_command = "$python_path gyp-win-tool link-wrapper $env False link.exe /nologo /OUT:{{output}} /PDB:{{output}}.pdb @$rspfile"
+
+      # TODO(brettw) support manifests
+      #manifest_command = "$python_path gyp-win-tool manifest-wrapper $env mt.exe -nologo -manifest $manifests -out:{{output}}.manifest"
+      #command = "cmd /c $link_command && $manifest_command"
+      command = link_command
+
+      default_output_extension = ".exe"
+      description = "LINK {{output}}"
+      outputs = [
+        "{{root_out_dir}}/{{target_output_name}}{{output_extension}}",
+      ]
+
+      # The use of inputs_newline is to work around a fixed per-line buffer
+      # size in the linker.
+      rspfile_content = "{{inputs_newline}} {{libs}} {{solibs}} {{ldflags}}"
+    }
+
+    tool("stamp") {
+      command = "$python_path gyp-win-tool stamp {{output}}"
+      description = "STAMP {{output}}"
+    }
+
+    tool("copy") {
+      command =
+          "$python_path gyp-win-tool recursive-mirror {{source}} {{output}}"
+      description = "COPY {{source}} {{output}}"
+    }
+
+    # When invoking this toolchain not as the default one, these args will be
+    # passed to the build. They are ignored when this is the default toolchain.
+    toolchain_args() {
+      current_cpu = invoker.current_cpu
+      if (defined(invoker.is_clang)) {
+        is_clang = invoker.is_clang
+      }
+    }
+  }
+}
+
+# TODO(dpranke): Declare both toolchains all of the time when we
+# get it sorted out how we want to support them both in a single build.
+# Right now only one of these can be enabled at a time because the
+# runtime libraries get copied to root_build_dir and would collide.
+if (current_cpu == "x86") {
+  msvc_toolchain("x86") {
+    environment = "environment.x86"
+    current_cpu = "x86"
+    cl = "${goma_prefix}\"${vc_bin_dir}/cl.exe\""
+    is_clang = false
+  }
+  msvc_toolchain("clang_x86") {
+    environment = "environment.x86"
+    current_cpu = "x86"
+    prefix = rebase_path("//third_party/llvm-build/Release+Asserts/bin",
+                         root_build_dir)
+    cl = "${goma_prefix}$prefix/clang-cl.exe"
+    is_clang = true
+  }
+}
+
+if (current_cpu == "x64") {
+  msvc_toolchain("x64") {
+    environment = "environment.x64"
+    current_cpu = "x64"
+    cl = "${goma_prefix}\"${vc_bin_dir}/cl.exe\""
+    is_clang = false
+  }
+  msvc_toolchain("clang_x64") {
+    environment = "environment.x64"
+    current_cpu = "x64"
+    prefix = rebase_path("//third_party/llvm-build/Release+Asserts/bin",
+                         root_build_dir)
+    cl = "${goma_prefix}$prefix/clang-cl.exe"
+    is_clang = true
+  }
+}
diff --git a/build/toolchain/win/midl.gni b/build/toolchain/win/midl.gni
new file mode 100644
index 0000000..3e7fbec
--- /dev/null
+++ b/build/toolchain/win/midl.gni
@@ -0,0 +1,113 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+assert(is_win)
+
+import("//build/config/win/visual_studio_version.gni")
+
+# This template defines a rule to invoke the MS IDL compiler.
+#
+# Parameters
+#
+#   sources
+#      List of .idl file to process.
+#
+#   out_dir (optional)
+#       Directory to write the generated files to. Defaults to target_gen_dir.
+#
+#   deps (optional)
+#   visibility (optional)
+
+template("midl") {
+  action_name = "${target_name}_idl_action"
+  source_set_name = target_name
+
+  assert(defined(invoker.sources), "Source must be defined for $target_name")
+
+  if (defined(invoker.out_dir)) {
+    out_dir = invoker.out_dir
+  } else {
+    out_dir = target_gen_dir
+  }
+
+  header_file = "{{source_name_part}}.h"
+  dlldata_file = "{{source_name_part}}.dlldata.c"
+  interface_identifier_file = "{{source_name_part}}_i.c"
+  proxy_file = "{{source_name_part}}_p.c"
+  type_library_file = "{{source_name_part}}.tlb"
+
+  action_foreach(action_name) {
+    visibility = [ ":$source_set_name" ]
+
+    # This functionality is handled by the win-tool because the GYP build has
+    # MIDL support built-in.
+    # TODO(brettw) move this to a separate MIDL wrapper script for better
+    # clarity once GYP support is not needed.
+    script = "$root_build_dir/gyp-win-tool"
+
+    sources = invoker.sources
+
+    # Note that .tlb is not included in the outputs as it is not always
+    # generated depending on the content of the input idl file.
+    outputs = [
+      "$out_dir/$header_file",
+      "$out_dir/$dlldata_file",
+      "$out_dir/$interface_identifier_file",
+      "$out_dir/$proxy_file",
+    ]
+
+    if (current_cpu == "x86") {
+      win_tool_arch = "environment.x86"
+      idl_target_platform = "win32"
+    } else if (current_cpu == "x64") {
+      win_tool_arch = "environment.x64"
+      idl_target_platform = "x64"
+    } else {
+      assert(false, "Need environment for this arch")
+    }
+
+    args = [
+      "midl-wrapper",
+      win_tool_arch,
+      rebase_path(out_dir, root_build_dir),
+      type_library_file,
+      header_file,
+      dlldata_file,
+      interface_identifier_file,
+      proxy_file,
+      "{{source}}",
+      "/char",
+      "signed",
+      "/env",
+      idl_target_platform,
+      "/Oicf",
+    ]
+
+    if (defined(invoker.deps)) {
+      deps = invoker.deps
+    }
+  }
+
+  source_set(target_name) {
+    if (defined(invoker.visibility)) {
+      visibility = invoker.visibility
+    }
+
+    # We only compile the IID files from the IDL tool rather than all outputs.
+    sources = process_file_template(invoker.sources,
+                                    [ "$out_dir/$interface_identifier_file" ])
+
+    public_deps = [
+      ":$action_name",
+    ]
+
+    config("midl_warnings") {
+      if (is_clang) {
+        # MIDL generates code like "#endif !_MIDL_USE_GUIDDEF_"
+        cflags = [ "-Wno-extra-tokens" ]
+      }
+    }
+    configs += [ ":midl_warnings" ]
+  }
+}
diff --git a/build/toolchain/win/setup_toolchain.py b/build/toolchain/win/setup_toolchain.py
new file mode 100644
index 0000000..bc9bd1e
--- /dev/null
+++ b/build/toolchain/win/setup_toolchain.py
@@ -0,0 +1,154 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# Copies the given "win tool" (which the toolchain uses to wrap compiler
+# invocations) and the environment blocks for the 32-bit and 64-bit builds on
+# Windows to the build directory.
+#
+# The arguments are the visual studio install location and the location of the
+# win tool. The script assumes that the root build directory is the current dir
+# and the files will be written to the current directory.
+
+import errno
+import os
+import re
+import subprocess
+import sys
+
+
+def _ExtractImportantEnvironment(output_of_set):
+  """Extracts environment variables required for the toolchain to run from
+  a textual dump output by the cmd.exe 'set' command."""
+  envvars_to_save = (
+      'goma_.*', # TODO(scottmg): This is ugly, but needed for goma.
+      'include',
+      'lib',
+      'libpath',
+      'path',
+      'pathext',
+      'systemroot',
+      'temp',
+      'tmp',
+      )
+  env = {}
+  for line in output_of_set.splitlines():
+    for envvar in envvars_to_save:
+      if re.match(envvar + '=', line.lower()):
+        var, setting = line.split('=', 1)
+        if envvar == 'path':
+          # Our own rules (for running gyp-win-tool) and other actions in
+          # Chromium rely on python being in the path. Add the path to this
+          # python here so that if it's not in the path when ninja is run
+          # later, python will still be found.
+          setting = os.path.dirname(sys.executable) + os.pathsep + setting
+        env[var.upper()] = setting
+        break
+  for required in ('SYSTEMROOT', 'TEMP', 'TMP'):
+    if required not in env:
+      raise Exception('Environment variable "%s" '
+                      'required to be set to valid path' % required)
+  return env
+
+
+def _SetupScript(target_cpu, sdk_dir):
+  """Returns a command (with arguments) to be used to set up the
+  environment."""
+  # Check if we are running in the SDK command line environment and use
+  # the setup script from the SDK if so. |target_cpu| should be either
+  # 'x86' or 'x64'.
+  assert target_cpu in ('x86', 'x64')
+  if bool(int(os.environ.get('DEPOT_TOOLS_WIN_TOOLCHAIN', 1))) and sdk_dir:
+    return [os.path.normpath(os.path.join(sdk_dir, 'Bin/SetEnv.Cmd')),
+            '/' + target_cpu]
+  else:
+    # We only support x64-hosted tools.
+    # TODO(scottmg|dpranke): Non-depot_tools toolchain: need to get Visual
+    # Studio install location from registry.
+    return [os.path.normpath(os.path.join(os.environ['GYP_MSVS_OVERRIDE_PATH'],
+                                          'VC/vcvarsall.bat')),
+            'amd64_x86' if target_cpu == 'x86' else 'amd64']
+
+
+def _FormatAsEnvironmentBlock(envvar_dict):
+  """Format as an 'environment block' directly suitable for CreateProcess.
+  Briefly this is a list of key=value\0, terminated by an additional \0. See
+  CreateProcess documentation for more details."""
+  block = ''
+  nul = '\0'
+  for key, value in envvar_dict.iteritems():
+    block += key + '=' + value + nul
+  block += nul
+  return block
+
+
+def _CopyTool(source_path):
+  """Copies the given tool to the current directory, including a warning not
+  to edit it."""
+  with open(source_path) as source_file:
+    tool_source = source_file.readlines()
+
+  # Add header and write it out to the current directory (which should be the
+  # root build dir).
+  with open("gyp-win-tool", 'w') as tool_file:
+    tool_file.write(''.join([tool_source[0],
+                             '# Generated by setup_toolchain.py do not edit.\n']
+                            + tool_source[1:]))
+
+
+def main():
+  if len(sys.argv) != 6:
+    print('Usage setup_toolchain.py '
+          '<visual studio path> <win tool path> <win sdk path> '
+          '<runtime dirs> <target_cpu>')
+    sys.exit(2)
+  tool_source = sys.argv[2]
+  win_sdk_path = sys.argv[3]
+  runtime_dirs = sys.argv[4]
+  target_cpu = sys.argv[5]
+
+  _CopyTool(tool_source)
+
+  cpus = ('x86', 'x64')
+  assert target_cpu in cpus
+  vc_bin_dir = ''
+
+  # TODO(scottmg|goma): Do we need an equivalent of
+  # ninja_use_custom_environment_files?
+
+  for cpu in cpus:
+    # Extract environment variables for subprocesses.
+    args = _SetupScript(cpu, win_sdk_path)
+    args.extend(('&&', 'set'))
+    popen = subprocess.Popen(
+        args, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
+    variables, _ = popen.communicate()
+    env = _ExtractImportantEnvironment(variables)
+    env['PATH'] = runtime_dirs + ';' + env['PATH']
+
+    if cpu == target_cpu:
+      for path in env['PATH'].split(os.pathsep):
+        if os.path.exists(os.path.join(path, 'cl.exe')):
+          vc_bin_dir = os.path.realpath(path)
+          break
+
+    # The Windows SDK include directories must be first. They both have a sal.h,
+    # and the SDK one is newer and the SDK uses some newer features from it not
+    # present in the Visual Studio one.
+
+    if win_sdk_path:
+      additional_includes = ('{sdk_dir}\\Include\\shared;' +
+                             '{sdk_dir}\\Include\\um;' +
+                             '{sdk_dir}\\Include\\winrt;').format(
+                                  sdk_dir=win_sdk_path)
+      env['INCLUDE'] = additional_includes + env['INCLUDE']
+    env_block = _FormatAsEnvironmentBlock(env)
+    with open('environment.' + cpu, 'wb') as f:
+      f.write(env_block)
+
+  assert vc_bin_dir
+  print 'vc_bin_dir = "%s"' % vc_bin_dir
+
+
+if __name__ == '__main__':
+  main()
diff --git a/build/tree_truth.sh b/build/tree_truth.sh
new file mode 100755
index 0000000..617092d
--- /dev/null
+++ b/build/tree_truth.sh
@@ -0,0 +1,102 @@
+#!/bin/bash
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# Script for printing recent commits in a buildbot run.
+
+# Return the sha1 of the given tag.  If not present, return "".
+# $1: path to repo
+# $2: tag name
+tt_sha1_for_tag() {
+  oneline=$(cd $1 && git log -1 $2 --format='%H' 2>/dev/null)
+  if [ $? -eq 0 ] ; then
+    echo $oneline
+  fi
+}
+
+# Return the sha1 of HEAD, or ""
+# $1: path to repo
+tt_sha1_for_head() {
+  ( cd $1 && git log HEAD -n1 --format='%H' | cat )
+}
+
+# For the given repo, set tag to HEAD.
+# $1: path to repo
+# $2: tag name
+tt_tag_head() {
+  ( cd $1 && git tag -f $2 )
+}
+
+# For the given repo, delete the tag.
+# $1: path to repo
+# $2: tag name
+tt_delete_tag() {
+  ( cd $1 && git tag -d $2 )
+}
+
+# For the given repo, set tag to "three commits ago" (for testing).
+# $1: path to repo
+# $2: tag name
+tt_tag_three_ago() {
+ local sh=$(cd $1 && git log --pretty=oneline -n 3 | tail -1 | awk '{print $1}')
+  ( cd $1 && git tag -f $2 $sh )
+}
+
+# List the commits between the given tag and HEAD.
+# If the tag does not exist, only list the last few.
+# If the tag is at HEAD, list nothing.
+# Output format has distinct build steps for repos with changes.
+# $1: path to repo
+# $2: tag name
+# $3: simple/short repo name to use for display
+tt_list_commits() {
+  local tag_sha1=$(tt_sha1_for_tag $1 $2)
+  local head_sha1=$(tt_sha1_for_head $1)
+  local display_name=$(echo $3 | sed 's#/#_#g')
+  if [ "${tag_sha1}" = "${head_sha1}" ] ; then
+    return
+  fi
+  if [ "${tag_sha1}" = "" ] ; then
+    echo "@@@BUILD_STEP Recent commits in repo $display_name@@@"
+    echo "NOTE: git tag was not found so we have no baseline."
+    echo "Here are some recent commits, but they may not be new for this build."
+    ( cd $1 && git log -n 10 --stat | cat)
+  else
+    echo "@@@BUILD_STEP New commits in repo $display_name@@@"
+    ( cd $1 && git log -n 500 $2..HEAD --stat | cat)
+  fi
+}
+
+# Clean out the tree truth tags in all repos.  For testing.
+tt_clean_all() {
+ for project in $@; do
+   tt_delete_tag $CHROME_SRC/../$project tree_truth
+ done
+}
+
+# Print tree truth for all clank repos.
+tt_print_all() {
+ for project in $@; do
+   local full_path=$CHROME_SRC/../$project
+   tt_list_commits $full_path tree_truth $project
+   tt_tag_head $full_path tree_truth
+ done
+}
+
+# Print a summary of the last 10 commits for each repo.
+tt_brief_summary() {
+  echo "@@@BUILD_STEP Brief summary of recent CLs in every branch@@@"
+  for project in $@; do
+    echo $project:
+    local full_path=$CHROME_SRC/../$project
+    (cd $full_path && git log -n 10 --format="   %H %s   %an, %ad" | cat)
+    echo "================================================================="
+  done
+}
+
+CHROME_SRC=$1
+shift
+PROJECT_LIST=$@
+tt_brief_summary $PROJECT_LIST
+tt_print_all $PROJECT_LIST
diff --git a/build/uiautomator_test.gypi b/build/uiautomator_test.gypi
new file mode 100644
index 0000000..e9bd0bf
--- /dev/null
+++ b/build/uiautomator_test.gypi
@@ -0,0 +1,37 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into a target to provide a rule
+# to build uiautomator dexed tests jar.
+#
+# To use this, create a gyp target with the following form:
+# {
+#   'target_name': 'test_suite_name',
+#   'type': 'none',
+#   'includes': ['path/to/this/gypi/file'],
+# }
+#
+
+{
+  'dependencies': [
+    '<(DEPTH)/build/android/pylib/device/commands/commands.gyp:chromium_commands',
+    '<(DEPTH)/tools/android/android_tools.gyp:android_tools',
+  ],
+  'variables': {
+    'output_dex_path': '<(PRODUCT_DIR)/lib.java/<(_target_name).dex.jar',
+  },
+  'actions': [
+    {
+      'action_name': 'dex_<(_target_name)',
+      'message': 'Dexing <(_target_name) jar',
+      'variables': {
+        'dex_input_paths': [
+          '>@(library_dexed_jars_paths)',
+        ],
+        'output_path': '<(output_dex_path)',
+      },
+      'includes': [ 'android/dex_action.gypi' ],
+    },
+  ],
+}
diff --git a/build/update-linux-sandbox.sh b/build/update-linux-sandbox.sh
new file mode 100755
index 0000000..735733a
--- /dev/null
+++ b/build/update-linux-sandbox.sh
@@ -0,0 +1,75 @@
+#!/bin/sh
+
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+BUILDTYPE="${BUILDTYPE:-Debug}"
+CHROME_SRC_DIR="${CHROME_SRC_DIR:-$(dirname -- $(readlink -fn -- "$0"))/..}"
+CHROME_OUT_DIR="${CHROME_SRC_DIR}/${CHROMIUM_OUT_DIR:-out}/${BUILDTYPE}"
+CHROME_SANDBOX_BUILD_PATH="${CHROME_OUT_DIR}/chrome_sandbox"
+CHROME_SANDBOX_INST_PATH="/usr/local/sbin/chrome-devel-sandbox"
+CHROME_SANDBOX_INST_DIR=$(dirname -- "$CHROME_SANDBOX_INST_PATH")
+
+TARGET_DIR_TYPE=$(stat -f -c %t -- "${CHROME_SANDBOX_INST_DIR}" 2>/dev/null)
+if [ $? -ne 0 ]; then
+  echo "Could not get status of ${CHROME_SANDBOX_INST_DIR}"
+  exit 1
+fi
+
+# Make sure the path is not on NFS.
+if [ "${TARGET_DIR_TYPE}" = "6969" ]; then
+  echo "Please make sure ${CHROME_SANDBOX_INST_PATH} is not on NFS!"
+  exit 1
+fi
+
+installsandbox() {
+  echo "(using sudo so you may be asked for your password)"
+  sudo -- cp "${CHROME_SANDBOX_BUILD_PATH}" \
+    "${CHROME_SANDBOX_INST_PATH}" &&
+  sudo -- chown root:root "${CHROME_SANDBOX_INST_PATH}" &&
+  sudo -- chmod 4755 "${CHROME_SANDBOX_INST_PATH}"
+  return $?
+}
+
+if [ ! -d "${CHROME_OUT_DIR}" ]; then
+  echo -n "${CHROME_OUT_DIR} does not exist. Use \"BUILDTYPE=Release ${0}\" "
+  echo "If you are building in Release mode"
+  exit 1
+fi
+
+if [ ! -f "${CHROME_SANDBOX_BUILD_PATH}" ]; then
+  echo -n "Could not find ${CHROME_SANDBOX_BUILD_PATH}, "
+  echo "please make sure you build the chrome_sandbox target"
+  exit 1
+fi
+
+if [ ! -f "${CHROME_SANDBOX_INST_PATH}" ]; then
+  echo -n "Could not find ${CHROME_SANDBOX_INST_PATH}, "
+  echo "installing it now."
+  installsandbox
+fi
+
+if [ ! -f "${CHROME_SANDBOX_INST_PATH}" ]; then
+  echo "Failed to install ${CHROME_SANDBOX_INST_PATH}"
+  exit 1
+fi
+
+CURRENT_API=$("${CHROME_SANDBOX_BUILD_PATH}" --get-api)
+INSTALLED_API=$("${CHROME_SANDBOX_INST_PATH}" --get-api)
+
+if [ "${CURRENT_API}" != "${INSTALLED_API}" ]; then
+  echo "Your installed setuid sandbox is too old, installing it now."
+  if ! installsandbox; then
+    echo "Failed to install ${CHROME_SANDBOX_INST_PATH}"
+    exit 1
+  fi
+else
+  echo "Your setuid sandbox is up to date"
+  if [ "${CHROME_DEVEL_SANDBOX}" != "${CHROME_SANDBOX_INST_PATH}" ]; then
+    echo -n "Make sure you have \"export "
+    echo -n "CHROME_DEVEL_SANDBOX=${CHROME_SANDBOX_INST_PATH}\" "
+    echo "somewhere in your .bashrc"
+    echo "This variable is currently: ${CHROME_DEVEL_SANDBOX:-empty}"
+  fi
+fi
diff --git a/build/util/BUILD.gn b/build/util/BUILD.gn
new file mode 100644
index 0000000..29dd943
--- /dev/null
+++ b/build/util/BUILD.gn
@@ -0,0 +1,48 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+action("webkit_version") {
+  script = "version.py"
+
+  lastchange_file = "LASTCHANGE.blink"
+
+  # TODO(brettw) move from content to this directory.
+  template_file = "//content/webkit_version.h.in"
+  inputs = [
+    lastchange_file,
+    template_file,
+  ]
+
+  output_file = "$root_gen_dir/webkit_version.h"
+  outputs = [
+    output_file,
+  ]
+
+  args = [
+    "-f",
+    rebase_path(lastchange_file, root_build_dir),
+    rebase_path(template_file, root_build_dir),
+    rebase_path(output_file, root_build_dir),
+  ]
+}
+
+action("chrome_version_json") {
+  script = "version.py"
+  _chrome_version_path = "//chrome/VERSION"
+  inputs = [
+    _chrome_version_path,
+  ]
+  _output_file = "$root_gen_dir/CHROME_VERSION.json"
+  outputs = [
+    _output_file,
+  ]
+  args = [
+    "--file",
+    rebase_path(_chrome_version_path, root_build_dir),
+    "--template",
+    "{\"full-quoted\": \"\\\"@MAJOR@.@MINOR@.@BUILD@.@PATCH@\\\"\"}",
+    "--output",
+    rebase_path(_output_file, root_build_dir),
+  ]
+}
diff --git a/build/util/LASTCHANGE b/build/util/LASTCHANGE
new file mode 100644
index 0000000..438a0fe
--- /dev/null
+++ b/build/util/LASTCHANGE
@@ -0,0 +1 @@
+LASTCHANGE=a757125bae5bce3daacf60f00502f7dd6490b875
diff --git a/build/util/lastchange.py b/build/util/lastchange.py
new file mode 100755
index 0000000..3f3ee4a
--- /dev/null
+++ b/build/util/lastchange.py
@@ -0,0 +1,309 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+lastchange.py -- Chromium revision fetching utility.
+"""
+
+import re
+import optparse
+import os
+import subprocess
+import sys
+
+_GIT_SVN_ID_REGEX = re.compile(r'.*git-svn-id:\s*([^@]*)@([0-9]+)', re.DOTALL)
+
+class VersionInfo(object):
+  def __init__(self, url, revision):
+    self.url = url
+    self.revision = revision
+
+
+def FetchSVNRevision(directory, svn_url_regex):
+  """
+  Fetch the Subversion branch and revision for a given directory.
+
+  Errors are swallowed.
+
+  Returns:
+    A VersionInfo object or None on error.
+  """
+  try:
+    proc = subprocess.Popen(['svn', 'info'],
+                            stdout=subprocess.PIPE,
+                            stderr=subprocess.PIPE,
+                            cwd=directory,
+                            shell=(sys.platform=='win32'))
+  except OSError:
+    # command is apparently either not installed or not executable.
+    return None
+  if not proc:
+    return None
+
+  attrs = {}
+  for line in proc.stdout:
+    line = line.strip()
+    if not line:
+      continue
+    key, val = line.split(': ', 1)
+    attrs[key] = val
+
+  try:
+    match = svn_url_regex.search(attrs['URL'])
+    if match:
+      url = match.group(2)
+    else:
+      url = ''
+    revision = attrs['Revision']
+  except KeyError:
+    return None
+
+  return VersionInfo(url, revision)
+
+
+def RunGitCommand(directory, command):
+  """
+  Launches git subcommand.
+
+  Errors are swallowed.
+
+  Returns:
+    A process object or None.
+  """
+  command = ['git'] + command
+  # Force shell usage under cygwin. This is a workaround for
+  # mysterious loss of cwd while invoking cygwin's git.
+  # We can't just pass shell=True to Popen, as under win32 this will
+  # cause CMD to be used, while we explicitly want a cygwin shell.
+  if sys.platform == 'cygwin':
+    command = ['sh', '-c', ' '.join(command)]
+  try:
+    proc = subprocess.Popen(command,
+                            stdout=subprocess.PIPE,
+                            stderr=subprocess.PIPE,
+                            cwd=directory,
+                            shell=(sys.platform=='win32'))
+    return proc
+  except OSError:
+    return None
+
+
+def FetchGitRevision(directory):
+  """
+  Fetch the Git hash for a given directory.
+
+  Errors are swallowed.
+
+  Returns:
+    A VersionInfo object or None on error.
+  """
+  hsh = ''
+  proc = RunGitCommand(directory, ['rev-parse', 'HEAD'])
+  if proc:
+    output = proc.communicate()[0].strip()
+    if proc.returncode == 0 and output:
+      hsh = output
+  if not hsh:
+    return None
+  pos = ''
+  proc = RunGitCommand(directory, ['cat-file', 'commit', 'HEAD'])
+  if proc:
+    output = proc.communicate()[0]
+    if proc.returncode == 0 and output:
+      for line in reversed(output.splitlines()):
+        if line.startswith('Cr-Commit-Position:'):
+          pos = line.rsplit()[-1].strip()
+          break
+  if not pos:
+    return VersionInfo('git', hsh)
+  return VersionInfo('git', '%s-%s' % (hsh, pos))
+
+
+def FetchGitSVNURLAndRevision(directory, svn_url_regex, go_deeper):
+  """
+  Fetch the Subversion URL and revision through Git.
+
+  Errors are swallowed.
+
+  Returns:
+    A tuple containing the Subversion URL and revision.
+  """
+  git_args = ['log', '-1', '--format=%b']
+  if go_deeper:
+    git_args.append('--grep=git-svn-id')
+  proc = RunGitCommand(directory, git_args)
+  if proc:
+    output = proc.communicate()[0].strip()
+    if proc.returncode == 0 and output:
+      # Extract the latest SVN revision and the SVN URL.
+      # The target line is the last "git-svn-id: ..." line like this:
+      # git-svn-id: svn://svn.chromium.org/chrome/trunk/src@85528 0039d316....
+      match = _GIT_SVN_ID_REGEX.search(output)
+      if match:
+        revision = match.group(2)
+        url_match = svn_url_regex.search(match.group(1))
+        if url_match:
+          url = url_match.group(2)
+        else:
+          url = ''
+        return url, revision
+  return None, None
+
+
+def FetchGitSVNRevision(directory, svn_url_regex, go_deeper):
+  """
+  Fetch the Git-SVN identifier for the local tree.
+
+  Errors are swallowed.
+  """
+  url, revision = FetchGitSVNURLAndRevision(directory, svn_url_regex, go_deeper)
+  if url and revision:
+    return VersionInfo(url, revision)
+  return None
+
+
+def FetchVersionInfo(default_lastchange, directory=None,
+                     directory_regex_prior_to_src_url='chrome|blink|svn',
+                     go_deeper=False):
+  """
+  Returns the last change (in the form of a branch, revision tuple),
+  from some appropriate revision control system.
+  """
+  svn_url_regex = re.compile(
+      r'.*/(' + directory_regex_prior_to_src_url + r')(/.*)')
+
+  version_info = (FetchSVNRevision(directory, svn_url_regex) or
+                  FetchGitSVNRevision(directory, svn_url_regex, go_deeper) or
+                  FetchGitRevision(directory))
+  if not version_info:
+    if default_lastchange and os.path.exists(default_lastchange):
+      revision = open(default_lastchange, 'r').read().strip()
+      version_info = VersionInfo(None, revision)
+    else:
+      version_info = VersionInfo(None, None)
+  return version_info
+
+def GetHeaderGuard(path):
+  """
+  Returns the header #define guard for the given file path.
+  This treats everything after the last instance of "src/" as being a
+  relevant part of the guard. If there is no "src/", then the entire path
+  is used.
+  """
+  src_index = path.rfind('src/')
+  if src_index != -1:
+    guard = path[src_index + 4:]
+  else:
+    guard = path
+  guard = guard.upper()
+  return guard.replace('/', '_').replace('.', '_').replace('\\', '_') + '_'
+
+def GetHeaderContents(path, define, version):
+  """
+  Returns what the contents of the header file should be that indicate the given
+  revision. Note that the #define is specified as a string, even though it's
+  currently always a SVN revision number, in case we need to move to git hashes.
+  """
+  header_guard = GetHeaderGuard(path)
+
+  header_contents = """/* Generated by lastchange.py, do not edit.*/
+
+#ifndef %(header_guard)s
+#define %(header_guard)s
+
+#define %(define)s "%(version)s"
+
+#endif  // %(header_guard)s
+"""
+  header_contents = header_contents % { 'header_guard': header_guard,
+                                        'define': define,
+                                        'version': version }
+  return header_contents
+
+def WriteIfChanged(file_name, contents):
+  """
+  Writes the specified contents to the specified file_name
+  iff the contents are different than the current contents.
+  """
+  try:
+    old_contents = open(file_name, 'r').read()
+  except EnvironmentError:
+    pass
+  else:
+    if contents == old_contents:
+      return
+    os.unlink(file_name)
+  open(file_name, 'w').write(contents)
+
+
+def main(argv=None):
+  if argv is None:
+    argv = sys.argv
+
+  parser = optparse.OptionParser(usage="lastchange.py [options]")
+  parser.add_option("-d", "--default-lastchange", metavar="FILE",
+                    help="Default last change input FILE.")
+  parser.add_option("-m", "--version-macro",
+                    help="Name of C #define when using --header. Defaults to " +
+                    "LAST_CHANGE.",
+                    default="LAST_CHANGE")
+  parser.add_option("-o", "--output", metavar="FILE",
+                    help="Write last change to FILE. " +
+                    "Can be combined with --header to write both files.")
+  parser.add_option("", "--header", metavar="FILE",
+                    help="Write last change to FILE as a C/C++ header. " +
+                    "Can be combined with --output to write both files.")
+  parser.add_option("--revision-only", action='store_true',
+                    help="Just print the SVN revision number. Overrides any " +
+                    "file-output-related options.")
+  parser.add_option("-s", "--source-dir", metavar="DIR",
+                    help="Use repository in the given directory.")
+  parser.add_option("--git-svn-go-deeper", action='store_true',
+                    help="In a Git-SVN repo, dig down to the last committed " +
+                    "SVN change (historic behaviour).")
+  opts, args = parser.parse_args(argv[1:])
+
+  out_file = opts.output
+  header = opts.header
+
+  while len(args) and out_file is None:
+    if out_file is None:
+      out_file = args.pop(0)
+  if args:
+    sys.stderr.write('Unexpected arguments: %r\n\n' % args)
+    parser.print_help()
+    sys.exit(2)
+
+  if opts.source_dir:
+    src_dir = opts.source_dir
+  else:
+    src_dir = os.path.dirname(os.path.abspath(__file__))
+
+  version_info = FetchVersionInfo(opts.default_lastchange,
+                                  directory=src_dir,
+                                  go_deeper=opts.git_svn_go_deeper)
+
+  if version_info.revision == None:
+    version_info.revision = '0'
+
+  if opts.revision_only:
+    print version_info.revision
+  else:
+    contents = "LASTCHANGE=%s\n" % version_info.revision
+    if not out_file and not opts.header:
+      sys.stdout.write(contents)
+    else:
+      if out_file:
+        WriteIfChanged(out_file, contents)
+      if header:
+        WriteIfChanged(header,
+                       GetHeaderContents(header, opts.version_macro,
+                                         version_info.revision))
+
+  return 0
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/build/util/lib/common/__init__.py b/build/util/lib/common/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/build/util/lib/common/__init__.py
diff --git a/build/util/lib/common/perf_result_data_type.py b/build/util/lib/common/perf_result_data_type.py
new file mode 100644
index 0000000..67b550a
--- /dev/null
+++ b/build/util/lib/common/perf_result_data_type.py
@@ -0,0 +1,20 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+DEFAULT = 'default'
+UNIMPORTANT = 'unimportant'
+HISTOGRAM = 'histogram'
+UNIMPORTANT_HISTOGRAM = 'unimportant-histogram'
+INFORMATIONAL = 'informational'
+
+ALL_TYPES = [DEFAULT, UNIMPORTANT, HISTOGRAM, UNIMPORTANT_HISTOGRAM,
+             INFORMATIONAL]
+
+
+def IsValidType(datatype):
+  return datatype in ALL_TYPES
+
+
+def IsHistogram(datatype):
+  return (datatype == HISTOGRAM or datatype == UNIMPORTANT_HISTOGRAM)
diff --git a/build/util/lib/common/perf_tests_results_helper.py b/build/util/lib/common/perf_tests_results_helper.py
new file mode 100644
index 0000000..6cb058b
--- /dev/null
+++ b/build/util/lib/common/perf_tests_results_helper.py
@@ -0,0 +1,166 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import re
+import sys
+
+import json
+import logging
+import math
+
+import perf_result_data_type
+
+
+# Mapping from result type to test output
+RESULT_TYPES = {perf_result_data_type.UNIMPORTANT: 'RESULT ',
+                perf_result_data_type.DEFAULT: '*RESULT ',
+                perf_result_data_type.INFORMATIONAL: '',
+                perf_result_data_type.UNIMPORTANT_HISTOGRAM: 'HISTOGRAM ',
+                perf_result_data_type.HISTOGRAM: '*HISTOGRAM '}
+
+
+def _EscapePerfResult(s):
+  """Escapes |s| for use in a perf result."""
+  return re.sub('[\:|=/#&,]', '_', s)
+
+
+def FlattenList(values):
+  """Returns a simple list without sub-lists."""
+  ret = []
+  for entry in values:
+    if isinstance(entry, list):
+      ret.extend(FlattenList(entry))
+    else:
+      ret.append(entry)
+  return ret
+
+
+def GeomMeanAndStdDevFromHistogram(histogram_json):
+  histogram = json.loads(histogram_json)
+  # Handle empty histograms gracefully.
+  if not 'buckets' in histogram:
+    return 0.0, 0.0
+  count = 0
+  sum_of_logs = 0
+  for bucket in histogram['buckets']:
+    if 'high' in bucket:
+      bucket['mean'] = (bucket['low'] + bucket['high']) / 2.0
+    else:
+      bucket['mean'] = bucket['low']
+    if bucket['mean'] > 0:
+      sum_of_logs += math.log(bucket['mean']) * bucket['count']
+      count += bucket['count']
+
+  if count == 0:
+    return 0.0, 0.0
+
+  sum_of_squares = 0
+  geom_mean = math.exp(sum_of_logs / count)
+  for bucket in histogram['buckets']:
+    if bucket['mean'] > 0:
+      sum_of_squares += (bucket['mean'] - geom_mean) ** 2 * bucket['count']
+  return geom_mean, math.sqrt(sum_of_squares / count)
+
+
+def _ValueToString(v):
+  # Special case for floats so we don't print using scientific notation.
+  if isinstance(v, float):
+    return '%f' % v
+  else:
+    return str(v)
+
+
+def _MeanAndStdDevFromList(values):
+  avg = None
+  sd = None
+  if len(values) > 1:
+    try:
+      value = '[%s]' % ','.join([_ValueToString(v) for v in values])
+      avg = sum([float(v) for v in values]) / len(values)
+      sqdiffs = [(float(v) - avg) ** 2 for v in values]
+      variance = sum(sqdiffs) / (len(values) - 1)
+      sd = math.sqrt(variance)
+    except ValueError:
+      value = ', '.join(values)
+  else:
+    value = values[0]
+  return value, avg, sd
+
+
+def PrintPages(page_list):
+  """Prints list of pages to stdout in the format required by perf tests."""
+  print 'Pages: [%s]' % ','.join([_EscapePerfResult(p) for p in page_list])
+
+
+def PrintPerfResult(measurement, trace, values, units,
+                    result_type=perf_result_data_type.DEFAULT,
+                    print_to_stdout=True):
+  """Prints numerical data to stdout in the format required by perf tests.
+
+  The string args may be empty but they must not contain any colons (:) or
+  equals signs (=).
+  This is parsed by the buildbot using:
+  http://src.chromium.org/viewvc/chrome/trunk/tools/build/scripts/slave/process_log_utils.py
+
+  Args:
+    measurement: A description of the quantity being measured, e.g. "vm_peak".
+        On the dashboard, this maps to a particular graph. Mandatory.
+    trace: A description of the particular data point, e.g. "reference".
+        On the dashboard, this maps to a particular "line" in the graph.
+        Mandatory.
+    values: A list of numeric measured values. An N-dimensional list will be
+        flattened and treated as a simple list.
+    units: A description of the units of measure, e.g. "bytes".
+    result_type: Accepts values of perf_result_data_type.ALL_TYPES.
+    print_to_stdout: If True, prints the output in stdout instead of returning
+        the output to caller.
+
+    Returns:
+      String of the formated perf result.
+  """
+  assert perf_result_data_type.IsValidType(result_type), \
+         'result type: %s is invalid' % result_type
+
+  trace_name = _EscapePerfResult(trace)
+
+  if (result_type == perf_result_data_type.UNIMPORTANT or
+      result_type == perf_result_data_type.DEFAULT or
+      result_type == perf_result_data_type.INFORMATIONAL):
+    assert isinstance(values, list)
+    assert '/' not in measurement
+    flattened_values = FlattenList(values)
+    assert len(flattened_values)
+    value, avg, sd = _MeanAndStdDevFromList(flattened_values)
+    output = '%s%s: %s%s%s %s' % (
+        RESULT_TYPES[result_type],
+        _EscapePerfResult(measurement),
+        trace_name,
+        # Do not show equal sign if the trace is empty. Usually it happens when
+        # measurement is enough clear to describe the result.
+        '= ' if trace_name else '',
+        value,
+        units)
+  else:
+    assert perf_result_data_type.IsHistogram(result_type)
+    assert isinstance(values, list)
+    # The histograms can only be printed individually, there's no computation
+    # across different histograms.
+    assert len(values) == 1
+    value = values[0]
+    output = '%s%s: %s= %s %s' % (
+        RESULT_TYPES[result_type],
+        _EscapePerfResult(measurement),
+        trace_name,
+        value,
+        units)
+    avg, sd = GeomMeanAndStdDevFromHistogram(value)
+
+  if avg:
+    output += '\nAvg %s: %f%s' % (measurement, avg, units)
+  if sd:
+    output += '\nSd  %s: %f%s' % (measurement, sd, units)
+  if print_to_stdout:
+    print output
+    sys.stdout.flush()
+  return output
diff --git a/build/util/lib/common/unittest_util.py b/build/util/lib/common/unittest_util.py
new file mode 100644
index 0000000..189f587
--- /dev/null
+++ b/build/util/lib/common/unittest_util.py
@@ -0,0 +1,153 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Utilities for dealing with the python unittest module."""
+
+import fnmatch
+import sys
+import unittest
+
+
+class _TextTestResult(unittest._TextTestResult):
+  """A test result class that can print formatted text results to a stream.
+
+  Results printed in conformance with gtest output format, like:
+  [ RUN        ] autofill.AutofillTest.testAutofillInvalid: "test desc."
+  [         OK ] autofill.AutofillTest.testAutofillInvalid
+  [ RUN        ] autofill.AutofillTest.testFillProfile: "test desc."
+  [         OK ] autofill.AutofillTest.testFillProfile
+  [ RUN        ] autofill.AutofillTest.testFillProfileCrazyCharacters: "Test."
+  [         OK ] autofill.AutofillTest.testFillProfileCrazyCharacters
+  """
+  def __init__(self, stream, descriptions, verbosity):
+    unittest._TextTestResult.__init__(self, stream, descriptions, verbosity)
+    self._fails = set()
+
+  def _GetTestURI(self, test):
+    return '%s.%s.%s' % (test.__class__.__module__,
+                         test.__class__.__name__,
+                         test._testMethodName)
+
+  def getDescription(self, test):
+    return '%s: "%s"' % (self._GetTestURI(test), test.shortDescription())
+
+  def startTest(self, test):
+    unittest.TestResult.startTest(self, test)
+    self.stream.writeln('[ RUN        ] %s' % self.getDescription(test))
+
+  def addSuccess(self, test):
+    unittest.TestResult.addSuccess(self, test)
+    self.stream.writeln('[         OK ] %s' % self._GetTestURI(test))
+
+  def addError(self, test, err):
+    unittest.TestResult.addError(self, test, err)
+    self.stream.writeln('[      ERROR ] %s' % self._GetTestURI(test))
+    self._fails.add(self._GetTestURI(test))
+
+  def addFailure(self, test, err):
+    unittest.TestResult.addFailure(self, test, err)
+    self.stream.writeln('[     FAILED ] %s' % self._GetTestURI(test))
+    self._fails.add(self._GetTestURI(test))
+
+  def getRetestFilter(self):
+    return ':'.join(self._fails)
+
+
+class TextTestRunner(unittest.TextTestRunner):
+  """Test Runner for displaying test results in textual format.
+
+  Results are displayed in conformance with google test output.
+  """
+
+  def __init__(self, verbosity=1):
+    unittest.TextTestRunner.__init__(self, stream=sys.stderr,
+                                     verbosity=verbosity)
+
+  def _makeResult(self):
+    return _TextTestResult(self.stream, self.descriptions, self.verbosity)
+
+
+def GetTestsFromSuite(suite):
+  """Returns all the tests from a given test suite."""
+  tests = []
+  for x in suite:
+    if isinstance(x, unittest.TestSuite):
+      tests += GetTestsFromSuite(x)
+    else:
+      tests += [x]
+  return tests
+
+
+def GetTestNamesFromSuite(suite):
+  """Returns a list of every test name in the given suite."""
+  return map(lambda x: GetTestName(x), GetTestsFromSuite(suite))
+
+
+def GetTestName(test):
+  """Gets the test name of the given unittest test."""
+  return '.'.join([test.__class__.__module__,
+                   test.__class__.__name__,
+                   test._testMethodName])
+
+
+def FilterTestSuite(suite, gtest_filter):
+  """Returns a new filtered tests suite based on the given gtest filter.
+
+  See http://code.google.com/p/googletest/wiki/AdvancedGuide
+  for gtest_filter specification.
+  """
+  return unittest.TestSuite(FilterTests(GetTestsFromSuite(suite), gtest_filter))
+
+
+def FilterTests(all_tests, gtest_filter):
+  """Filter a list of tests based on the given gtest filter.
+
+  Args:
+    all_tests: List of tests (unittest.TestSuite)
+    gtest_filter: Filter to apply.
+
+  Returns:
+    Filtered subset of the given list of tests.
+  """
+  test_names = [GetTestName(test) for test in all_tests]
+  filtered_names = FilterTestNames(test_names, gtest_filter)
+  return [test for test in all_tests if GetTestName(test) in filtered_names]
+
+
+def FilterTestNames(all_tests, gtest_filter):
+  """Filter a list of test names based on the given gtest filter.
+
+  See http://code.google.com/p/googletest/wiki/AdvancedGuide
+  for gtest_filter specification.
+
+  Args:
+    all_tests: List of test names.
+    gtest_filter: Filter to apply.
+
+  Returns:
+    Filtered subset of the given list of test names.
+  """
+  pattern_groups = gtest_filter.split('-')
+  positive_patterns = ['*']
+  if pattern_groups[0]:
+    positive_patterns = pattern_groups[0].split(':')
+  negative_patterns = None
+  if len(pattern_groups) > 1:
+    negative_patterns = pattern_groups[1].split(':')
+
+  tests = []
+  for test in all_tests:
+    # Test name must by matched by one positive pattern.
+    for pattern in positive_patterns:
+      if fnmatch.fnmatch(test, pattern):
+        break
+    else:
+      continue
+    # Test name must not be matched by any negative patterns.
+    for pattern in negative_patterns or []:
+      if fnmatch.fnmatch(test, pattern):
+        break
+    else:
+      tests += [test]
+  return tests
diff --git a/build/util/lib/common/util.py b/build/util/lib/common/util.py
new file mode 100644
index 0000000..a415b1f
--- /dev/null
+++ b/build/util/lib/common/util.py
@@ -0,0 +1,151 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Generic utilities for all python scripts."""
+
+import atexit
+import httplib
+import os
+import signal
+import stat
+import subprocess
+import sys
+import tempfile
+import urlparse
+
+
+def GetPlatformName():
+  """Return a string to be used in paths for the platform."""
+  if IsWindows():
+    return 'win'
+  if IsMac():
+    return 'mac'
+  if IsLinux():
+    return 'linux'
+  raise NotImplementedError('Unknown platform "%s".' % sys.platform)
+
+
+def IsWindows():
+  return sys.platform == 'cygwin' or sys.platform.startswith('win')
+
+
+def IsLinux():
+  return sys.platform.startswith('linux')
+
+
+def IsMac():
+  return sys.platform.startswith('darwin')
+
+
+def _DeleteDir(path):
+  """Deletes a directory recursively, which must exist."""
+  # Don't use shutil.rmtree because it can't delete read-only files on Win.
+  for root, dirs, files in os.walk(path, topdown=False):
+    for name in files:
+      filename = os.path.join(root, name)
+      os.chmod(filename, stat.S_IWRITE)
+      os.remove(filename)
+    for name in dirs:
+      os.rmdir(os.path.join(root, name))
+  os.rmdir(path)
+
+
+def Delete(path):
+  """Deletes the given file or directory (recursively), which must exist."""
+  if os.path.isdir(path):
+    _DeleteDir(path)
+  else:
+    os.remove(path)
+
+
+def MaybeDelete(path):
+  """Deletes the given file or directory (recurisvely), if it exists."""
+  if os.path.exists(path):
+    Delete(path)
+
+
+def MakeTempDir(parent_dir=None):
+  """Creates a temporary directory and returns an absolute path to it.
+
+  The temporary directory is automatically deleted when the python interpreter
+  exits normally.
+
+  Args:
+    parent_dir: the directory to create the temp dir in. If None, the system
+                temp dir is used.
+
+  Returns:
+    The absolute path to the temporary directory.
+  """
+  path = tempfile.mkdtemp(dir=parent_dir)
+  atexit.register(MaybeDelete, path)
+  return path
+
+
+def Unzip(zip_path, output_dir):
+  """Unzips the given zip file using a system installed unzip tool.
+
+  Args:
+    zip_path: zip file to unzip.
+    output_dir: directory to unzip the contents of the zip file. The directory
+                must exist.
+
+  Raises:
+    RuntimeError if the unzip operation fails.
+  """
+  if IsWindows():
+    unzip_cmd = ['C:\\Program Files\\7-Zip\\7z.exe', 'x', '-y']
+  else:
+    unzip_cmd = ['unzip', '-o']
+  unzip_cmd += [zip_path]
+  if RunCommand(unzip_cmd, output_dir) != 0:
+    raise RuntimeError('Unable to unzip %s to %s' % (zip_path, output_dir))
+
+
+def Kill(pid):
+  """Terminate the given pid."""
+  if IsWindows():
+    subprocess.call(['taskkill.exe', '/T', '/F', '/PID', str(pid)])
+  else:
+    os.kill(pid, signal.SIGTERM)
+
+
+def RunCommand(cmd, cwd=None):
+  """Runs the given command and returns the exit code.
+
+  Args:
+    cmd: list of command arguments.
+    cwd: working directory to execute the command, or None if the current
+         working directory should be used.
+
+  Returns:
+    The exit code of the command.
+  """
+  process = subprocess.Popen(cmd, cwd=cwd)
+  process.wait()
+  return process.returncode
+
+
+def DoesUrlExist(url):
+  """Determines whether a resource exists at the given URL.
+
+  Args:
+    url: URL to be verified.
+
+  Returns:
+    True if url exists, otherwise False.
+  """
+  parsed = urlparse.urlparse(url)
+  try:
+    conn = httplib.HTTPConnection(parsed.netloc)
+    conn.request('HEAD', parsed.path)
+    response = conn.getresponse()
+  except (socket.gaierror, socket.error):
+    return False
+  finally:
+    conn.close()
+  # Follow both permanent (301) and temporary (302) redirects.
+  if response.status == 302 or response.status == 301:
+    return DoesUrlExist(response.getheader('location'))
+  return response.status == 200
diff --git a/build/util/version.gypi b/build/util/version.gypi
new file mode 100644
index 0000000..327a5c2
--- /dev/null
+++ b/build/util/version.gypi
@@ -0,0 +1,20 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+  'variables': {
+    'variables': {
+      'version_py_path': '<(DEPTH)/build/util/version.py',
+      'version_path': '<(DEPTH)/chrome/VERSION',
+      'lastchange_path': '<(DEPTH)/build/util/LASTCHANGE',
+    },
+    'version_py_path': '<(version_py_path)',
+    'version_path': '<(version_path)',
+    'lastchange_path': '<(lastchange_path)',
+    'version_full':
+        '<!(python <(version_py_path) -f <(version_path) -t "@MAJOR@.@MINOR@.@BUILD@.@PATCH@")',
+    'version_mac_dylib':
+        '<!(python <(version_py_path) -f <(version_path) -t "@BUILD@.@PATCH_HI@.@PATCH_LO@" -e "PATCH_HI=int(PATCH)/256" -e "PATCH_LO=int(PATCH)%256")',
+  },  # variables
+}
diff --git a/build/util/version.py b/build/util/version.py
new file mode 100755
index 0000000..4d3691a
--- /dev/null
+++ b/build/util/version.py
@@ -0,0 +1,166 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+version.py -- Chromium version string substitution utility.
+"""
+
+import argparse
+import os
+import sys
+
+
+def fetch_values_from_file(values_dict, file_name):
+  """
+  Fetches KEYWORD=VALUE settings from the specified file.
+
+  Everything to the left of the first '=' is the keyword,
+  everything to the right is the value.  No stripping of
+  white space, so beware.
+
+  The file must exist, otherwise you get the Python exception from open().
+  """
+  for line in open(file_name, 'r').readlines():
+    key, val = line.rstrip('\r\n').split('=', 1)
+    values_dict[key] = val
+
+
+def fetch_values(file_list):
+  """
+  Returns a dictionary of values to be used for substitution, populating
+  the dictionary with KEYWORD=VALUE settings from the files in 'file_list'.
+
+  Explicitly adds the following value from internal calculations:
+
+    OFFICIAL_BUILD
+  """
+  CHROME_BUILD_TYPE = os.environ.get('CHROME_BUILD_TYPE')
+  if CHROME_BUILD_TYPE == '_official':
+    official_build = '1'
+  else:
+    official_build = '0'
+
+  values = dict(
+    OFFICIAL_BUILD = official_build,
+  )
+
+  for file_name in file_list:
+    fetch_values_from_file(values, file_name)
+
+  return values
+
+
+def subst_template(contents, values):
+  """
+  Returns the template with substituted values from the specified dictionary.
+
+  Keywords to be substituted are surrounded by '@':  @KEYWORD@.
+
+  No attempt is made to avoid recursive substitution.  The order
+  of evaluation is random based on the order of the keywords returned
+  by the Python dictionary.  So do NOT substitute a value that
+  contains any @KEYWORD@ strings expecting them to be recursively
+  substituted, okay?
+  """
+  for key, val in values.iteritems():
+    try:
+      contents = contents.replace('@' + key + '@', val)
+    except TypeError:
+      print repr(key), repr(val)
+  return contents
+
+
+def subst_file(file_name, values):
+  """
+  Returns the contents of the specified file_name with substituted
+  values from the specified dictionary.
+
+  This is like subst_template, except it operates on a file.
+  """
+  template = open(file_name, 'r').read()
+  return subst_template(template, values);
+
+
+def write_if_changed(file_name, contents):
+  """
+  Writes the specified contents to the specified file_name
+  iff the contents are different than the current contents.
+  """
+  try:
+    old_contents = open(file_name, 'r').read()
+  except EnvironmentError:
+    pass
+  else:
+    if contents == old_contents:
+      return
+    os.unlink(file_name)
+  open(file_name, 'w').write(contents)
+
+
+def main():
+  parser = argparse.ArgumentParser()
+  parser.add_argument('-f', '--file', action='append', default=[],
+                      help='Read variables from FILE.')
+  parser.add_argument('-i', '--input', default=None,
+                      help='Read strings to substitute from FILE.')
+  parser.add_argument('-o', '--output', default=None,
+                      help='Write substituted strings to FILE.')
+  parser.add_argument('-t', '--template', default=None,
+                      help='Use TEMPLATE as the strings to substitute.')
+  parser.add_argument('-e', '--eval', action='append', default=[],
+                      help='Evaluate VAL after reading variables. Can be used '
+                           'to synthesize variables. e.g. -e \'PATCH_HI=int('
+                           'PATCH)/256.')
+  parser.add_argument('args', nargs=argparse.REMAINDER,
+                      help='For compatibility: INPUT and OUTPUT can be '
+                           'passed as positional arguments.')
+  options = parser.parse_args()
+
+  evals = {}
+  for expression in options.eval:
+    try:
+      evals.update(dict([expression.split('=', 1)]))
+    except ValueError:
+      parser.error('-e requires VAR=VAL')
+
+  # Compatibility with old versions that considered the first two positional
+  # arguments shorthands for --input and --output.
+  while len(options.args) and (options.input is None or \
+                               options.output is None):
+    if options.input is None:
+      options.input = options.args.pop(0)
+    elif options.output is None:
+      options.output = options.args.pop(0)
+  if options.args:
+    parser.error('Unexpected arguments: %r' % options.args)
+
+  values = fetch_values(options.file)
+  for key, val in evals.iteritems():
+    values[key] = str(eval(val, globals(), values))
+
+  if options.template is not None:
+    contents = subst_template(options.template, values)
+  elif options.input:
+    contents = subst_file(options.input, values)
+  else:
+    # Generate a default set of version information.
+    contents = """MAJOR=%(MAJOR)s
+MINOR=%(MINOR)s
+BUILD=%(BUILD)s
+PATCH=%(PATCH)s
+LASTCHANGE=%(LASTCHANGE)s
+OFFICIAL_BUILD=%(OFFICIAL_BUILD)s
+""" % values
+
+  if options.output is not None:
+    write_if_changed(options.output, contents)
+  else:
+    print contents
+
+  return 0
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/build/vs_toolchain.py b/build/vs_toolchain.py
new file mode 100644
index 0000000..16f4477
--- /dev/null
+++ b/build/vs_toolchain.py
@@ -0,0 +1,259 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import json
+import os
+import pipes
+import shutil
+import subprocess
+import sys
+
+
+script_dir = os.path.dirname(os.path.realpath(__file__))
+chrome_src = os.path.abspath(os.path.join(script_dir, os.pardir))
+SRC_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
+sys.path.insert(1, os.path.join(chrome_src, 'tools'))
+sys.path.insert(0, os.path.join(chrome_src, 'tools', 'gyp', 'pylib'))
+json_data_file = os.path.join(script_dir, 'win_toolchain.json')
+
+
+import gyp
+
+
+def SetEnvironmentAndGetRuntimeDllDirs():
+  """Sets up os.environ to use the depot_tools VS toolchain with gyp, and
+  returns the location of the VS runtime DLLs so they can be copied into
+  the output directory after gyp generation.
+  """
+  vs2013_runtime_dll_dirs = None
+  depot_tools_win_toolchain = \
+      bool(int(os.environ.get('DEPOT_TOOLS_WIN_TOOLCHAIN', '1')))
+  if sys.platform in ('win32', 'cygwin') and depot_tools_win_toolchain:
+    if not os.path.exists(json_data_file):
+      Update()
+    with open(json_data_file, 'r') as tempf:
+      toolchain_data = json.load(tempf)
+
+    toolchain = toolchain_data['path']
+    version = toolchain_data['version']
+    win_sdk = toolchain_data.get('win_sdk')
+    if not win_sdk:
+      win_sdk = toolchain_data['win8sdk']
+    wdk = toolchain_data['wdk']
+    # TODO(scottmg): The order unfortunately matters in these. They should be
+    # split into separate keys for x86 and x64. (See CopyVsRuntimeDlls call
+    # below). http://crbug.com/345992
+    vs2013_runtime_dll_dirs = toolchain_data['runtime_dirs']
+
+    os.environ['GYP_MSVS_OVERRIDE_PATH'] = toolchain
+    os.environ['GYP_MSVS_VERSION'] = version
+    # We need to make sure windows_sdk_path is set to the automated
+    # toolchain values in GYP_DEFINES, but don't want to override any
+    # otheroptions.express
+    # values there.
+    gyp_defines_dict = gyp.NameValueListToDict(gyp.ShlexEnv('GYP_DEFINES'))
+    gyp_defines_dict['windows_sdk_path'] = win_sdk
+    os.environ['GYP_DEFINES'] = ' '.join('%s=%s' % (k, pipes.quote(str(v)))
+        for k, v in gyp_defines_dict.iteritems())
+    os.environ['WINDOWSSDKDIR'] = win_sdk
+    os.environ['WDK_DIR'] = wdk
+    # Include the VS runtime in the PATH in case it's not machine-installed.
+    runtime_path = ';'.join(vs2013_runtime_dll_dirs)
+    os.environ['PATH'] = runtime_path + ';' + os.environ['PATH']
+  return vs2013_runtime_dll_dirs
+
+
+def _VersionNumber():
+  """Gets the standard version number ('120', '140', etc.) based on
+  GYP_MSVS_VERSION."""
+  if os.environ['GYP_MSVS_VERSION'] == '2013':
+    return '120'
+  elif os.environ['GYP_MSVS_VERSION'] == '2015':
+    return '140'
+  else:
+    raise ValueError('Unexpected GYP_MSVS_VERSION')
+
+
+def _CopyRuntimeImpl(target, source):
+  """Copy |source| to |target| if it doesn't already exist or if it
+  needs to be updated.
+  """
+  if (os.path.isdir(os.path.dirname(target)) and
+      (not os.path.isfile(target) or
+      os.stat(target).st_mtime != os.stat(source).st_mtime)):
+    print 'Copying %s to %s...' % (source, target)
+    if os.path.exists(target):
+      os.unlink(target)
+    shutil.copy2(source, target)
+
+
+def _CopyRuntime2013(target_dir, source_dir, dll_pattern):
+  """Copy both the msvcr and msvcp runtime DLLs, only if the target doesn't
+  exist, but the target directory does exist."""
+  for file_part in ('p', 'r'):
+    dll = dll_pattern % file_part
+    target = os.path.join(target_dir, dll)
+    source = os.path.join(source_dir, dll)
+    _CopyRuntimeImpl(target, source)
+
+
+def _CopyRuntime2015(target_dir, source_dir, dll_pattern):
+  """Copy both the msvcp and vccorlib runtime DLLs, only if the target doesn't
+  exist, but the target directory does exist."""
+  for file_part in ('msvcp', 'vccorlib'):
+    dll = dll_pattern % file_part
+    target = os.path.join(target_dir, dll)
+    source = os.path.join(source_dir, dll)
+    _CopyRuntimeImpl(target, source)
+
+
+def CopyVsRuntimeDlls(output_dir, runtime_dirs):
+  """Copies the VS runtime DLLs from the given |runtime_dirs| to the output
+  directory so that even if not system-installed, built binaries are likely to
+  be able to run.
+
+  This needs to be run after gyp has been run so that the expected target
+  output directories are already created.
+  """
+  assert sys.platform.startswith(('win32', 'cygwin'))
+
+  x86, x64 = runtime_dirs
+  out_debug = os.path.join(output_dir, 'Debug')
+  out_debug_nacl64 = os.path.join(output_dir, 'Debug', 'x64')
+  out_release = os.path.join(output_dir, 'Release')
+  out_release_nacl64 = os.path.join(output_dir, 'Release', 'x64')
+  out_debug_x64 = os.path.join(output_dir, 'Debug_x64')
+  out_release_x64 = os.path.join(output_dir, 'Release_x64')
+
+  if os.path.exists(out_debug) and not os.path.exists(out_debug_nacl64):
+    os.makedirs(out_debug_nacl64)
+  if os.path.exists(out_release) and not os.path.exists(out_release_nacl64):
+    os.makedirs(out_release_nacl64)
+  if os.environ.get('GYP_MSVS_VERSION') == '2015':
+    _CopyRuntime2015(out_debug,          x86, '%s140d.dll')
+    _CopyRuntime2015(out_release,        x86, '%s140.dll')
+    _CopyRuntime2015(out_debug_x64,      x64, '%s140d.dll')
+    _CopyRuntime2015(out_release_x64,    x64, '%s140.dll')
+    _CopyRuntime2015(out_debug_nacl64,   x64, '%s140d.dll')
+    _CopyRuntime2015(out_release_nacl64, x64, '%s140.dll')
+  else:
+    # VS2013 is the default.
+    _CopyRuntime2013(out_debug,          x86, 'msvc%s120d.dll')
+    _CopyRuntime2013(out_release,        x86, 'msvc%s120.dll')
+    _CopyRuntime2013(out_debug_x64,      x64, 'msvc%s120d.dll')
+    _CopyRuntime2013(out_release_x64,    x64, 'msvc%s120.dll')
+    _CopyRuntime2013(out_debug_nacl64,   x64, 'msvc%s120d.dll')
+    _CopyRuntime2013(out_release_nacl64, x64, 'msvc%s120.dll')
+
+  # Copy the PGO runtime library to the release directories.
+  if os.environ.get('GYP_MSVS_OVERRIDE_PATH'):
+    pgo_x86_runtime_dir = os.path.join(os.environ.get('GYP_MSVS_OVERRIDE_PATH'),
+                                       'VC', 'bin')
+    pgo_x64_runtime_dir = os.path.join(pgo_x86_runtime_dir, 'amd64')
+    pgo_runtime_dll = 'pgort' + _VersionNumber() + '.dll'
+    source_x86 = os.path.join(pgo_x86_runtime_dir, pgo_runtime_dll)
+    if os.path.exists(source_x86):
+      _CopyRuntimeImpl(os.path.join(out_release, pgo_runtime_dll), source_x86)
+    source_x64 = os.path.join(pgo_x64_runtime_dir, pgo_runtime_dll)
+    if os.path.exists(source_x64):
+      _CopyRuntimeImpl(os.path.join(out_release_x64, pgo_runtime_dll),
+                       source_x64)
+
+
+def CopyDlls(target_dir, configuration, target_cpu):
+  """Copy the VS runtime DLLs into the requested directory as needed.
+
+  configuration is one of 'Debug' or 'Release'.
+  target_cpu is one of 'x86' or 'x64'.
+
+  The debug configuration gets both the debug and release DLLs; the
+  release config only the latter.
+  """
+  vs2013_runtime_dll_dirs = SetEnvironmentAndGetRuntimeDllDirs()
+  if not vs2013_runtime_dll_dirs:
+    return
+
+  x64_runtime, x86_runtime = vs2013_runtime_dll_dirs
+  runtime_dir = x64_runtime if target_cpu == 'x64' else x86_runtime
+  _CopyRuntime2013(
+      target_dir, runtime_dir, 'msvc%s' + _VersionNumber() + '.dll')
+  if configuration == 'Debug':
+    _CopyRuntime2013(
+        target_dir, runtime_dir, 'msvc%s' + _VersionNumber() + 'd.dll')
+
+
+def _GetDesiredVsToolchainHashes():
+  """Load a list of SHA1s corresponding to the toolchains that we want installed
+  to build with."""
+  # TODO(scottmg): If explicitly set to VS2015 override hashes to the VS2015 RC
+  # toolchain. http://crbug.com/492774.
+  if os.environ.get('GYP_MSVS_VERSION') == '2015':
+    return ['40721575c85171cea5d7afe5ec17bd108a94796e']
+  else:
+    # Default to VS2013.
+    return ['ee7d718ec60c2dc5d255bbe325909c2021a7efef']
+
+
+def Update():
+  """Requests an update of the toolchain to the specific hashes we have at
+  this revision. The update outputs a .json of the various configuration
+  information required to pass to gyp which we use in |GetToolchainDir()|.
+  """
+  depot_tools_win_toolchain = \
+      bool(int(os.environ.get('DEPOT_TOOLS_WIN_TOOLCHAIN', '1')))
+  if sys.platform in ('win32', 'cygwin') and depot_tools_win_toolchain:
+    import find_depot_tools
+    depot_tools_path = find_depot_tools.add_depot_tools_to_path()
+    get_toolchain_args = [
+        sys.executable,
+        os.path.join(depot_tools_path,
+                    'win_toolchain',
+                    'get_toolchain_if_necessary.py'),
+        '--output-json', json_data_file,
+      ] + _GetDesiredVsToolchainHashes()
+    subprocess.check_call(get_toolchain_args)
+
+  return 0
+
+
+def GetToolchainDir():
+  """Gets location information about the current toolchain (must have been
+  previously updated by 'update'). This is used for the GN build."""
+  runtime_dll_dirs = SetEnvironmentAndGetRuntimeDllDirs()
+
+  # If WINDOWSSDKDIR is not set, search the default SDK path and set it.
+  if not 'WINDOWSSDKDIR' in os.environ:
+    default_sdk_path = 'C:\\Program Files (x86)\\Windows Kits\\8.1'
+    if os.path.isdir(default_sdk_path):
+      os.environ['WINDOWSSDKDIR'] = default_sdk_path
+
+  print '''vs_path = "%s"
+sdk_path = "%s"
+vs_version = "%s"
+wdk_dir = "%s"
+runtime_dirs = "%s"
+''' % (
+      os.environ['GYP_MSVS_OVERRIDE_PATH'],
+      os.environ['WINDOWSSDKDIR'],
+      os.environ['GYP_MSVS_VERSION'],
+      os.environ.get('WDK_DIR', ''),
+      ';'.join(runtime_dll_dirs or ['None']))
+
+
+def main():
+  if not sys.platform.startswith(('win32', 'cygwin')):
+    return 0
+  commands = {
+      'update': Update,
+      'get_toolchain_dir': GetToolchainDir,
+      'copy_dlls': CopyDlls,
+  }
+  if len(sys.argv) < 2 or sys.argv[1] not in commands:
+    print >>sys.stderr, 'Expected one of: %s' % ', '.join(commands)
+    return 1
+  return commands[sys.argv[1]](*sys.argv[2:])
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/build/whitespace_file.txt b/build/whitespace_file.txt
new file mode 100644
index 0000000..ea82f4e
--- /dev/null
+++ b/build/whitespace_file.txt
@@ -0,0 +1,156 @@
+Copyright 2014 The Chromium Authors. All rights reserved.
+Use of this useless file is governed by a BSD-style license that can be
+found in the LICENSE file.
+
+
+This file is used for making non-code changes to trigger buildbot cycles. Make
+any modification below this line.
+
+======================================================================
+
+Let's make a story. Add zero+ sentences for every commit:
+
+CHÄPTER 1:
+It was a dark and blinky night; the rain fell in torrents -- except at
+occasional intervals, when it was checked by a violent gust of wind which
+swept up the streets (for it is in London that our scene lies), rattling along
+the housetops, and fiercely agitating the scanty flame of the lamps that
+struggled against the elements. A hooded figure emerged.
+
+It was a Domo-Kun.
+
+"What took you so long?", inquired his wife.
+
+Silence. Oblivious to his silence, she continued, "Did Mr. Usagi enjoy the
+waffles you brought him?" "You know him, he's not one to forego a waffle,
+no matter how burnt," he snickered.
+
+The pause was filled with the sound of compile errors.
+
+CHAPTER 2:
+The jelly was as dark as night, and just as runny.
+The Domo-Kun shuddered, remembering the way Mr. Usagi had speared his waffles
+with his fork, watching the runny jelly spread and pool across his plate,
+like the blood of a dying fawn. "It reminds me of that time --" he started, as
+his wife cut in quickly: "-- please. I can't bear to hear it.". A flury of
+images coming from the past flowed through his mind.
+
+"You recall what happened on Mulholland drive?" The ceiling fan rotated slowly
+overhead, barely disturbing the thick cigarette smoke. No doubt was left about
+when the fan was last cleaned.
+
+There was a poignant pause.
+
+CHAPTER 3:
+Mr. Usagi felt that something wasn't right. Shortly after the Domo-Kun left he
+began feeling sick. He thought out loud to himself, "No, he wouldn't have done
+that to me." He considered that perhaps he shouldn't have pushed so hard.
+Perhaps he shouldn't have been so cold and sarcastic, after the unimaginable
+horror that had occurred just the week before.
+
+Next time, there won't be any sushi. Why sushi with waffles anyway?  It's like
+adorning breakfast cereal with halibut -- shameful.
+
+CHAPTER 4:
+The taste of stale sushi in his mouth the next morning was unbearable. He
+wondered where the sushi came from as he attempted to wash the taste away with
+a bottle of 3000¥ sake. He tries to recall the cook's face.  Purple? Probably.
+
+CHAPTER 5:
+Many tears later, Mr. Usagi would laugh at the memory of the earnest,
+well-intentioned Domo-Kun. Another day in the life. That is when he realized that
+life goes on.
+
+TRUISMS (1978-1983)
+JENNY HOLZER
+A LITTLE KNOWLEDGE CAN GO A LONG WAY
+A LOT OF PROFESSIONALS ARE CRACKPOTS
+A MAN CAN'T KNOW WHAT IT IS TO BE A MOTHER
+A NAME MEANS A LOT JUST BY ITSELF
+A POSITIVE ATTITUDE MEANS ALL THE DIFFERENCE IN THE WORLD
+A RELAXED MAN IS NOT NECESSARILY A BETTER MAN
+NO ONE SHOULD EVER USE SVN
+AN INFLEXIBLE POSITION SOMETIMES IS A SIGN OF PARALYSIS
+IT IS MANS FATE TO OUTSMART HIMSELF
+BEING SURE OF YOURSELF MEANS YOU'RE A FOOL
+AM NOT
+ARE TOO
+IF AT FIRST YOU DON'T SUCCEED: TRY, EXCEPT, FINALLY
+AND THEN, TIME LEAPT BACKWARDS
+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAaaaaaaaaaaaaaaaaaaaaaaaaaaaahhhh LOT
+I'm really tempted to change something above the line.
+Reeccciiiipppppeeeeeesssssss!!!!!!!!!
+PEOPLE SAY "FAILURE IS NOT AN OPTION", BUT FAILURE IS ALWAYS AN OPTION.
+WHAT GOES UP MUST HAVE A NON-ZERO VELOCITY
+
+I can feel the heat closing in, feel them out there making their moves...
+What could possibly go wrong? We've already ate our cake.
+
+Stand Still. Pause Clocks. We can make the World Stop.
+WUBWUBWUBWUBWUB
+
+I want a 1917 build and you will give me what I want.
+
+This sentence is false.
+
+Beauty is in the eyes of a Beholder.
+
+I'm the best at space.
+
+The first time Yossarian saw the chaplain, he fell madly in love with him.
+*
+*
+*
+Give not thyself up, then, to fire, lest it invert thee, deaden thee; as for
+the time it did me. There is a wisdom that is woe; but there is a woe that is
+madness. And there is a Catskill eagle in some souls that can alike dive down
+into the blackest gorges, and soar out of them again and become invisible in
+the sunny spaces. And even if he for ever flies within the gorge, that gorge
+is in the mountains; so that even in his lowest swoop the mountain eagle is
+still higher than other birds upon the plain, even though they soar.
+*
+*
+*
+
+I'm here to commit lines and drop rhymes
+*
+This is a line to test and try uploading a cl.
+
+And lo, in the year 2014, there was verily an attempt to upgrade to GCC 4.8 on
+the Android bots, and it was good. Except on one bot, where it was bad. And
+lo, the change was reverted, and GCC went back to 4.6, where code is slower
+and less optimized. And verily did it break the build, because artifacts had
+been created with 4.8, and alignment was no longer the same, and a great
+sadness descended upon the Android GN buildbot, and it did refuseth to build
+any more. But the sheriffs thought to themselves: Placebo! Let us clobber the
+bot, and perhaps it will rebuild with GCC 4.6, which hath worked for many many
+seasons. And so they modified the whitespace file with these immortal lines,
+and visited it upon the bots, that great destruction might be wrought upon
+their outdated binaries. In clobberus, veritas.
+
+As the git approaches, light begins to shine through the SCM thrice again...
+However, the git, is, after all, quite stupid.
+
+Suddenly Domo-Kun found itself in a room filled with dazzling mirrors.
+
+A herd of wild gits appears!  Time for CQ :D
+And one more for sizes.py...
+
+Sigh.
+
+It was love at first sight.  The moment Yossarian first laid eyes on the chaplain, he fell madly in love with him.
+
+Cool whitespace change for git-cl land
+
+Oh god the bots are red! I'm blind! Mmmm, cronuts.
+
+If you stand on your head, you will get footprints in your hair.
+
+sigh
+sigher
+pick up cls
+
+In the BUILD we trust.
+^_^
+
+In the masters we don't.
diff --git a/build/win/asan.gyp b/build/win/asan.gyp
new file mode 100644
index 0000000..d938426
--- /dev/null
+++ b/build/win/asan.gyp
@@ -0,0 +1,30 @@
+# Copyright (c) 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+   'targets': [
+     {
+       'target_name': 'asan_dynamic_runtime',
+       'type': 'none',
+       'variables': {
+         # Every target is going to depend on asan_dynamic_runtime, so allow
+         # this one to depend on itself.
+         'prune_self_dependency': 1,
+       },
+       'conditions': [
+         ['OS=="win"', {
+           'copies': [
+             {
+               'destination': '<(PRODUCT_DIR)',
+               'files': [
+                 # Path is relative to this GYP file.
+                 '<(DEPTH)/<(make_clang_dir)/lib/clang/<!(python <(DEPTH)/tools/clang/scripts/update.py --print-clang-version)/lib/windows/clang_rt.asan_dynamic-i386.dll',
+               ],
+             },
+           ],
+         }],
+       ],
+     },
+   ],
+}
diff --git a/build/win/chrome_win.croc b/build/win/chrome_win.croc
new file mode 100644
index 0000000..e1e3bb7
--- /dev/null
+++ b/build/win/chrome_win.croc
@@ -0,0 +1,26 @@
+# -*- python -*-
+# Crocodile config file for Chromium windows
+
+{
+  # List of rules, applied in order
+  'rules' : [
+    # Specify inclusions before exclusions, since rules are in order.
+
+    # Don't include chromeos, posix, or linux specific files
+    {
+      'regexp' : '.*(_|/)(chromeos|linux|posix)(\\.|_)',
+      'include' : 0,
+    },
+    # Don't include ChromeOS dirs
+    {
+      'regexp' : '.*/chromeos/',
+      'include' : 0,
+    },
+
+    # Groups
+    {
+      'regexp' : '.*_test_win\\.',
+      'group' : 'test',
+    },
+  ],
+}
diff --git a/build/win/compatibility.manifest b/build/win/compatibility.manifest
new file mode 100644
index 0000000..10d10da
--- /dev/null
+++ b/build/win/compatibility.manifest
@@ -0,0 +1,17 @@
+<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
+<assembly xmlns="urn:schemas-microsoft-com:asm.v1" manifestVersion="1.0">
+  <compatibility xmlns="urn:schemas-microsoft-com:compatibility.v1">
+    <application>
+      <!--The ID below indicates application support for Windows Vista -->
+      <supportedOS Id="{e2011457-1546-43c5-a5fe-008deee3d3f0}"/>
+      <!--The ID below indicates application support for Windows 7 -->
+      <supportedOS Id="{35138b9a-5d96-4fbd-8e2d-a2440225f93a}"/>
+      <!--The ID below indicates application support for Windows 8 -->
+      <supportedOS Id="{4a2f28e3-53b9-4441-ba9c-d69d4a4a6e38}"/>
+      <!--The ID below indicates application support for Windows 8.1 -->
+      <supportedOS Id="{1f676c76-80e1-4239-95bb-83d0f6d0da78}"/>
+      <!--The ID below indicates application support for Windows 10 -->
+      <supportedOS Id="{8e0f7a12-bfb3-4fe8-b9a5-48fd50a15a9a}"/>
+    </application>
+  </compatibility>
+</assembly>
diff --git a/build/win/dbghelp_xp/README.chromium b/build/win/dbghelp_xp/README.chromium
new file mode 100644
index 0000000..a52cfad
--- /dev/null
+++ b/build/win/dbghelp_xp/README.chromium
@@ -0,0 +1,2 @@
+This dbghelp.dll is the redistributable version from the Windows 7 SDK, the

+last one to work on Windows XP.

diff --git a/build/win/dbghelp_xp/dbghelp.dll b/build/win/dbghelp_xp/dbghelp.dll
new file mode 100755
index 0000000..9f52a5d
--- /dev/null
+++ b/build/win/dbghelp_xp/dbghelp.dll
Binary files differ
diff --git a/build/win/importlibs/create_import_lib.gypi b/build/win/importlibs/create_import_lib.gypi
new file mode 100644
index 0000000..9cb0d345
--- /dev/null
+++ b/build/win/importlibs/create_import_lib.gypi
@@ -0,0 +1,53 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into a target to provide a rule
+# to create import libraries from an import description file in a consistent
+# manner.
+#
+# To use this, create a gyp target with the following form:
+# {
+#   'target_name': 'my_proto_lib',
+#   'type': 'none',
+#   'sources': [
+#     'foo.imports',
+#     'bar.imports',
+#   ],
+#   'variables': {
+#     # Optional, see below: 'proto_in_dir': '.'
+#     'create_importlib': 'path-to-script',
+#     'lib_dir': 'path-to-output-directory',
+#   },
+#   'includes': ['path/to/this/gypi/file'],
+# }
+#
+# This will generate import libraries named 'foo.lib' and 'bar.lib' in the
+# specified lib directory.
+
+{
+  'variables': {
+    'create_importlib': '<(DEPTH)/build/win/importlibs/create_importlib_win.py',
+    'lib_dir': '<(PRODUCT_DIR)/lib',
+  },
+  'rules': [
+    {
+      'rule_name': 'create_import_lib',
+      'extension': 'imports',
+      'inputs': [
+        '<(create_importlib)',
+      ],
+      'outputs': [
+        '<(lib_dir)/<(RULE_INPUT_ROOT).lib',
+      ],
+      'action': [
+        'python',
+        '<(create_importlib)',
+        '--output-file', '<@(_outputs)',
+        '<(RULE_INPUT_PATH)',
+      ],
+      'message': 'Generating import library from <(RULE_INPUT_PATH)',
+      'process_outputs_as_sources': 0,
+    },
+  ],
+}
diff --git a/build/win/importlibs/create_importlib_win.py b/build/win/importlibs/create_importlib_win.py
new file mode 100755
index 0000000..bb6a2f0
--- /dev/null
+++ b/build/win/importlibs/create_importlib_win.py
@@ -0,0 +1,217 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+"""Creates an import library from an import description file."""
+import ast
+import logging
+import optparse
+import os
+import os.path
+import shutil
+import subprocess
+import sys
+import tempfile
+
+
+_USAGE = """\
+Usage: %prog [options] [imports-file]
+
+Creates an import library from imports-file.
+
+Note: this script uses the microsoft assembler (ml.exe) and the library tool
+    (lib.exe), both of which must be in path.
+"""
+
+
+_ASM_STUB_HEADER = """\
+; This file is autogenerated by create_importlib_win.py, do not edit.
+.386
+.MODEL FLAT, C
+.CODE
+
+; Stubs to provide mangled names to lib.exe for the
+; correct generation of import libs.
+"""
+
+
+_DEF_STUB_HEADER = """\
+; This file is autogenerated by create_importlib_win.py, do not edit.
+
+; Export declarations for generating import libs.
+"""
+
+
+_LOGGER = logging.getLogger()
+
+
+
+class _Error(Exception):
+  pass
+
+
+class _ImportLibraryGenerator(object):
+  def __init__(self, temp_dir):
+    self._temp_dir = temp_dir
+
+  def _Shell(self, cmd, **kw):
+    ret = subprocess.call(cmd, **kw)
+    _LOGGER.info('Running "%s" returned %d.', cmd, ret)
+    if ret != 0:
+      raise _Error('Command "%s" returned %d.' % (cmd, ret))
+
+  def _ReadImportsFile(self, imports_file):
+    # Slurp the imports file.
+    return ast.literal_eval(open(imports_file).read())
+
+  def _WriteStubsFile(self, import_names, output_file):
+    output_file.write(_ASM_STUB_HEADER)
+
+    for name in import_names:
+      output_file.write('%s PROC\n' % name)
+      output_file.write('%s ENDP\n' % name)
+
+    output_file.write('END\n')
+
+  def _WriteDefFile(self, dll_name, import_names, output_file):
+    output_file.write(_DEF_STUB_HEADER)
+    output_file.write('NAME %s\n' % dll_name)
+    output_file.write('EXPORTS\n')
+    for name in import_names:
+      name = name.split('@')[0]
+      output_file.write('  %s\n' % name)
+
+  def _CreateObj(self, dll_name, imports):
+    """Writes an assembly file containing empty declarations.
+
+    For each imported function of the form:
+
+    AddClipboardFormatListener@4 PROC
+    AddClipboardFormatListener@4 ENDP
+
+    The resulting object file is then supplied to lib.exe with a .def file
+    declaring the corresponding non-adorned exports as they appear on the
+    exporting DLL, e.g.
+
+    EXPORTS
+      AddClipboardFormatListener
+
+    In combination, the .def file and the .obj file cause lib.exe to generate
+    an x86 import lib with public symbols named like
+    "__imp__AddClipboardFormatListener@4", binding to exports named like
+    "AddClipboardFormatListener".
+
+    All of this is perpetrated in a temporary directory, as the intermediate
+    artifacts are quick and easy to produce, and of no interest to anyone
+    after the fact."""
+
+    # Create an .asm file to provide stdcall-like stub names to lib.exe.
+    asm_name = dll_name + '.asm'
+    _LOGGER.info('Writing asm file "%s".', asm_name)
+    with open(os.path.join(self._temp_dir, asm_name), 'wb') as stubs_file:
+      self._WriteStubsFile(imports, stubs_file)
+
+    # Invoke on the assembler to compile it to .obj.
+    obj_name = dll_name + '.obj'
+    cmdline = ['ml.exe', '/nologo', '/c', asm_name, '/Fo', obj_name]
+    self._Shell(cmdline, cwd=self._temp_dir, stdout=open(os.devnull))
+
+    return obj_name
+
+  def _CreateImportLib(self, dll_name, imports, architecture, output_file):
+    """Creates an import lib binding imports to dll_name for architecture.
+
+    On success, writes the import library to output file.
+    """
+    obj_file = None
+
+    # For x86 architecture we have to provide an object file for correct
+    # name mangling between the import stubs and the exported functions.
+    if architecture == 'x86':
+      obj_file = self._CreateObj(dll_name, imports)
+
+    # Create the corresponding .def file. This file has the non stdcall-adorned
+    # names, as exported by the destination DLL.
+    def_name = dll_name + '.def'
+    _LOGGER.info('Writing def file "%s".', def_name)
+    with open(os.path.join(self._temp_dir, def_name), 'wb') as def_file:
+      self._WriteDefFile(dll_name, imports, def_file)
+
+    # Invoke on lib.exe to create the import library.
+    # We generate everything into the temporary directory, as the .exp export
+    # files will be generated at the same path as the import library, and we
+    # don't want those files potentially gunking the works.
+    dll_base_name, ext = os.path.splitext(dll_name)
+    lib_name = dll_base_name + '.lib'
+    cmdline = ['lib.exe',
+               '/machine:%s' % architecture,
+               '/def:%s' % def_name,
+               '/out:%s' % lib_name]
+    if obj_file:
+      cmdline.append(obj_file)
+
+    self._Shell(cmdline, cwd=self._temp_dir, stdout=open(os.devnull))
+
+    # Copy the .lib file to the output directory.
+    shutil.copyfile(os.path.join(self._temp_dir, lib_name), output_file)
+    _LOGGER.info('Created "%s".', output_file)
+
+  def CreateImportLib(self, imports_file, output_file):
+    # Read the imports file.
+    imports = self._ReadImportsFile(imports_file)
+
+    # Creates the requested import library in the output directory.
+    self._CreateImportLib(imports['dll_name'],
+                          imports['imports'],
+                          imports.get('architecture', 'x86'),
+                          output_file)
+
+
+def main():
+  parser = optparse.OptionParser(usage=_USAGE)
+  parser.add_option('-o', '--output-file',
+                    help='Specifies the output file path.')
+  parser.add_option('-k', '--keep-temp-dir',
+                    action='store_true',
+                    help='Keep the temporary directory.')
+  parser.add_option('-v', '--verbose',
+                    action='store_true',
+                    help='Verbose logging.')
+
+  options, args = parser.parse_args()
+
+  if len(args) != 1:
+    parser.error('You must provide an imports file.')
+
+  if not options.output_file:
+    parser.error('You must provide an output file.')
+
+  options.output_file = os.path.abspath(options.output_file)
+
+  if options.verbose:
+    logging.basicConfig(level=logging.INFO)
+  else:
+    logging.basicConfig(level=logging.WARN)
+
+
+  temp_dir = tempfile.mkdtemp()
+  _LOGGER.info('Created temporary directory "%s."', temp_dir)
+  try:
+    # Create a generator and create the import lib.
+    generator = _ImportLibraryGenerator(temp_dir)
+
+    ret = generator.CreateImportLib(args[0], options.output_file)
+  except Exception, e:
+    _LOGGER.exception('Failed to create import lib.')
+    ret = 1
+  finally:
+    if not options.keep_temp_dir:
+      shutil.rmtree(temp_dir)
+      _LOGGER.info('Deleted temporary directory "%s."', temp_dir)
+
+  return ret
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/build/win/importlibs/filter_export_list.py b/build/win/importlibs/filter_export_list.py
new file mode 100755
index 0000000..c2489a9d
--- /dev/null
+++ b/build/win/importlibs/filter_export_list.py
@@ -0,0 +1,85 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+"""Help maintaining DLL import lists."""
+import ast
+import optparse
+import re
+import sys
+
+
+_EXPORT_RE = re.compile(r"""
+  ^\s*(?P<ordinal>[0-9]+)  # The ordinal field.
+  \s+(?P<hint>[0-9A-F]+)   # The hint field.
+  \s(?P<rva>........)      # The RVA field.
+  \s+(?P<name>[^ ]+)       # And finally the name we're really after.
+""", re.VERBOSE)
+
+
+_USAGE = r"""\
+Usage: %prog [options] [master-file]
+
+This script filters a list of exports from a DLL, generated from something
+like the following command line:
+
+C:\> dumpbin /exports user32.dll
+
+against a master list of imports built from e.g.
+
+C:\> dumpbin /exports user32.lib
+
+The point of this is to trim non-public exports from the list, and to
+normalize the names to their stdcall-mangled form for the generation of
+import libraries.
+Note that the export names from the latter incanatation are stdcall-mangled,
+e.g. they are suffixed with "@" and the number of argument bytes to the
+function.
+"""
+
+def _ReadMasterFile(master_file):
+  # Slurp the master file.
+  with open(master_file) as f:
+    master_exports = ast.literal_eval(f.read())
+
+  master_mapping = {}
+  for export in master_exports:
+    name = export.split('@')[0]
+    master_mapping[name] = export
+
+  return master_mapping
+
+
+def main():
+  parser = optparse.OptionParser(usage=_USAGE)
+  parser.add_option('-r', '--reverse',
+                    action='store_true',
+                    help='Reverse the matching, e.g. return the functions '
+                         'in the master list that aren\'t in the input.')
+
+  options, args = parser.parse_args()
+  if len(args) != 1:
+    parser.error('Must provide a master file.')
+
+  master_mapping = _ReadMasterFile(args[0])
+
+  found_exports = []
+  for line in sys.stdin:
+    match = _EXPORT_RE.match(line)
+    if match:
+      export_name = master_mapping.get(match.group('name'), None)
+      if export_name:
+          found_exports.append(export_name)
+
+  if options.reverse:
+    # Invert the found_exports list.
+    found_exports = set(master_mapping.values()) - set(found_exports)
+
+  # Sort the found exports for tidy output.
+  print '\n'.join(sorted(found_exports))
+  return 0
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/build/win/importlibs/x86/user32.winxp.imports b/build/win/importlibs/x86/user32.winxp.imports
new file mode 100644
index 0000000..24403a8
--- /dev/null
+++ b/build/win/importlibs/x86/user32.winxp.imports
@@ -0,0 +1,670 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# This file is used to create a custom import library for Chrome's use of
+# user32.dll exports. The set of exports defined below
+{
+  'architecture': 'x86',
+
+  # The DLL to bind to.
+  'dll_name': 'user32.dll',
+
+  # Name of the generated import library.
+  'importlib_name': 'user32.winxp.lib',
+
+  # This is the set of exports observed on a user32.dll from Windows XP SP2.
+  # The version of the DLL where these were observed is 5.1.2600.2180.
+  # Incidentally this set of exports also coincides with Windows XP SP3, where
+  # the version of the DLL is 5.1.2600.5512.
+  # Don't add new imports here unless and until the minimal supported
+  # Windows version has been bumped past Windows XP SP2+.
+  'imports': [
+    'ActivateKeyboardLayout@8',
+    'AdjustWindowRect@12',
+    'AdjustWindowRectEx@16',
+    'AllowSetForegroundWindow@4',
+    'AnimateWindow@12',
+    'AnyPopup@0',
+    'AppendMenuA@16',
+    'AppendMenuW@16',
+    'ArrangeIconicWindows@4',
+    'AttachThreadInput@12',
+    'BeginDeferWindowPos@4',
+    'BeginPaint@8',
+    'BlockInput@4',
+    'BringWindowToTop@4',
+    'BroadcastSystemMessage@20',
+    'BroadcastSystemMessageA@20',
+    'BroadcastSystemMessageExA@24',
+    'BroadcastSystemMessageExW@24',
+    'BroadcastSystemMessageW@20',
+    'CallMsgFilter@8',
+    'CallMsgFilterA@8',
+    'CallMsgFilterW@8',
+    'CallNextHookEx@16',
+    'CallWindowProcA@20',
+    'CallWindowProcW@20',
+    'CascadeChildWindows@8',
+    'CascadeWindows@20',
+    'ChangeClipboardChain@8',
+    'ChangeDisplaySettingsA@8',
+    'ChangeDisplaySettingsExA@20',
+    'ChangeDisplaySettingsExW@20',
+    'ChangeDisplaySettingsW@8',
+    'ChangeMenuA@20',
+    'ChangeMenuW@20',
+    'CharLowerA@4',
+    'CharLowerBuffA@8',
+    'CharLowerBuffW@8',
+    'CharLowerW@4',
+    'CharNextA@4',
+    'CharNextExA@12',
+    'CharNextW@4',
+    'CharPrevA@8',
+    'CharPrevExA@16',
+    'CharPrevW@8',
+    'CharToOemA@8',
+    'CharToOemBuffA@12',
+    'CharToOemBuffW@12',
+    'CharToOemW@8',
+    'CharUpperA@4',
+    'CharUpperBuffA@8',
+    'CharUpperBuffW@8',
+    'CharUpperW@4',
+    'CheckDlgButton@12',
+    'CheckMenuItem@12',
+    'CheckMenuRadioItem@20',
+    'CheckRadioButton@16',
+    'ChildWindowFromPoint@12',
+    'ChildWindowFromPointEx@16',
+    'ClientToScreen@8',
+    'ClipCursor@4',
+    'CloseClipboard@0',
+    'CloseDesktop@4',
+    'CloseWindow@4',
+    'CloseWindowStation@4',
+    'CopyAcceleratorTableA@12',
+    'CopyAcceleratorTableW@12',
+    'CopyIcon@4',
+    'CopyImage@20',
+    'CopyRect@8',
+    'CountClipboardFormats@0',
+    'CreateAcceleratorTableA@8',
+    'CreateAcceleratorTableW@8',
+    'CreateCaret@16',
+    'CreateCursor@28',
+    'CreateDesktopA@24',
+    'CreateDesktopW@24',
+    'CreateDialogIndirectParamA@20',
+    'CreateDialogIndirectParamW@20',
+    'CreateDialogParamA@20',
+    'CreateDialogParamW@20',
+    'CreateIcon@28',
+    'CreateIconFromResource@16',
+    'CreateIconFromResourceEx@28',
+    'CreateIconIndirect@4',
+    'CreateMDIWindowA@40',
+    'CreateMDIWindowW@40',
+    'CreateMenu@0',
+    'CreatePopupMenu@0',
+    'CreateWindowExA@48',
+    'CreateWindowExW@48',
+    'CreateWindowStationA@16',
+    'CreateWindowStationW@16',
+    'DdeAbandonTransaction@12',
+    'DdeAccessData@8',
+    'DdeAddData@16',
+    'DdeClientTransaction@32',
+    'DdeCmpStringHandles@8',
+    'DdeConnect@16',
+    'DdeConnectList@20',
+    'DdeCreateDataHandle@28',
+    'DdeCreateStringHandleA@12',
+    'DdeCreateStringHandleW@12',
+    'DdeDisconnect@4',
+    'DdeDisconnectList@4',
+    'DdeEnableCallback@12',
+    'DdeFreeDataHandle@4',
+    'DdeFreeStringHandle@8',
+    'DdeGetData@16',
+    'DdeGetLastError@4',
+    'DdeImpersonateClient@4',
+    'DdeInitializeA@16',
+    'DdeInitializeW@16',
+    'DdeKeepStringHandle@8',
+    'DdeNameService@16',
+    'DdePostAdvise@12',
+    'DdeQueryConvInfo@12',
+    'DdeQueryNextServer@8',
+    'DdeQueryStringA@20',
+    'DdeQueryStringW@20',
+    'DdeReconnect@4',
+    'DdeSetQualityOfService@12',
+    'DdeSetUserHandle@12',
+    'DdeUnaccessData@4',
+    'DdeUninitialize@4',
+    'DefDlgProcA@16',
+    'DefDlgProcW@16',
+    'DefFrameProcA@20',
+    'DefFrameProcW@20',
+    'DefMDIChildProcA@16',
+    'DefMDIChildProcW@16',
+    'DefRawInputProc@12',
+    'DefWindowProcA@16',
+    'DefWindowProcW@16',
+    'DeferWindowPos@32',
+    'DeleteMenu@12',
+    'DeregisterShellHookWindow@4',
+    'DestroyAcceleratorTable@4',
+    'DestroyCaret@0',
+    'DestroyCursor@4',
+    'DestroyIcon@4',
+    'DestroyMenu@4',
+    'DestroyWindow@4',
+    'DialogBoxIndirectParamA@20',
+    'DialogBoxIndirectParamW@20',
+    'DialogBoxParamA@20',
+    'DialogBoxParamW@20',
+    'DisableProcessWindowsGhosting@0',
+    'DispatchMessageA@4',
+    'DispatchMessageW@4',
+    'DlgDirListA@20',
+    'DlgDirListComboBoxA@20',
+    'DlgDirListComboBoxW@20',
+    'DlgDirListW@20',
+    'DlgDirSelectComboBoxExA@16',
+    'DlgDirSelectComboBoxExW@16',
+    'DlgDirSelectExA@16',
+    'DlgDirSelectExW@16',
+    'DragDetect@12',
+    'DragObject@20',
+    'DrawAnimatedRects@16',
+    'DrawCaption@16',
+    'DrawEdge@16',
+    'DrawFocusRect@8',
+    'DrawFrame@16',
+    'DrawFrameControl@16',
+    'DrawIcon@16',
+    'DrawIconEx@36',
+    'DrawMenuBar@4',
+    'DrawStateA@40',
+    'DrawStateW@40',
+    'DrawTextA@20',
+    'DrawTextExA@24',
+    'DrawTextExW@24',
+    'DrawTextW@20',
+    'EditWndProc@16',
+    'EmptyClipboard@0',
+    'EnableMenuItem@12',
+    'EnableScrollBar@12',
+    'EnableWindow@8',
+    'EndDeferWindowPos@4',
+    'EndDialog@8',
+    'EndMenu@0',
+    'EndPaint@8',
+    'EndTask@12',
+    'EnumChildWindows@12',
+    'EnumClipboardFormats@4',
+    'EnumDesktopWindows@12',
+    'EnumDesktopsA@12',
+    'EnumDesktopsW@12',
+    'EnumDisplayDevicesA@16',
+    'EnumDisplayDevicesW@16',
+    'EnumDisplayMonitors@16',
+    'EnumDisplaySettingsA@12',
+    'EnumDisplaySettingsExA@16',
+    'EnumDisplaySettingsExW@16',
+    'EnumDisplaySettingsW@12',
+    'EnumPropsA@8',
+    'EnumPropsExA@12',
+    'EnumPropsExW@12',
+    'EnumPropsW@8',
+    'EnumThreadWindows@12',
+    'EnumWindowStationsA@8',
+    'EnumWindowStationsW@8',
+    'EnumWindows@8',
+    'EqualRect@8',
+    'ExcludeUpdateRgn@8',
+    'ExitWindowsEx@8',
+    'FillRect@12',
+    'FindWindowA@8',
+    'FindWindowExA@16',
+    'FindWindowExW@16',
+    'FindWindowW@8',
+    'FlashWindow@8',
+    'FlashWindowEx@4',
+    'FrameRect@12',
+    'FreeDDElParam@8',
+    'GetActiveWindow@0',
+    'GetAltTabInfo@20',
+    'GetAltTabInfoA@20',
+    'GetAltTabInfoW@20',
+    'GetAncestor@8',
+    'GetAsyncKeyState@4',
+    'GetCapture@0',
+    'GetCaretBlinkTime@0',
+    'GetCaretPos@4',
+    'GetClassInfoA@12',
+    'GetClassInfoExA@12',
+    'GetClassInfoExW@12',
+    'GetClassInfoW@12',
+    'GetClassLongA@8',
+    'GetClassLongW@8',
+    'GetClassNameA@12',
+    'GetClassNameW@12',
+    'GetClassWord@8',
+    'GetClientRect@8',
+    'GetClipCursor@4',
+    'GetClipboardData@4',
+    'GetClipboardFormatNameA@12',
+    'GetClipboardFormatNameW@12',
+    'GetClipboardOwner@0',
+    'GetClipboardSequenceNumber@0',
+    'GetClipboardViewer@0',
+    'GetComboBoxInfo@8',
+    'GetCursor@0',
+    'GetCursorInfo@4',
+    'GetCursorPos@4',
+    'GetDC@4',
+    'GetDCEx@12',
+    'GetDesktopWindow@0',
+    'GetDialogBaseUnits@0',
+    'GetDlgCtrlID@4',
+    'GetDlgItem@8',
+    'GetDlgItemInt@16',
+    'GetDlgItemTextA@16',
+    'GetDlgItemTextW@16',
+    'GetDoubleClickTime@0',
+    'GetFocus@0',
+    'GetForegroundWindow@0',
+    'GetGUIThreadInfo@8',
+    'GetGuiResources@8',
+    'GetIconInfo@8',
+    'GetInputDesktop@0',
+    'GetInputState@0',
+    'GetKBCodePage@0',
+    'GetKeyNameTextA@12',
+    'GetKeyNameTextW@12',
+    'GetKeyState@4',
+    'GetKeyboardLayout@4',
+    'GetKeyboardLayoutList@8',
+    'GetKeyboardLayoutNameA@4',
+    'GetKeyboardLayoutNameW@4',
+    'GetKeyboardState@4',
+    'GetKeyboardType@4',
+    'GetLastActivePopup@4',
+    'GetLastInputInfo@4',
+    'GetLayeredWindowAttributes@16',
+    'GetListBoxInfo@4',
+    'GetMenu@4',
+    'GetMenuBarInfo@16',
+    'GetMenuCheckMarkDimensions@0',
+    'GetMenuContextHelpId@4',
+    'GetMenuDefaultItem@12',
+    'GetMenuInfo@8',
+    'GetMenuItemCount@4',
+    'GetMenuItemID@8',
+    'GetMenuItemInfoA@16',
+    'GetMenuItemInfoW@16',
+    'GetMenuItemRect@16',
+    'GetMenuState@12',
+    'GetMenuStringA@20',
+    'GetMenuStringW@20',
+    'GetMessageA@16',
+    'GetMessageExtraInfo@0',
+    'GetMessagePos@0',
+    'GetMessageTime@0',
+    'GetMessageW@16',
+    'GetMonitorInfoA@8',
+    'GetMonitorInfoW@8',
+    'GetMouseMovePointsEx@20',
+    'GetNextDlgGroupItem@12',
+    'GetNextDlgTabItem@12',
+    'GetOpenClipboardWindow@0',
+    'GetParent@4',
+    'GetPriorityClipboardFormat@8',
+    'GetProcessDefaultLayout@4',
+    'GetProcessWindowStation@0',
+    'GetPropA@8',
+    'GetPropW@8',
+    'GetQueueStatus@4',
+    'GetRawInputBuffer@12',
+    'GetRawInputData@20',
+    'GetRawInputDeviceInfoA@16',
+    'GetRawInputDeviceInfoW@16',
+    'GetRawInputDeviceList@12',
+    'GetRegisteredRawInputDevices@12',
+    'GetScrollBarInfo@12',
+    'GetScrollInfo@12',
+    'GetScrollPos@8',
+    'GetScrollRange@16',
+    'GetShellWindow@0',
+    'GetSubMenu@8',
+    'GetSysColor@4',
+    'GetSysColorBrush@4',
+    'GetSystemMenu@8',
+    'GetSystemMetrics@4',
+    'GetTabbedTextExtentA@20',
+    'GetTabbedTextExtentW@20',
+    'GetThreadDesktop@4',
+    'GetTitleBarInfo@8',
+    'GetTopWindow@4',
+    'GetUpdateRect@12',
+    'GetUpdateRgn@12',
+    'GetUserObjectInformationA@20',
+    'GetUserObjectInformationW@20',
+    'GetUserObjectSecurity@20',
+    'GetWindow@8',
+    'GetWindowContextHelpId@4',
+    'GetWindowDC@4',
+    'GetWindowInfo@8',
+    'GetWindowLongA@8',
+    'GetWindowLongW@8',
+    'GetWindowModuleFileName@12',
+    'GetWindowModuleFileNameA@12',
+    'GetWindowModuleFileNameW@12',
+    'GetWindowPlacement@8',
+    'GetWindowRect@8',
+    'GetWindowRgn@8',
+    'GetWindowRgnBox@8',
+    'GetWindowTextA@12',
+    'GetWindowTextLengthA@4',
+    'GetWindowTextLengthW@4',
+    'GetWindowTextW@12',
+    'GetWindowThreadProcessId@8',
+    'GetWindowWord@8',
+    'GrayStringA@36',
+    'GrayStringW@36',
+    'HideCaret@4',
+    'HiliteMenuItem@16',
+    'IMPGetIMEA@8',
+    'IMPGetIMEW@8',
+    'IMPQueryIMEA@4',
+    'IMPQueryIMEW@4',
+    'IMPSetIMEA@8',
+    'IMPSetIMEW@8',
+    'ImpersonateDdeClientWindow@8',
+    'InSendMessage@0',
+    'InSendMessageEx@4',
+    'InflateRect@12',
+    'InsertMenuA@20',
+    'InsertMenuItemA@16',
+    'InsertMenuItemW@16',
+    'InsertMenuW@20',
+    'InternalGetWindowText@12',
+    'IntersectRect@12',
+    'InvalidateRect@12',
+    'InvalidateRgn@12',
+    'InvertRect@8',
+    'IsCharAlphaA@4',
+    'IsCharAlphaNumericA@4',
+    'IsCharAlphaNumericW@4',
+    'IsCharAlphaW@4',
+    'IsCharLowerA@4',
+    'IsCharLowerW@4',
+    'IsCharUpperA@4',
+    'IsCharUpperW@4',
+    'IsChild@8',
+    'IsClipboardFormatAvailable@4',
+    'IsDialogMessage@8',
+    'IsDialogMessageA@8',
+    'IsDialogMessageW@8',
+    'IsDlgButtonChecked@8',
+    'IsGUIThread@4',
+    'IsHungAppWindow@4',
+    'IsIconic@4',
+    'IsMenu@4',
+    'IsRectEmpty@4',
+    'IsWinEventHookInstalled@4',
+    'IsWindow@4',
+    'IsWindowEnabled@4',
+    'IsWindowUnicode@4',
+    'IsWindowVisible@4',
+    'IsZoomed@4',
+    'KillTimer@8',
+    'LoadAcceleratorsA@8',
+    'LoadAcceleratorsW@8',
+    'LoadBitmapA@8',
+    'LoadBitmapW@8',
+    'LoadCursorA@8',
+    'LoadCursorFromFileA@4',
+    'LoadCursorFromFileW@4',
+    'LoadCursorW@8',
+    'LoadIconA@8',
+    'LoadIconW@8',
+    'LoadImageA@24',
+    'LoadImageW@24',
+    'LoadKeyboardLayoutA@8',
+    'LoadKeyboardLayoutW@8',
+    'LoadMenuA@8',
+    'LoadMenuIndirectA@4',
+    'LoadMenuIndirectW@4',
+    'LoadMenuW@8',
+    'LoadStringA@16',
+    'LoadStringW@16',
+    'LockSetForegroundWindow@4',
+    'LockWindowUpdate@4',
+    'LockWorkStation@0',
+    'LookupIconIdFromDirectory@8',
+    'LookupIconIdFromDirectoryEx@20',
+    'MapDialogRect@8',
+    'MapVirtualKeyA@8',
+    'MapVirtualKeyExA@12',
+    'MapVirtualKeyExW@12',
+    'MapVirtualKeyW@8',
+    'MapWindowPoints@16',
+    'MenuItemFromPoint@16',
+    'MessageBeep@4',
+    'MessageBoxA@16',
+    'MessageBoxExA@20',
+    'MessageBoxExW@20',
+    'MessageBoxIndirectA@4',
+    'MessageBoxIndirectW@4',
+    'MessageBoxTimeoutA@24',
+    'MessageBoxTimeoutW@24',
+    'MessageBoxW@16',
+    'ModifyMenuA@20',
+    'ModifyMenuW@20',
+    'MonitorFromPoint@12',
+    'MonitorFromRect@8',
+    'MonitorFromWindow@8',
+    'MoveWindow@24',
+    'MsgWaitForMultipleObjects@20',
+    'MsgWaitForMultipleObjectsEx@20',
+    'NotifyWinEvent@16',
+    'OemKeyScan@4',
+    'OemToCharA@8',
+    'OemToCharBuffA@12',
+    'OemToCharBuffW@12',
+    'OemToCharW@8',
+    'OffsetRect@12',
+    'OpenClipboard@4',
+    'OpenDesktopA@16',
+    'OpenDesktopW@16',
+    'OpenIcon@4',
+    'OpenInputDesktop@12',
+    'OpenWindowStationA@12',
+    'OpenWindowStationW@12',
+    'PackDDElParam@12',
+    'PaintDesktop@4',
+    'PeekMessageA@20',
+    'PeekMessageW@20',
+    'PostMessageA@16',
+    'PostMessageW@16',
+    'PostQuitMessage@4',
+    'PostThreadMessageA@16',
+    'PostThreadMessageW@16',
+    'PrintWindow@12',
+    'PrivateExtractIconsA@32',
+    'PrivateExtractIconsW@32',
+    'PtInRect@12',
+    'RealChildWindowFromPoint@12',
+    'RealGetWindowClass@12',
+    'RealGetWindowClassA@12',
+    'RealGetWindowClassW@12',
+    'RedrawWindow@16',
+    'RegisterClassA@4',
+    'RegisterClassExA@4',
+    'RegisterClassExW@4',
+    'RegisterClassW@4',
+    'RegisterClipboardFormatA@4',
+    'RegisterClipboardFormatW@4',
+    'RegisterDeviceNotificationA@12',
+    'RegisterDeviceNotificationW@12',
+    'RegisterHotKey@16',
+    'RegisterRawInputDevices@12',
+    'RegisterShellHookWindow@4',
+    'RegisterWindowMessageA@4',
+    'RegisterWindowMessageW@4',
+    'ReleaseCapture@0',
+    'ReleaseDC@8',
+    'RemoveMenu@12',
+    'RemovePropA@8',
+    'RemovePropW@8',
+    'ReplyMessage@4',
+    'ReuseDDElParam@20',
+    'ScreenToClient@8',
+    'ScrollDC@28',
+    'ScrollWindow@20',
+    'ScrollWindowEx@32',
+    'SendDlgItemMessageA@20',
+    'SendDlgItemMessageW@20',
+    'SendIMEMessageExA@8',
+    'SendIMEMessageExW@8',
+    'SendInput@12',
+    'SendMessageA@16',
+    'SendMessageCallbackA@24',
+    'SendMessageCallbackW@24',
+    'SendMessageTimeoutA@28',
+    'SendMessageTimeoutW@28',
+    'SendMessageW@16',
+    'SendNotifyMessageA@16',
+    'SendNotifyMessageW@16',
+    'SetActiveWindow@4',
+    'SetCapture@4',
+    'SetCaretBlinkTime@4',
+    'SetCaretPos@8',
+    'SetClassLongA@12',
+    'SetClassLongW@12',
+    'SetClassWord@12',
+    'SetClipboardData@8',
+    'SetClipboardViewer@4',
+    'SetCursor@4',
+    'SetCursorPos@8',
+    'SetDebugErrorLevel@4',
+    'SetDeskWallpaper@4',
+    'SetDlgItemInt@16',
+    'SetDlgItemTextA@12',
+    'SetDlgItemTextW@12',
+    'SetDoubleClickTime@4',
+    'SetFocus@4',
+    'SetForegroundWindow@4',
+    'SetKeyboardState@4',
+    'SetLastErrorEx@8',
+    'SetLayeredWindowAttributes@16',
+    'SetMenu@8',
+    'SetMenuContextHelpId@8',
+    'SetMenuDefaultItem@12',
+    'SetMenuInfo@8',
+    'SetMenuItemBitmaps@20',
+    'SetMenuItemInfoA@16',
+    'SetMenuItemInfoW@16',
+    'SetMessageExtraInfo@4',
+    'SetMessageQueue@4',
+    'SetParent@8',
+    'SetProcessDefaultLayout@4',
+    'SetProcessWindowStation@4',
+    'SetPropA@12',
+    'SetPropW@12',
+    'SetRect@20',
+    'SetRectEmpty@4',
+    'SetScrollInfo@16',
+    'SetScrollPos@16',
+    'SetScrollRange@20',
+    'SetShellWindow@4',
+    'SetSysColors@12',
+    'SetSystemCursor@8',
+    'SetThreadDesktop@4',
+    'SetTimer@16',
+    'SetUserObjectInformationA@16',
+    'SetUserObjectInformationW@16',
+    'SetUserObjectSecurity@12',
+    'SetWinEventHook@28',
+    'SetWindowContextHelpId@8',
+    'SetWindowLongA@12',
+    'SetWindowLongW@12',
+    'SetWindowPlacement@8',
+    'SetWindowPos@28',
+    'SetWindowRgn@12',
+    'SetWindowTextA@8',
+    'SetWindowTextW@8',
+    'SetWindowWord@12',
+    'SetWindowsHookA@8',
+    'SetWindowsHookExA@16',
+    'SetWindowsHookExW@16',
+    'SetWindowsHookW@8',
+    'ShowCaret@4',
+    'ShowCursor@4',
+    'ShowOwnedPopups@8',
+    'ShowScrollBar@12',
+    'ShowWindow@8',
+    'ShowWindowAsync@8',
+    'SubtractRect@12',
+    'SwapMouseButton@4',
+    'SwitchDesktop@4',
+    'SwitchToThisWindow@8',
+    'SystemParametersInfoA@16',
+    'SystemParametersInfoW@16',
+    'TabbedTextOutA@32',
+    'TabbedTextOutW@32',
+    'TileChildWindows@8',
+    'TileWindows@20',
+    'ToAscii@20',
+    'ToAsciiEx@24',
+    'ToUnicode@24',
+    'ToUnicodeEx@28',
+    'TrackMouseEvent@4',
+    'TrackPopupMenu@28',
+    'TrackPopupMenuEx@24',
+    'TranslateAccelerator@12',
+    'TranslateAcceleratorA@12',
+    'TranslateAcceleratorW@12',
+    'TranslateMDISysAccel@8',
+    'TranslateMessage@4',
+    'UnhookWinEvent@4',
+    'UnhookWindowsHook@8',
+    'UnhookWindowsHookEx@4',
+    'UnionRect@12',
+    'UnloadKeyboardLayout@4',
+    'UnpackDDElParam@16',
+    'UnregisterClassA@8',
+    'UnregisterClassW@8',
+    'UnregisterDeviceNotification@4',
+    'UnregisterHotKey@8',
+    'UpdateLayeredWindow@36',
+    'UpdateWindow@4',
+    'UserHandleGrantAccess@12',
+    'ValidateRect@8',
+    'ValidateRgn@8',
+    'VkKeyScanA@4',
+    'VkKeyScanExA@8',
+    'VkKeyScanExW@8',
+    'VkKeyScanW@4',
+    'WINNLSEnableIME@8',
+    'WINNLSGetEnableStatus@4',
+    'WINNLSGetIMEHotkey@4',
+    'WaitForInputIdle@8',
+    'WaitMessage@0',
+    'WinHelpA@16',
+    'WinHelpW@16',
+    'WindowFromDC@4',
+    'WindowFromPoint@8',
+    'keybd_event@16',
+    'mouse_event@20',
+    'wsprintfA',
+    'wsprintfW',
+    'wvsprintfA@12',
+    'wvsprintfW@12',
+  ]
+}
diff --git a/build/win/importlibs/x86/user32.winxp.lib b/build/win/importlibs/x86/user32.winxp.lib
new file mode 100644
index 0000000..deb5577
--- /dev/null
+++ b/build/win/importlibs/x86/user32.winxp.lib
Binary files differ
diff --git a/build/win/reorder-imports.py b/build/win/reorder-imports.py
new file mode 100755
index 0000000..281668f
--- /dev/null
+++ b/build/win/reorder-imports.py
@@ -0,0 +1,57 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import glob
+import optparse
+import os
+import shutil
+import subprocess
+import sys
+
+def reorder_imports(input_dir, output_dir, architecture):
+  """Run swapimports.exe on the initial chrome.exe, and write to the output
+  directory. Also copy over any related files that might be needed
+  (pdbs, manifests etc.).
+  """
+
+  input_image = os.path.join(input_dir, 'chrome.exe')
+  output_image = os.path.join(output_dir, 'chrome.exe')
+
+  swap_exe = os.path.join(
+    __file__,
+    '..\\..\\..\\third_party\\syzygy\\binaries\\exe\\swapimport.exe')
+
+  args = [swap_exe, '--input-image=%s' % input_image,
+      '--output-image=%s' % output_image, '--overwrite', '--no-logo']
+
+  if architecture == 'x64':
+    args.append('--x64');
+
+  args.append('chrome_elf.dll');
+
+  subprocess.call(args)
+
+  for fname in glob.iglob(os.path.join(input_dir, 'chrome.exe.*')):
+    shutil.copy(fname, os.path.join(output_dir, os.path.basename(fname)))
+  return 0
+
+
+def main(argv):
+  usage = 'reorder_imports.py -i <input_dir> -o <output_dir> -a <target_arch>'
+  parser = optparse.OptionParser(usage=usage)
+  parser.add_option('-i', '--input', help='reorder chrome.exe in DIR',
+      metavar='DIR')
+  parser.add_option('-o', '--output', help='write new chrome.exe to DIR',
+      metavar='DIR')
+  parser.add_option('-a', '--arch', help='architecture of build (optional)',
+      default='ia32')
+  opts, args = parser.parse_args()
+
+  if not opts.input or not opts.output:
+    parser.error('Please provide and input and output directory')
+  return reorder_imports(opts.input, opts.output, opts.arch)
+
+if __name__ == "__main__":
+  sys.exit(main(sys.argv[1:]))
diff --git a/build/win/use_ansi_codes.py b/build/win/use_ansi_codes.py
new file mode 100755
index 0000000..cff5f43
--- /dev/null
+++ b/build/win/use_ansi_codes.py
@@ -0,0 +1,10 @@
+#!/usr/bin/env python
+# Copyright (c) 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Prints if the the terminal is likely to understand ANSI codes."""
+
+import os
+
+# Add more terminals here as needed.
+print 'ANSICON' in os.environ
diff --git a/build/win_is_xtree_patched.py b/build/win_is_xtree_patched.py
new file mode 100755
index 0000000..3f1994f
--- /dev/null
+++ b/build/win_is_xtree_patched.py
@@ -0,0 +1,26 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Determines if the VS xtree header has been patched to disable C4702."""
+
+import os
+
+
+def IsPatched():
+  # TODO(scottmg): For now, just return if we're using the packaged toolchain
+  # script (because we know it's patched). Another case could be added here to
+  # query the active VS installation and actually check the contents of xtree.
+  # http://crbug.com/346399.
+  return int(os.environ.get('DEPOT_TOOLS_WIN_TOOLCHAIN', 1)) == 1
+
+
+def DoMain(_):
+  """Hook to be called from gyp without starting a separate python
+  interpreter."""
+  return "1" if IsPatched() else "0"
+
+
+if __name__ == '__main__':
+  print DoMain([])
diff --git a/build/win_precompile.gypi b/build/win_precompile.gypi
new file mode 100644
index 0000000..fb86076
--- /dev/null
+++ b/build/win_precompile.gypi
@@ -0,0 +1,20 @@
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Include this file to make targets in your .gyp use the default
+# precompiled header on Windows, in debug builds only as the official
+# builders blow up (out of memory) if precompiled headers are used for
+# release builds.
+
+{
+  'conditions': [
+    ['OS=="win" and chromium_win_pch==1', {
+        'target_defaults': {
+          'msvs_precompiled_header': '<(DEPTH)/build/precompile.h',
+          'msvs_precompiled_source': '<(DEPTH)/build/precompile.cc',
+          'sources': ['<(DEPTH)/build/precompile.cc'],
+        }
+      }],
+  ],
+}
diff --git a/dart.gyp b/dart.gyp
index b942371..f513625 100644
--- a/dart.gyp
+++ b/dart.gyp
@@ -35,6 +35,13 @@
       ],
     },
     {
+      'target_name': 'fuchsia_test',
+      'type': 'none',
+      'dependencies': [
+        'runtime/dart-runtime.gyp:fuchsia_test',
+      ],
+    },
+    {
       # This is the target that is built on the VM build bots.  It
       # must depend on anything that is required by the VM test
       # suites.
diff --git a/pkg/analysis_server/benchmark/perf/benchmark_local.dart b/pkg/analysis_server/benchmark/perf/benchmark_local.dart
index a6fd493..8cbdb89 100644
--- a/pkg/analysis_server/benchmark/perf/benchmark_local.dart
+++ b/pkg/analysis_server/benchmark/perf/benchmark_local.dart
@@ -9,6 +9,7 @@
 import 'package:analysis_server/plugin/protocol/protocol.dart';
 
 import 'benchmark_scenario.dart';
+import 'memory_tests.dart';
 
 main(List<String> args) async {
   int length = args.length;
@@ -35,6 +36,9 @@
   await run_local_completion_3();
   await run_local_completion_4();
   await run_local_refactoring_1();
+
+  await run_memory_initialAnalysis_1();
+  await run_memory_initialAnalysis_2();
 }
 
 PathHolder paths;
@@ -224,6 +228,35 @@
   printBenchmarkResults(id, description, times);
 }
 
+Future run_memory_initialAnalysis_1() async {
+  String id = 'memory-initialAnalysis-1';
+  String description = r'''
+1. Start server, set 'analyzer' and 'analysis_server' analysis roots.
+2. Measure the memory usage after finishing initial analysis.
+3. Shutdown the server.
+4. Go to (1).
+''';
+  List<int> sizes = await AnalysisServerMemoryUsageTest
+      .start_waitInitialAnalysis_shutdown(
+          roots: <String>[paths.analyzer], numOfRepeats: 3);
+  printMemoryResults(id, description, sizes);
+}
+
+Future run_memory_initialAnalysis_2() async {
+  String id = 'memory-initialAnalysis-2';
+  String description = r'''
+1. Start server, set 'analyzer' and 'analysis_server' analysis roots.
+2. Measure the memory usage after finishing initial analysis.
+3. Shutdown the server.
+4. Go to (1).
+''';
+  List<int> sizes = await AnalysisServerMemoryUsageTest
+      .start_waitInitialAnalysis_shutdown(
+          roots: <String>[paths.analyzer, paths.analysisServer],
+          numOfRepeats: 3);
+  printMemoryResults(id, description, sizes);
+}
+
 class PathHolder {
   String analysisServer;
   String analyzer;
diff --git a/pkg/analysis_server/benchmark/perf/memory_tests.dart b/pkg/analysis_server/benchmark/perf/memory_tests.dart
new file mode 100644
index 0000000..8522430
--- /dev/null
+++ b/pkg/analysis_server/benchmark/perf/memory_tests.dart
@@ -0,0 +1,125 @@
+// Copyright (c) 2016, the Dart project authors.  Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.
+
+import 'dart:async';
+import 'dart:convert';
+import 'dart:io';
+
+import 'package:analysis_server/plugin/protocol/protocol.dart';
+import 'package:unittest/unittest.dart';
+
+import '../../test/integration/integration_tests.dart';
+
+void printMemoryResults(String id, String description, List<int> sizes) {
+  String now = new DateTime.now().toUtc().toIso8601String();
+  print('$now ========== $id');
+  print('memory: $sizes');
+  print(description.trim());
+  print('--------------------');
+  print('');
+  print('');
+}
+
+/**
+ * Base class for analysis server memory usage tests.
+ */
+class AnalysisServerMemoryUsageTest
+    extends AbstractAnalysisServerIntegrationTest {
+  static const int vmServicePort = 12345;
+
+  int getMemoryUsage() {
+    ProcessResult result = _run('curl', <String>[
+      'localhost:$vmServicePort/_getAllocationProfile\?isolateId=isolates/root\&gc=full'
+    ]);
+    Map json = JSON.decode(result.stdout);
+    Map heaps = json['result']['heaps'];
+    int newSpace = heaps['new']['used'];
+    int oldSpace = heaps['old']['used'];
+    return newSpace + oldSpace;
+  }
+
+  /**
+   * Send the server an 'analysis.setAnalysisRoots' command directing it to
+   * analyze [sourceDirectory].
+   */
+  Future setAnalysisRoot() =>
+      sendAnalysisSetAnalysisRoots([sourceDirectory.path], []);
+
+  /**
+   * The server is automatically started before every test.
+   */
+  @override
+  Future setUp() {
+    onAnalysisErrors.listen((AnalysisErrorsParams params) {
+      currentAnalysisErrors[params.file] = params.errors;
+    });
+    onServerError.listen((ServerErrorParams params) {
+      // A server error should never happen during an integration test.
+      fail('${params.message}\n${params.stackTrace}');
+    });
+    Completer serverConnected = new Completer();
+    onServerConnected.listen((_) {
+      expect(serverConnected.isCompleted, isFalse);
+      serverConnected.complete();
+    });
+    return startServer(servicesPort: vmServicePort).then((_) {
+      server.listenToOutput(dispatchNotification);
+      server.exitCode.then((_) {
+        skipShutdown = true;
+      });
+      return serverConnected.future;
+    });
+  }
+
+  /**
+   * After every test, the server is stopped.
+   */
+  Future shutdown() async => await shutdownIfNeeded();
+
+  /**
+   * Enable [ServerService.STATUS] notifications so that [analysisFinished]
+   * can be used.
+   */
+  Future subscribeToStatusNotifications() async {
+    await sendServerSetSubscriptions([ServerService.STATUS]);
+  }
+
+  /**
+   * Synchronously run the given [executable] with the given [arguments]. Return
+   * the result of running the process.
+   */
+  ProcessResult _run(String executable, List<String> arguments) {
+    return Process.runSync(executable, arguments,
+        stderrEncoding: UTF8, stdoutEncoding: UTF8);
+  }
+
+  /**
+   *  1. Start Analysis Server.
+   *  2. Set the analysis [roots].
+   *  3. Wait for analysis to complete.
+   *  4. Record the time to finish analysis.
+   *  5. Shutdown.
+   *  6. Go to (1).
+   */
+  static Future<List<int>> start_waitInitialAnalysis_shutdown(
+      {List<String> roots, int numOfRepeats}) async {
+    expect(roots, isNotNull, reason: 'roots');
+    expect(numOfRepeats, isNotNull, reason: 'numOfRepeats');
+    // Repeat.
+    List<int> sizes = <int>[];
+    for (int i = 0; i < numOfRepeats; i++) {
+      AnalysisServerMemoryUsageTest test = new AnalysisServerMemoryUsageTest();
+      // Initialize Analysis Server.
+      await test.setUp();
+      await test.subscribeToStatusNotifications();
+      // Set roots and analyze.
+      await test.sendAnalysisSetAnalysisRoots(roots, []);
+      await test.analysisFinished;
+      sizes.add(test.getMemoryUsage());
+      // Stop the server.
+      await test.shutdown();
+    }
+    return sizes;
+  }
+}
diff --git a/pkg/analysis_server/lib/src/status/get_handler.dart b/pkg/analysis_server/lib/src/status/get_handler.dart
index a493100..0e9a2ed 100644
--- a/pkg/analysis_server/lib/src/status/get_handler.dart
+++ b/pkg/analysis_server/lib/src/status/get_handler.dart
@@ -400,6 +400,20 @@
   }
 
   /**
+   * Produce an encoded version of the given [descriptor] that can be used to
+   * find the descriptor later.
+   */
+  String _encodeSdkDescriptor(SdkDescription descriptor) {
+    StringBuffer buffer = new StringBuffer();
+    buffer.write(descriptor.options.encodeCrossContextOptions());
+    for (String path in descriptor.paths) {
+      buffer.write('+');
+      buffer.write(path);
+    }
+    return buffer.toString();
+  }
+
+  /**
    * Return the folder being managed by the given [analysisServer] that matches
    * the given [contextFilter], or `null` if there is none.
    */
@@ -562,10 +576,30 @@
   }
 
   /**
+   * Return the context for the SDK whose descriptor is encoded to be the same
+   * as the given [contextFilter]. The [analysisServer] is used to access the
+   * SDKs.
+   */
+  AnalysisContext _getSdkContext(
+      AnalysisServer analysisServer, String contextFilter) {
+    DartSdkManager manager = analysisServer.sdkManager;
+    List<SdkDescription> descriptors = manager.sdkDescriptors;
+    for (SdkDescription descriptor in descriptors) {
+      if (contextFilter == _encodeSdkDescriptor(descriptor)) {
+        return manager.getSdk(descriptor, () => null)?.context;
+      }
+    }
+    return null;
+  }
+
+  /**
    * Return `true` if the given analysis [context] has at least one entry with
    * an exception.
    */
   bool _hasException(InternalAnalysisContext context) {
+    if (context == null) {
+      return false;
+    }
     MapIterator<AnalysisTarget, CacheEntry> iterator =
         context.analysisCache.iterator();
     while (iterator.moveNext()) {
@@ -902,9 +936,17 @@
       return _returnFailure(
           request, 'Query parameter $CONTEXT_QUERY_PARAM required');
     }
+    InternalAnalysisContext context = null;
     Folder folder = _findFolder(analysisServer, contextFilter);
     if (folder == null) {
-      return _returnFailure(request, 'Invalid context: $contextFilter');
+      context = _getSdkContext(analysisServer, contextFilter);
+      if (context == null) {
+        return _returnFailure(request, 'Invalid context: $contextFilter');
+      }
+      return _returnFailure(request,
+          'Cannot view cache entries from an SDK context: $contextFilter');
+    } else {
+      context = analysisServer.folderMap[folder];
     }
     String sourceUri = request.uri.queryParameters[SOURCE_QUERY_PARAM];
     if (sourceUri == null) {
@@ -946,7 +988,6 @@
     });
     allContexts.sort((Folder firstFolder, Folder secondFolder) =>
         firstFolder.path.compareTo(secondFolder.path));
-    InternalAnalysisContext context = analysisServer.folderMap[folder];
 
     _writeResponse(request, (StringBuffer buffer) {
       _writePage(buffer, 'Analysis Server - Cache Entry',
@@ -1000,7 +1041,7 @@
         }
         for (CacheEntry entry in entries) {
           Map<String, String> linkParameters = <String, String>{
-            CONTEXT_QUERY_PARAM: folder.path,
+            CONTEXT_QUERY_PARAM: contextFilter,
             SOURCE_QUERY_PARAM: sourceUri
           };
           List<ResultDescriptor> results = _getExpectedResults(entry);
@@ -1217,17 +1258,21 @@
       return _returnFailure(
           request, 'Query parameter $CONTEXT_QUERY_PARAM required');
     }
+    InternalAnalysisContext context = null;
     Folder folder = _findFolder(analysisServer, contextFilter);
     if (folder == null) {
-      return _returnFailure(request, 'Invalid context: $contextFilter');
+      context = _getSdkContext(analysisServer, contextFilter);
+      if (context == null) {
+        return _returnFailure(request, 'Invalid context: $contextFilter');
+      }
+    } else {
+      context = analysisServer.folderMap[folder];
     }
 
-    InternalAnalysisContext context = analysisServer.folderMap[folder];
-
     _writeResponse(request, (StringBuffer buffer) {
       _writePage(buffer, 'Analysis Server - Context Diagnostics',
           ['Context: $contextFilter'], (StringBuffer buffer) {
-        _writeContextDiagnostics(buffer, context);
+        _writeContextDiagnostics(buffer, context, contextFilter);
       });
     });
   }
@@ -1246,9 +1291,15 @@
       return _returnFailure(
           request, 'Query parameter $CONTEXT_QUERY_PARAM required');
     }
+    InternalAnalysisContext context = null;
     Folder folder = _findFolder(analysisServer, contextFilter);
     if (folder == null) {
-      return _returnFailure(request, 'Invalid context: $contextFilter');
+      context = _getSdkContext(analysisServer, contextFilter);
+      if (context == null) {
+        return _returnFailure(request, 'Invalid context: $contextFilter');
+      }
+    } else {
+      context = analysisServer.folderMap[folder];
     }
 
     List<String> priorityNames = <String>[];
@@ -1256,7 +1307,6 @@
     List<String> implicitNames = <String>[];
     Map<String, String> links = new HashMap<String, String>();
     List<CaughtException> exceptions = <CaughtException>[];
-    InternalAnalysisContext context = analysisServer.folderMap[folder];
     context.prioritySources.forEach((Source source) {
       priorityNames.add(source.fullName);
     });
@@ -1275,7 +1325,7 @@
           String link = makeLink(
               CACHE_ENTRY_PATH,
               {
-                CONTEXT_QUERY_PARAM: folder.path,
+                CONTEXT_QUERY_PARAM: contextFilter,
                 SOURCE_QUERY_PARAM: target.uri.toString()
               },
               sourceName,
@@ -1333,6 +1383,8 @@
       _writeOption(
           buffer, 'Enable strict call checks', options.enableStrictCallChecks);
       _writeOption(buffer, 'Enable super mixins', options.enableSuperMixins);
+      _writeOption(
+          buffer, 'Enable trailing commas', options.enableTrailingCommas);
       _writeOption(buffer, 'Generate dart2js hints', options.dart2jsHint);
       _writeOption(buffer, 'Generate errors in implicit files',
           options.generateImplicitErrors);
@@ -1478,13 +1530,17 @@
       return _returnFailure(
           request, 'Query parameter $CONTEXT_QUERY_PARAM required');
     }
+    InternalAnalysisContext context = null;
     Folder folder = _findFolder(analysisServer, contextFilter);
     if (folder == null) {
-      return _returnFailure(request, 'Invalid context: $contextFilter');
+      context = _getSdkContext(analysisServer, contextFilter);
+      if (context == null) {
+        return _returnFailure(request, 'Invalid context: $contextFilter');
+      }
+    } else {
+      context = analysisServer.folderMap[folder];
     }
 
-    InternalAnalysisContext context = analysisServer.folderMap[folder];
-
     _writeResponse(request, (StringBuffer buffer) {
       _writePage(buffer, 'Analysis Server - Context Validation Diagnostics',
           ['Context: $contextFilter'], (StringBuffer buffer) {
@@ -1721,21 +1777,34 @@
       buffer.write('<p><b>SDK Contexts</b></p>');
       buffer.write('<p>');
       first = true;
-      List<String> descriptors = analysisServer.sdkManager.sdkDescriptors
-          .map((SdkDescription descriptor) => descriptor.toString())
-          .toList();
+      DartSdkManager manager = analysisServer.sdkManager;
+      List<SdkDescription> descriptors = manager.sdkDescriptors;
       if (descriptors.isEmpty) {
         buffer.write('none');
       } else {
-        descriptors.sort();
-        for (String descriptor in descriptors) {
+        Map<String, SdkDescription> sdkMap = <String, SdkDescription>{};
+        for (SdkDescription descriptor in descriptors) {
+          sdkMap[descriptor.toString()] = descriptor;
+        }
+        List<String> descriptorNames = sdkMap.keys.toList();
+        descriptorNames.sort();
+        for (String name in descriptorNames) {
           if (first) {
             first = false;
           } else {
             buffer.write('<br>');
           }
-          // TODO(brianwilkerson) Add a link to information about the contexts.
-          buffer.write(descriptor);
+          SdkDescription descriptor = sdkMap[name];
+          String contextId = _encodeSdkDescriptor(descriptor);
+          buffer.write(makeLink(
+              CONTEXT_PATH,
+              {CONTEXT_QUERY_PARAM: contextId},
+              name,
+              _hasException(manager.getSdk(descriptor, () => null)?.context)));
+          buffer.write(' <small><b>[');
+          buffer.write(makeLink(CONTEXT_DIAGNOSTICS_PATH,
+              {CONTEXT_QUERY_PARAM: contextId}, 'diagnostics'));
+          buffer.write(']</b></small>');
         }
       }
       buffer.write('</p>');
@@ -1877,14 +1946,14 @@
    * Write diagnostic information about the given [context] to the given
    * [buffer].
    */
-  void _writeContextDiagnostics(
-      StringBuffer buffer, InternalAnalysisContext context) {
+  void _writeContextDiagnostics(StringBuffer buffer,
+      InternalAnalysisContext context, String contextFilter) {
     AnalysisDriver driver = (context as AnalysisContextImpl).driver;
     List<WorkItem> workItems = driver.currentWorkOrder?.workItems;
 
     buffer.write('<p>');
     buffer.write(makeLink(CONTEXT_VALIDATION_DIAGNOSTICS_PATH,
-        {CONTEXT_QUERY_PARAM: context.name}, 'Run validation'));
+        {CONTEXT_QUERY_PARAM: contextFilter}, 'Run validation'));
     buffer.write('</p>');
 
     buffer.write('<h3>Most Recently Perfomed Tasks</h3>');
diff --git a/pkg/analysis_server/test/integration/analysis/highlights_test2.dart b/pkg/analysis_server/test/integration/analysis/highlights_test2.dart
index 54dd3ef..3ac66ce 100644
--- a/pkg/analysis_server/test/integration/analysis/highlights_test2.dart
+++ b/pkg/analysis_server/test/integration/analysis/highlights_test2.dart
@@ -20,8 +20,9 @@
 
 @reflectiveTest
 class AnalysisHighlightsTest extends AbstractAnalysisServerIntegrationTest {
-  Future startServer() {
-    return server.start(useAnalysisHighlight2: true);
+  Future startServer({int servicesPort}) {
+    return server.start(
+        servicesPort: servicesPort, useAnalysisHighlight2: true);
   }
 
   test_highlights() {
diff --git a/pkg/analysis_server/test/integration/integration_tests.dart b/pkg/analysis_server/test/integration/integration_tests.dart
index 58f4aaa..9e764a0 100644
--- a/pkg/analysis_server/test/integration/integration_tests.dart
+++ b/pkg/analysis_server/test/integration/integration_tests.dart
@@ -205,7 +205,8 @@
   /**
    * Start [server].
    */
-  Future startServer() => server.start();
+  Future startServer({int servicesPort}) =>
+      server.start(servicesPort: servicesPort);
 
   /**
    * After every test, the server is stopped and [sourceDirectory] is deleted.
@@ -509,6 +510,9 @@
         .listen((String line) {
       lastCommunicationTime = currentElapseTime;
       String trimmedLine = line.trim();
+      if (trimmedLine.startsWith('Observatory listening on ')) {
+        return;
+      }
       _recordStdio('RECV: $trimmedLine');
       var message;
       try {
@@ -596,6 +600,7 @@
       {bool debugServer: false,
       int diagnosticPort,
       bool profileServer: false,
+      int servicesPort,
       bool useAnalysisHighlight2: false}) {
     if (_process != null) {
       throw new Exception('Process already started');
@@ -610,8 +615,14 @@
       arguments.add('--debug');
     }
     if (profileServer) {
-      arguments.add('--observe');
+      if (servicesPort == null) {
+        arguments.add('--observe');
+      } else {
+        arguments.add('--observe=$servicesPort');
+      }
       arguments.add('--pause-isolates-on-exit');
+    } else if (servicesPort != null) {
+      arguments.add('--enable-vm-service=$servicesPort');
     }
     if (Platform.packageRoot != null) {
       arguments.add('--package-root=${Platform.packageRoot}');
diff --git a/pkg/analyzer/lib/src/context/builder.dart b/pkg/analyzer/lib/src/context/builder.dart
new file mode 100644
index 0000000..6abd7bd
--- /dev/null
+++ b/pkg/analyzer/lib/src/context/builder.dart
@@ -0,0 +1,265 @@
+// Copyright (c) 2016, the Dart project authors.  Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.
+
+library analyzer.src.context.context_builder;
+
+import 'dart:collection';
+import 'dart:core' hide Resource;
+import 'dart:io' as io;
+
+import 'package:analyzer/file_system/file_system.dart';
+import 'package:analyzer/plugin/resolver_provider.dart';
+import 'package:analyzer/source/analysis_options_provider.dart';
+import 'package:analyzer/source/embedder.dart';
+import 'package:analyzer/source/package_map_resolver.dart';
+import 'package:analyzer/source/sdk_ext.dart';
+import 'package:analyzer/src/generated/engine.dart';
+import 'package:analyzer/src/generated/java_io.dart';
+import 'package:analyzer/src/generated/sdk.dart';
+import 'package:analyzer/src/generated/sdk_io.dart';
+import 'package:analyzer/src/generated/source.dart';
+import 'package:analyzer/src/generated/source_io.dart';
+import 'package:analyzer/src/task/options.dart';
+import 'package:package_config/discovery.dart';
+import 'package:package_config/packages.dart';
+import 'package:package_config/packages_file.dart';
+import 'package:package_config/src/packages_impl.dart';
+import 'package:path/path.dart' as path;
+import 'package:yaml/yaml.dart';
+
+/**
+ * A utility class used to build an analysis context for a given directory.
+ *
+ * The construction of analysis contexts is as follows:
+ *
+ * 1. Determine how package: URI's are to be resolved. This follows the lookup
+ *    algorithm defined by the [package specification][1].
+ *
+ * 2. Using the results of step 1, look in each package for an embedder file
+ *    (_embedder.yaml). If one exists then it defines the SDK. If multiple such
+ *    files exist then use the first one found. Otherwise, use the default SDK.
+ *
+ * 3. Look in each package for an SDK extension file (_sdkext). For each such
+ *    file, add the specified files to the SDK.
+ *
+ * 4. Look for an analysis options file (`analyis_options.yaml` or
+ *    `.analysis_options`) and process the options in the file.
+ *
+ * 5. Create a new context. Initialize its source factory based on steps 1, 2
+ *    and 3. Initialize its analysis options from step 4.
+ *
+ * [1]: https://github.com/dart-lang/dart_enhancement_proposals/blob/master/Accepted/0005%20-%20Package%20Specification/DEP-pkgspec.md.
+ */
+class ContextBuilder {
+  /**
+   * The [ResourceProvider] by which paths are converted into [Resource]s.
+   */
+  final ResourceProvider resourceProvider;
+
+  /**
+   * The manager used to manage the DartSdk's that have been created so that
+   * they can be shared across contexts.
+   */
+  final DartSdkManager sdkManager;
+
+  /**
+   * The cache containing the contents of overlayed files.
+   */
+  final ContentCache contentCache;
+
+  /**
+   * The resolver provider used to create a package: URI resolver, or `null` if
+   * the normal (Package Specification DEP) lookup mechanism is to be used.
+   */
+  ResolverProvider packageResolverProvider;
+
+  /**
+   * The file path of the .packages file that should be used in place of any
+   * file found using the normal (Package Specification DEP) lookup mechanism.
+   */
+  String defaultPackageFilePath;
+
+  /**
+   * The file path of the packages directory that should be used in place of any
+   * file found using the normal (Package Specification DEP) lookup mechanism.
+   */
+  String defaultPackagesDirectoryPath;
+
+  /**
+   * The file path of the analysis options file that should be used in place of
+   * any file in the root directory.
+   */
+  String defaultAnalysisOptionsFilePath;
+
+  /**
+   * The default analysis options that should be used unless some or all of them
+   * are overridden in the analysis options file.
+   */
+  AnalysisOptions defaultOptions;
+
+  /**
+   * Initialize a newly created builder to be ready to build a context rooted in
+   * the directory with the given [rootDirectoryPath].
+   */
+  ContextBuilder(this.resourceProvider, this.sdkManager, this.contentCache);
+
+  AnalysisContext buildContext(String rootDirectoryPath) {
+    // TODO(brianwilkerson) Split getAnalysisOptions so we can capture the
+    // option map and use it to run the options processors.
+    AnalysisOptions options = getAnalysisOptions(rootDirectoryPath);
+    InternalAnalysisContext context =
+        AnalysisEngine.instance.createAnalysisContext();
+    context.contentCache = contentCache;
+    context.sourceFactory = createSourceFactory(rootDirectoryPath, options);
+    context.analysisOptions = options;
+    //_processAnalysisOptions(context, optionMap);
+    return context;
+  }
+
+//  void _processAnalysisOptions(
+//      AnalysisContext context, Map<String, YamlNode> optionMap) {
+//    List<OptionsProcessor> optionsProcessors =
+//        AnalysisEngine.instance.optionsPlugin.optionsProcessors;
+//    try {
+//      optionsProcessors.forEach(
+//          (OptionsProcessor p) => p.optionsProcessed(context, optionMap));
+//
+//      // Fill in lint rule defaults in case lints are enabled and rules are
+//      // not specified in an options file.
+//      if (context.analysisOptions.lint && !containsLintRuleEntry(optionMap)) {
+//        setLints(context, linterPlugin.contributedRules);
+//      }
+//
+//      // Ask engine to further process options.
+//      if (optionMap != null) {
+//        configureContextOptions(context, optionMap);
+//      }
+//    } on Exception catch (e) {
+//      optionsProcessors.forEach((OptionsProcessor p) => p.onError(e));
+//    }
+//  }
+
+  Map<String, List<Folder>> convertPackagesToMap(Packages packages) {
+    if (packages == null || packages == Packages.noPackages) {
+      return null;
+    }
+    Map<String, List<Folder>> folderMap = new HashMap<String, List<Folder>>();
+    packages.asMap().forEach((String packagePath, Uri uri) {
+      folderMap[packagePath] = [resourceProvider.getFolder(path.fromUri(uri))];
+    });
+    return folderMap;
+  }
+
+  Packages createPackageMap(String rootDirectoryPath) {
+    if (defaultPackageFilePath != null) {
+      // TODO(brianwilkerson) Figure out why we're going through Uri rather than
+      // just creating the file from the path.
+      Uri fileUri = new Uri.file(defaultPackageFilePath);
+      io.File configFile = new io.File.fromUri(fileUri).absolute;
+      List<int> bytes = configFile.readAsBytesSync();
+      Map<String, Uri> map = parse(bytes, configFile.uri);
+      return new MapPackages(map);
+    } else if (defaultPackagesDirectoryPath != null) {
+      return getPackagesDirectory(
+          new Uri.directory(defaultPackagesDirectoryPath));
+    }
+    return findPackagesFromFile(new Uri.directory(rootDirectoryPath));
+  }
+
+  SourceFactory createSourceFactory(
+      String rootDirectoryPath, AnalysisOptions options) {
+    if (packageResolverProvider != null) {
+      Folder folder = resourceProvider.getResource('.');
+      UriResolver resolver = packageResolverProvider(folder);
+      if (resolver != null) {
+        // TODO(brianwilkerson) This doesn't support either embedder files or
+        // sdk extensions because we don't have a way to get the package map
+        // from the resolver.
+        List<UriResolver> resolvers = <UriResolver>[
+          new DartUriResolver(findSdk(null, options)),
+          resolver,
+          new ResourceUriResolver(resourceProvider)
+        ];
+        return new SourceFactory(resolvers);
+      }
+    }
+    Map<String, List<Folder>> packageMap =
+        convertPackagesToMap(createPackageMap(rootDirectoryPath));
+    List<UriResolver> resolvers = <UriResolver>[];
+    resolvers.add(new DartUriResolver(findSdk(packageMap, options)));
+    if (packageMap != null) {
+      resolvers.add(new SdkExtUriResolver(packageMap));
+      resolvers.add(new PackageMapUriResolver(resourceProvider, packageMap));
+    }
+    resolvers.add(new ResourceUriResolver(resourceProvider));
+    return new SourceFactory(resolvers);
+  }
+
+  /**
+   * Use the given [packageMap] and [options] to locate the SDK.
+   */
+  DartSdk findSdk(
+      Map<String, List<Folder>> packageMap, AnalysisOptions options) {
+    if (packageMap != null) {
+      EmbedderYamlLocator locator = new EmbedderYamlLocator(packageMap);
+      Map<Folder, YamlMap> embedderYamls = locator.embedderYamls;
+      EmbedderSdk embedderSdk = new EmbedderSdk(embedderYamls);
+      if (embedderSdk.sdkLibraries.length > 0) {
+        List<String> paths = <String>[];
+        for (Folder folder in embedderYamls.keys) {
+          paths.add(folder
+              .getChildAssumingFile(EmbedderYamlLocator.EMBEDDER_FILE_NAME)
+              .path);
+        }
+        SdkDescription description = new SdkDescription(paths, options);
+        DartSdk dartSdk = sdkManager.getSdk(description, () {
+          embedderSdk.analysisOptions = options;
+          embedderSdk.useSummary = sdkManager.canUseSummaries;
+          return embedderSdk;
+        });
+        return dartSdk;
+      }
+    }
+    String sdkPath = sdkManager.defaultSdkDirectory;
+    SdkDescription description = new SdkDescription(<String>[sdkPath], options);
+    return sdkManager.getSdk(description, () {
+      DirectoryBasedDartSdk sdk =
+          new DirectoryBasedDartSdk(new JavaFile(sdkPath));
+      sdk.analysisOptions = options;
+      sdk.useSummary = sdkManager.canUseSummaries;
+      return sdk;
+    });
+  }
+
+  AnalysisOptions getAnalysisOptions(String rootDirectoryPath) {
+    AnalysisOptionsImpl options = new AnalysisOptionsImpl.from(defaultOptions);
+    File optionsFile = getOptionsFile(rootDirectoryPath);
+    if (optionsFile != null) {
+      Map<String, YamlNode> fileOptions =
+          new AnalysisOptionsProvider().getOptionsFromFile(optionsFile);
+      applyToAnalysisOptions(options, fileOptions);
+    }
+    return options;
+  }
+
+  File getOptionsFile(String rootDirectoryPath) {
+    if (defaultAnalysisOptionsFilePath != null) {
+      return resourceProvider.getFile(defaultAnalysisOptionsFilePath);
+    }
+    Folder root = resourceProvider.getFolder(rootDirectoryPath);
+    for (Folder folder = root; folder != null; folder = folder.parent) {
+      File file =
+          folder.getChildAssumingFile(AnalysisEngine.ANALYSIS_OPTIONS_FILE);
+      if (file.exists) {
+        return file;
+      }
+      file = folder
+          .getChildAssumingFile(AnalysisEngine.ANALYSIS_OPTIONS_YAML_FILE);
+      if (file.exists) {
+        return file;
+      }
+    }
+    return null;
+  }
+}
diff --git a/pkg/analyzer/lib/src/context/context.dart b/pkg/analyzer/lib/src/context/context.dart
index a5f8c37..27098db 100644
--- a/pkg/analyzer/lib/src/context/context.dart
+++ b/pkg/analyzer/lib/src/context/context.dart
@@ -288,7 +288,8 @@
             options.enableStrictCallChecks ||
         this._options.enableGenericMethods != options.enableGenericMethods ||
         this._options.enableAsync != options.enableAsync ||
-        this._options.enableSuperMixins != options.enableSuperMixins;
+        this._options.enableSuperMixins != options.enableSuperMixins ||
+        this._options.enableTrailingCommas != options.enableTrailingCommas;
     int cacheSize = options.cacheSize;
     if (this._options.cacheSize != cacheSize) {
       this._options.cacheSize = cacheSize;
@@ -304,6 +305,7 @@
     this._options.enableAsync = options.enableAsync;
     this._options.enableSuperMixins = options.enableSuperMixins;
     this._options.enableTiming = options.enableTiming;
+    this._options.enableTrailingCommas = options.enableTrailingCommas;
     this._options.hint = options.hint;
     this._options.incremental = options.incremental;
     this._options.incrementalApi = options.incrementalApi;
diff --git a/pkg/analyzer/lib/src/dart/ast/utilities.dart b/pkg/analyzer/lib/src/dart/ast/utilities.dart
index e2867a3..53f9109 100644
--- a/pkg/analyzer/lib/src/dart/ast/utilities.dart
+++ b/pkg/analyzer/lib/src/dart/ast/utilities.dart
@@ -2539,6 +2539,9 @@
       node.element;
 
   @override
+  Element visitExportDirective(ExportDirective node) => node.element;
+
+  @override
   Element visitFunctionDeclaration(FunctionDeclaration node) => node.element;
 
   @override
@@ -2570,6 +2573,8 @@
         if (element is LibraryElement) {
           return element.definingCompilationUnit;
         }
+      } else if (grandParent is LibraryDirective) {
+        return grandParent.element;
       }
     }
     return node.bestElement;
diff --git a/pkg/analyzer/lib/src/dart/constant/evaluation.dart b/pkg/analyzer/lib/src/dart/constant/evaluation.dart
index da2db55..0c8288a 100644
--- a/pkg/analyzer/lib/src/dart/constant/evaluation.dart
+++ b/pkg/analyzer/lib/src/dart/constant/evaluation.dart
@@ -347,7 +347,14 @@
         // This could happen in the event of invalid code.  The error will be
         // reported at constant evaluation time.
       }
-      if (constNode.arguments != null) {
+      if (constNode == null) {
+        // We cannot determine what element the annotation is on, nor the offset
+        // of the annotation, so there's not a lot of information in this
+        // message, but it's better than getting an exception.
+        // https://github.com/dart-lang/sdk/issues/26811
+        AnalysisEngine.instance.logger.logInformation(
+            'No annotationAst for $constant in ${constant.compilationUnit}');
+      } else if (constNode.arguments != null) {
         constNode.arguments.accept(referenceFinder);
       }
     } else if (constant is VariableElement) {
diff --git a/pkg/analyzer/lib/src/generated/engine.dart b/pkg/analyzer/lib/src/generated/engine.dart
index abded10..67a931c7 100644
--- a/pkg/analyzer/lib/src/generated/engine.dart
+++ b/pkg/analyzer/lib/src/generated/engine.dart
@@ -1095,6 +1095,12 @@
   bool get enableTiming;
 
   /**
+   * Return `true` to enable trailing commas in parameter and argument lists
+   * (sdk#26647).
+   */
+  bool get enableTrailingCommas;
+
+  /**
    * A flag indicating whether finer grained dependencies should be used
    * instead of just source level dependencies.
    *
@@ -1242,6 +1248,9 @@
   @override
   bool enableTiming = false;
 
+  @override
+  bool enableTrailingCommas = false;
+
   /**
    * A flag indicating whether errors, warnings and hints should be generated
    * for sources that are implicitly being analyzed.
@@ -1346,6 +1355,7 @@
     enableGenericMethods = options.enableGenericMethods;
     enableSuperMixins = options.enableSuperMixins;
     enableTiming = options.enableTiming;
+    enableTrailingCommas = options.enableTrailingCommas;
     generateImplicitErrors = options.generateImplicitErrors;
     generateSdkErrors = options.generateSdkErrors;
     hint = options.hint;
diff --git a/pkg/analyzer/lib/src/generated/parser.dart b/pkg/analyzer/lib/src/generated/parser.dart
index 2b7aa9f..ab3b05e 100644
--- a/pkg/analyzer/lib/src/generated/parser.dart
+++ b/pkg/analyzer/lib/src/generated/parser.dart
@@ -2189,6 +2189,12 @@
   bool parseGenericMethodComments = false;
 
   /**
+   * A flag indicating whether the parser is to parse trailing commas in
+   * parameter and argument lists (sdk#26647).
+   */
+  bool parseTrailingCommas = false;
+
+  /**
    * Initialize a newly created parser to parse tokens in the given [_source]
    * and to report any errors that are found to the given [_errorListener].
    */
@@ -2324,6 +2330,9 @@
       bool foundNamedArgument = argument is NamedExpression;
       bool generatedError = false;
       while (_optional(TokenType.COMMA)) {
+        if (parseTrailingCommas && _matches(TokenType.CLOSE_PAREN)) {
+          break;
+        }
         argument = parseArgument();
         arguments.add(argument);
         if (argument is NamedExpression) {
@@ -6112,6 +6121,21 @@
       // TODO(brianwilkerson) Improve the detection and reporting of missing and
       // mismatched delimiters.
       type = _currentToken.type;
+
+      // Advance past trailing commas as appropriate.
+      if (parseTrailingCommas && type == TokenType.COMMA) {
+        // Only parse commas trailing normal (non-positional/named) params.
+        if (rightSquareBracket == null && rightCurlyBracket == null) {
+          Token next = _peek();
+          if (next.type == TokenType.CLOSE_PAREN ||
+              next.type == TokenType.CLOSE_CURLY_BRACKET ||
+              next.type == TokenType.CLOSE_SQUARE_BRACKET) {
+            _advance();
+            type = _currentToken.type;
+          }
+        }
+      }
+
       if (type == TokenType.CLOSE_SQUARE_BRACKET) {
         rightSquareBracket = getAndAdvance();
         if (leftSquareBracket == null) {
diff --git a/pkg/analyzer/lib/src/generated/resolver.dart b/pkg/analyzer/lib/src/generated/resolver.dart
index 33dc56e..15c41d6 100644
--- a/pkg/analyzer/lib/src/generated/resolver.dart
+++ b/pkg/analyzer/lib/src/generated/resolver.dart
@@ -3499,6 +3499,12 @@
   bool _enclosingBlockContainsBreak = false;
 
   /**
+   * Set to `true` when a `continue` is encountered, and reset to `false` when a
+   * `do`, `while`, `for` or `switch` block is entered.
+   */
+  bool _enclosingBlockContainsContinue = false;
+
+  /**
    * Add node when a labelled `break` is encountered.
    */
   Set<AstNode> _enclosingBlockBreaksLabel = new Set<AstNode>();
@@ -3606,14 +3612,24 @@
   }
 
   @override
-  bool visitContinueStatement(ContinueStatement node) => false;
+  bool visitContinueStatement(ContinueStatement node) {
+    _enclosingBlockContainsContinue = true;
+    return false;
+  }
 
   @override
   bool visitDoStatement(DoStatement node) {
     bool outerBreakValue = _enclosingBlockContainsBreak;
+    bool outerContinueValue = _enclosingBlockContainsContinue;
     _enclosingBlockContainsBreak = false;
+    _enclosingBlockContainsContinue = false;
     try {
-      if (_nodeExits(node.body) && !_enclosingBlockContainsBreak) {
+      bool bodyExits = _nodeExits(node.body);
+      bool containsBreakOrContinue =
+          _enclosingBlockContainsBreak || _enclosingBlockContainsContinue;
+      // Even if we determine that the body "exits", there might be break or
+      // continue statements that actually mean it _doesn't_ always exit.
+      if (bodyExits && !containsBreakOrContinue) {
         return true;
       }
       Expression conditionExpression = node.condition;
@@ -3630,6 +3646,7 @@
       return false;
     } finally {
       _enclosingBlockContainsBreak = outerBreakValue;
+      _enclosingBlockContainsContinue = outerContinueValue;
     }
   }
 
@@ -4840,7 +4857,7 @@
    * Place an info node into the error stream indicating that a
    * [type] has been inferred as the type of [node].
    */
-  void recordInference(AstNode node, DartType type) {
+  void recordInference(Expression node, DartType type) {
     if (!_inferenceHints) {
       return;
     }
@@ -5636,11 +5653,6 @@
   bool resolveOnlyCommentInFunctionBody = false;
 
   /**
-   * True if we're analyzing in strong mode.
-   */
-  bool _strongMode;
-
-  /**
    * Body of the function currently being analyzed, if any.
    */
   FunctionBody _currentFunctionBody;
@@ -5671,7 +5683,6 @@
     this.typeSystem = definingLibrary.context.typeSystem;
     bool strongModeHints = false;
     AnalysisOptions options = definingLibrary.context.analysisOptions;
-    _strongMode = options.strongMode;
     if (options is AnalysisOptionsImpl) {
       strongModeHints = options.strongModeHints;
     }
@@ -6362,24 +6373,13 @@
 
   @override
   Object visitDefaultFormalParameter(DefaultFormalParameter node) {
-    ParameterElement element = node.element;
-    InferenceContext.setType(node.defaultValue, element.type);
+    InferenceContext.setType(node.defaultValue, node.parameter.element?.type);
     super.visitDefaultFormalParameter(node);
+    ParameterElement element = node.element;
     if (element.initializer != null && node.defaultValue != null) {
       (element.initializer as FunctionElementImpl).returnType =
           node.defaultValue.staticType;
     }
-    if (_strongMode &&
-        node.defaultValue != null &&
-        element.hasImplicitType &&
-        element is! FieldFormalParameterElement) {
-
-      DartType type = node.defaultValue.staticType;
-      if (!type.isBottom && !type.isDynamic) {
-        (element as ParameterElementImpl).type = type;
-        inferenceContext.recordInference(node, type);
-      }
-    }
     // Clone the ASTs for default formal parameters, so that we can use them
     // during constant evaluation.
     if (!LibraryElementImpl.hasResolutionCapability(
diff --git a/pkg/analyzer/lib/src/summary/summarize_elements.dart b/pkg/analyzer/lib/src/summary/summarize_elements.dart
index 50e1bb6..98e3de4 100644
--- a/pkg/analyzer/lib/src/summary/summarize_elements.dart
+++ b/pkg/analyzer/lib/src/summary/summarize_elements.dart
@@ -902,14 +902,10 @@
     DartType type = parameter.type;
     if (parameter.hasImplicitType) {
       Element contextParent = context.enclosingElement;
-      // Strong mode infers parameters in two cases:
-      // - instance members (i.e. not constructors or static members),
-      // - parameters with default values, except initializing formals
-      //   (the type comes from the field).
       if (!parameter.isInitializingFormal &&
           contextParent is ExecutableElement &&
-          (!contextParent.isStatic && contextParent is! ConstructorElement ||
-              parameter.parameterKind != ParameterKind.REQUIRED)) {
+          !contextParent.isStatic &&
+          contextParent is! ConstructorElement) {
         b.inferredTypeSlot = storeInferredType(type, context);
       }
     } else {
diff --git a/pkg/analyzer/lib/src/task/dart.dart b/pkg/analyzer/lib/src/task/dart.dart
index 804744a..25e74ec 100644
--- a/pkg/analyzer/lib/src/task/dart.dart
+++ b/pkg/analyzer/lib/src/task/dart.dart
@@ -3972,6 +3972,7 @@
     parser.parseFunctionBodies = options.analyzeFunctionBodiesPredicate(source);
     parser.parseGenericMethods = options.enableGenericMethods;
     parser.parseGenericMethodComments = options.strongMode;
+    parser.parseTrailingCommas = options.enableTrailingCommas;
     CompilationUnit unit = parser.parseCompilationUnit(tokenStream);
     unit.lineInfo = lineInfo;
 
diff --git a/pkg/analyzer/lib/src/task/options.dart b/pkg/analyzer/lib/src/task/options.dart
index 0f47c44..a25d146 100644
--- a/pkg/analyzer/lib/src/task/options.dart
+++ b/pkg/analyzer/lib/src/task/options.dart
@@ -47,6 +47,7 @@
   static const String enableGenericMethods = 'enableGenericMethods';
   static const String enableStrictCallChecks = 'enableStrictCallChecks';
   static const String enableSuperMixins = 'enableSuperMixins';
+  static const String enableTrailingCommas = 'enableTrailingCommas';
 
   /// This option is deprecated.
   static const String enableConditionalDirectives =
@@ -488,6 +489,14 @@
         context.analysisOptions = options;
       }
     }
+    if (feature == AnalyzerOptions.enableTrailingCommas) {
+      if (isTrue(value)) {
+        AnalysisOptionsImpl options =
+            new AnalysisOptionsImpl.from(context.analysisOptions);
+        options.enableTrailingCommas = true;
+        context.analysisOptions = options;
+      }
+    }
     if (feature == AnalyzerOptions.enableGenericMethods) {
       if (isTrue(value)) {
         AnalysisOptionsImpl options =
diff --git a/pkg/analyzer/lib/src/util/fast_uri.dart b/pkg/analyzer/lib/src/util/fast_uri.dart
index 8af548c..0df5ce05 100644
--- a/pkg/analyzer/lib/src/util/fast_uri.dart
+++ b/pkg/analyzer/lib/src/util/fast_uri.dart
@@ -62,23 +62,7 @@
   bool get hasFragment => false;
 
   @override
-  int get hashCode {
-    // This code is copied from the standard Uri implementation.
-    // It is important that Uri and FastUri generate compatible hashCodes
-    // because Uri and FastUri may be used as keys in the same map.
-    int combine(part, current) {
-      // The sum is truncated to 30 bits to make sure it fits into a Smi.
-      return (current * 31 + part.hashCode) & 0x3FFFFFFF;
-    }
-    return _hashCode ??= combine(
-        scheme,
-        combine(
-            userInfo,
-            combine(
-                host,
-                combine(port,
-                    combine(path, combine(query, combine(fragment, 1)))))));
-  }
+  int get hashCode => _text.hashCode;
 
   @override
   bool get hasPort => false;
diff --git a/pkg/analyzer/pubspec.yaml b/pkg/analyzer/pubspec.yaml
index 0723e30..42066a8 100644
--- a/pkg/analyzer/pubspec.yaml
+++ b/pkg/analyzer/pubspec.yaml
@@ -1,5 +1,5 @@
 name: analyzer
-version: 0.27.4-alpha.14
+version: 0.27.4-alpha.15
 author: Dart Team <misc@dartlang.org>
 description: Static analyzer for Dart.
 homepage: https://github.com/dart-lang/sdk/tree/master/pkg/analyzer
diff --git a/pkg/analyzer/test/generated/all_the_rest_test.dart b/pkg/analyzer/test/generated/all_the_rest_test.dart
index f9c4c8b..70e1c73 100644
--- a/pkg/analyzer/test/generated/all_the_rest_test.dart
+++ b/pkg/analyzer/test/generated/all_the_rest_test.dart
@@ -2565,14 +2565,14 @@
 
 @reflectiveTest
 class ElementLocatorTest extends ResolverTestCase {
-  void fail_locate_ExportDirective() {
+  void test_locate_ExportDirective() {
     AstNode id = _findNodeIn("export", "export 'dart:core';");
     Element element = ElementLocator.locate(id);
     EngineTestCase.assertInstanceOf(
-        (obj) => obj is ImportElement, ImportElement, element);
+        (obj) => obj is ExportElement, ExportElement, element);
   }
 
-  void fail_locate_Identifier_libraryDirective() {
+  void test_locate_Identifier_libraryDirective() {
     AstNode id = _findNodeIn("foo", "library foo.bar;");
     Element element = ElementLocator.locate(id);
     EngineTestCase.assertInstanceOf(
@@ -3267,18 +3267,6 @@
  */
 @reflectiveTest
 class ExitDetectorTest extends ParserTestCase {
-  void fail_doStatement_continue_with_label() {
-    _assertFalse("{ x: do { continue x; } while(true); }");
-  }
-
-  void fail_whileStatement_continue_with_label() {
-    _assertFalse("{ x: while (true) { continue x; } }");
-  }
-
-  void fail_whileStatement_doStatement_scopeRequired() {
-    _assertTrue("{ while (true) { x: do { continue x; } while(true); }");
-  }
-
   void test_asExpression() {
     _assertFalse("a as Object;");
   }
@@ -3487,6 +3475,40 @@
     _assertTrue("{ do {} while (throw ''); }");
   }
 
+  void test_doStatement_break_and_throw() {
+    _assertFalse("{ do { if (1==1) break; throw 'T'; } while (0==1); }");
+  }
+
+  void test_doStatement_continue_and_throw() {
+    _assertFalse("{ do { if (1==1) continue; throw 'T'; } while (0==1); }");
+  }
+
+  void test_doStatement_continueInSwitch_and_throw() {
+    _assertFalse('''
+{
+  do {
+    switch (1) {
+      L: case 0: continue;
+      M: case 1: break;
+    }
+    throw 'T';
+  } while (0 == 1);
+}''');
+  }
+
+  void test_doStatement_continueDoInSwitch_and_throw() {
+    _assertFalse('''
+{
+  D: do {
+    switch (1) {
+      L: case 0: continue D;
+      M: case 1: break;
+    }
+    throw 'T';
+  } while (0 == 1);
+}''');
+  }
+
   void test_doStatement_true_break() {
     _assertFalse("{ do { break; } while (true); }");
   }
@@ -3495,6 +3517,11 @@
     _assertTrue("{ do { continue; } while (true); }");
   }
 
+  void test_doStatement_true_continueWithLabel() {
+    _assertTrue("{ x: do { continue x; } while (true); }");
+  }
+
+
   void test_doStatement_true_if_return() {
     _assertTrue("{ do { if (true) {return null;} } while (true); }");
   }
@@ -3759,6 +3786,19 @@
     _assertTrue("switch (i) { case 0: case 1: return 0; default: return 1; }");
   }
 
+  // The ExitDetector could conceivably follow switch continue labels and
+  // determine that `case 0` exits, `case 1` continues to an exiting case, and
+  // `default` exits, so the switch exits.
+  @failingTest
+  void test_switch_includesContinue() {
+    _assertTrue('''
+switch (i) {
+  zero: case 0: return 0;
+  case 1: continue zero;
+  default: return 1;
+}''');
+  }
+
   void test_switch_noDefault() {
     _assertFalse("switch (i) { case 0: return 0; }");
   }
@@ -3888,6 +3928,14 @@
     _assertTrue("{ while (true) { continue; } }");
   }
 
+  void test_whileStatement_true_continueWithLabel() {
+    _assertTrue("{ x: while (true) { continue x; } }");
+  }
+
+  void test_whileStatement_true_doStatement_scopeRequired() {
+    _assertTrue("{ while (true) { x: do { continue x; } while (true); } }");
+  }
+
   void test_whileStatement_true_if_return() {
     _assertTrue("{ while (true) { if (true) {return null;} } }");
   }
diff --git a/pkg/analyzer/test/generated/parser_test.dart b/pkg/analyzer/test/generated/parser_test.dart
index 0674039..58c7c6f 100644
--- a/pkg/analyzer/test/generated/parser_test.dart
+++ b/pkg/analyzer/test/generated/parser_test.dart
@@ -1216,6 +1216,50 @@
         "external typedef F();", [ParserErrorCode.EXTERNAL_TYPEDEF]);
   }
 
+  void test_extraCommaInParameterList() {
+    parseTrailingCommas = true;
+    parse4("parseFormalParameterList", "(int a, , int b)",
+        [ParserErrorCode.MISSING_IDENTIFIER, ParserErrorCode.EXPECTED_TOKEN]);
+    parseTrailingCommas = false;
+    parse4("parseFormalParameterList", "(int a, , int b)",
+        [ParserErrorCode.MISSING_IDENTIFIER, ParserErrorCode.EXPECTED_TOKEN]);
+  }
+
+  void test_extraCommaTrailingNamedParameterGroup() {
+    parseTrailingCommas = true;
+    parse4("parseFormalParameterList", "({int b},)", [
+      ParserErrorCode.MISSING_IDENTIFIER,
+      ParserErrorCode.NORMAL_BEFORE_OPTIONAL_PARAMETERS
+    ]);
+    parseTrailingCommas = false;
+    parse4("parseFormalParameterList", "({int b},)", [
+      ParserErrorCode.MISSING_IDENTIFIER,
+      ParserErrorCode.NORMAL_BEFORE_OPTIONAL_PARAMETERS
+    ]);
+  }
+
+  void test_extraCommaTrailingPositionalParameterGroup() {
+    parseTrailingCommas = true;
+    parse4("parseFormalParameterList", "([int b],)", [
+      ParserErrorCode.MISSING_IDENTIFIER,
+      ParserErrorCode.NORMAL_BEFORE_OPTIONAL_PARAMETERS
+    ]);
+    parseTrailingCommas = false;
+    parse4("parseFormalParameterList", "([int b],)", [
+      ParserErrorCode.MISSING_IDENTIFIER,
+      ParserErrorCode.NORMAL_BEFORE_OPTIONAL_PARAMETERS
+    ]);
+  }
+
+  void test_extraTrailingCommaInParameterList() {
+    parseTrailingCommas = true;
+    parse4("parseFormalParameterList", "(a,,)",
+        [ParserErrorCode.MISSING_IDENTIFIER]);
+    parseTrailingCommas = false;
+    parse4("parseFormalParameterList", "(a,,)",
+        [ParserErrorCode.MISSING_IDENTIFIER, ParserErrorCode.EXPECTED_TOKEN]);
+  }
+
   void test_factoryTopLevelDeclaration_class() {
     ParserTestCase.parseCompilationUnit(
         "factory class C {}", [ParserErrorCode.FACTORY_TOP_LEVEL_DECLARATION]);
@@ -1322,7 +1366,7 @@
         "0++", [ParserErrorCode.ILLEGAL_ASSIGNMENT_TO_NON_ASSIGNABLE]);
   }
 
-  void test_illegalAssignmentToNonAssignable_postfix_plusPlus_parethesized() {
+  void test_illegalAssignmentToNonAssignable_postfix_plusPlus_parenthesized() {
     parseExpression(
         "(x)++", [ParserErrorCode.ILLEGAL_ASSIGNMENT_TO_NON_ASSIGNABLE]);
   }
@@ -1715,6 +1759,12 @@
     expect(expression.isSynthetic, isTrue);
   }
 
+  void test_missingIdentifierForParameterGroup() {
+    parseTrailingCommas = true;
+    parse4("parseFormalParameterList", "(,)",
+        [ParserErrorCode.MISSING_IDENTIFIER]);
+  }
+
   void test_missingKeywordOperator() {
     parse3("parseOperator", <Object>[emptyCommentAndMetadata(), null, null],
         "+(x) {}", [ParserErrorCode.MISSING_KEYWORD_OPERATOR]);
@@ -2777,6 +2827,12 @@
   bool enableGenericMethodComments = false;
 
   /**
+   * A flag indicating whether parsing trailing commas in parameter and argument
+   * lists should be enabled for this test.
+   */
+  bool parseTrailingCommas = false;
+
+  /**
    * Return a CommentAndMetadata object with the given values that can be used for testing.
    *
    * @param comment the comment to be wrapped in the object
@@ -2830,6 +2886,7 @@
     parser.parseGenericMethods = enableGenericMethods;
     parser.parseGenericMethodComments = enableGenericMethodComments;
     parser.parseFunctionBodies = parseFunctionBodies;
+    parser.parseTrailingCommas = parseTrailingCommas;
     Object result =
         invokeParserMethodImpl(parser, methodName, objects, tokenStream);
     //
@@ -4755,6 +4812,13 @@
     expect(arguments, hasLength(2));
   }
 
+  void test_parseArgumentList_trailing_comma() {
+    parseTrailingCommas = true;
+    ArgumentList argumentList = parse4("parseArgumentList", "(x, y, z,)");
+    NodeList<Expression> arguments = argumentList.arguments;
+    expect(arguments, hasLength(3));
+  }
+
   void test_parseAssertStatement() {
     AssertStatement statement = parse4("parseAssertStatement", "assert (x);");
     expect(statement.assertKeyword, isNotNull);
@@ -5937,6 +6001,22 @@
     expect(method.body, isNotNull);
   }
 
+  void test_parseClassMember_method_trailing_commas() {
+    parseTrailingCommas = true;
+    MethodDeclaration method =
+        parse("parseClassMember", <Object>["C"], "void f(int x, int y,) {}");
+    expect(method.documentationComment, isNull);
+    expect(method.externalKeyword, isNull);
+    expect(method.modifierKeyword, isNull);
+    expect(method.propertyKeyword, isNull);
+    expect(method.returnType, isNotNull);
+    expect(method.name, isNotNull);
+    expect(method.operatorKeyword, isNull);
+    expect(method.typeParameters, isNull);
+    expect(method.parameters, isNotNull);
+    expect(method.body, isNotNull);
+  }
+
   void test_parseClassMember_operator_index() {
     MethodDeclaration method =
         parse("parseClassMember", <Object>["C"], "int operator [](int i) {}");
@@ -6327,12 +6407,13 @@
 
   void test_parseCommentReferences_notClosed_withIdentifier() {
     DocumentationCommentToken docToken = new DocumentationCommentToken(
-          TokenType.MULTI_LINE_COMMENT, "/** [namePrefix some text", 5);
-    List<CommentReference> references =
-        parse("parseCommentReferences", <Object>[<DocumentationCommentToken>[
-      docToken
-    ]], "")
-        as List<CommentReference>;
+        TokenType.MULTI_LINE_COMMENT, "/** [namePrefix some text", 5);
+    List<CommentReference> references = parse(
+        "parseCommentReferences",
+        <Object>[
+          <DocumentationCommentToken>[docToken]
+        ],
+        "") as List<CommentReference>;
     expect(docToken.references, hasLength(1));
     expect(references, hasLength(1));
     Token referenceToken = docToken.references[0];
@@ -7777,6 +7858,17 @@
     expect(parameterList.rightParenthesis, isNotNull);
   }
 
+  void test_parseFormalParameterList_named_trailing_comma() {
+    parseTrailingCommas = true;
+    FormalParameterList parameterList =
+        parse4("parseFormalParameterList", "(A a, {B b,})");
+    expect(parameterList.leftParenthesis, isNotNull);
+    expect(parameterList.leftDelimiter, isNotNull);
+    expect(parameterList.parameters, hasLength(2));
+    expect(parameterList.rightDelimiter, isNotNull);
+    expect(parameterList.rightParenthesis, isNotNull);
+  }
+
   void test_parseFormalParameterList_normal_multiple() {
     FormalParameterList parameterList =
         parse4("parseFormalParameterList", "(A a, B b, C c)");
@@ -7817,6 +7909,17 @@
     expect(parameterList.rightParenthesis, isNotNull);
   }
 
+  void test_parseFormalParameterList_normal_single_trailing_comma() {
+    parseTrailingCommas = true;
+    FormalParameterList parameterList =
+        parse4("parseFormalParameterList", "(A a,)");
+    expect(parameterList.leftParenthesis, isNotNull);
+    expect(parameterList.leftDelimiter, isNull);
+    expect(parameterList.parameters, hasLength(1));
+    expect(parameterList.rightDelimiter, isNull);
+    expect(parameterList.rightParenthesis, isNotNull);
+  }
+
   void test_parseFormalParameterList_positional_multiple() {
     FormalParameterList parameterList =
         parse4("parseFormalParameterList", "([A a = null, B b, C c = null])");
@@ -7837,6 +7940,17 @@
     expect(parameterList.rightParenthesis, isNotNull);
   }
 
+  void test_parseFormalParameterList_positional_trailing_comma() {
+    parseTrailingCommas = true;
+    FormalParameterList parameterList =
+        parse4("parseFormalParameterList", "(A a, [B b,])");
+    expect(parameterList.leftParenthesis, isNotNull);
+    expect(parameterList.leftDelimiter, isNotNull);
+    expect(parameterList.parameters, hasLength(2));
+    expect(parameterList.rightDelimiter, isNotNull);
+    expect(parameterList.rightParenthesis, isNotNull);
+  }
+
   void test_parseFormalParameterList_prefixedType() {
     FormalParameterList parameterList =
         parse4("parseFormalParameterList", "(io.File f)");
diff --git a/pkg/analyzer/test/src/context/builder_test.dart b/pkg/analyzer/test/src/context/builder_test.dart
new file mode 100644
index 0000000..758a03c
--- /dev/null
+++ b/pkg/analyzer/test/src/context/builder_test.dart
@@ -0,0 +1,340 @@
+// Copyright (c) 2016, the Dart project authors.  Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.
+
+library analyzer.test.src.context.context_builder_test;
+
+import 'dart:io' as io;
+
+import 'package:analyzer/file_system/file_system.dart';
+import 'package:analyzer/file_system/memory_file_system.dart';
+import 'package:analyzer/file_system/physical_file_system.dart';
+import 'package:analyzer/src/context/builder.dart';
+import 'package:analyzer/src/generated/engine.dart';
+import 'package:analyzer/src/generated/sdk.dart';
+import 'package:analyzer/src/generated/source.dart';
+import 'package:package_config/packages.dart';
+import 'package:package_config/src/packages_impl.dart';
+import 'package:path/path.dart' as path;
+import 'package:unittest/unittest.dart';
+
+import '../../generated/test_support.dart';
+import '../../reflective_tests.dart';
+import '../../utils.dart';
+import 'mock_sdk.dart';
+
+main() {
+  initializeTestEnvironment();
+  runReflectiveTests(ContextBuilderTest_WithDisk);
+  runReflectiveTests(ContextBuilderTest_WithoutDisk);
+}
+
+@reflectiveTest
+class ContextBuilderTest_WithDisk extends EngineTestCase {
+  /**
+   * The resource provider to be used by tests.
+   */
+  PhysicalResourceProvider resourceProvider;
+
+  /**
+   * The path context used to manipulate file paths.
+   */
+  path.Context pathContext;
+
+  /**
+   * The SDK manager used by the tests;
+   */
+  DartSdkManager sdkManager;
+
+  /**
+   * The content cache used by the tests.
+   */
+  ContentCache contentCache;
+
+  @override
+  void setUp() {
+    resourceProvider = PhysicalResourceProvider.INSTANCE;
+    pathContext = resourceProvider.pathContext;
+    sdkManager = new DartSdkManager('', false, (_) => new MockSdk());
+    contentCache = new ContentCache();
+  }
+
+  void test_createPackageMap_fromPackageDirectory_explicit() {
+    withTempDir((io.Directory tempDir) {
+      // Use a package directory that is outside the project directory.
+      String rootPath = tempDir.path;
+      String projectPath = pathContext.join(rootPath, 'project');
+      String packageDirPath = pathContext.join(rootPath, 'packages');
+      String fooName = 'foo';
+      String fooPath = pathContext.join(packageDirPath, fooName);
+      String barName = 'bar';
+      String barPath = pathContext.join(packageDirPath, barName);
+      new io.Directory(projectPath).createSync(recursive: true);
+      new io.Directory(fooPath).createSync(recursive: true);
+      new io.Directory(barPath).createSync(recursive: true);
+
+      ContextBuilder builder =
+          new ContextBuilder(resourceProvider, sdkManager, contentCache);
+      builder.defaultPackagesDirectoryPath = packageDirPath;
+
+      Packages packages = builder.createPackageMap(projectPath);
+      expect(packages, isNotNull);
+      Map<String, Uri> map = packages.asMap();
+      expect(map, hasLength(2));
+      expect(map[fooName], new Uri.directory(fooPath));
+      expect(map[barName], new Uri.directory(barPath));
+    });
+  }
+
+  void test_createPackageMap_fromPackageDirectory_inRoot() {
+    withTempDir((io.Directory tempDir) {
+      // Use a package directory that is inside the project directory.
+      String projectPath = tempDir.path;
+      String packageDirPath = pathContext.join(projectPath, 'packages');
+      String fooName = 'foo';
+      String fooPath = pathContext.join(packageDirPath, fooName);
+      String barName = 'bar';
+      String barPath = pathContext.join(packageDirPath, barName);
+      new io.Directory(fooPath).createSync(recursive: true);
+      new io.Directory(barPath).createSync(recursive: true);
+
+      ContextBuilder builder =
+          new ContextBuilder(resourceProvider, sdkManager, contentCache);
+      Packages packages = builder.createPackageMap(projectPath);
+      expect(packages, isNotNull);
+      Map<String, Uri> map = packages.asMap();
+      expect(map, hasLength(2));
+      expect(map[fooName], new Uri.directory(fooPath));
+      expect(map[barName], new Uri.directory(barPath));
+    });
+  }
+
+  void test_createPackageMap_fromPackageFile_explicit() {
+    withTempDir((io.Directory tempDir) {
+      // Use a package file that is outside the project directory's hierarchy.
+      String rootPath = tempDir.path;
+      String projectPath = pathContext.join(rootPath, 'project');
+      String packageFilePath = pathContext.join(rootPath, 'child', '.packages');
+      new io.Directory(projectPath).createSync(recursive: true);
+      new io.File(packageFilePath)
+        ..createSync(recursive: true)
+        ..writeAsStringSync(r'''
+foo:/pkg/foo
+bar:/pkg/bar
+''');
+
+      ContextBuilder builder =
+          new ContextBuilder(resourceProvider, sdkManager, contentCache);
+      builder.defaultPackageFilePath = packageFilePath;
+      Packages packages = builder.createPackageMap(projectPath);
+      expect(packages, isNotNull);
+      Map<String, Uri> map = packages.asMap();
+      expect(map, hasLength(2));
+      expect(map['foo'], new Uri.directory('/pkg/foo'));
+      expect(map['bar'], new Uri.directory('/pkg/bar'));
+    });
+  }
+
+  void test_createPackageMap_fromPackageFile_inParentOfRoot() {
+    withTempDir((io.Directory tempDir) {
+      // Use a package file that is inside the parent of the project directory.
+      String rootPath = tempDir.path;
+      String projectPath = pathContext.join(rootPath, 'project');
+      String packageFilePath = pathContext.join(rootPath, '.packages');
+      new io.Directory(projectPath).createSync(recursive: true);
+      new io.File(packageFilePath)
+        ..createSync(recursive: true)
+        ..writeAsStringSync(r'''
+foo:/pkg/foo
+bar:/pkg/bar
+''');
+
+      ContextBuilder builder =
+          new ContextBuilder(resourceProvider, sdkManager, contentCache);
+      Packages packages = builder.createPackageMap(projectPath);
+      expect(packages, isNotNull);
+      Map<String, Uri> map = packages.asMap();
+      expect(map, hasLength(2));
+      expect(map['foo'], new Uri.directory('/pkg/foo'));
+      expect(map['bar'], new Uri.directory('/pkg/bar'));
+    });
+  }
+
+  void test_createPackageMap_fromPackageFile_inRoot() {
+    withTempDir((io.Directory tempDir) {
+      // Use a package file that is inside the project directory.
+      String rootPath = tempDir.path;
+      String projectPath = pathContext.join(rootPath, 'project');
+      String packageFilePath = pathContext.join(projectPath, '.packages');
+      new io.Directory(projectPath).createSync(recursive: true);
+      new io.File(packageFilePath)
+        ..createSync(recursive: true)
+        ..writeAsStringSync(r'''
+foo:/pkg/foo
+bar:/pkg/bar
+''');
+
+      ContextBuilder builder =
+          new ContextBuilder(resourceProvider, sdkManager, contentCache);
+      Packages packages = builder.createPackageMap(projectPath);
+      expect(packages, isNotNull);
+      Map<String, Uri> map = packages.asMap();
+      expect(map, hasLength(2));
+      expect(map['foo'], new Uri.directory('/pkg/foo'));
+      expect(map['bar'], new Uri.directory('/pkg/bar'));
+    });
+  }
+
+  void test_createPackageMap_none() {
+    withTempDir((io.Directory tempDir) {
+      ContextBuilder builder =
+          new ContextBuilder(resourceProvider, sdkManager, contentCache);
+      Packages packages = builder.createPackageMap(tempDir.path);
+      expect(packages, same(Packages.noPackages));
+    });
+  }
+
+  /**
+   * Execute the [test] function with a temporary [directory]. The test function
+   * can perform any disk operations within the directory and the directory (and
+   * its content) will be removed after the function returns.
+   */
+  void withTempDir(test(io.Directory directory)) {
+    io.Directory directory =
+        io.Directory.systemTemp.createTempSync('analyzer_');
+    try {
+      test(directory);
+    } finally {
+      directory.deleteSync(recursive: true);
+    }
+  }
+}
+
+@reflectiveTest
+class ContextBuilderTest_WithoutDisk extends EngineTestCase {
+  /**
+   * The resource provider to be used by tests.
+   */
+  MemoryResourceProvider resourceProvider;
+
+  /**
+   * The SDK manager used by the tests;
+   */
+  DartSdkManager sdkManager;
+
+  /**
+   * The content cache used by the tests.
+   */
+  ContentCache contentCache;
+
+  void fail_createSourceFactory() {
+    fail('Incomplete test');
+  }
+
+  void fail_findSdkResolver() {
+    fail('Incomplete test');
+  }
+
+  @override
+  void setUp() {
+    resourceProvider = new MemoryResourceProvider();
+    sdkManager = new DartSdkManager('', false, (_) => new MockSdk());
+    contentCache = new ContentCache();
+  }
+
+  void test_convertPackagesToMap_noPackages() {
+    ContextBuilder builder =
+        new ContextBuilder(resourceProvider, sdkManager, contentCache);
+    expect(builder.convertPackagesToMap(Packages.noPackages), isNull);
+  }
+
+  void test_convertPackagesToMap_null() {
+    ContextBuilder builder =
+        new ContextBuilder(resourceProvider, sdkManager, contentCache);
+    expect(builder.convertPackagesToMap(null), isNull);
+  }
+
+  void test_convertPackagesToMap_packages() {
+    String fooName = 'foo';
+    String fooPath = '/pkg/foo';
+    Uri fooUri = new Uri.directory(fooPath);
+    String barName = 'bar';
+    String barPath = '/pkg/bar';
+    Uri barUri = new Uri.directory(barPath);
+
+    ContextBuilder builder =
+        new ContextBuilder(resourceProvider, sdkManager, contentCache);
+    MapPackages packages = new MapPackages({fooName: fooUri, barName: barUri});
+    Map<String, List<Folder>> result = builder.convertPackagesToMap(packages);
+    expect(result, isNotNull);
+    expect(result, hasLength(2));
+    expect(result[fooName], hasLength(1));
+    expect(result[fooName][0].path, fooPath);
+    expect(result[barName], hasLength(1));
+    expect(result[barName][0].path, barPath);
+  }
+
+  void test_getOptionsFile_explicit() {
+    String path = '/some/directory/path';
+    String filePath = '/options/analysis.yaml';
+    resourceProvider.newFile(filePath, '');
+
+    ContextBuilder builder =
+        new ContextBuilder(resourceProvider, sdkManager, contentCache);
+    builder.defaultAnalysisOptionsFilePath = filePath;
+    File result = builder.getOptionsFile(path);
+    expect(result, isNotNull);
+    expect(result.path, filePath);
+  }
+
+  void test_getOptionsFile_inParentOfRoot_new() {
+    String parentPath = '/some/directory';
+    String path = '$parentPath/path';
+    String filePath =
+        '$parentPath/${AnalysisEngine.ANALYSIS_OPTIONS_YAML_FILE}';
+    resourceProvider.newFile(filePath, '');
+
+    ContextBuilder builder =
+        new ContextBuilder(resourceProvider, sdkManager, contentCache);
+    File result = builder.getOptionsFile(path);
+    expect(result, isNotNull);
+    expect(result.path, filePath);
+  }
+
+  void test_getOptionsFile_inParentOfRoot_old() {
+    String parentPath = '/some/directory';
+    String path = '$parentPath/path';
+    String filePath = '$parentPath/${AnalysisEngine.ANALYSIS_OPTIONS_FILE}';
+    resourceProvider.newFile(filePath, '');
+
+    ContextBuilder builder =
+        new ContextBuilder(resourceProvider, sdkManager, contentCache);
+    File result = builder.getOptionsFile(path);
+    expect(result, isNotNull);
+    expect(result.path, filePath);
+  }
+
+  void test_getOptionsFile_inRoot_new() {
+    String path = '/some/directory/path';
+    String filePath = '$path/${AnalysisEngine.ANALYSIS_OPTIONS_YAML_FILE}';
+    resourceProvider.newFile(filePath, '');
+
+    ContextBuilder builder =
+        new ContextBuilder(resourceProvider, sdkManager, contentCache);
+    File result = builder.getOptionsFile(path);
+    expect(result, isNotNull);
+    expect(result.path, filePath);
+  }
+
+  void test_getOptionsFile_inRoot_old() {
+    String path = '/some/directory/path';
+    String filePath = '$path/${AnalysisEngine.ANALYSIS_OPTIONS_FILE}';
+    resourceProvider.newFile(filePath, '');
+
+    ContextBuilder builder =
+        new ContextBuilder(resourceProvider, sdkManager, contentCache);
+    File result = builder.getOptionsFile(path);
+    expect(result, isNotNull);
+    expect(result.path, filePath);
+  }
+}
diff --git a/pkg/analyzer/test/src/context/test_all.dart b/pkg/analyzer/test/src/context/test_all.dart
index 6b9127c..33c2986 100644
--- a/pkg/analyzer/test/src/context/test_all.dart
+++ b/pkg/analyzer/test/src/context/test_all.dart
@@ -7,6 +7,7 @@
 import 'package:unittest/unittest.dart';
 
 import '../../utils.dart';
+import 'builder_test.dart' as builder_test;
 import 'cache_test.dart' as cache_test;
 import 'context_factory_test.dart' as context_factory_test;
 import 'context_test.dart' as context_test;
@@ -15,6 +16,7 @@
 main() {
   initializeTestEnvironment();
   group('context tests', () {
+    builder_test.main();
     cache_test.main();
     context_factory_test.main();
     context_test.main();
diff --git a/pkg/analyzer/test/src/summary/resynthesize_ast_test.dart b/pkg/analyzer/test/src/summary/resynthesize_ast_test.dart
index 3cf454f..1adaae0 100644
--- a/pkg/analyzer/test/src/summary/resynthesize_ast_test.dart
+++ b/pkg/analyzer/test/src/summary/resynthesize_ast_test.dart
@@ -510,12 +510,6 @@
 
   @override
   @failingTest
-  void test_inferDefaultFormalParameter() {
-    super.test_inferDefaultFormalParameter();
-  }
-
-  @override
-  @failingTest
   void test_inferenceInCyclesIsDeterministic() {
     super.test_inferenceInCyclesIsDeterministic();
   }
diff --git a/pkg/analyzer/test/src/summary/resynthesize_test.dart b/pkg/analyzer/test/src/summary/resynthesize_test.dart
index d31dcd4..881ad8f 100644
--- a/pkg/analyzer/test/src/summary/resynthesize_test.dart
+++ b/pkg/analyzer/test/src/summary/resynthesize_test.dart
@@ -4107,7 +4107,9 @@
     checkLibrary('class C { bool operator<=(C other) => false; }');
   }
 
-  test_parameterType_inferred_constructor() {
+  test_parameterTypeNotInferred_constructor() {
+    // Strong mode doesn't do type inference on constructor parameters, so it's
+    // ok that we don't store inferred type info for them in summaries.
     checkLibrary('''
 class C {
   C.positional([x = 1]);
@@ -4116,22 +4118,6 @@
 ''');
   }
 
-  test_parameterType_inferred_staticMethod() {
-    checkLibrary('''
-class C {
-  static void positional([x = 1]) {}
-  static void named({x: 1}) {}
-}
-''');
-  }
-
-  test_parameterType_inferred_topLevelFunction() {
-    checkLibrary('''
-void positional([x = 1]) {}
-void named({x: 1}) {}
-''');
-  }
-
   test_parameterTypeNotInferred_initializingFormal() {
     // Strong mode doesn't do type inference on initializing formals, so it's
     // ok that we don't store inferred type info for them in summaries.
@@ -4144,6 +4130,27 @@
 ''');
   }
 
+  test_parameterTypeNotInferred_staticMethod() {
+    // Strong mode doesn't do type inference on parameters of static methods,
+    // so it's ok that we don't store inferred type info for them in summaries.
+    checkLibrary('''
+class C {
+  static void positional([x = 1]) {}
+  static void named({x: 1}) {}
+}
+''');
+  }
+
+  test_parameterTypeNotInferred_topLevelFunction() {
+    // Strong mode doesn't do type inference on parameters of top level
+    // functions, so it's ok that we don't store inferred type info for them in
+    // summaries.
+    checkLibrary('''
+void positional([x = 1]) {}
+void named({x: 1}) {}
+''');
+  }
+
   test_parts() {
     addSource('/a.dart', 'part of my.lib;');
     addSource('/b.dart', 'part of my.lib;');
diff --git a/pkg/analyzer/test/src/task/strong/checker_test.dart b/pkg/analyzer/test/src/task/strong/checker_test.dart
index 77145e9..0b1a115 100644
--- a/pkg/analyzer/test/src/task/strong/checker_test.dart
+++ b/pkg/analyzer/test/src/task/strong/checker_test.dart
@@ -2037,12 +2037,16 @@
 // default formal
 void df0([/*error:IMPLICIT_DYNAMIC_PARAMETER*/x = DYNAMIC_VALUE]) {}
 void df1([dynamic x = DYNAMIC_VALUE]) {}
-void df2([/*info:INFERRED_TYPE*/x = 42]) {}
+
+// https://github.com/dart-lang/sdk/issues/25794
+void df2([/*error:IMPLICIT_DYNAMIC_PARAMETER*/x = 42]) {}
 
 // default formal (named)
 void nf0({/*error:IMPLICIT_DYNAMIC_PARAMETER*/x: DYNAMIC_VALUE}) {}
 void nf1({dynamic x: DYNAMIC_VALUE}) {}
-void nf2({/*info:INFERRED_TYPE*/x: 42}) {}
+
+// https://github.com/dart-lang/sdk/issues/25794
+void nf2({/*error:IMPLICIT_DYNAMIC_PARAMETER*/x: 42}) {}
 
 // field formal
 class C {
diff --git a/pkg/analyzer/test/src/task/strong/inferred_type_test.dart b/pkg/analyzer/test/src/task/strong/inferred_type_test.dart
index 1c5847f..0446efa 100644
--- a/pkg/analyzer/test/src/task/strong/inferred_type_test.dart
+++ b/pkg/analyzer/test/src/task/strong/inferred_type_test.dart
@@ -723,7 +723,7 @@
 }
 void f([List<int> l = /*info:INFERRED_TYPE_LITERAL*/const [1]]) {}
 // We do this inference in an early task but don't preserve the infos.
-Function2<List<int>, String> g = /*pass should be info:INFERRED_TYPE_CLOSURE*/([/*info:INFERRED_TYPE*/llll = /*info:INFERRED_TYPE_LITERAL*/const [1]]) => "hello";
+Function2<List<int>, String> g = /*pass should be info:INFERRED_TYPE_CLOSURE*/([llll = /*info:INFERRED_TYPE_LITERAL*/const [1]]) => "hello";
 ''');
   }
 
@@ -1912,33 +1912,6 @@
 ''');
   }
 
-  void test_inferDefaultFormalParameter() {
-    var unit = checkFile('''
-f([/*info:INFERRED_TYPE*/x = 42]) {}
-g({/*info:INFERRED_TYPE*/x: 'hi'}) {}
-''');
-    expect(unit.functions[0].parameters[0].type.toString(), 'int');
-    expect(unit.functions[1].parameters[0].type.toString(), 'String');
-  }
-
-  void test_inferDefaultFormalParameter_fieldFormal() {
-    checkFile('''
-class C {
-  int x;
-  var y;
-  C({this.x: /*error:INVALID_ASSIGNMENT*/0.0, this.y: 'hi'}) {
-    String z = /*info:DYNAMIC_CAST*/y;
-  }
-  C.c([this.x =/*error:INVALID_ASSIGNMENT*/0.0, this.y = 'hi']) {
-    String z = /*info:DYNAMIC_CAST*/y;
-  }
-  m() {
-    String z = /*info:DYNAMIC_CAST*/y;
-  }
-}
-''');
-  }
-
   void test_inferedType_usesSyntheticFunctionType() {
     var mainUnit = checkFile('''
 int f() => null;
diff --git a/pkg/compiler/lib/src/common/names.dart b/pkg/compiler/lib/src/common/names.dart
index 300d176..2fa535b 100644
--- a/pkg/compiler/lib/src/common/names.dart
+++ b/pkg/compiler/lib/src/common/names.dart
@@ -26,6 +26,9 @@
   /// The name of the iterator property used in for-each loops.
   static const String iterator = 'iterator';
 
+  /// The name of the `loadLibrary` getter defined on deferred prefixes.
+  static const String loadLibrary = 'loadLibrary';
+
   /// The name of the main method.
   static const String main = 'main';
 
diff --git a/pkg/compiler/lib/src/compile_time_constants.dart b/pkg/compiler/lib/src/compile_time_constants.dart
index 71962e8..1c066ed 100644
--- a/pkg/compiler/lib/src/compile_time_constants.dart
+++ b/pkg/compiler/lib/src/compile_time_constants.dart
@@ -414,7 +414,10 @@
   AstConstant evaluate(Node node) {
     // TODO(johnniwinther): should there be a visitErrorNode?
     if (node is ErrorNode) return new ErroneousAstConstant(context, node);
-    return node.accept(this);
+    AstConstant result = node.accept(this);
+    assert(invariant(node, !isEvaluatingConstant || result != null,
+        message: "No AstConstant computed for the node."));
+    return result;
   }
 
   AstConstant evaluateConstant(Node node) {
@@ -422,7 +425,8 @@
     isEvaluatingConstant = true;
     AstConstant result = node.accept(this);
     isEvaluatingConstant = oldIsEvaluatingConstant;
-    assert(result != null);
+    assert(invariant(node, result != null,
+        message: "No AstConstant computed for the node."));
     return result;
   }
 
@@ -461,8 +465,8 @@
         !link.isEmpty;
         link = link.tail) {
       AstConstant argument = evaluateConstant(link.head);
-      if (argument == null) {
-        return null;
+      if (argument == null || argument.isError) {
+        return argument;
       }
       argumentExpressions.add(argument.expression);
       argumentValues.add(argument.value);
@@ -488,12 +492,12 @@
         link = link.tail) {
       LiteralMapEntry entry = link.head;
       AstConstant key = evaluateConstant(entry.key);
-      if (key == null) {
-        return null;
+      if (key == null || key.isError) {
+        return key;
       }
       AstConstant value = evaluateConstant(entry.value);
-      if (value == null) {
-        return null;
+      if (value == null || value.isError) {
+        return value;
       }
       if (!map.containsKey(key.value)) {
         keyValues.add(key.value);
@@ -530,7 +534,12 @@
   AstConstant visitStringJuxtaposition(StringJuxtaposition node) {
     AstConstant left = evaluate(node.first);
     AstConstant right = evaluate(node.second);
-    if (left == null || right == null) return null;
+    if (left == null || left.isError) {
+      return left;
+    }
+    if (right == null || right.isError) {
+      return right;
+    }
     StringConstantValue leftValue = left.value;
     StringConstantValue rightValue = right.value;
     return new AstConstant(
@@ -544,16 +553,16 @@
   AstConstant visitStringInterpolation(StringInterpolation node) {
     List<ConstantExpression> subexpressions = <ConstantExpression>[];
     AstConstant initialString = evaluate(node.string);
-    if (initialString == null) {
-      return null;
+    if (initialString == null || initialString.isError) {
+      return initialString;
     }
     subexpressions.add(initialString.expression);
     StringConstantValue initialStringValue = initialString.value;
     DartString accumulator = initialStringValue.primitiveValue;
     for (StringInterpolationPart part in node.parts) {
       AstConstant subexpression = evaluate(part.expression);
-      if (subexpression == null) {
-        return null;
+      if (subexpression == null || subexpression.isError) {
+        return subexpression;
       }
       subexpressions.add(subexpression.expression);
       ConstantValue expression = subexpression.value;
@@ -578,7 +587,6 @@
       accumulator =
           new DartString.concat(accumulator, partStringValue.primitiveValue);
     }
-    ;
     return new AstConstant(
         context,
         node,
@@ -725,8 +733,8 @@
     } else if (send.isPrefix) {
       assert(send.isOperator);
       AstConstant receiverConstant = evaluate(send.receiver);
-      if (receiverConstant == null) {
-        return null;
+      if (receiverConstant == null || receiverConstant.isError) {
+        return receiverConstant;
       }
       Operator node = send.selector;
       UnaryOperator operator = UnaryOperator.parse(node.source);
@@ -747,8 +755,11 @@
       assert(send.argumentCount() == 1);
       AstConstant left = evaluate(send.receiver);
       AstConstant right = evaluate(send.argumentsNode.nodes.head);
-      if (left == null || right == null) {
-        return null;
+      if (left == null || left.isError) {
+        return left;
+      }
+      if (right == null || right.isError) {
+        return right;
       }
       ConstantValue leftValue = left.value;
       ConstantValue rightValue = right.value;
@@ -796,8 +807,8 @@
 
   AstConstant visitConditional(Conditional node) {
     AstConstant condition = evaluate(node.condition);
-    if (condition == null) {
-      return null;
+    if (condition == null || condition.isError) {
+      return condition;
     } else if (!condition.value.isBool) {
       DartType conditionType = condition.value.getType(coreTypes);
       if (isEvaluatingConstant) {
@@ -809,8 +820,11 @@
     }
     AstConstant thenExpression = evaluate(node.thenExpression);
     AstConstant elseExpression = evaluate(node.elseExpression);
-    if (thenExpression == null || elseExpression == null) {
-      return null;
+    if (thenExpression == null || thenExpression.isError) {
+      return thenExpression;
+    }
+    if (elseExpression == null || elseExpression.isError) {
+      return elseExpression;
     }
     BoolConstantValue boolCondition = condition.value;
     return new AstConstant(
@@ -978,48 +992,43 @@
     ConstantValue defaultValue = normalizedArguments[1].value;
 
     if (firstArgument.isNull) {
-      reporter.reportErrorMessage(
+      return reportNotCompileTimeConstant(
           normalizedArguments[0].node, MessageKind.NULL_NOT_ALLOWED);
-      return null;
     }
 
     if (!firstArgument.isString) {
       DartType type = defaultValue.getType(coreTypes);
-      reporter.reportErrorMessage(
+      return reportNotCompileTimeConstant(
           normalizedArguments[0].node,
           MessageKind.NOT_ASSIGNABLE,
           {'fromType': type, 'toType': coreTypes.stringType});
-      return null;
     }
 
     if (constructor.isIntFromEnvironmentConstructor &&
         !(defaultValue.isNull || defaultValue.isInt)) {
       DartType type = defaultValue.getType(coreTypes);
-      reporter.reportErrorMessage(
+      return reportNotCompileTimeConstant(
           normalizedArguments[1].node,
           MessageKind.NOT_ASSIGNABLE,
           {'fromType': type, 'toType': coreTypes.intType});
-      return null;
     }
 
     if (constructor.isBoolFromEnvironmentConstructor &&
         !(defaultValue.isNull || defaultValue.isBool)) {
       DartType type = defaultValue.getType(coreTypes);
-      reporter.reportErrorMessage(
+      return reportNotCompileTimeConstant(
           normalizedArguments[1].node,
           MessageKind.NOT_ASSIGNABLE,
           {'fromType': type, 'toType': coreTypes.boolType});
-      return null;
     }
 
     if (constructor.isStringFromEnvironmentConstructor &&
         !(defaultValue.isNull || defaultValue.isString)) {
       DartType type = defaultValue.getType(coreTypes);
-      reporter.reportErrorMessage(
+      return reportNotCompileTimeConstant(
           normalizedArguments[1].node,
           MessageKind.NOT_ASSIGNABLE,
           {'fromType': type, 'toType': coreTypes.stringType});
-      return null;
     }
 
     String name = firstArgument.primitiveValue.slowToString();
@@ -1103,6 +1112,11 @@
     fieldConstants.forEach((FieldElement field, AstConstant astConstant) {
       fieldValues[field] = astConstant.value;
     });
+    for (AstConstant fieldValue in fieldConstants.values) {
+      if (fieldValue.isError) {
+        return fieldValue;
+      }
+    }
     return new AstConstant(
         context,
         node,
@@ -1115,13 +1129,18 @@
     return node.expression.accept(this);
   }
 
-  AstConstant signalNotCompileTimeConstant(Node node,
-      {MessageKind message: MessageKind.NOT_A_COMPILE_TIME_CONSTANT}) {
-    if (isEvaluatingConstant) {
-      reporter.reportErrorMessage(node, message);
+  AstConstant reportNotCompileTimeConstant(Node node, MessageKind message,
+      [Map arguments = const {}]) {
+    reporter.reportErrorMessage(node, message, arguments);
+    return new AstConstant(context, node, new ErroneousConstantExpression(),
+        new NullConstantValue());
+  }
 
-      return new AstConstant(context, node, new ErroneousConstantExpression(),
-          new NullConstantValue());
+  AstConstant signalNotCompileTimeConstant(Node node,
+      {MessageKind message: MessageKind.NOT_A_COMPILE_TIME_CONSTANT,
+      Map arguments: const {}}) {
+    if (isEvaluatingConstant) {
+      return reportNotCompileTimeConstant(node, message, arguments);
     }
     // Else we don't need to do anything. The final handler is only
     // optimistically trying to compile constants. So it is normal that we
@@ -1372,6 +1391,8 @@
         value);
   }
 
+  bool get isError => expression.kind == ConstantExpressionKind.ERRONEOUS;
+
   String toString() => expression.toString();
 }
 
diff --git a/pkg/compiler/lib/src/compiler.dart b/pkg/compiler/lib/src/compiler.dart
index 5707bdc..d567bba 100644
--- a/pkg/compiler/lib/src/compiler.dart
+++ b/pkg/compiler/lib/src/compiler.dart
@@ -920,6 +920,16 @@
       fullyEnqueueTopLevelElement(element, world);
     }
     library.implementation.forEachLocalMember(enqueueAll);
+    library.imports.forEach((ImportElement import) {
+      if (import.isDeferred) {
+        // `import.prefix` and `loadLibrary` may be `null` when the deferred
+        // import has compile-time errors.
+        GetterElement loadLibrary = import.prefix?.loadLibrary;
+        if (loadLibrary != null) {
+          world.addToWorkList(loadLibrary);
+        }
+      }
+    });
   }
 
   void fullyEnqueueTopLevelElement(Element element, Enqueuer world) {
diff --git a/pkg/compiler/lib/src/constants/constant_constructors.dart b/pkg/compiler/lib/src/constants/constant_constructors.dart
index 874dab7..4628f28 100644
--- a/pkg/compiler/lib/src/constants/constant_constructors.dart
+++ b/pkg/compiler/lib/src/constants/constant_constructors.dart
@@ -87,6 +87,11 @@
     applyParameters(parameters, _);
     ConstructedConstantExpression constructorInvocation =
         applyInitializers(node, _);
+    constructor.enclosingClass.forEachInstanceField((_, FieldElement field) {
+      if (!fieldMap.containsKey(field)) {
+        fieldMap[field] = field.constant;
+      }
+    });
     return new GenerativeConstantConstructor(
         currentClass.thisType, defaultValues, fieldMap, constructorInvocation);
   }
@@ -334,4 +339,10 @@
   ConstantExpression visitNamedArgument(NamedArgument node) {
     return apply(node.expression);
   }
+
+  @override
+  ConstantExpression visitIfNull(Send node, Node left, Node right, _) {
+    return new BinaryConstantExpression(
+        apply(left), BinaryOperator.IF_NULL, apply(right));
+  }
 }
diff --git a/pkg/compiler/lib/src/constants/values.dart b/pkg/compiler/lib/src/constants/values.dart
index 434fcfa..62749bf 100644
--- a/pkg/compiler/lib/src/constants/values.dart
+++ b/pkg/compiler/lib/src/constants/values.dart
@@ -666,7 +666,7 @@
   ConstructedConstantValue(
       InterfaceType type, Map<FieldElement, ConstantValue> fields)
       : this.fields = fields,
-        hashCode = Hashing.mapHash(fields, Hashing.objectHash(type)),
+        hashCode = Hashing.unorderedMapHash(fields, Hashing.objectHash(type)),
         super(type) {
     assert(type != null);
     assert(!fields.containsValue(null));
diff --git a/pkg/compiler/lib/src/dart2js.dart b/pkg/compiler/lib/src/dart2js.dart
index b82ad53..4301a55 100644
--- a/pkg/compiler/lib/src/dart2js.dart
+++ b/pkg/compiler/lib/src/dart2js.dart
@@ -471,7 +471,8 @@
         api.Diagnostic.INFO);
   }
   if (resolveOnly) {
-    if (resolutionInputs.contains(resolutionOutput)) {
+    if (resolutionInputs != null &&
+        resolutionInputs.contains(resolutionOutput)) {
       helpAndFail("Resolution input '${resolutionOutput}' can't be used as "
           "resolution output. Use the '--out' option to specify another "
           "resolution output.");
@@ -535,7 +536,7 @@
       packageConfig: packageConfig,
       packagesDiscoveryProvider: findPackages,
       resolutionInputs: resolutionInputs,
-      resolutionOutput: resolutionOutput,
+      resolutionOutput: resolveOnly ? resolutionOutput : null,
       options: options,
       environment: environment);
   return compileFunc(
diff --git a/pkg/compiler/lib/src/elements/elements.dart b/pkg/compiler/lib/src/elements/elements.dart
index 06e0d00..52f5f30 100644
--- a/pkg/compiler/lib/src/elements/elements.dart
+++ b/pkg/compiler/lib/src/elements/elements.dart
@@ -941,6 +941,9 @@
 
   /// Import that declared this deferred prefix.
   ImportElement get deferredImport;
+
+  /// The `loadLibrary` getter implicitly defined on deferred prefixes.
+  GetterElement get loadLibrary;
 }
 
 /// A type alias definition.
@@ -1691,6 +1694,10 @@
   /// The element is an implicit forwarding constructor on a mixin application.
   /// No AST or [TreeElements] are provided.
   FORWARDING_CONSTRUCTOR,
+
+  /// The element is the `loadLibrary` getter implicitly defined on a deferred
+  /// prefix.
+  DEFERRED_LOAD_LIBRARY,
 }
 
 /// [ResolvedAst] contains info that define the semantics of an element.
diff --git a/pkg/compiler/lib/src/elements/modelx.dart b/pkg/compiler/lib/src/elements/modelx.dart
index 248a9ef..bb331d9 100644
--- a/pkg/compiler/lib/src/elements/modelx.dart
+++ b/pkg/compiler/lib/src/elements/modelx.dart
@@ -5,6 +5,7 @@
 library elements.modelx;
 
 import '../common.dart';
+import '../common/names.dart' show Identifiers;
 import '../common/resolution.dart' show Resolution, ParsingContext;
 import '../compiler.dart' show Compiler;
 import '../constants/constant_constructors.dart';
@@ -1259,6 +1260,11 @@
     return visitor.visitPrefixElement(this, arg);
   }
 
+  @override
+  GetterElement get loadLibrary {
+    return isDeferred ? lookupLocalMember(Identifiers.loadLibrary) : null;
+  }
+
   String toString() => '$kind($name)';
 }
 
@@ -1419,11 +1425,22 @@
       // constant for a variable already known to be erroneous.
       return;
     }
-    assert(invariant(this, constantCache == null || constantCache == value,
-        message: "Constant has already been computed for $this. "
-            "Existing constant: "
-            "${constantCache != null ? constantCache.toStructuredText() : ''}, "
-            "New constant: ${value != null ? value.toStructuredText() : ''}."));
+    if (constantCache != null && constantCache != value) {
+      // Allow setting the constant as erroneous. Constants computed during
+      // resolution are locally valid but might be effectively erroneous. For
+      // instance `a ? true : false` where a is `const a = m()`. Since `a` is
+      // declared to be constant, the conditional is assumed valid, but when
+      // computing the value we see that it isn't.
+      // TODO(johnniwinther): Remove this exception when all constant
+      // expressions are computed during resolution.
+      assert(invariant(
+          this, value == null || value.kind == ConstantExpressionKind.ERRONEOUS,
+          message: "Constant has already been computed for $this. "
+              "Existing constant: "
+              "${constantCache != null ? constantCache.toStructuredText() : ''}"
+              ", New constant: "
+              "${value != null ? value.toStructuredText() : ''}."));
+    }
     constantCache = value;
   }
 }
@@ -2254,7 +2271,7 @@
 
   DeferredLoaderGetterElementX(PrefixElement prefix)
       : this.prefix = prefix,
-        super("loadLibrary", Modifiers.EMPTY, prefix, false) {
+        super(Identifiers.loadLibrary, Modifiers.EMPTY, prefix, false) {
     functionSignature = new FunctionSignatureX(type: new FunctionType(this));
   }
 
@@ -2265,6 +2282,7 @@
   bool get isDeferredLoaderGetter => true;
 
   bool get isTopLevel => true;
+
   // By having position null, the enclosing elements location is printed in
   // error messages.
   Token get position => null;
@@ -2275,6 +2293,13 @@
 
   FunctionExpression get node => null;
 
+  bool get hasResolvedAst => true;
+
+  ResolvedAst get resolvedAst {
+    return new SynthesizedResolvedAst(
+        this, ResolvedAstKind.DEFERRED_LOAD_LIBRARY);
+  }
+
   @override
   SetterElement get setter => null;
 }
diff --git a/pkg/compiler/lib/src/inferrer/type_graph_inferrer.dart b/pkg/compiler/lib/src/inferrer/type_graph_inferrer.dart
index f81a6ca..f8ae6ea 100644
--- a/pkg/compiler/lib/src/inferrer/type_graph_inferrer.dart
+++ b/pkg/compiler/lib/src/inferrer/type_graph_inferrer.dart
@@ -856,20 +856,27 @@
             if (constant != null) {
               ConstantValue value =
                   compiler.backend.constants.getConstantValue(constant);
-              assert(invariant(fieldElement, value != null,
-                  message: "Constant expression without value: "
-                      "${constant.toStructuredText()}."));
-              if (value.isFunction) {
-                FunctionConstantValue functionConstant = value;
-                type = types.allocateClosure(node, functionConstant.element);
+              if (value != null) {
+                if (value.isFunction) {
+                  FunctionConstantValue functionConstant = value;
+                  type = types.allocateClosure(node, functionConstant.element);
+                } else {
+                  // Although we might find a better type, we have to keep
+                  // the old type around to ensure that we get a complete view
+                  // of the type graph and do not drop any flow edges.
+                  TypeMask refinedType = computeTypeMask(compiler, value);
+                  assert(TypeMask.assertIsNormalized(refinedType, classWorld));
+                  type = new NarrowTypeInformation(type, refinedType);
+                  types.allocatedTypes.add(type);
+                }
               } else {
-                // Although we might find a better type, we have to keep
-                // the old type around to ensure that we get a complete view
-                // of the type graph and do not drop any flow edges.
-                TypeMask refinedType = computeTypeMask(compiler, value);
-                assert(TypeMask.assertIsNormalized(refinedType, classWorld));
-                type = new NarrowTypeInformation(type, refinedType);
-                types.allocatedTypes.add(type);
+                assert(invariant(
+                    fieldElement,
+                    fieldElement.isInstanceMember ||
+                        constant.isImplicit ||
+                        constant.isPotential,
+                    message: "Constant expression without value: "
+                        "${constant.toStructuredText()}."));
               }
             }
           }
diff --git a/pkg/compiler/lib/src/js_backend/backend.dart b/pkg/compiler/lib/src/js_backend/backend.dart
index 7489914..cbae712 100644
--- a/pkg/compiler/lib/src/js_backend/backend.dart
+++ b/pkg/compiler/lib/src/js_backend/backend.dart
@@ -1527,17 +1527,24 @@
       ConstantExpression constant = variableElement.constant;
       if (constant != null) {
         ConstantValue initialValue = constants.getConstantValue(constant);
-        assert(invariant(variableElement, initialValue != null,
-            message: "Constant expression without value: "
-                "${constant.toStructuredText()}."));
-        registerCompileTimeConstant(initialValue, work.registry);
-        addCompileTimeConstantForEmission(initialValue);
-        // We don't need to generate code for static or top-level
-        // variables. For instance variables, we may need to generate
-        // the checked setter.
-        if (Elements.isStaticOrTopLevel(element)) {
-          return impactTransformer
-              .transformCodegenImpact(work.registry.worldImpact);
+        if (initialValue != null) {
+          registerCompileTimeConstant(initialValue, work.registry);
+          addCompileTimeConstantForEmission(initialValue);
+          // We don't need to generate code for static or top-level
+          // variables. For instance variables, we may need to generate
+          // the checked setter.
+          if (Elements.isStaticOrTopLevel(element)) {
+            return impactTransformer
+                .transformCodegenImpact(work.registry.worldImpact);
+          }
+        } else {
+          assert(invariant(
+              variableElement,
+              variableElement.isInstanceMember ||
+                  constant.isImplicit ||
+                  constant.isPotential,
+              message: "Constant expression without value: "
+                  "${constant.toStructuredText()}."));
         }
       } else {
         // If the constant-handler was not able to produce a result we have to
diff --git a/pkg/compiler/lib/src/js_backend/js_interop_analysis.dart b/pkg/compiler/lib/src/js_backend/js_interop_analysis.dart
index e2ebe39..83f6284 100644
--- a/pkg/compiler/lib/src/js_backend/js_interop_analysis.dart
+++ b/pkg/compiler/lib/src/js_backend/js_interop_analysis.dart
@@ -8,6 +8,11 @@
 import '../common.dart';
 import '../constants/values.dart'
     show ConstantValue, ConstructedConstantValue, StringConstantValue;
+import '../dart_types.dart'
+    show
+        DartType,
+        DynamicType,
+        FunctionType;
 import '../diagnostics/messages.dart' show MessageKind;
 import '../elements/elements.dart'
     show
@@ -185,4 +190,14 @@
     });
     return new jsAst.Block(statements);
   }
+
+  FunctionType buildJsFunctionType() {
+    // TODO(jacobr): consider using codegenWorld.isChecks to determine the
+    // range of positional arguments that need to be supported by JavaScript
+    // function types.
+    return new FunctionType.synthesized(
+      const DynamicType(),
+      [],
+      new List<DartType>.filled(16, const DynamicType()));
+  }
 }
diff --git a/pkg/compiler/lib/src/js_emitter/runtime_type_generator.dart b/pkg/compiler/lib/src/js_emitter/runtime_type_generator.dart
index ae4cbba..ddef40b 100644
--- a/pkg/compiler/lib/src/js_emitter/runtime_type_generator.dart
+++ b/pkg/compiler/lib/src/js_emitter/runtime_type_generator.dart
@@ -132,6 +132,17 @@
     _generateIsTestsOn(classElement, generateIsTest,
         generateFunctionTypeSignature, generateSubstitution, generateTypeCheck);
 
+    if (classElement == backend.helpers.jsJavaScriptFunctionClass) {
+        var type = backend.jsInteropAnalysis.buildJsFunctionType();
+        if (type != null) {
+          jsAst.Expression thisAccess = new jsAst.This();
+          RuntimeTypesEncoder rtiEncoder = backend.rtiEncoder;
+          jsAst.Expression encoding =
+              rtiEncoder.getSignatureEncoding(type, thisAccess);
+          jsAst.Name operatorSignature = namer.asName(namer.operatorSignature);
+          result.properties[operatorSignature] = encoding;
+        }
+    }
     return result;
   }
 
diff --git a/pkg/compiler/lib/src/resolution/constructors.dart b/pkg/compiler/lib/src/resolution/constructors.dart
index e30cfdc..90f4716 100644
--- a/pkg/compiler/lib/src/resolution/constructors.dart
+++ b/pkg/compiler/lib/src/resolution/constructors.dart
@@ -448,11 +448,24 @@
       constructorInvocation = resolveImplicitSuperConstructorSend();
     }
     if (isConst && isValidAsConstant) {
-      constructor.constantConstructor = new GenerativeConstantConstructor(
-          constructor.enclosingClass.thisType,
-          defaultValues,
-          fieldInitializers,
-          constructorInvocation);
+      constructor.enclosingClass.forEachInstanceField((_, FieldElement field) {
+        if (!fieldInitializers.containsKey(field)) {
+          visitor.resolution.ensureResolved(field);
+          // TODO(johnniwinther): Report error if `field.constant` is `null`.
+          if (field.constant != null) {
+            fieldInitializers[field] = field.constant;
+          } else {
+            isValidAsConstant = false;
+          }
+        }
+      });
+      if (isValidAsConstant) {
+        constructor.constantConstructor = new GenerativeConstantConstructor(
+            constructor.enclosingClass.thisType,
+            defaultValues,
+            fieldInitializers,
+            constructorInvocation);
+      }
     }
     visitor.scope = oldScope;
     return null; // If there was no redirection always return null.
diff --git a/pkg/compiler/lib/src/resolution/resolution.dart b/pkg/compiler/lib/src/resolution/resolution.dart
index 05e1cf0..f775190 100644
--- a/pkg/compiler/lib/src/resolution/resolution.dart
+++ b/pkg/compiler/lib/src/resolution/resolution.dart
@@ -53,6 +53,7 @@
 import 'constructors.dart';
 import 'members.dart';
 import 'registry.dart';
+import 'resolution_result.dart';
 import 'scope.dart' show MutableScope;
 import 'signatures.dart';
 import 'tree_elements.dart';
@@ -387,7 +388,10 @@
     if (initializer != null) {
       // TODO(johnniwinther): Avoid analyzing initializers if
       // [Compiler.analyzeSignaturesOnly] is set.
-      visitor.visit(initializer);
+      ResolutionResult result = visitor.visit(initializer);
+      if (result.isConstant) {
+        element.constant = result.constant;
+      }
     } else if (modifiers.isConst) {
       reporter.reportErrorMessage(
           element, MessageKind.CONST_WITHOUT_INITIALIZER);
diff --git a/pkg/compiler/lib/src/serialization/constant_serialization.dart b/pkg/compiler/lib/src/serialization/constant_serialization.dart
index b041347..ae3040a 100644
--- a/pkg/compiler/lib/src/serialization/constant_serialization.dart
+++ b/pkg/compiler/lib/src/serialization/constant_serialization.dart
@@ -169,8 +169,8 @@
 
   @override
   void visitDeferred(DeferredConstantExpression exp, ObjectEncoder encoder) {
-    throw new UnsupportedError(
-        "ConstantSerializer.visitDeferred: ${exp.toDartText()}");
+    encoder.setElement(Key.PREFIX, exp.prefix);
+    encoder.setConstant(Key.EXPRESSION, exp.expression);
   }
 }
 
@@ -267,6 +267,9 @@
       case ConstantExpressionKind.NAMED_REFERENCE:
         return new NamedArgumentReference(decoder.getString(Key.NAME));
       case ConstantExpressionKind.DEFERRED:
+        return new DeferredConstantExpression(
+            decoder.getConstant(Key.EXPRESSION),
+            decoder.getElement(Key.PREFIX));
       case ConstantExpressionKind.SYNTHETIC:
     }
     throw new UnsupportedError("Unexpected constant kind: ${kind} in $decoder");
diff --git a/pkg/compiler/lib/src/serialization/element_serialization.dart b/pkg/compiler/lib/src/serialization/element_serialization.dart
index 7069b51..d51c67a 100644
--- a/pkg/compiler/lib/src/serialization/element_serialization.dart
+++ b/pkg/compiler/lib/src/serialization/element_serialization.dart
@@ -5,12 +5,14 @@
 library dart2js.serialization.elements;
 
 import '../common.dart';
+import '../common/names.dart';
 import '../constants/constructors.dart';
 import '../constants/expressions.dart';
 import '../dart_types.dart';
 import '../diagnostics/messages.dart';
 import '../elements/elements.dart';
-import '../elements/modelx.dart' show ErroneousElementX;
+import '../elements/modelx.dart'
+    show DeferredLoaderGetterElementX, ErroneousElementX;
 import 'constant_serialization.dart';
 import 'keys.dart';
 import 'modelz.dart';
@@ -51,6 +53,7 @@
   IMPORT,
   EXPORT,
   PREFIX,
+  DEFERRED_LOAD_LIBRARY,
   LOCAL_VARIABLE,
   EXTERNAL_LIBRARY,
   EXTERNAL_LIBRARY_MEMBER,
@@ -69,6 +72,8 @@
   const ErrorSerializer(),
   const LibrarySerializer(),
   const CompilationUnitSerializer(),
+  const PrefixSerializer(),
+  const DeferredLoadLibrarySerializer(),
   const ClassSerializer(),
   const ConstructorSerializer(),
   const FieldSerializer(),
@@ -78,7 +83,6 @@
   const ParameterSerializer(),
   const ImportSerializer(),
   const ExportSerializer(),
-  const PrefixSerializer(),
   const LocalVariableSerializer(),
 ];
 
@@ -289,7 +293,6 @@
     encoder.setElements(Key.EXPORTS, element.exports);
 
     encoder.setElements(Key.IMPORT_SCOPE, getImportedElements(element));
-
     encoder.setElements(Key.EXPORT_SCOPE, getExportedElements(element));
   }
 }
@@ -423,7 +426,8 @@
       SerializedElementKind kind) {
     SerializerUtil.serializeParentRelation(element, encoder);
     if (kind == SerializedElementKind.FORWARDING_CONSTRUCTOR) {
-      encoder.setElement(Key.ELEMENT, element.definingConstructor);
+      serializeElementReference(element.enclosingClass, Key.ELEMENT, Key.NAME,
+          encoder, element.definingConstructor);
     } else {
       SerializerUtil.serializeMetadata(element, encoder);
       encoder.setType(Key.TYPE, element.type);
@@ -500,6 +504,9 @@
   const FunctionSerializer();
 
   SerializedElementKind getSerializedKind(Element element) {
+    if (element.isDeferredLoaderGetter) {
+      return null;
+    }
     if (element.isFunction) {
       if (element.isTopLevel) return SerializedElementKind.TOPLEVEL_FUNCTION;
       if (element.isStatic) return SerializedElementKind.STATIC_FUNCTION;
@@ -713,10 +720,27 @@
     encoder.setString(Key.NAME, element.name);
     encoder.setElement(Key.LIBRARY, element.library);
     encoder.setElement(Key.COMPILATION_UNIT, element.compilationUnit);
-    if (element.deferredImport != null) {
-      encoder.setElement(Key.IMPORT, element.deferredImport);
-    }
     encoder.setBool(Key.IS_DEFERRED, element.isDeferred);
+    if (element.isDeferred) {
+      encoder.setElement(Key.IMPORT, element.deferredImport);
+      encoder.setElement(Key.GETTER, element.loadLibrary);
+    }
+  }
+}
+
+class DeferredLoadLibrarySerializer implements ElementSerializer {
+  const DeferredLoadLibrarySerializer();
+
+  SerializedElementKind getSerializedKind(Element element) {
+    if (element.isDeferredLoaderGetter) {
+      return SerializedElementKind.DEFERRED_LOAD_LIBRARY;
+    }
+    return null;
+  }
+
+  void serialize(GetterElement element, ObjectEncoder encoder,
+      SerializedElementKind kind) {
+    encoder.setElement(Key.PREFIX, element.enclosingElement);
   }
 }
 
@@ -773,8 +797,10 @@
       case SerializedElementKind.REDIRECTING_FACTORY_CONSTRUCTOR:
         return new RedirectingFactoryConstructorElementZ(decoder);
       case SerializedElementKind.FORWARDING_CONSTRUCTOR:
-        return new ForwardingConstructorElementZ(
-            decoder.getElement(Key.CLASS), decoder.getElement(Key.ELEMENT));
+        ClassElement cls = decoder.getElement(Key.CLASS);
+        Element definingConstructor =
+            deserializeElementReference(cls, Key.ELEMENT, Key.NAME, decoder);
+        return new ForwardingConstructorElementZ(cls, definingConstructor);
       case SerializedElementKind.TOPLEVEL_FUNCTION:
         return new TopLevelFunctionElementZ(decoder);
       case SerializedElementKind.STATIC_FUNCTION:
@@ -809,6 +835,8 @@
         return new ExportElementZ(decoder);
       case SerializedElementKind.PREFIX:
         return new PrefixElementZ(decoder);
+      case SerializedElementKind.DEFERRED_LOAD_LIBRARY:
+        return new DeferredLoaderGetterElementX(decoder.getElement(Key.PREFIX));
       case SerializedElementKind.LOCAL_VARIABLE:
         return new LocalVariableElementZ(decoder);
       case SerializedElementKind.EXTERNAL_LIBRARY:
diff --git a/pkg/compiler/lib/src/serialization/modelz.dart b/pkg/compiler/lib/src/serialization/modelz.dart
index c909f9a..4c932f3 100644
--- a/pkg/compiler/lib/src/serialization/modelz.dart
+++ b/pkg/compiler/lib/src/serialization/modelz.dart
@@ -458,7 +458,8 @@
 
   void _ensureExports() {
     if (_exportsMap == null) {
-      _exportsMap = new ListedContainer(_decoder.getElements(Key.EXPORT_SCOPE));
+      _exportsMap = new ListedContainer(
+          _decoder.getElements(Key.EXPORT_SCOPE, isOptional: true));
     }
   }
 
@@ -2257,6 +2258,7 @@
     implements PrefixElement {
   bool _isDeferred;
   ImportElement _deferredImport;
+  GetterElement _loadLibrary;
 
   PrefixElementZ(ObjectDecoder decoder) : super(decoder);
 
@@ -2266,7 +2268,10 @@
   void _ensureDeferred() {
     if (_isDeferred == null) {
       _isDeferred = _decoder.getBool(Key.IS_DEFERRED);
-      _deferredImport = _decoder.getElement(Key.IMPORT, isOptional: true);
+      if (_isDeferred) {
+        _deferredImport = _decoder.getElement(Key.IMPORT);
+        _loadLibrary = _decoder.getElement(Key.GETTER);
+      }
     }
   }
 
@@ -2283,6 +2288,11 @@
   }
 
   @override
+  GetterElement get loadLibrary {
+    return _loadLibrary;
+  }
+
+  @override
   ElementKind get kind => ElementKind.PREFIX;
 
   @override
diff --git a/pkg/compiler/lib/src/serialization/resolved_ast_serialization.dart b/pkg/compiler/lib/src/serialization/resolved_ast_serialization.dart
index bca6d9e..b5d6cae 100644
--- a/pkg/compiler/lib/src/serialization/resolved_ast_serialization.dart
+++ b/pkg/compiler/lib/src/serialization/resolved_ast_serialization.dart
@@ -100,6 +100,7 @@
         break;
       case ResolvedAstKind.DEFAULT_CONSTRUCTOR:
       case ResolvedAstKind.FORWARDING_CONSTRUCTOR:
+      case ResolvedAstKind.DEFERRED_LOAD_LIBRARY:
         // No additional properties.
         break;
     }
@@ -355,6 +356,8 @@
         (element as AstElementMixinZ).resolvedAst =
             new SynthesizedResolvedAst(element, kind);
         break;
+      case ResolvedAstKind.DEFERRED_LOAD_LIBRARY:
+        break;
     }
   }
 
diff --git a/pkg/compiler/lib/src/serialization/serialization_util.dart b/pkg/compiler/lib/src/serialization/serialization_util.dart
index 24aa6f8..1e40000 100644
--- a/pkg/compiler/lib/src/serialization/serialization_util.dart
+++ b/pkg/compiler/lib/src/serialization/serialization_util.dart
@@ -506,10 +506,19 @@
     if (elementName == null) {
       return null;
     }
-    assert(invariant(NO_LOCATION_SPANNABLE, context.isConstructor,
-        message: "Unexpected reference of forwarding constructor "
-            "'${elementName}' from $context."));
-    ClassElement superclass = context.enclosingClass.superclass;
+    ClassElement cls;
+    if (context is ClassElement) {
+      assert(invariant(NO_LOCATION_SPANNABLE, context.isNamedMixinApplication,
+          message: "Unexpected reference of forwarding constructor "
+              "'${elementName}' from $context."));
+      cls = context;
+    } else {
+      assert(invariant(NO_LOCATION_SPANNABLE, context.isConstructor,
+          message: "Unexpected reference of forwarding constructor "
+              "'${elementName}' from $context."));
+      cls = context.enclosingClass;
+    }
+    ClassElement superclass = cls.superclass;
     element = superclass.lookupConstructor(elementName);
     assert(invariant(NO_LOCATION_SPANNABLE, element != null,
         message: "Unresolved reference of forwarding constructor "
diff --git a/pkg/compiler/lib/src/util/util.dart b/pkg/compiler/lib/src/util/util.dart
index 102ca9e..d9cc16c 100644
--- a/pkg/compiler/lib/src/util/util.dart
+++ b/pkg/compiler/lib/src/util/util.dart
@@ -52,6 +52,16 @@
     return h;
   }
 
+  /// Mix the bits of the hash codes of the unordered key/value from [map] with
+  /// [existing].
+  static int unorderedMapHash(Map map, [int existing = 0]) {
+    int h = 0;
+    for (var key in map.keys) {
+      h ^= objectHash(key, objectHash(map[key]));
+    }
+    return mixHashCodeBits(h, existing);
+  }
+
   /// Mix the bits of the key/value hash codes from [map] with [existing].
   static int mapHash(Map map, [int existing = 0]) {
     int h = existing;
diff --git a/pkg/pkg.status b/pkg/pkg.status
index d0eca7c..81fe4d1 100644
--- a/pkg/pkg.status
+++ b/pkg/pkg.status
@@ -27,101 +27,19 @@
 
 [ $runtime == vm && $system == windows]
 analysis_server/test/analysis/get_errors_test: Skip # runtime error, Issue 22180
+analysis_server/test/context_manager_test: RuntimeError # Issue 26828
 analysis_server/test/integration/analysis/analysis_options_test: RuntimeError # Issue 24796
 analyzer/test/generated/all_the_rest_test: Fail # Issue 21772
-analyzer_cli/test/driver_test: Fail # Issue 25471
+analyzer/test/generated/source_factory_test: RuntimeError # Issue 26828
+analyzer/test/src/context/builder_test: RuntimeError # Issue 26828
+analyzer/test/src/summary/linker_test: RuntimeError # Issue 26828
+analyzer/test/src/summary/prelinker_test: RuntimeError # Issue 26828
+analyzer/test/src/summary/summarize_elements_strong_test: RuntimeError # Issue 26828
+analyzer/test/src/summary/summarize_elements_test: RuntimeError # Issue 26828
 
 [ $compiler == dart2js ]
+analysis_server/test/integration: SkipByDesign # Analysis server integration tests don't make sense to run under dart2js, since the code under test always runs in the Dart vm as a subprocess.
 analyzer_cli/test/*: SkipByDesign # Only meant to run on vm
-analysis_server/test/*: Skip # Issue 22161
-analysis_server/test/analysis_notification_highlights_test: Pass, Slow # 19756, 21628
-analysis_server/test/analysis_notification_navigation_test: Pass, Slow # Issue 19756, 21628
-analysis_server/test/analysis_notification_occurrences_test: Pass, Slow # Issue 19756, 21628
-analysis_server/test/analysis_notification_outline_test: Pass, Slow # Issue 19756, 21628
-analysis_server/test/domain_analysis_test: Pass, Slow # Issue 19756, 21628
-analysis_server/test/domain_completion_test: Pass, Slow
-analysis_server/test/edit/assists_test: Pass, Slow
-analysis_server/test/edit/format_test: Pass, Slow
-analysis_server/test/edit/refactoring_test: Pass, Slow # Issue 19756, 21628
-analysis_server/test/search/element_references_test: Pass, Slow
-analysis_server/test/search/top_level_declarations_test: Pass, Slow # 19756, 21628
-analysis_server/test/services/index/store/codec_test: Pass, Slow
-analysis_server/test/socket_server_test: Pass, Slow # Issue 19756, 21628
-analyzer/test/context/declared_variables_test: Pass, Slow # Issue 21628
-analyzer/test/dart/element/element_test: Pass, Slow # Issue 24914
-analyzer/test/dart/ast/ast_test: Pass, Slow # Issue 19756, 21628
-analyzer/test/dart/ast/utilities_test: Pass, Slow # Issue 19756, 21628
-analyzer/test/dart/ast/visitor_test: Pass, Slow # Issue 19756, 21628
-analyzer/test/enum_test: Slow, Pass, Fail # Issue 21323
-analyzer/test/non_hint_code_test: Pass, Slow # Issue 21628
-analyzer/test/strong_mode_test: Pass, Slow # Issue 21628
-analyzer/test/generated/all_the_rest_test: Pass, Slow # Issue 21628
-analyzer/test/generated/checked_mode_compile_time_error_code_test: Pass, Slow # Issue 21628
-analyzer/test/generated/ast_test: Pass, Slow # Issue 21628
-analyzer/test/generated/checked_mode_compile_time_error_code_test: Pass, Slow # Issue 21628
-analyzer/test/generated/compile_time_error_code_test: Pass, Slow # Issue 21628
-analyzer/test/generated/constant_test: Pass, Slow # Issue 24914
-analyzer/test/generated/declaration_resolver_test: Pass, Slow # Issue 24914
-analyzer/test/generated/element_test: Pass, Slow # Issue 21628
-analyzer/test/generated/element_resolver_test: Pass, Slow # Issue 21628
-analyzer/test/generated/error_suppression_test: Pass, Slow # Issue 21628
-analyzer/test/generated/engine_test: SkipSlow
-analyzer/test/generated/hint_code_test: Pass, Slow # Issue 21628
-analyzer/test/generated/non_hint_code_test: Pass, Slow # Issue 21628
-analyzer/test/generated/incremental_resolver_test: Pass, Slow # Issue 21628
-analyzer/test/generated/incremental_scanner_test: Pass, Slow # Issue 21628
-analyzer/test/generated/inheritance_manager_test: Pass, Slow # Issue 21628
-analyzer/test/generated/non_error_resolver_test: Pass, Slow # Issue 21628
-analyzer/test/generated/parser_test: Pass, Slow # Issue 21628
-analyzer/test/generated/resolver_test: Pass, Slow # Issue 21628
-analyzer/test/generated/scanner_test: Pass, Slow # Issue 21628
-analyzer/test/generated/sdk_test: Skip # Issue 21628
-analyzer/test/generated/simple_resolver_test: Pass, Slow # Issue 21628
-analyzer/test/generated/source_factory_test: Pass, Slow # Issue 21628
-analyzer/test/generated/static_type_analyzer_test: Pass, Slow # Issue 21628
-analyzer/test/generated/static_type_warning_code_test: Pass, Slow
-analyzer/test/generated/static_type_warning_code_test: Pass, Slow # Issue 21628
-analyzer/test/generated/static_warning_code_test: Pass, Slow # Issue 21628
-analyzer/test/generated/strong_mode_test: Pass, Slow # Issue 21628
-analyzer/test/generated/type_system_test: Pass, Slow # Issue 21628
-analyzer/test/generated/utilities_test: Pass, Slow # Issue 21628
-analyzer/test/source/embedder_test: Skip # Issue 21628
-analyzer/test/source/package_map_provider_test: Skip # Issue 21628
-analyzer/test/src/context/cache_test: Pass, Slow # Issue 21628
-analyzer/test/src/context/context_test: Pass, Timeout # dartbug.com/23658
-analyzer/test/src/dart/ast/utilities_test: Pass, Slow # Issue 24914
-analyzer/test/src/dart/constant/evaluation_test: Pass, Slow # Issue 24914
-analyzer/test/src/dart/constant/value_test: Pass, Slow # Issue 24914
-analyzer/test/src/dart/element/element_test: Pass, Slow # Issue 24914
-analyzer/test/src/summary/incremental_cache_test: Pass, Slow # Issue 24914
-analyzer/test/src/summary/index_unit_test: Pass, Slow # Issue 24914
-analyzer/test/src/summary/linker_test: Pass, Slow # Issue 24914
-analyzer/test/src/summary/prelinker_test: Pass, Slow # Issue 24914
-analyzer/test/src/summary/resynthesize_ast_test: Pass, Slow
-analyzer/test/src/summary/resynthesize_strong_test: Pass, Slow
-analyzer/test/src/summary/resynthesize_test: Pass, Slow
-analyzer/test/src/summary/summary_sdk_test: Pass, Slow # Issue 24914
-analyzer/test/src/summary/summarize_ast_test: Pass, Slow # Issue 24914
-analyzer/test/src/summary/summarize_ast_strong_test: Pass, Slow # Issue 24914
-analyzer/test/src/summary/summarize_elements_strong_test: Pass, Slow # Issue 24914
-analyzer/test/src/summary/summarize_elements_test: Pass, Slow # Issue 24914
-analyzer/test/src/task/dart_test: Pass, Slow # Issue 21628
-analyzer/test/src/task/dart_work_manager_test: Pass, Slow # Issue 21628
-analyzer/test/src/task/driver_test: Pass, Slow # Issue 21628
-analyzer/test/src/task/general_test: Pass, Slow # Issue 21628
-analyzer/test/src/task/html_test: Pass, Slow # Issue 21628
-analyzer/test/src/task/html_work_manager_test: Pass, Slow # Issue 21628
-analyzer/test/src/task/incremental_element_builder_test: Pass, Slow # Issue 21628
-analyzer/test/src/task/inputs_test: Pass, Slow # Issue 21628
-analyzer/test/src/task/manager_test: Pass, Slow # Issue 21628
-analyzer/test/src/task/model_test: Pass, Slow # Issue 21628
-analyzer/test/src/task/options_test: Pass, Slow # Issue 21628
-analyzer/test/src/task/options_work_manager_test: Pass, Slow # Issue 21628
-analyzer/test/src/task/strong/checker_test: Pass, Slow # Issue 21628
-analyzer/test/src/task/strong/inferred_type_test: Pass, Slow # Issue 21628
-analyzer/test/src/task/strong_mode_test: Pass, Slow # Issue 21628
-analyzer/test/src/task/yaml_test: Pass, Slow # Issue 21628
-analyzer/tool/task_dependency_graph/check_test: Skip # Issue 21628
 collection/test/equality_test/01: Fail # Issue 1533
 collection/test/equality_test/02: Fail # Issue 1533
 collection/test/equality_test/03: Fail # Issue 1533
@@ -131,22 +49,14 @@
 lookup_map/test/version_check_test: SkipByDesign # Only meant to run in vm.
 typed_data/test/typed_buffers_test/01: Fail # Not supporting Int64List, Uint64List.
 
-# Analysis server integration tests don't make sense to run under
-# dart2js, since the code under test always runs in the Dart vm as a
-# subprocess.
-analysis_server/test/integration: Skip
+[ $compiler == dart2js && $builder_tag != dart2js_analyzer ]
+analyzer/test/*: Skip # Issue 26813
+analyzer/tool/*: Skip # Issue 26813
+analysis_server/test/*: Skip # Issue 26813
 
 [ $compiler == dart2js && $checked ]
 crypto/test/base64_test: Slow, Pass
 
-[ $runtime == d8 ]
-analysis_server/test/analysis_notification_overrides_test: Pass, Slow # Issue 19756
-analysis_server/test/analysis_notification_occurrences_test: Pass, Slow # Issue 19756
-analysis_server/test/analysis_notification_outline_test: Pass, Slow # Issue 19756
-analysis_server/test/domain_search_test: Pass, Slow # Issue 19756
-analysis_server/test/search/element_reference_test: Pass, Slow # Issue 19756
-analysis_server/index/store/codec_test: Pass, Slow # Issue 19756
-
 [ $runtime == jsshell ]
 async/test/stream_zip_test: RuntimeError, OK # Issue 26103. Timers are not supported.
 lookup_map/test/lookup_map_test: RuntimeError, OK # Issue 26103. Timers are not supported.
@@ -160,7 +70,7 @@
 
 [ $runtime == ie10 ]
 analyzer/test/generated/java_core_test: Pass, Timeout # Issue 19747
-typed_data/test/typed_buffers_test/none: Fail # Issue   17607 (I put this here explicitly, since this is not the same as on ie9)
+typed_data/test/typed_buffers_test/none: Fail # Issue 17607 (I put this here explicitly, since this is not the same as on ie9)
 
 [ $runtime == safarimobilesim ]
 # Unexplained errors only occuring on Safari 6.1 and earlier.
@@ -207,11 +117,6 @@
 [ $use_repository_packages ]
 analyzer/test/*: PubGetError
 
-[ $compiler == dart2js && $cps_ir && $host_checked == false ]
-analyzer/test/dart/element/element_test: Pass, Slow # Times out due to inlining, but see issue 24485
-analyzer/test/src/summary/resynthesize_test: Pass, Slow # Times out due to inlining, but see issue 24485
-analyzer/test/src/task/strong_mode_test: Pass, Slow # Times out due to inlining, but see issue 24485
-
 [ $compiler == dart2js && $cps_ir && $host_checked ]
 analyzer/test/dart/ast/ast_test: Crash # Issue 24485
 analyzer/test/dart/ast/visitor_test: Crash # Issue 24485
diff --git a/pkg/pkgbuild.status b/pkg/pkgbuild.status
index c7736e8..2f7a6bd 100644
--- a/pkg/pkgbuild.status
+++ b/pkg/pkgbuild.status
@@ -7,8 +7,5 @@
 [ $use_public_packages ]
 pkg/compiler: SkipByDesign # js_ast is not published
 
-[ $use_repository_packages ]
-third_party/pkg/dartdoc: PubGetError # Issue 26696
-
 [ ($use_repository_packages || $use_public_packages) && ($system == windows || $system == linux) ]
 third_party/pkg/*: Pass, PubGetError # Issue 26696
diff --git a/runtime/bin/bin.gypi b/runtime/bin/bin.gypi
index 3905ad7..2d04720 100644
--- a/runtime/bin/bin.gypi
+++ b/runtime/bin/bin.gypi
@@ -1058,6 +1058,20 @@
       ]
     },
     {
+      'target_name': 'fuchsia_test',
+      'type': 'executable',
+      'dependencies': [
+        'libdart_nosnapshot',
+      ],
+      'include_dirs': [
+        '..',
+        '../include',
+      ],
+      'sources': [
+        'fuchsia_test.cc',
+      ],
+    },
+    {
       # dart binary with a snapshot of corelibs built in.
       'target_name': 'dart',
       'type': 'executable',
diff --git a/runtime/bin/builtin.dart b/runtime/bin/builtin.dart
index 7045187..7780208 100644
--- a/runtime/bin/builtin.dart
+++ b/runtime/bin/builtin.dart
@@ -261,27 +261,6 @@
   return scriptUri.toString();
 }
 
-// Embedder Entrypoint:
-// Function called by standalone embedder to resolve uris when the VM requests
-// Dart_kCanonicalizeUrl from the tag handler.
-String _resolveUri(String base, String userString) {
-  if (!_setupCompleted) {
-    _setupHooks();
-  }
-
-  if (_traceLoading) {
-    _log('Resolving: $userString from $base');
-  }
-
-  var baseUri = Uri.parse(base);
-  var result = baseUri.resolve(userString).toString();
-  if (_traceLoading) {
-    _log('Resolved $userString in $base to $result');
-  }
-
-  return result;
-}
-
 // Embedder Entrypoint (gen_snapshot):
 // Resolve relative paths relative to working directory.
 String _resolveInWorkingDirectory(String fileName) {
diff --git a/runtime/bin/dart_product_entries.txt b/runtime/bin/dart_product_entries.txt
index fcfda57..098d0a0 100644
--- a/runtime/bin/dart_product_entries.txt
+++ b/runtime/bin/dart_product_entries.txt
@@ -1,7 +1,6 @@
 dart:_builtin,::,_getMainClosure
 dart:_builtin,::,_getPrintClosure
 dart:_builtin,::,_getUriBaseClosure
-dart:_builtin,::,_resolveUri
 dart:_builtin,::,_setWorkingDirectory
 dart:_builtin,::,_setPackageRoot
 dart:_builtin,::,_loadPackagesMap
diff --git a/runtime/bin/fuchsia_test.cc b/runtime/bin/fuchsia_test.cc
new file mode 100644
index 0000000..802ff31
--- /dev/null
+++ b/runtime/bin/fuchsia_test.cc
@@ -0,0 +1,39 @@
+// Copyright (c) 2016, the Dart project authors.  Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.
+
+#include <stdio.h>
+#include <stdlib.h>
+
+#include <dart_api.h>
+
+int main(void) {
+  fprintf(stderr, "Calling Dart_SetVMFlags\n");
+  fflush(stderr);
+  if (!Dart_SetVMFlags(0, NULL)) {
+    fprintf(stderr, "Failed to set flags\n");
+    fflush(stderr);
+    return -1;
+  }
+  fprintf(stderr, "Calling Dart_Initialize\n");
+  fflush(stderr);
+  char* error = Dart_Initialize(
+      NULL, NULL, NULL,
+      NULL, NULL, NULL, NULL,
+      NULL,
+      NULL,
+      NULL,
+      NULL,
+      NULL,
+      NULL,
+      NULL);
+  if (error != NULL) {
+    fprintf(stderr, "VM initialization failed: %s\n", error);
+    fflush(stderr);
+    free(error);
+    return -1;
+  }
+  fprintf(stderr, "Success!\n");
+  fflush(stderr);
+  return 0;
+}
diff --git a/runtime/bin/main.cc b/runtime/bin/main.cc
index d7bfb3b..a1f859a 100644
--- a/runtime/bin/main.cc
+++ b/runtime/bin/main.cc
@@ -425,6 +425,26 @@
 }
 
 
+static bool ProcessHotReloadTestModeOption(const char* arg,
+                                           CommandLineOptions* vm_options) {
+  if (*arg != '\0') {
+    return false;
+  }
+
+  // Identity reload.
+  vm_options->AddArgument("--identity_reload");
+  // Start reloading quickly.
+  vm_options->AddArgument("--reload_every=50");
+  // Reload from optimized and unoptimized code.
+  vm_options->AddArgument("--reload_every_optimized=false");
+  // Reload less frequently as time goes on.
+  vm_options->AddArgument("--reload_every_back_off");
+  // Ensure that an isolate has reloaded once.
+  vm_options->AddArgument("--check_reloaded");
+
+  return true;
+}
+
 
 static bool ProcessShutdownOption(const char* arg,
                                   CommandLineOptions* vm_options) {
@@ -477,6 +497,7 @@
   { "--run-app-snapshot=", ProcessRunAppSnapshotOption },
   { "--use-blobs", ProcessUseBlobsOption },
   { "--trace-loading", ProcessTraceLoadingOption },
+  { "--hot-reload-test-mode", ProcessHotReloadTestModeOption },
   { NULL, NULL }
 };
 
@@ -1407,7 +1428,6 @@
         { "dart:_builtin", "::", "_getPrintClosure" },
         { "dart:_builtin", "::", "_getUriBaseClosure" },
         { "dart:_builtin", "::", "_resolveInWorkingDirectory" },
-        { "dart:_builtin", "::", "_resolveUri" },
         { "dart:_builtin", "::", "_setWorkingDirectory" },
         { "dart:_builtin", "::", "_setPackageRoot" },
         { "dart:_builtin", "::", "_libraryFilePath" },
diff --git a/runtime/bin/vmservice/vmservice_io.dart b/runtime/bin/vmservice/vmservice_io.dart
index bfa99a6..3e1bee4 100644
--- a/runtime/bin/vmservice/vmservice_io.dart
+++ b/runtime/bin/vmservice/vmservice_io.dart
@@ -60,6 +60,46 @@
   _shutdown();
 }
 
+Future<Uri> createTempDirCallback(String base) async {
+  Directory temp = await Directory.systemTemp.createTemp(base);
+  return temp.uri;
+}
+
+Future deleteDirCallback(Uri path) async {
+  Directory dir = new Directory.fromUri(path);
+  await dir.delete(recursive: true);
+}
+
+Future writeFileCallback(Uri path, List<int> bytes) async {
+  var file = await new File.fromUri(path);
+  await file.writeAsBytes(bytes);
+}
+
+Future<List<int>> readFileCallback(Uri path) async {
+  var file = await new File.fromUri(path);
+  return await file.readAsBytes();
+}
+
+Future<List<Map<String,String>>> listFilesCallback(Uri dirPath) async {
+  var dir = new Directory.fromUri(dirPath);
+  var dirPathStr = dirPath.path;
+  var stream = dir.list(recursive: true);
+  var result = [];
+  await for (var fileEntity in stream) {
+    var filePath = new Uri.file(fileEntity.path).path;
+    var stat = await fileEntity.stat();
+    if (stat.type == FileSystemEntityType.FILE &&
+        filePath.startsWith(dirPathStr)) {
+      var map = {};
+      map['name'] = '/' + filePath.substring(dirPathStr.length);
+      map['size'] = stat.size;
+      map['modified'] = stat.modified.millisecondsSinceEpoch;
+      result.add(map);
+    }
+  }
+  return result;
+}
+
 _clearFuture(_) {
   serverFuture = null;
 }
@@ -96,6 +136,11 @@
 main() {
   // Set embedder hooks.
   VMServiceEmbedderHooks.cleanup = cleanupCallback;
+  VMServiceEmbedderHooks.createTempDir = createTempDirCallback;
+  VMServiceEmbedderHooks.deleteDir = deleteDirCallback;
+  VMServiceEmbedderHooks.writeFile = writeFileCallback;
+  VMServiceEmbedderHooks.readFile = readFileCallback;
+  VMServiceEmbedderHooks.listFiles = listFilesCallback;
   // Always instantiate the vmservice object so that the exit message
   // can be delivered and waiting loaders can be cancelled.
   var service = new VMService();
diff --git a/runtime/lib/bigint.dart b/runtime/lib/bigint.dart
index bc9b8f4..c21b04a 100644
--- a/runtime/lib/bigint.dart
+++ b/runtime/lib/bigint.dart
@@ -1336,6 +1336,8 @@
     return str;
   }
 
+  int _bitAndFromSmi(int other) => _bitAndFromInteger(other);
+
   int _bitAndFromInteger(int other) {
     return other._toBigint()._and(this)._toValidInt();
   }
diff --git a/runtime/lib/double.cc b/runtime/lib/double.cc
index 2706269..c28e1b1 100644
--- a/runtime/lib/double.cc
+++ b/runtime/lib/double.cc
@@ -232,9 +232,7 @@
 
 DEFINE_NATIVE_ENTRY(Double_toString, 1) {
   const Number& number = Number::CheckedHandle(arguments->NativeArgAt(0));
-  Heap::Space space = isolate->heap()->ShouldPretenure(kOneByteStringCid) ?
-      Heap::kPretenured : Heap::kNew;
-  return number.ToString(space);
+  return number.ToString(Heap::kNew);
 }
 
 
diff --git a/runtime/lib/integers.cc b/runtime/lib/integers.cc
index 5680037..efb12af 100644
--- a/runtime/lib/integers.cc
+++ b/runtime/lib/integers.cc
@@ -285,6 +285,19 @@
 }
 
 
+DEFINE_NATIVE_ENTRY(Smi_bitAndFromSmi, 2) {
+  const Smi& left = Smi::CheckedHandle(arguments->NativeArgAt(0));
+  GET_NON_NULL_NATIVE_ARGUMENT(Smi, right, arguments->NativeArgAt(1));
+  if (FLAG_trace_intrinsified_natives) {
+    OS::Print("Smi_bitAndFromSmi %s & %s\n",
+        left.ToCString(), right.ToCString());
+  }
+  const Smi& left_value = Smi::Cast(left);
+  const Smi& right_value = Smi::Cast(right);
+  return Smi::New(left_value.Value() & right_value.Value());
+}
+
+
 DEFINE_NATIVE_ENTRY(Smi_shrFromInt, 2) {
   const Smi& amount = Smi::CheckedHandle(arguments->NativeArgAt(0));
   GET_NON_NULL_NATIVE_ARGUMENT(Integer, value, arguments->NativeArgAt(1));
diff --git a/runtime/lib/integers.dart b/runtime/lib/integers.dart
index a7722b0..b86b71c 100644
--- a/runtime/lib/integers.dart
+++ b/runtime/lib/integers.dart
@@ -63,6 +63,7 @@
   num remainder(num other) {
     return other._remainderFromInteger(this);
   }
+  int _bitAndFromSmi(int other) native "Integer_bitAndFromInteger";
   int _bitAndFromInteger(int other) native "Integer_bitAndFromInteger";
   int _bitOrFromInteger(int other) native "Integer_bitOrFromInteger";
   int _bitXorFromInteger(int other) native "Integer_bitXorFromInteger";
@@ -412,6 +413,9 @@
   int operator ~() native "Smi_bitNegate";
   int get bitLength native "Smi_bitLength";
 
+  int operator &(int other) => other._bitAndFromSmi(this);
+
+  int _bitAndFromSmi(int other) native "Smi_bitAndFromSmi";
   int _shrFromInt(int other) native "Smi_shrFromInt";
   int _shlFromInt(int other) native "Smi_shlFromInt";
 
@@ -609,6 +613,8 @@
   int operator ~() native "Mint_bitNegate";
   int get bitLength native "Mint_bitLength";
 
+  int _bitAndFromSmi(int other) => _bitAndFromInteger(other);
+
   // Shift by mint exceeds range that can be handled by the VM.
   int _shrFromInt(int other) {
     if (other < 0) {
diff --git a/runtime/lib/stacktrace.cc b/runtime/lib/stacktrace.cc
index 9d9a307..a63d66d 100644
--- a/runtime/lib/stacktrace.cc
+++ b/runtime/lib/stacktrace.cc
@@ -61,6 +61,16 @@
   OS::PrintErr("=== Current Trace:\n%s===\n", stacktrace.ToCString());
 }
 
+// Like _printCurrentStacktrace, but works in a NoSafepointScope.
+void _printCurrentStacktraceNoSafepoint() {
+  StackFrameIterator frames(StackFrameIterator::kDontValidateFrames);
+  StackFrame* frame = frames.NextFrame();
+  while (frame != NULL) {
+    OS::Print("%s\n", frame->ToCString());
+    frame = frames.NextFrame();
+  }
+}
+
 DEFINE_NATIVE_ENTRY(StackTrace_current, 0) {
   const Stacktrace& stacktrace = GetCurrentStacktrace(1);
   return stacktrace.raw();
diff --git a/runtime/lib/string.cc b/runtime/lib/string.cc
index 01f6ad0..d4008a3 100644
--- a/runtime/lib/string.cc
+++ b/runtime/lib/string.cc
@@ -300,8 +300,7 @@
 
 DEFINE_NATIVE_ENTRY(OneByteString_allocate, 1) {
   GET_NON_NULL_NATIVE_ARGUMENT(Smi, length_obj, arguments->NativeArgAt(0));
-  Heap::Space space = isolate->heap()->SpaceForAllocation(kOneByteStringCid);
-  return OneByteString::New(length_obj.Value(), space);
+  return OneByteString::New(length_obj.Value(), Heap::kNew);
 }
 
 
@@ -326,7 +325,7 @@
   }
   ASSERT(length >= 0);
 
-  Heap::Space space = isolate->heap()->SpaceForAllocation(kOneByteStringCid);
+  Heap::Space space = Heap::kNew;
   if (list.IsTypedData()) {
     const TypedData& array = TypedData::Cast(list);
     if (end > array.LengthInBytes()) {
@@ -425,7 +424,7 @@
     Exceptions::ThrowArgumentError(end_obj);
   }
 
-  Heap::Space space = isolate->heap()->SpaceForAllocation(kTwoByteStringCid);
+  Heap::Space space = Heap::kNew;
   if (list.IsTypedData()) {
     const TypedData& array = TypedData::Cast(list);
     if (array.ElementType() != kUint16ArrayElement) {
diff --git a/runtime/lib/timeline.cc b/runtime/lib/timeline.cc
index b5b397d..6923e6f 100644
--- a/runtime/lib/timeline.cc
+++ b/runtime/lib/timeline.cc
@@ -54,6 +54,7 @@
 
 
 DEFINE_NATIVE_ENTRY(Timeline_reportTaskEvent, 6) {
+#ifndef PRODUCT
   if (!FLAG_support_timeline) {
     return Object::null();
   }
@@ -114,12 +115,13 @@
 
   // json was allocated in the zone and a copy will be stored in event.
   event->CompleteWithPreSerializedJSON(json);
-
+#endif
   return Object::null();
 }
 
 
 DEFINE_NATIVE_ENTRY(Timeline_reportCompleteEvent, 5) {
+#ifndef PRODUCT
   if (!FLAG_support_timeline) {
     return Object::null();
   }
@@ -185,12 +187,13 @@
                   end_cpu);
   // json was allocated in the zone and a copy will be stored in event.
   event->CompleteWithPreSerializedJSON(json);
-
+#endif
   return Object::null();
 }
 
 
 DEFINE_NATIVE_ENTRY(Timeline_reportInstantEvent, 4) {
+#ifndef PRODUCT
   if (!FLAG_support_timeline) {
     return Object::null();
   }
@@ -228,7 +231,7 @@
   event->Instant("", start.AsInt64Value());
   // json was allocated in the zone and a copy will be stored in event.
   event->CompleteWithPreSerializedJSON(json);
-
+#endif
   return Object::null();
 }
 
diff --git a/runtime/lib/uri_patch.dart b/runtime/lib/uri_patch.dart
index 2664b9b..80024c2 100644
--- a/runtime/lib/uri_patch.dart
+++ b/runtime/lib/uri_patch.dart
@@ -16,14 +16,16 @@
 _UriBaseClosure _uriBaseClosure = _unsupportedUriBase;
 
 patch class Uri {
+  /* patch */ static Uri get base => _uriBaseClosure();
+}
+
+patch class _Uri {
   static final bool _isWindowsCached = _isWindowsPlatform;
 
-  /* patch */ static bool get _isWindows => _isWindowsCached;
-
-  /* patch */ static Uri get base => _uriBaseClosure();
-
   static bool get _isWindowsPlatform native "Uri_isWindowsPlatform";
 
+  /* patch */ static bool get _isWindows => _isWindowsCached;
+
   /* patch */ static String _uriEncode(List<int> canonicalTable,
                                        String text,
                                        Encoding encoding,
diff --git a/runtime/observatory/pubspec.yaml b/runtime/observatory/pubspec.yaml
index b00ab07..e71be36 100644
--- a/runtime/observatory/pubspec.yaml
+++ b/runtime/observatory/pubspec.yaml
@@ -18,6 +18,8 @@
   charted: ^0.3.0
   polymer: ^0.16.3
   unittest: < 0.12.0
+  js: ^0.6.0
+  js_util: ^0.2.0
   usage: any
 dependency_overrides:
   analyzer:
@@ -52,6 +54,10 @@
     path: ../../third_party/observatory_pub_packages/packages/initialize
   intl:
     path: ../../third_party/observatory_pub_packages/packages/intl
+  js:
+    path: ../../third_party/observatory_pub_packages/packages/js
+  js_util:
+    path: ../../third_party/observatory_pub_packages/packages/js_util
   logging:
     path: ../../third_party/observatory_pub_packages/packages/logging
   matcher:
@@ -76,6 +82,8 @@
     path: ../../third_party/observatory_pub_packages/packages/pool
   quiver:
     path: ../../third_party/observatory_pub_packages/packages/quiver
+  quiver_iterables:
+    path: ../../third_party/observatory_pub_packages/packages/quiver_iterables
   smoke:
     path: ../../third_party/observatory_pub_packages/packages/smoke
   source_maps:
diff --git a/runtime/observatory/tests/service/dev_fs_test.dart b/runtime/observatory/tests/service/dev_fs_test.dart
index 8855e13..08e5a5e 100644
--- a/runtime/observatory/tests/service/dev_fs_test.dart
+++ b/runtime/observatory/tests/service/dev_fs_test.dart
@@ -11,17 +11,19 @@
 var tests = [
   (VM vm) async {
     var result = await vm.invokeRpcNoUpgrade('_listDevFS', {});
-    expect(result['type'], equals('FSList'));
+    expect(result['type'], equals('FileSystemList'));
     expect(result['fsNames'].toString(), equals("[]"));
 
     var params = {
       'fsName': 'alpha'
     };
     result = await vm.invokeRpcNoUpgrade('_createDevFS', params);
-    expect(result['type'], equals('Success'));
+    expect(result['type'], equals('FileSystem'));
+    expect(result['name'], equals('alpha'));
+    expect(result['uri'], new isInstanceOf<String>());
 
     result = await vm.invokeRpcNoUpgrade('_listDevFS', {});
-    expect(result['type'], equals('FSList'));
+    expect(result['type'], equals('FileSystemList'));
     expect(result['fsNames'].toString(), equals('[alpha]'));
 
     bool caughtException;
@@ -39,7 +41,7 @@
     expect(result['type'], equals('Success'));
 
     result = await vm.invokeRpcNoUpgrade('_listDevFS', {});
-    expect(result['type'], equals('FSList'));
+    expect(result['type'], equals('FileSystemList'));
     expect(result['fsNames'].toString(), equals("[]"));
 
     caughtException = false;
@@ -61,10 +63,10 @@
 
     var result;
     // Create DevFS.
-    result = await vm.invokeRpcNoUpgrade('_createDevFS', {
-        'fsName': fsId
-            });
-    expect(result['type'], equals('Success'));
+    result = await vm.invokeRpcNoUpgrade('_createDevFS', { 'fsName': fsId });
+    expect(result['type'], equals('FileSystem'));
+    expect(result['name'], equals(fsId));
+    expect(result['uri'], new isInstanceOf<String>());
 
     bool caughtException = false;
     try {
@@ -76,9 +78,7 @@
     } on ServerRpcException catch(e) {
       caughtException = true;
       expect(e.code, equals(ServerRpcException.kFileDoesNotExist));
-      expect(e.message,
-             "_readDevFSFile: file 'dart-devfs://banana//foobar.dat' "
-             "does not exist");
+      expect(e.message, startsWith("_readDevFSFile: FileSystemException: "));
     }
     expect(caughtException, isTrue);
 
@@ -98,25 +98,29 @@
     expect(result['type'], equals('FSFile'));
     expect(result['fileContents'], equals(fileContents));
 
-    // Read a malformed path back.
+    // The leading '/' is optional.
+    result = await vm.invokeRpcNoUpgrade('_readDevFSFile', {
+        'fsName': fsId,
+        'path': filePath.substring(1),
+    });
+    expect(result['type'], equals('FSFile'));
+    expect(result['fileContents'], equals(fileContents));
+
+    // Read a file outside of the fs.
     caughtException = false;
     try {
-      result = await vm.invokeRpcNoUpgrade('_readDevFSFile', {
-          'fsName': fsId,
-          'path': filePath.substring(1)  // Strip the leading '/'.
+      await vm.invokeRpcNoUpgrade('_readDevFSFile', {
+        'fsName': fsId,
+        'path': '../foo',
       });
+      expect(false, isTrue, reason:'Unreachable');
     } on ServerRpcException catch(e) {
       caughtException = true;
       expect(e.code, equals(ServerRpcException.kInvalidParams));
-      expect(e.message,
-             "_readDevFSFile: file system path \'foobar.dat\' "
-             "must begin with a /");
+      expect(e.message, "_readDevFSFile: invalid 'path' parameter: ../foo");
     }
     expect(caughtException, isTrue);
 
-    expect(result['type'], equals('FSFile'));
-    expect(result['fileContents'], equals(fileContents));
-
     // Write a set of files.
     result = await vm.invokeRpcNoUpgrade('_writeDevFSFiles', {
         'fsName': fsId,
@@ -140,7 +144,7 @@
     result = await vm.invokeRpcNoUpgrade('_listDevFSFiles', {
         'fsName': fsId,
     });
-    expect(result['type'], equals('FSFilesList'));
+    expect(result['type'], equals('FSFileList'));
     expect(result['files'].length, equals(3));
 
     // Delete DevFS.
diff --git a/runtime/observatory/tests/service/service.status b/runtime/observatory/tests/service/service.status
index 31ffa21..b18b6ce 100644
--- a/runtime/observatory/tests/service/service.status
+++ b/runtime/observatory/tests/service/service.status
@@ -54,3 +54,19 @@
 # TODO(vegorov) re-enable when debugger, coverage and profiling is completely
 # fixed for SIMDBC.
 *: Skip
+
+[ $hot_reload ]
+add_breakpoint_rpc_test: Pass, Timeout, Fail, Crash
+code_test: Fail, Crash
+debugger_location_test: Timeout, Fail, Crash
+debugging_inlined_finally_test: Pass, Timeout, Fail, Crash
+dominator_tree_test: Timeout, Crash
+eval_test: Timeout, Fail, Crash
+evaluate_in_frame_rpc_test: Timeout, Fail, Crash
+get_cpu_profile_timeline_rpc_test: Pass, Timeout, Crash
+get_heap_map_rpc_test: Pass, Timeout, Fail, Crash
+get_vm_timeline_rpc_test: Timeout, Fail, Crash
+graph_test: Pass, Timeout, Fail, Crash
+smart_next_test: Pass, Timeout, Fail, Crash
+step_over_await_test: Pass, Timeout, Fail, Crash
+vm_timeline_events_test: Timeout, Fail, Crash
diff --git a/runtime/platform/globals.h b/runtime/platform/globals.h
index e3d1a34..5e3b011 100644
--- a/runtime/platform/globals.h
+++ b/runtime/platform/globals.h
@@ -113,7 +113,7 @@
 // Windows, both 32- and 64-bit, regardless of the check for _WIN32.
 #define TARGET_OS_WINDOWS 1
 
-#else
+#elif !defined(TARGET_OS_FUCHSIA)
 #error Automatic target os detection failed.
 #endif
 
diff --git a/runtime/platform/math.h b/runtime/platform/math.h
index 039153e..0dc34008 100644
--- a/runtime/platform/math.h
+++ b/runtime/platform/math.h
@@ -8,11 +8,16 @@
 // We must take these math functions from the C++ header file as long as we
 // are using the STL. Otherwise the Android build will break due to confusion
 // between C++ and C headers when math.h is also included.
+#if !defined(TARGET_OS_FUCHSIA)
 #include <cmath>
 
 #define isinf(val) std::isinf(val)
 #define isnan(val) std::isnan(val)
 #define signbit(val) std::signbit(val)
 #define isfinite(val) std::isfinite(val)
+#else
+// TODO(zra): When Fuchsia has STL, do the same thing as above.
+#include <math.h>
+#endif
 
 #endif  // PLATFORM_MATH_H_
diff --git a/runtime/platform/utils.h b/runtime/platform/utils.h
index 508e0e7..65bc14f 100644
--- a/runtime/platform/utils.h
+++ b/runtime/platform/utils.h
@@ -215,6 +215,8 @@
 
 #if defined(TARGET_OS_ANDROID)
 #include "platform/utils_android.h"
+#elif defined(TARGET_OS_FUCHSIA)
+#include "platform/utils_fuchsia.h"
 #elif defined(TARGET_OS_LINUX)
 #include "platform/utils_linux.h"
 #elif defined(TARGET_OS_MACOS)
diff --git a/runtime/platform/utils_fuchsia.h b/runtime/platform/utils_fuchsia.h
new file mode 100644
index 0000000..7054225
--- /dev/null
+++ b/runtime/platform/utils_fuchsia.h
@@ -0,0 +1,67 @@
+// Copyright (c) 2016, the Dart project authors.  Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.
+
+#ifndef PLATFORM_UTILS_FUCHSIA_H_
+#define PLATFORM_UTILS_FUCHSIA_H_
+
+#include "platform/assert.h"
+
+namespace dart {
+
+inline int Utils::CountLeadingZeros(uword x) {
+  UNIMPLEMENTED();
+  return 0;
+}
+
+
+inline int Utils::CountTrailingZeros(uword x) {
+  UNIMPLEMENTED();
+  return 0;
+}
+
+
+inline uint16_t Utils::HostToBigEndian16(uint16_t value) {
+  UNIMPLEMENTED();
+  return 0;
+}
+
+
+inline uint32_t Utils::HostToBigEndian32(uint32_t value) {
+  UNIMPLEMENTED();
+  return 0;
+}
+
+
+inline uint64_t Utils::HostToBigEndian64(uint64_t value) {
+  UNIMPLEMENTED();
+  return 0;
+}
+
+
+inline uint16_t Utils::HostToLittleEndian16(uint16_t value) {
+  UNIMPLEMENTED();
+  return 0;
+}
+
+
+inline uint32_t Utils::HostToLittleEndian32(uint32_t value) {
+  UNIMPLEMENTED();
+  return 0;
+}
+
+
+inline uint64_t Utils::HostToLittleEndian64(uint64_t value) {
+  UNIMPLEMENTED();
+  return 0;
+}
+
+
+inline char* Utils::StrError(int err, char* buffer, size_t bufsize) {
+  UNIMPLEMENTED();
+  return NULL;
+}
+
+}  // namespace dart
+
+#endif  // PLATFORM_UTILS_FUCHSIA_H_
diff --git a/runtime/vm/assembler_arm.cc b/runtime/vm/assembler_arm.cc
index bf5edce..2e39e22 100644
--- a/runtime/vm/assembler_arm.cc
+++ b/runtime/vm/assembler_arm.cc
@@ -1687,49 +1687,6 @@
 }
 
 
-Operand Assembler::GetVerifiedMemoryShadow() {
-  Operand offset;
-  if (!Operand::CanHold(VerifiedMemory::offset(), &offset)) {
-    FATAL1("Offset 0x%" Px " not representable", VerifiedMemory::offset());
-  }
-  return offset;
-}
-
-
-void Assembler::WriteShadowedField(Register base,
-                                   intptr_t offset,
-                                   Register value,
-                                   Condition cond) {
-  if (VerifiedMemory::enabled()) {
-    ASSERT(base != value);
-    Operand shadow(GetVerifiedMemoryShadow());
-    add(base, base, shadow, cond);
-    str(value, Address(base, offset), cond);
-    sub(base, base, shadow, cond);
-  }
-  str(value, Address(base, offset), cond);
-}
-
-
-void Assembler::WriteShadowedFieldPair(Register base,
-                                       intptr_t offset,
-                                       Register value_even,
-                                       Register value_odd,
-                                       Condition cond) {
-  ASSERT(value_odd == value_even + 1);
-  ASSERT(value_even % 2 == 0);
-  if (VerifiedMemory::enabled()) {
-    ASSERT(base != value_even);
-    ASSERT(base != value_odd);
-    Operand shadow(GetVerifiedMemoryShadow());
-    add(base, base, shadow, cond);
-    strd(value_even, value_odd, base, offset, cond);
-    sub(base, base, shadow, cond);
-  }
-  strd(value_even, value_odd, base, offset, cond);
-}
-
-
 Register UseRegister(Register reg, RegList* used) {
   ASSERT(reg != THR);
   ASSERT(reg != SP);
@@ -1749,89 +1706,12 @@
 }
 
 
-void Assembler::VerifiedWrite(Register object,
-                              const Address& address,
-                              Register new_value,
-                              FieldContent old_content) {
-#if defined(DEBUG)
-  ASSERT(address.mode() == Address::Offset ||
-         address.mode() == Address::NegOffset);
-  // Allocate temporary registers (and check for register collisions).
-  RegList used = 0;
-  UseRegister(new_value, &used);
-  Register base = UseRegister(address.rn(), &used);
-  if ((object != base) && (object != kNoRegister)) {
-    UseRegister(object, &used);
-  }
-  if (address.rm() != kNoRegister) {
-    UseRegister(address.rm(), &used);
-  }
-  Register old_value = AllocateRegister(&used);
-  Register temp = AllocateRegister(&used);
-  PushList(used);
-  ldr(old_value, address);
-  // First check that 'old_value' contains 'old_content'.
-  // Smi test.
-  tst(old_value, Operand(kHeapObjectTag));
-  Label ok;
-  switch (old_content) {
-    case kOnlySmi:
-      b(&ok, EQ);  // Smi is OK.
-      Stop("Expected smi.");
-      break;
-    case kHeapObjectOrSmi:
-      b(&ok, EQ);  // Smi is OK.
-      // Non-smi case: Verify object pointer is word-aligned when untagged.
-      COMPILE_ASSERT(kHeapObjectTag == 1);
-      tst(old_value, Operand((kWordSize - 1) - kHeapObjectTag));
-      b(&ok, EQ);
-      Stop("Expected heap object or Smi");
-      break;
-    case kEmptyOrSmiOrNull:
-      b(&ok, EQ);  // Smi is OK.
-      // Non-smi case: Check for the special zap word or null.
-      // Note: Cannot use CompareImmediate, since IP may be in use.
-      LoadImmediate(temp, Heap::kZap32Bits);
-      cmp(old_value, Operand(temp));
-      b(&ok, EQ);
-      LoadObject(temp, Object::null_object());
-      cmp(old_value, Operand(temp));
-      b(&ok, EQ);
-      Stop("Expected zapped, Smi or null");
-      break;
-    default:
-      UNREACHABLE();
-  }
-  Bind(&ok);
-  if (VerifiedMemory::enabled()) {
-    Operand shadow_offset(GetVerifiedMemoryShadow());
-    // Adjust the address to shadow.
-    add(base, base, shadow_offset);
-    ldr(temp, address);
-    cmp(old_value, Operand(temp));
-    Label match;
-    b(&match, EQ);
-    Stop("Write barrier verification failed");
-    Bind(&match);
-    // Write new value in shadow.
-    str(new_value, address);
-    // Restore original address.
-    sub(base, base, shadow_offset);
-  }
-  str(new_value, address);
-  PopList(used);
-#else
-  str(new_value, address);
-#endif  // DEBUG
-}
-
-
 void Assembler::StoreIntoObject(Register object,
                                 const Address& dest,
                                 Register value,
                                 bool can_value_be_smi) {
   ASSERT(object != value);
-  VerifiedWrite(object, dest, value, kHeapObjectOrSmi);
+  str(value, dest);
   Label done;
   if (can_value_be_smi) {
     StoreIntoObjectFilter(object, value, &done);
@@ -1872,9 +1752,8 @@
 
 void Assembler::StoreIntoObjectNoBarrier(Register object,
                                          const Address& dest,
-                                         Register value,
-                                         FieldContent old_content) {
-  VerifiedWrite(object, dest, value, old_content);
+                                         Register value) {
+  str(value, dest);
 #if defined(DEBUG)
   Label done;
   StoreIntoObjectFilter(object, value, &done);
@@ -1885,50 +1764,48 @@
 }
 
 
-void Assembler::StoreIntoObjectNoBarrierOffset(Register object,
-                                               int32_t offset,
-                                               Register value,
-                                               FieldContent old_content) {
-  int32_t ignored = 0;
-  if (Address::CanHoldStoreOffset(kWord, offset - kHeapObjectTag, &ignored)) {
-    StoreIntoObjectNoBarrier(object, FieldAddress(object, offset), value,
-                             old_content);
-  } else {
-    AddImmediate(IP, object, offset - kHeapObjectTag);
-    StoreIntoObjectNoBarrier(object, Address(IP), value, old_content);
-  }
-}
-
-
 void Assembler::StoreIntoObjectNoBarrier(Register object,
                                          const Address& dest,
-                                         const Object& value,
-                                         FieldContent old_content) {
+                                         const Object& value) {
   ASSERT(!value.IsICData() || ICData::Cast(value).IsOriginal());
   ASSERT(!value.IsField() || Field::Cast(value).IsOriginal());
   ASSERT(value.IsSmi() || value.InVMHeap() ||
          (value.IsOld() && value.IsNotTemporaryScopedHandle()));
   // No store buffer update.
   LoadObject(IP, value);
-  VerifiedWrite(object, dest, IP, old_content);
+  str(IP, dest);
 }
 
 
 void Assembler::StoreIntoObjectNoBarrierOffset(Register object,
                                                int32_t offset,
-                                               const Object& value,
-                                               FieldContent old_content) {
-  ASSERT(!value.IsICData() || ICData::Cast(value).IsOriginal());
-  ASSERT(!value.IsField() || Field::Cast(value).IsOriginal());
+                                               Register value) {
   int32_t ignored = 0;
   if (Address::CanHoldStoreOffset(kWord, offset - kHeapObjectTag, &ignored)) {
-    StoreIntoObjectNoBarrier(object, FieldAddress(object, offset), value,
-                             old_content);
+    StoreIntoObjectNoBarrier(object, FieldAddress(object, offset), value);
   } else {
     Register base = object == R9 ? R8 : R9;
     Push(base);
     AddImmediate(base, object, offset - kHeapObjectTag);
-    StoreIntoObjectNoBarrier(object, Address(base), value, old_content);
+    StoreIntoObjectNoBarrier(object, Address(base), value);
+    Pop(base);
+  }
+}
+
+
+void Assembler::StoreIntoObjectNoBarrierOffset(Register object,
+                                               int32_t offset,
+                                               const Object& value) {
+  ASSERT(!value.IsICData() || ICData::Cast(value).IsOriginal());
+  ASSERT(!value.IsField() || Field::Cast(value).IsOriginal());
+  int32_t ignored = 0;
+  if (Address::CanHoldStoreOffset(kWord, offset - kHeapObjectTag, &ignored)) {
+    StoreIntoObjectNoBarrier(object, FieldAddress(object, offset), value);
+  } else {
+    Register base = object == R9 ? R8 : R9;
+    Push(base);
+    AddImmediate(base, object, offset - kHeapObjectTag);
+    StoreIntoObjectNoBarrier(object, Address(base), value);
     Pop(base);
   }
 }
@@ -1944,9 +1821,9 @@
   Bind(&init_loop);
   AddImmediate(begin, 2 * kWordSize);
   cmp(begin, Operand(end));
-  WriteShadowedFieldPair(begin, -2 * kWordSize, value_even, value_odd, LS);
+  strd(value_even, value_odd, begin, -2 * kWordSize, LS);
   b(&init_loop, CC);
-  WriteShadowedField(begin, -2 * kWordSize, value_even, HI);
+  str(value_even, Address(begin, -2 * kWordSize), HI);
 #if defined(DEBUG)
   Label done;
   StoreIntoObjectFilter(object, value_even, &done);
@@ -1967,11 +1844,11 @@
   ASSERT(value_odd == value_even + 1);
   intptr_t current_offset = begin_offset;
   while (current_offset + kWordSize < end_offset) {
-    WriteShadowedFieldPair(base, current_offset, value_even, value_odd);
+    strd(value_even, value_odd, base, current_offset);
     current_offset += 2*kWordSize;
   }
   while (current_offset < end_offset) {
-    WriteShadowedField(base, current_offset, value_even);
+    str(value_even, Address(base, current_offset));
     current_offset += kWordSize;
   }
 #if defined(DEBUG)
@@ -1993,7 +1870,7 @@
   Stop("New value must be Smi.");
   Bind(&done);
 #endif  // defined(DEBUG)
-  VerifiedWrite(kNoRegister, dest, value, kOnlySmi);
+  str(value, dest);
 }
 
 
@@ -3477,8 +3354,9 @@
     // If this allocation is traced, program will jump to failure path
     // (i.e. the allocation stub) which will allocate the object and trace the
     // allocation call site.
-    MaybeTraceAllocation(cls.id(), temp_reg, failure);
-    Heap::Space space = Heap::SpaceForAllocation(cls.id());
+    NOT_IN_PRODUCT(
+      MaybeTraceAllocation(cls.id(), temp_reg, failure));
+    Heap::Space space = Heap::kNew;
     ldr(temp_reg, Address(THR, Thread::heap_offset()));
     ldr(instance_reg, Address(temp_reg, Heap::TopOffset(space)));
     // TODO(koda): Protect against unsigned overflow here.
@@ -3494,7 +3372,7 @@
     // next object start and store the class in the class field of object.
     str(instance_reg, Address(temp_reg, Heap::TopOffset(space)));
 
-    LoadAllocationStatsAddress(temp_reg, cls.id());
+    NOT_IN_PRODUCT(LoadAllocationStatsAddress(temp_reg, cls.id()));
 
     ASSERT(instance_size >= kHeapObjectTag);
     AddImmediate(instance_reg, -instance_size + kHeapObjectTag);
@@ -3506,7 +3384,7 @@
     LoadImmediate(IP, tags);
     str(IP, FieldAddress(instance_reg, Object::tags_offset()));
 
-    IncrementAllocationStats(temp_reg, cls.id(), space);
+    NOT_IN_PRODUCT(IncrementAllocationStats(temp_reg, cls.id(), space));
   } else {
     b(failure);
   }
@@ -3524,8 +3402,8 @@
     // If this allocation is traced, program will jump to failure path
     // (i.e. the allocation stub) which will allocate the object and trace the
     // allocation call site.
-    MaybeTraceAllocation(cid, temp1, failure);
-    Heap::Space space = Heap::SpaceForAllocation(cid);
+    NOT_IN_PRODUCT(MaybeTraceAllocation(cid, temp1, failure));
+    Heap::Space space = Heap::kNew;
     ldr(temp1, Address(THR, Thread::heap_offset()));
     // Potential new object start.
     ldr(instance, Address(temp1, Heap::TopOffset(space)));
@@ -3539,7 +3417,7 @@
     cmp(end_address, Operand(temp2));
     b(failure, CS);
 
-    LoadAllocationStatsAddress(temp2, cid);
+    NOT_IN_PRODUCT(LoadAllocationStatsAddress(temp2, cid));
 
     // Successfully allocated the object(s), now update top to point to
     // next object start and initialize the object.
@@ -3555,7 +3433,7 @@
     str(temp1, FieldAddress(instance, Array::tags_offset()));  // Store tags.
 
     LoadImmediate(temp1, instance_size);
-    IncrementAllocationStatsWithSize(temp2, temp1, space);
+    NOT_IN_PRODUCT(IncrementAllocationStatsWithSize(temp2, temp1, space));
   } else {
     b(failure);
   }
diff --git a/runtime/vm/assembler_arm.h b/runtime/vm/assembler_arm.h
index 66e7c13..cac760b 100644
--- a/runtime/vm/assembler_arm.h
+++ b/runtime/vm/assembler_arm.h
@@ -744,14 +744,6 @@
   void PushObject(const Object& object);
   void CompareObject(Register rn, const Object& object);
 
-  // When storing into a heap object field, knowledge of the previous content
-  // is expressed through these constants.
-  enum FieldContent {
-    kEmptyOrSmiOrNull,  // Empty = garbage/zapped in release/debug mode.
-    kHeapObjectOrSmi,
-    kOnlySmi,
-  };
-
   void StoreIntoObject(Register object,  // Object we are storing into.
                        const Address& dest,  // Where we are storing into.
                        Register value,  // Value we are storing.
@@ -763,27 +755,16 @@
 
   void StoreIntoObjectNoBarrier(Register object,
                                 const Address& dest,
-                                Register value,
-                                FieldContent old_content = kHeapObjectOrSmi);
-  void InitializeFieldNoBarrier(Register object,
-                                const Address& dest,
-                                Register value) {
-    StoreIntoObjectNoBarrier(object, dest, value, kEmptyOrSmiOrNull);
-  }
-  void StoreIntoObjectNoBarrierOffset(
-      Register object,
-      int32_t offset,
-      Register value,
-      FieldContent old_content = kHeapObjectOrSmi);
+                                Register value);
   void StoreIntoObjectNoBarrier(Register object,
                                 const Address& dest,
-                                const Object& value,
-                                FieldContent old_content = kHeapObjectOrSmi);
-  void StoreIntoObjectNoBarrierOffset(
-      Register object,
-      int32_t offset,
-      const Object& value,
-      FieldContent old_content = kHeapObjectOrSmi);
+                                const Object& value);
+  void StoreIntoObjectNoBarrierOffset(Register object,
+                                      int32_t offset,
+                                      Register value);
+  void StoreIntoObjectNoBarrierOffset(Register object,
+                                      int32_t offset,
+                                      const Object& value);
 
   // Store value_even, value_odd, value_even, ... into the words in the address
   // range [begin, end), assumed to be uninitialized fields in object (tagged).
@@ -1193,27 +1174,6 @@
                                   Register value,
                                   Label* no_update);
 
-  // Helpers for write-barrier verification.
-
-  // Returns VerifiedMemory::offset() as an Operand.
-  Operand GetVerifiedMemoryShadow();
-  // Writes value to [base + offset] and also its shadow location, if enabled.
-  void WriteShadowedField(Register base,
-                          intptr_t offset,
-                          Register value,
-                          Condition cond = AL);
-  void WriteShadowedFieldPair(Register base,
-                              intptr_t offset,
-                              Register value_even,
-                              Register value_odd,
-                              Condition cond = AL);
-  // Writes new_value to address and its shadow location, if enabled, after
-  // verifying that its old value matches its shadow.
-  void VerifiedWrite(Register object,
-                     const Address& address,
-                     Register new_value,
-                     FieldContent old_content);
-
   DISALLOW_ALLOCATION();
   DISALLOW_COPY_AND_ASSIGN(Assembler);
 };
diff --git a/runtime/vm/assembler_arm64.cc b/runtime/vm/assembler_arm64.cc
index 9832dae..26c0b4a 100644
--- a/runtime/vm/assembler_arm64.cc
+++ b/runtime/vm/assembler_arm64.cc
@@ -1354,9 +1354,9 @@
     // If this allocation is traced, program will jump to failure path
     // (i.e. the allocation stub) which will allocate the object and trace the
     // allocation call site.
-    MaybeTraceAllocation(cls.id(), temp_reg, failure);
+    NOT_IN_PRODUCT(MaybeTraceAllocation(cls.id(), temp_reg, failure));
     const intptr_t instance_size = cls.instance_size();
-    Heap::Space space = Heap::SpaceForAllocation(cls.id());
+    Heap::Space space = Heap::kNew;
     ldr(temp_reg, Address(THR, Thread::heap_offset()));
     ldr(instance_reg, Address(temp_reg, Heap::TopOffset(space)));
     // TODO(koda): Protect against unsigned overflow here.
@@ -1375,7 +1375,7 @@
     ASSERT(instance_size >= kHeapObjectTag);
     AddImmediate(
         instance_reg, instance_reg, -instance_size + kHeapObjectTag);
-    UpdateAllocationStats(cls.id(), space);
+    NOT_IN_PRODUCT(UpdateAllocationStats(cls.id(), space));
 
     uword tags = 0;
     tags = RawObject::SizeTag::update(instance_size, tags);
@@ -1400,8 +1400,8 @@
     // If this allocation is traced, program will jump to failure path
     // (i.e. the allocation stub) which will allocate the object and trace the
     // allocation call site.
-    MaybeTraceAllocation(cid, temp1, failure);
-    Heap::Space space = Heap::SpaceForAllocation(cid);
+    NOT_IN_PRODUCT(MaybeTraceAllocation(cid, temp1, failure));
+    Heap::Space space = Heap::kNew;
     ldr(temp1, Address(THR, Thread::heap_offset()));
     // Potential new object start.
     ldr(instance, Address(temp1, Heap::TopOffset(space)));
@@ -1420,7 +1420,7 @@
     str(end_address, Address(temp1, Heap::TopOffset(space)));
     add(instance, instance, Operand(kHeapObjectTag));
     LoadImmediate(temp2, instance_size);
-    UpdateAllocationStatsWithSize(cid, temp2, space);
+    NOT_IN_PRODUCT(UpdateAllocationStatsWithSize(cid, temp2, space));
 
     // Initialize the tags.
     // instance: new object start as a tagged pointer.
diff --git a/runtime/vm/assembler_dbc_test.cc b/runtime/vm/assembler_dbc_test.cc
index e97fdd7..180437e 100644
--- a/runtime/vm/assembler_dbc_test.cc
+++ b/runtime/vm/assembler_dbc_test.cc
@@ -121,11 +121,11 @@
 
 ASSEMBLER_TEST_GENERATE(Nop, assembler) {
   __ PushConstant(Smi::Handle(Smi::New(42)));
-  __ Nop();
-  __ Nop();
-  __ Nop();
-  __ Nop();
-  __ Nop();
+  __ Nop(0);
+  __ Nop(0);
+  __ Nop(0);
+  __ Nop(0);
+  __ Nop(0);
   __ ReturnTOS();
 }
 
@@ -1606,6 +1606,50 @@
 }
 
 
+//  - TestSmi rA, rD
+//
+//    If FP[rA] & FP[rD] != 0, then skip the next instruction. FP[rA] and FP[rD]
+//    must be Smis.
+ASSEMBLER_TEST_GENERATE(TestSmiTrue, assembler) {
+  Label branch_taken;
+  __ Frame(2);
+  __ LoadConstant(0, Smi::Handle(Smi::New(7)));
+  __ LoadConstant(1, Smi::Handle(Smi::New(3)));
+  __ TestSmi(0, 1);
+  __ Jump(&branch_taken);
+  __ PushConstant(Bool::True());
+  __ ReturnTOS();
+  __ Bind(&branch_taken);
+  __ PushConstant(Bool::False());
+  __ ReturnTOS();
+}
+
+
+ASSEMBLER_TEST_RUN(TestSmiTrue, test) {
+  EXPECT(EXECUTE_TEST_CODE_BOOL(test->code()));
+}
+
+
+ASSEMBLER_TEST_GENERATE(TestSmiFalse, assembler) {
+  Label branch_taken;
+  __ Frame(2);
+  __ LoadConstant(0, Smi::Handle(Smi::New(8)));
+  __ LoadConstant(1, Smi::Handle(Smi::New(4)));
+  __ TestSmi(0, 1);
+  __ Jump(&branch_taken);
+  __ PushConstant(Bool::True());
+  __ ReturnTOS();
+  __ Bind(&branch_taken);
+  __ PushConstant(Bool::False());
+  __ ReturnTOS();
+}
+
+
+ASSEMBLER_TEST_RUN(TestSmiFalse, test) {
+  EXPECT(!EXECUTE_TEST_CODE_BOOL(test->code()));
+}
+
+
 //  - CheckSmi rA
 //
 //    If FP[rA] is a Smi, then skip the next instruction.
@@ -1646,7 +1690,7 @@
 ASSEMBLER_TEST_GENERATE(CheckClassIdSmiPass, assembler) {
   __ Frame(1);
   __ LoadConstant(0, Smi::Handle(Smi::New(42)));
-  __ CheckClassId(0, __ AddConstant(Smi::Handle(Smi::New(kSmiCid))));
+  __ CheckClassId(0, kSmiCid);
   __ LoadConstant(0, Smi::Handle(Smi::New(-1)));
   __ Return(0);
 }
@@ -1660,7 +1704,7 @@
 ASSEMBLER_TEST_GENERATE(CheckClassIdNonSmiPass, assembler) {
   __ Frame(1);
   __ LoadConstant(0, Bool::True());
-  __ CheckClassId(0, __ AddConstant(Smi::Handle(Smi::New(kBoolCid))));
+  __ CheckClassId(0, kBoolCid);
   __ LoadConstant(0, Bool::False());
   __ Return(0);
 }
@@ -1674,7 +1718,7 @@
 ASSEMBLER_TEST_GENERATE(CheckClassIdFail, assembler) {
   __ Frame(1);
   __ LoadConstant(0, Smi::Handle(Smi::New(-1)));
-  __ CheckClassId(0, __ AddConstant(Smi::Handle(Smi::New(kBoolCid))));
+  __ CheckClassId(0, kBoolCid);
   __ LoadConstant(0, Smi::Handle(Smi::New(42)));
   __ Return(0);
 }
@@ -1684,6 +1728,70 @@
   EXPECT_EQ(42, EXECUTE_TEST_CODE_INTPTR(test->code()));
 }
 
+
+//  - If<Cond>Null rA
+//
+//    Cond is Eq or Ne. Skips the next instruction unless the given condition
+//    holds.
+ASSEMBLER_TEST_GENERATE(IfEqNullNotNull, assembler) {
+  __ Frame(2);
+  __ LoadConstant(0, Smi::Handle(Smi::New(-1)));
+  __ LoadConstant(1, Smi::Handle(Smi::New(42)));
+  __ IfEqNull(0);
+  __ LoadConstant(1, Smi::Handle(Smi::New(-1)));
+  __ Return(1);
+}
+
+
+ASSEMBLER_TEST_RUN(IfEqNullNotNull, test) {
+  EXPECT_EQ(42, EXECUTE_TEST_CODE_INTPTR(test->code()));
+}
+
+
+ASSEMBLER_TEST_GENERATE(IfEqNullIsNull, assembler) {
+  __ Frame(2);
+  __ LoadConstant(0, Object::null_object());
+  __ LoadConstant(1, Smi::Handle(Smi::New(-1)));
+  __ IfEqNull(0);
+  __ LoadConstant(1, Smi::Handle(Smi::New(42)));
+  __ Return(1);
+}
+
+
+ASSEMBLER_TEST_RUN(IfEqNullIsNull, test) {
+  EXPECT_EQ(42, EXECUTE_TEST_CODE_INTPTR(test->code()));
+}
+
+
+ASSEMBLER_TEST_GENERATE(IfNeNullIsNull, assembler) {
+  __ Frame(2);
+  __ LoadConstant(0, Object::null_object());
+  __ LoadConstant(1, Smi::Handle(Smi::New(42)));
+  __ IfNeNull(0);
+  __ LoadConstant(1, Smi::Handle(Smi::New(-1)));
+  __ Return(1);
+}
+
+
+ASSEMBLER_TEST_RUN(IfNeNullIsNull, test) {
+  EXPECT_EQ(42, EXECUTE_TEST_CODE_INTPTR(test->code()));
+}
+
+
+ASSEMBLER_TEST_GENERATE(IfNeNullNotNull, assembler) {
+  __ Frame(2);
+  __ LoadConstant(0, Smi::Handle(Smi::New(-1)));
+  __ LoadConstant(1, Smi::Handle(Smi::New(-1)));
+  __ IfNeNull(0);
+  __ LoadConstant(1, Smi::Handle(Smi::New(42)));
+  __ Return(1);
+}
+
+
+ASSEMBLER_TEST_RUN(IfNeNullNotNull, test) {
+  EXPECT_EQ(42, EXECUTE_TEST_CODE_INTPTR(test->code()));
+}
+
 }  // namespace dart
 
 #endif  // defined(TARGET_ARCH_DBC)
diff --git a/runtime/vm/assembler_ia32.cc b/runtime/vm/assembler_ia32.cc
index e8851ac..1bc37d8 100644
--- a/runtime/vm/assembler_ia32.cc
+++ b/runtime/vm/assembler_ia32.cc
@@ -14,7 +14,6 @@
 #include "vm/runtime_entry.h"
 #include "vm/stack_frame.h"
 #include "vm/stub_code.h"
-#include "vm/verified_memory.h"
 
 namespace dart {
 
@@ -1998,9 +1997,6 @@
 
 void Assembler::j(Condition condition, Label* label, bool near) {
   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
-  if (VerifiedMemory::enabled()) {
-    near = Assembler::kFarJump;
-  }
   if (label->IsBound()) {
     static const int kShortSize = 2;
     static const int kLongSize = 6;
@@ -2043,9 +2039,6 @@
 
 void Assembler::jmp(Label* label, bool near) {
   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
-  if (VerifiedMemory::enabled()) {
-    near = Assembler::kFarJump;
-  }
   if (label->IsBound()) {
     static const int kShortSize = 2;
     static const int kLongSize = 5;
@@ -2266,105 +2259,13 @@
 }
 
 
-void Assembler::VerifyHeapWord(const Address& address,
-                               FieldContent old_content) {
-#if defined(DEBUG)
-  switch (old_content) {
-    case kEmptyOrSmiOrNull:
-      VerifyUninitialized(address);
-      break;
-    case kHeapObjectOrSmi:
-      VerifyObjectOrSmi(address);
-      break;
-    case kOnlySmi:
-      VerifySmi(address);
-      break;
-  }
-#endif  // DEBUG
-  if (VerifiedMemory::enabled()) {
-    Register addr_reg = EDX;
-    Register value = EBX;
-    // Preserve registers.
-    pushl(addr_reg);
-    pushl(value);
-    leal(addr_reg, address);
-    // ASSERT(*address == *(address + offset))
-    movl(value, Address(addr_reg, 0));
-    cmpl(value, Address(addr_reg, VerifiedMemory::offset()));
-    Label ok;
-    j(EQUAL, &ok, Assembler::kNearJump);
-    Stop("Write barrier verification failed");
-    Bind(&ok);
-    popl(value);
-    popl(addr_reg);
-  }
-}
-
-
-void Assembler::VerifiedWrite(const Address& dest,
-                              Register value,
-                              FieldContent old_content) {
-  VerifyHeapWord(dest, old_content);
-  movl(dest, value);
-  if (VerifiedMemory::enabled()) {
-    Register temp = (value == EDX) ? ECX : EDX;
-    pushl(temp);
-    leal(temp, dest);
-    movl(Address(temp, VerifiedMemory::offset()), value);
-    popl(temp);
-  }
-}
-
-
-#if defined(DEBUG)
-void Assembler::VerifyObjectOrSmi(const Address& dest) {
-  Label ok;
-  testb(dest, Immediate(kHeapObjectTag));
-  j(ZERO, &ok, Assembler::kNearJump);
-  // Non-smi case: Verify object pointer is word-aligned when untagged.
-  COMPILE_ASSERT(kHeapObjectTag == 1);
-  testb(dest, Immediate((kWordSize - 1) - kHeapObjectTag));
-  j(ZERO, &ok, Assembler::kNearJump);
-  Stop("Expected heap object or Smi");
-  Bind(&ok);
-}
-
-
-void Assembler::VerifyUninitialized(const Address& dest) {
-  Label ok;
-  testb(dest, Immediate(kHeapObjectTag));
-  j(ZERO, &ok, Assembler::kNearJump);
-  // Non-smi case: Check for the special zap word or null.
-#if defined(DEBUG)
-  cmpl(dest, Immediate(Heap::kZap32Bits));
-  j(EQUAL, &ok, Assembler::kNearJump);
-#else
-#error Only supported in DEBUG mode
-#endif
-  cmpl(dest, Immediate(reinterpret_cast<uint32_t>(Object::null())));
-  j(EQUAL, &ok, Assembler::kNearJump);
-  Stop("Expected zapped, Smi or null");
-  Bind(&ok);
-}
-
-
-void Assembler::VerifySmi(const Address& dest, const char* stop_msg) {
-  Label done;
-  testb(dest, Immediate(kHeapObjectTag));
-  j(ZERO, &done, Assembler::kNearJump);
-  Stop(stop_msg);
-  Bind(&done);
-}
-#endif  // defined(DEBUG)
-
-
 // Destroys the value register.
 void Assembler::StoreIntoObject(Register object,
                                 const Address& dest,
                                 Register value,
                                 bool can_value_be_smi) {
   ASSERT(object != value);
-  VerifiedWrite(dest, value, kHeapObjectOrSmi);
+  movl(dest, value);
   Label done;
   if (can_value_be_smi) {
     StoreIntoObjectFilter(object, value, &done);
@@ -2388,9 +2289,8 @@
 
 void Assembler::StoreIntoObjectNoBarrier(Register object,
                                          const Address& dest,
-                                         Register value,
-                                         FieldContent old_content) {
-  VerifiedWrite(dest, value, old_content);
+                                         Register value) {
+  movl(dest, value);
 #if defined(DEBUG)
   Label done;
   pushl(value);
@@ -2418,30 +2318,14 @@
 
 void Assembler::StoreIntoObjectNoBarrier(Register object,
                                          const Address& dest,
-                                         const Object& value,
-                                         FieldContent old_content) {
+                                         const Object& value) {
   ASSERT(!value.IsICData() || ICData::Cast(value).IsOriginal());
   ASSERT(!value.IsField() || Field::Cast(value).IsOriginal());
-  VerifyHeapWord(dest, old_content);
   if (value.IsSmi() || value.InVMHeap()) {
     Immediate imm_value(reinterpret_cast<int32_t>(value.raw()));
     movl(dest, imm_value);
-    if (VerifiedMemory::enabled()) {
-      Register temp = ECX;
-      pushl(temp);
-      leal(temp, dest);
-      movl(Address(temp, VerifiedMemory::offset()), imm_value);
-      popl(temp);
-    }
   } else {
     UnverifiedStoreOldObject(dest, value);
-    if (VerifiedMemory::enabled()) {
-      Register temp = EDX;
-      pushl(temp);
-      leal(temp, dest);
-      UnverifiedStoreOldObject(Address(temp, VerifiedMemory::offset()), value);
-      popl(temp);
-    }
   }
   // No store buffer update.
 }
@@ -2455,37 +2339,21 @@
   Stop("New value must be Smi.");
   Bind(&done);
 #endif  // defined(DEBUG)
-  VerifiedWrite(dest, value, kOnlySmi);
+  movl(dest, value);
 }
 
 
 void Assembler::ZeroInitSmiField(const Address& dest) {
-  VerifyHeapWord(dest, kEmptyOrSmiOrNull);
   Immediate zero(Smi::RawValue(0));
   movl(dest, zero);
-  if (VerifiedMemory::enabled()) {
-    Register temp = ECX;
-    pushl(temp);
-    leal(temp, dest);
-    movl(Address(temp, VerifiedMemory::offset()), zero);
-    popl(temp);
-  }
 }
 
 
 void Assembler::IncrementSmiField(const Address& dest, int32_t increment) {
   // Note: FlowGraphCompiler::EdgeCounterIncrementSizeInBytes depends on
   // the length of this instruction sequence.
-  VerifyHeapWord(dest, kOnlySmi);
   Immediate inc_imm(Smi::RawValue(increment));
   addl(dest, inc_imm);
-  if (VerifiedMemory::enabled()) {
-    Register temp = ECX;
-    pushl(temp);
-    leal(temp, dest);
-    addl(Address(temp, VerifiedMemory::offset()), inc_imm);
-    popl(temp);
-  }
 }
 
 
@@ -2771,9 +2639,10 @@
     // If this allocation is traced, program will jump to failure path
     // (i.e. the allocation stub) which will allocate the object and trace the
     // allocation call site.
-    MaybeTraceAllocation(cls.id(), temp_reg, failure, near_jump);
+    NOT_IN_PRODUCT(
+      MaybeTraceAllocation(cls.id(), temp_reg, failure, near_jump));
     const intptr_t instance_size = cls.instance_size();
-    Heap::Space space = Heap::SpaceForAllocation(cls.id());
+    Heap::Space space = Heap::kNew;
     movl(temp_reg, Address(THR, Thread::heap_offset()));
     movl(instance_reg, Address(temp_reg, Heap::TopOffset(space)));
     addl(instance_reg, Immediate(instance_size));
@@ -2783,7 +2652,7 @@
     // Successfully allocated the object, now update top to point to
     // next object start and store the class in the class field of object.
     movl(Address(temp_reg, Heap::TopOffset(space)), instance_reg);
-    UpdateAllocationStats(cls.id(), temp_reg, space);
+    NOT_IN_PRODUCT(UpdateAllocationStats(cls.id(), temp_reg, space));
     ASSERT(instance_size >= kHeapObjectTag);
     subl(instance_reg, Immediate(instance_size - kHeapObjectTag));
     uword tags = 0;
@@ -2810,8 +2679,8 @@
     // If this allocation is traced, program will jump to failure path
     // (i.e. the allocation stub) which will allocate the object and trace the
     // allocation call site.
-    MaybeTraceAllocation(cid, temp_reg, failure, near_jump);
-    Heap::Space space = Heap::SpaceForAllocation(cid);
+    NOT_IN_PRODUCT(MaybeTraceAllocation(cid, temp_reg, failure, near_jump));
+    Heap::Space space = Heap::kNew;
     movl(temp_reg, Address(THR, Thread::heap_offset()));
     movl(instance, Address(temp_reg, Heap::TopOffset(space)));
     movl(end_address, instance);
@@ -2829,7 +2698,8 @@
     // next object start and initialize the object.
     movl(Address(temp_reg, Heap::TopOffset(space)), end_address);
     addl(instance, Immediate(kHeapObjectTag));
-    UpdateAllocationStatsWithSize(cid, instance_size, temp_reg, space);
+    NOT_IN_PRODUCT(
+        UpdateAllocationStatsWithSize(cid, instance_size, temp_reg, space));
 
     // Initialize the tags.
     uword tags = 0;
diff --git a/runtime/vm/assembler_ia32.h b/runtime/vm/assembler_ia32.h
index 5753792..77f77a6 100644
--- a/runtime/vm/assembler_ia32.h
+++ b/runtime/vm/assembler_ia32.h
@@ -636,12 +636,10 @@
     return 0xCCCCCCCC;
   }
 
-  // Note: verified_mem mode forces far jumps.
   void j(Condition condition, Label* label, bool near = kFarJump);
   void j(Condition condition, const ExternalLabel* label);
 
   void jmp(Register reg);
-  // Note: verified_mem mode forces far jumps.
   void jmp(Label* label, bool near = kFarJump);
   void jmp(const ExternalLabel* label);
 
@@ -677,14 +675,6 @@
   void CompareObject(Register reg, const Object& object);
   void LoadDoubleConstant(XmmRegister dst, double value);
 
-  // When storing into a heap object field, knowledge of the previous content
-  // is expressed through these constants.
-  enum FieldContent {
-    kEmptyOrSmiOrNull,  // Empty = garbage/zapped in release/debug mode.
-    kHeapObjectOrSmi,
-    kOnlySmi,
-  };
-
   void StoreIntoObject(Register object,  // Object we are storing into.
                        const Address& dest,  // Where we are storing into.
                        Register value,  // Value we are storing.
@@ -692,22 +682,10 @@
 
   void StoreIntoObjectNoBarrier(Register object,
                                 const Address& dest,
-                                Register value,
-                                FieldContent old_content = kHeapObjectOrSmi);
-  void InitializeFieldNoBarrier(Register object,
-                                const Address& dest,
-                                Register value) {
-    return StoreIntoObjectNoBarrier(object, dest, value, kEmptyOrSmiOrNull);
-  }
+                                Register value);
   void StoreIntoObjectNoBarrier(Register object,
                                 const Address& dest,
-                                const Object& value,
-                                FieldContent old_content = kHeapObjectOrSmi);
-  void InitializeFieldNoBarrier(Register object,
-                                const Address& dest,
-                                const Object& value) {
-    return StoreIntoObjectNoBarrier(object, dest, value, kEmptyOrSmiOrNull);
-  }
+                                const Object& value);
 
   // Stores a Smi value into a heap object field that always contains a Smi.
   void StoreIntoSmiField(const Address& dest, Register value);
@@ -1009,17 +987,6 @@
   void StoreIntoObjectFilterNoSmi(Register object,
                                   Register value,
                                   Label* no_update);
-#if defined(DEBUG)
-  void VerifyUninitialized(const Address& address);
-  void VerifyObjectOrSmi(const Address& address);
-  void VerifySmi(const Address& address, const char* stop_msg = "Expected Smi");
-#endif  // DEBUG
-  // Like VerifiedMemory::Verify(address, kWordSize) and ::Write, but also,
-  // in DEBUG mode, verifies that 'address' has content of type 'old_content'.
-  void VerifyHeapWord(const Address& address, FieldContent old_content);
-  void VerifiedWrite(const Address& dest,
-                     Register value,
-                     FieldContent old_content);
   void UnverifiedStoreOldObject(const Address& dest, const Object& value);
 
   int32_t jit_cookie();
diff --git a/runtime/vm/assembler_mips.cc b/runtime/vm/assembler_mips.cc
index c9c6549..ffd927d 100644
--- a/runtime/vm/assembler_mips.cc
+++ b/runtime/vm/assembler_mips.cc
@@ -981,9 +981,9 @@
     // If this allocation is traced, program will jump to failure path
     // (i.e. the allocation stub) which will allocate the object and trace the
     // allocation call site.
-    MaybeTraceAllocation(cls.id(), temp_reg, failure);
+    NOT_IN_PRODUCT(MaybeTraceAllocation(cls.id(), temp_reg, failure));
     const intptr_t instance_size = cls.instance_size();
-    Heap::Space space = Heap::SpaceForAllocation(cls.id());
+    Heap::Space space = Heap::kNew;
     lw(temp_reg, Address(THR, Thread::heap_offset()));
     lw(instance_reg, Address(temp_reg, Heap::TopOffset(space)));
     // TODO(koda): Protect against unsigned overflow here.
@@ -1000,7 +1000,7 @@
 
     ASSERT(instance_size >= kHeapObjectTag);
     AddImmediate(instance_reg, -instance_size + kHeapObjectTag);
-    UpdateAllocationStats(cls.id(), temp_reg, space);
+    NOT_IN_PRODUCT(UpdateAllocationStats(cls.id(), temp_reg, space));
     uword tags = 0;
     tags = RawObject::SizeTag::update(instance_size, tags);
     ASSERT(cls.id() != kIllegalCid);
@@ -1024,10 +1024,10 @@
     // If this allocation is traced, program will jump to failure path
     // (i.e. the allocation stub) which will allocate the object and trace the
     // allocation call site.
-    MaybeTraceAllocation(cid, temp1, failure);
+    NOT_IN_PRODUCT(MaybeTraceAllocation(cid, temp1, failure));
     Isolate* isolate = Isolate::Current();
     Heap* heap = isolate->heap();
-    Heap::Space space = heap->SpaceForAllocation(cid);
+    Heap::Space space = Heap::kNew;
     lw(temp1, Address(THR, Thread::heap_offset()));
     // Potential new object start.
     lw(instance, Address(temp1, heap->TopOffset(space)));
@@ -1047,7 +1047,7 @@
     sw(end_address, Address(temp1, Heap::TopOffset(space)));
     addiu(instance, instance, Immediate(kHeapObjectTag));
     LoadImmediate(temp1, instance_size);
-    UpdateAllocationStatsWithSize(cid, temp1, temp2, space);
+    NOT_IN_PRODUCT(UpdateAllocationStatsWithSize(cid, temp1, temp2, space));
 
     // Initialize the tags.
     // instance: new object start as a tagged pointer.
diff --git a/runtime/vm/assembler_x64.cc b/runtime/vm/assembler_x64.cc
index 67bc237..c266bf6 100644
--- a/runtime/vm/assembler_x64.cc
+++ b/runtime/vm/assembler_x64.cc
@@ -2489,9 +2489,6 @@
 
 void Assembler::j(Condition condition, Label* label, bool near) {
   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
-  if (VerifiedMemory::enabled()) {
-    near = Assembler::kFarJump;
-  }
   if (label->IsBound()) {
     static const int kShortSize = 2;
     static const int kLongSize = 6;
@@ -2546,9 +2543,6 @@
 
 void Assembler::jmp(Label* label, bool near) {
   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
-  if (VerifiedMemory::enabled()) {
-    near = Assembler::kFarJump;
-  }
   if (label->IsBound()) {
     static const int kShortSize = 2;
     static const int kLongSize = 5;
@@ -2944,109 +2938,12 @@
 }
 
 
-void Assembler::VerifyHeapWord(const Address& address,
-                               FieldContent old_content) {
-#if defined(DEBUG)
-  switch (old_content) {
-    case kEmptyOrSmiOrNull:
-      VerifyUninitialized(address);
-      break;
-    case kHeapObjectOrSmi:
-      VerifyObjectOrSmi(address);
-      break;
-    case kOnlySmi:
-      VerifySmi(address);
-      break;
-  }
-#endif  // DEBUG
-  if (VerifiedMemory::enabled()) {
-    Register addr_reg = RDX;
-    Register value = RBX;
-    // Preserve registers.
-    pushq(addr_reg);
-    pushq(value);
-    leaq(addr_reg, address);
-    // ASSERT(*address == *(address + offset))
-    movq(value, Address(addr_reg, 0));
-    cmpq(value, Address(addr_reg, VerifiedMemory::offset()));
-    Label ok;
-    j(EQUAL, &ok);
-    static const bool kFixedLengthEncoding = true;
-    Stop("Write barrier verification failed", kFixedLengthEncoding);
-    Bind(&ok);
-    popq(value);
-    popq(addr_reg);
-  }
-}
-
-
-void Assembler::VerifiedWrite(const Address& dest,
-                              Register value,
-                              FieldContent old_content) {
-  VerifyHeapWord(dest, old_content);
-  movq(dest, value);
-  if (VerifiedMemory::enabled()) {
-    Register temp = (value == RDX) ? RCX : RDX;
-    pushq(temp);
-    leaq(temp, dest);
-    movq(Address(temp, VerifiedMemory::offset()), value);
-    popq(temp);
-  }
-}
-
-
-#if defined(DEBUG)
-void Assembler::VerifyObjectOrSmi(const Address& dest) {
-  Label ok;
-  testb(dest, Immediate(kHeapObjectTag));
-  j(ZERO, &ok, Assembler::kNearJump);
-  // Non-smi case: Verify object pointer is word-aligned when untagged.
-  COMPILE_ASSERT(kHeapObjectTag == 1);
-  testb(dest, Immediate((kWordSize - 1) - kHeapObjectTag));
-  j(ZERO, &ok, Assembler::kNearJump);
-  static const bool kFixedLengthEncoding = true;
-  Stop("Expected heap object or Smi", kFixedLengthEncoding);
-  Bind(&ok);
-}
-
-
-void Assembler::VerifyUninitialized(const Address& dest) {
-  Label ok;
-  testb(dest, Immediate(kHeapObjectTag));
-  j(ZERO, &ok, Assembler::kNearJump);
-  // Non-smi case: Check for the special zap word or null.
-#if defined(DEBUG)
-  cmpq(dest, Immediate(Heap::kZap64Bits));
-  j(EQUAL, &ok, Assembler::kNearJump);
-#else
-#error Only supported in DEBUG mode
-#endif
-  LoadObject(TMP, Object::null_object());
-  cmpq(dest, TMP);
-  j(EQUAL, &ok, Assembler::kNearJump);
-  static const bool kFixedLengthEncoding = true;
-  Stop("Expected zapped, Smi or null", kFixedLengthEncoding);
-  Bind(&ok);
-}
-
-
-void Assembler::VerifySmi(const Address& dest, const char* stop_msg) {
-  Label done;
-  testb(dest, Immediate(kHeapObjectTag));
-  j(ZERO, &done, Assembler::kNearJump);
-  static const bool kFixedLengthEncoding = true;
-  Stop(stop_msg, kFixedLengthEncoding);
-  Bind(&done);
-}
-#endif  // defined(DEBUG)
-
-
 void Assembler::StoreIntoObject(Register object,
                                 const Address& dest,
                                 Register value,
                                 bool can_value_be_smi) {
   ASSERT(object != value);
-  VerifiedWrite(dest, value, kHeapObjectOrSmi);
+  movq(dest, value);
   Label done;
   if (can_value_be_smi) {
     StoreIntoObjectFilter(object, value, &done);
@@ -3071,9 +2968,8 @@
 
 void Assembler::StoreIntoObjectNoBarrier(Register object,
                                          const Address& dest,
-                                         Register value,
-                                         FieldContent old_content) {
-  VerifiedWrite(dest, value, old_content);
+                                         Register value) {
+  movq(dest, value);
 #if defined(DEBUG)
   Label done;
   pushq(value);
@@ -3088,22 +2984,10 @@
 
 void Assembler::StoreIntoObjectNoBarrier(Register object,
                                          const Address& dest,
-                                         const Object& value,
-                                         FieldContent old_content) {
+                                         const Object& value) {
   ASSERT(!value.IsICData() || ICData::Cast(value).IsOriginal());
   ASSERT(!value.IsField() || Field::Cast(value).IsOriginal());
-  VerifyHeapWord(dest, old_content);
-  if (VerifiedMemory::enabled()) {
-    const Register temp = RCX;
-    pushq(temp);
-    leaq(temp, dest);
-    StoreObject(Address(temp, 0), value);
-    StoreObject(Address(temp, VerifiedMemory::offset()), value);
-    popq(temp);
-  } else {
-    StoreObject(dest, value);
-  }
-  // TODO(koda): Use 'object', verify that generational barrier's not needed.
+  StoreObject(dest, value);
 }
 
 
@@ -3115,39 +2999,21 @@
   Stop("New value must be Smi.");
   Bind(&done);
 #endif  // defined(DEBUG)
-  VerifiedWrite(dest, value, kOnlySmi);
+  movq(dest, value);
 }
 
 
 void Assembler::ZeroInitSmiField(const Address& dest) {
-  // TODO(koda): Add VerifySmi once we distinguish initalization.
-  VerifyHeapWord(dest, kEmptyOrSmiOrNull);
   Immediate zero(Smi::RawValue(0));
   movq(dest, zero);
-  if (VerifiedMemory::enabled()) {
-    Register temp = RCX;
-    pushq(temp);
-    leaq(temp, dest);
-    movq(Address(temp, VerifiedMemory::offset()), zero);
-    popq(temp);
-  }
 }
 
 
 void Assembler::IncrementSmiField(const Address& dest, int64_t increment) {
   // Note: FlowGraphCompiler::EdgeCounterIncrementSizeInBytes depends on
   // the length of this instruction sequence.
-  // TODO(koda): Add VerifySmi once we distinguish initalization.
-  VerifyHeapWord(dest, kOnlySmi);
   Immediate inc_imm(Smi::RawValue(increment));
   addq(dest, inc_imm);
-  if (VerifiedMemory::enabled()) {
-    Register temp = RCX;
-    pushq(temp);
-    leaq(temp, dest);
-    addq(Address(temp, VerifiedMemory::offset()), inc_imm);
-    popq(temp);
-  }
 }
 
 
@@ -3521,9 +3387,9 @@
     // If this allocation is traced, program will jump to failure path
     // (i.e. the allocation stub) which will allocate the object and trace the
     // allocation call site.
-    MaybeTraceAllocation(cls.id(), failure, near_jump);
+    NOT_IN_PRODUCT(MaybeTraceAllocation(cls.id(), failure, near_jump));
     const intptr_t instance_size = cls.instance_size();
-    Heap::Space space = Heap::SpaceForAllocation(cls.id());
+    Heap::Space space = Heap::kNew;
     movq(temp, Address(THR, Thread::heap_offset()));
     movq(instance_reg, Address(temp, Heap::TopOffset(space)));
     addq(instance_reg, Immediate(instance_size));
@@ -3533,7 +3399,7 @@
     // Successfully allocated the object, now update top to point to
     // next object start and store the class in the class field of object.
     movq(Address(temp, Heap::TopOffset(space)), instance_reg);
-    UpdateAllocationStats(cls.id(), space);
+    NOT_IN_PRODUCT(UpdateAllocationStats(cls.id(), space));
     ASSERT(instance_size >= kHeapObjectTag);
     AddImmediate(instance_reg, Immediate(kHeapObjectTag - instance_size));
     uword tags = 0;
@@ -3560,8 +3426,8 @@
     // If this allocation is traced, program will jump to failure path
     // (i.e. the allocation stub) which will allocate the object and trace the
     // allocation call site.
-    MaybeTraceAllocation(cid, failure, near_jump);
-    Heap::Space space = Heap::SpaceForAllocation(cid);
+    NOT_IN_PRODUCT(MaybeTraceAllocation(cid, failure, near_jump));
+    Heap::Space space = Heap::kNew;
     movq(temp, Address(THR, Thread::heap_offset()));
     movq(instance, Address(temp, Heap::TopOffset(space)));
     movq(end_address, instance);
@@ -3579,7 +3445,7 @@
     // next object start and initialize the object.
     movq(Address(temp, Heap::TopOffset(space)), end_address);
     addq(instance, Immediate(kHeapObjectTag));
-    UpdateAllocationStatsWithSize(cid, instance_size, space);
+    NOT_IN_PRODUCT(UpdateAllocationStatsWithSize(cid, instance_size, space));
 
     // Initialize the tags.
     // instance: new object start as a tagged pointer.
diff --git a/runtime/vm/assembler_x64.h b/runtime/vm/assembler_x64.h
index 5f5aea4..f31d37b 100644
--- a/runtime/vm/assembler_x64.h
+++ b/runtime/vm/assembler_x64.h
@@ -686,12 +686,10 @@
     return 0xCCCCCCCCCCCCCCCC;
   }
 
-  // Note: verified_mem mode forces far jumps.
   void j(Condition condition, Label* label, bool near = kFarJump);
 
   void jmp(Register reg);
   void jmp(const Address& address);
-  // Note: verified_mem mode forces far jumps.
   void jmp(Label* label, bool near = kFarJump);
   void jmp(const ExternalLabel* label);
   void jmp(const StubEntry& stub_entry);
@@ -784,14 +782,6 @@
   void PushObject(const Object& object);
   void CompareObject(Register reg, const Object& object);
 
-  // When storing into a heap object field, knowledge of the previous content
-  // is expressed through these constants.
-  enum FieldContent {
-    kEmptyOrSmiOrNull,  // Empty = garbage/zapped in release/debug mode.
-    kHeapObjectOrSmi,
-    kOnlySmi,
-  };
-
   // Destroys value.
   void StoreIntoObject(Register object,  // Object we are storing into.
                        const Address& dest,  // Where we are storing into.
@@ -800,22 +790,10 @@
 
   void StoreIntoObjectNoBarrier(Register object,
                                 const Address& dest,
-                                Register value,
-                                FieldContent old_content = kHeapObjectOrSmi);
-  void InitializeFieldNoBarrier(Register object,
-                                const Address& dest,
-                                Register value) {
-    return StoreIntoObjectNoBarrier(object, dest, value, kEmptyOrSmiOrNull);
-  }
+                                Register value);
   void StoreIntoObjectNoBarrier(Register object,
                                 const Address& dest,
-                                const Object& value,
-                                FieldContent old_content = kHeapObjectOrSmi);
-  void InitializeFieldNoBarrier(Register object,
-                                const Address& dest,
-                                const Object& value) {
-    return StoreIntoObjectNoBarrier(object, dest, value, kEmptyOrSmiOrNull);
-  }
+                                const Object& value);
 
   // Stores a Smi value into a heap object field that always contains a Smi.
   void StoreIntoSmiField(const Address& dest, Register value);
@@ -1107,17 +1085,6 @@
   void StoreIntoObjectFilterNoSmi(Register object,
                                   Register value,
                                   Label* no_update);
-#if defined(DEBUG)
-  void VerifyUninitialized(const Address& address);
-  void VerifyObjectOrSmi(const Address& address);
-  void VerifySmi(const Address& address, const char* stop_msg = "Expected Smi");
-#endif  // DEBUG
-  // Like VerifiedMemory::Verify(address, kWordSize) and ::Write, but also,
-  // in DEBUG mode, verifies that 'address' has content of type 'old_content'.
-  void VerifyHeapWord(const Address& address, FieldContent old_content);
-  void VerifiedWrite(const Address& dest,
-                     Register value,
-                     FieldContent old_content);
   // Unaware of write barrier (use StoreInto* methods for storing to objects).
   void MoveImmediate(const Address& dst, const Immediate& imm);
 
diff --git a/runtime/vm/ast.h b/runtime/vm/ast.h
index 48b4a22..972263c 100644
--- a/runtime/vm/ast.h
+++ b/runtime/vm/ast.h
@@ -1243,11 +1243,13 @@
   StoreInstanceFieldNode(TokenPosition token_pos,
                          AstNode* instance,
                          const Field& field,
-                         AstNode* value)
+                         AstNode* value,
+                         bool is_initializer)
       : AstNode(token_pos),
         instance_(instance),
         field_(*MayCloneField(field)),
-        value_(value) {
+        value_(value),
+        is_initializer_(is_initializer) {
     ASSERT(instance_ != NULL);
     ASSERT(field_.IsZoneHandle());
     ASSERT(value_ != NULL);
@@ -1256,6 +1258,7 @@
   AstNode* instance() const { return instance_; }
   const Field& field() const { return field_; }
   AstNode* value() const { return value_; }
+  bool is_initializer() const { return is_initializer_; }
 
   virtual void VisitChildren(AstNodeVisitor* visitor) const {
     instance()->Visit(visitor);
@@ -1268,6 +1271,7 @@
   AstNode* instance_;
   const Field& field_;
   AstNode* value_;
+  const bool is_initializer_;
 
   DISALLOW_IMPLICIT_CONSTRUCTORS(StoreInstanceFieldNode);
 };
diff --git a/runtime/vm/atomic.h b/runtime/vm/atomic.h
index c182a84..b6e5f28 100644
--- a/runtime/vm/atomic.h
+++ b/runtime/vm/atomic.h
@@ -77,6 +77,8 @@
 
 #if defined(TARGET_OS_ANDROID)
 #include "vm/atomic_android.h"
+#elif defined(TARGET_OS_FUCHSIA)
+#include "vm/atomic_fuchsia.h"
 #elif defined(TARGET_OS_LINUX)
 #include "vm/atomic_linux.h"
 #elif defined(TARGET_OS_MACOS)
diff --git a/runtime/vm/atomic_fuchsia.h b/runtime/vm/atomic_fuchsia.h
new file mode 100644
index 0000000..d6c0f57
--- /dev/null
+++ b/runtime/vm/atomic_fuchsia.h
@@ -0,0 +1,66 @@
+// Copyright (c) 2016, the Dart project authors.  Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.
+
+#ifndef VM_ATOMIC_FUCHSIA_H_
+#define VM_ATOMIC_FUCHSIA_H_
+
+#if !defined VM_ATOMIC_H_
+#error Do not include atomic_fuchsia.h directly. Use atomic.h instead.
+#endif
+
+#if !defined(TARGET_OS_FUCHSIA)
+#error This file should only be included on Fuchsia builds.
+#endif
+
+#include "platform/assert.h"
+
+namespace dart {
+
+inline uintptr_t AtomicOperations::FetchAndIncrement(uintptr_t* p) {
+  UNIMPLEMENTED();
+  return 0;
+}
+
+
+inline void AtomicOperations::IncrementBy(intptr_t* p, intptr_t value) {
+  UNIMPLEMENTED();
+}
+
+
+inline void AtomicOperations::IncrementInt64By(int64_t* p, int64_t value) {
+  UNIMPLEMENTED();
+}
+
+
+inline uintptr_t AtomicOperations::FetchAndDecrement(uintptr_t* p) {
+  UNIMPLEMENTED();
+  return 0;
+}
+
+
+inline void AtomicOperations::DecrementBy(intptr_t* p, intptr_t value) {
+  UNIMPLEMENTED();
+}
+
+
+#if !defined(USING_SIMULATOR_ATOMICS)
+inline uword AtomicOperations::CompareAndSwapWord(uword* ptr,
+                                                  uword old_value,
+                                                  uword new_value) {
+  UNIMPLEMENTED();
+  return 0;
+}
+
+
+inline uint32_t AtomicOperations::CompareAndSwapUint32(uint32_t* ptr,
+                                                       uint32_t old_value,
+                                                       uint32_t new_value) {
+  UNIMPLEMENTED();
+  return 0;
+}
+#endif  // !defined(USING_SIMULATOR_ATOMICS)
+
+}  // namespace dart
+
+#endif  // VM_ATOMIC_FUCHSIA_H_
diff --git a/runtime/vm/become.cc b/runtime/vm/become.cc
index 2a8e491..13b7439 100644
--- a/runtime/vm/become.cc
+++ b/runtime/vm/become.cc
@@ -17,9 +17,6 @@
 
 namespace dart {
 
-DECLARE_FLAG(bool, trace_reload);
-
-
 ForwardingCorpse* ForwardingCorpse::AsForwarder(uword addr, intptr_t size) {
   ASSERT(size >= kObjectAlignment);
   ASSERT(Utils::IsAligned(size, kObjectAlignment));
@@ -151,49 +148,14 @@
 };
 
 
-#if defined(DEBUG)
-class NoForwardingCorpseTargetsVisitor : public ObjectPointerVisitor {
- public:
-  explicit NoForwardingCorpseTargetsVisitor(Isolate* isolate)
-      : ObjectPointerVisitor(isolate) { }
-
-  virtual void VisitPointers(RawObject** first, RawObject** last) {
-    for (RawObject** p = first; p <= last; p++) {
-      RawObject* target = *p;
-      if (target->IsHeapObject()) {
-        ASSERT(!target->IsForwardingCorpse());
-      }
-    }
-  }
-
- private:
-  DISALLOW_COPY_AND_ASSIGN(NoForwardingCorpseTargetsVisitor);
-};
-#endif
-
-
 void Become::ElementsForwardIdentity(const Array& before, const Array& after) {
   Thread* thread = Thread::Current();
   Isolate* isolate = thread->isolate();
   Heap* heap = isolate->heap();
 
-  {
-    // TODO(rmacnak): Investigate why this is necessary.
-    heap->CollectGarbage(Heap::kNew);
-  }
-
   TIMELINE_FUNCTION_GC_DURATION(thread, "Become::ElementsForwardIdentity");
   HeapIterationScope his;
 
-#if defined(DEBUG)
-  {
-    // There should be no pointers to free list elements / forwarding corpses.
-    NoForwardingCorpseTargetsVisitor visitor(isolate);
-    isolate->VisitObjectPointers(&visitor, true);
-    heap->VisitObjectPointers(&visitor);
-  }
-#endif
-
   // Setup forwarding pointers.
   ASSERT(before.Length() == after.Length());
   for (intptr_t i = 0; i < before.Length(); i++) {
@@ -241,22 +203,18 @@
     heap->VisitObjects(&object_visitor);
     pointer_visitor.VisitingObject(NULL);
 
-    TIR_Print("Performed %" Pd " heap and %" Pd " handle replacements\n",
-              pointer_visitor.count(),
-              handle_visitor.count());
+#if !defined(PRODUCT)
+    tds.SetNumArguments(2);
+    tds.FormatArgument(0, "Remapped objects", "%" Pd,  before.Length());
+    tds.FormatArgument(1, "Remapped references", "%" Pd,
+                       pointer_visitor.count() + handle_visitor.count());
+#endif
   }
 
 #if defined(DEBUG)
   for (intptr_t i = 0; i < before.Length(); i++) {
     ASSERT(before.At(i) == after.At(i));
   }
-
-  {
-    // There should be no pointers to forwarding corpses.
-    NoForwardingCorpseTargetsVisitor visitor(isolate);
-    isolate->VisitObjectPointers(&visitor, true);
-    heap->VisitObjectPointers(&visitor);
-  }
 #endif
 }
 
diff --git a/runtime/vm/benchmark_test.cc b/runtime/vm/benchmark_test.cc
index 1d56af7..b630698 100644
--- a/runtime/vm/benchmark_test.cc
+++ b/runtime/vm/benchmark_test.cc
@@ -11,6 +11,7 @@
 #include "platform/assert.h"
 #include "platform/globals.h"
 
+#include "vm/clustered_snapshot.h"
 #include "vm/compiler_stats.h"
 #include "vm/dart_api_impl.h"
 #include "vm/stack_frame.h"
diff --git a/runtime/vm/bootstrap_natives.h b/runtime/vm/bootstrap_natives.h
index d214b1b..4bb5e93 100644
--- a/runtime/vm/bootstrap_natives.h
+++ b/runtime/vm/bootstrap_natives.h
@@ -56,6 +56,7 @@
   V(SendPortImpl_get_id, 1)                                                    \
   V(SendPortImpl_get_hashcode, 1)                                              \
   V(SendPortImpl_sendInternal_, 2)                                             \
+  V(Smi_bitAndFromSmi, 2)                                                      \
   V(Smi_shlFromInt, 2)                                                         \
   V(Smi_shrFromInt, 2)                                                         \
   V(Smi_bitNegate, 1)                                                          \
diff --git a/runtime/vm/class_finalizer.cc b/runtime/vm/class_finalizer.cc
index 027072b..6554d23 100644
--- a/runtime/vm/class_finalizer.cc
+++ b/runtime/vm/class_finalizer.cc
@@ -12,6 +12,7 @@
 #include "vm/log.h"
 #include "vm/object_store.h"
 #include "vm/symbols.h"
+#include "vm/timeline.h"
 
 namespace dart {
 
@@ -118,6 +119,8 @@
 // b) after the user classes are loaded (dart_api).
 bool ClassFinalizer::ProcessPendingClasses() {
   Thread* thread = Thread::Current();
+  NOT_IN_PRODUCT(TimelineDurationScope tds(thread, Timeline::GetIsolateStream(),
+                                           "ProcessPendingClasses"));
   Isolate* isolate = thread->isolate();
   ASSERT(isolate != NULL);
   HANDLESCOPE(thread);
diff --git a/runtime/vm/class_table.cc b/runtime/vm/class_table.cc
index 9432a1b..4549fb8 100644
--- a/runtime/vm/class_table.cc
+++ b/runtime/vm/class_table.cc
@@ -2,8 +2,9 @@
 // for details. All rights reserved. Use of this source code is governed by a
 // BSD-style license that can be found in the LICENSE file.
 
-#include "vm/atomic.h"
 #include "vm/class_table.h"
+
+#include "vm/atomic.h"
 #include "vm/flags.h"
 #include "vm/freelist.h"
 #include "vm/growable_array.h"
@@ -143,10 +144,7 @@
 }
 
 
-void ClassTable::RegisterAt(intptr_t index, const Class& cls) {
-  ASSERT(Thread::Current()->IsMutatorThread());
-  ASSERT(index != kIllegalCid);
-  ASSERT(index >= kNumPredefinedCids);
+void ClassTable::AllocateIndex(intptr_t index) {
   if (index >= capacity_) {
     // Grow the capacity of the class table.
     // TODO(koda): Add ClassTable::Grow to share code.
@@ -171,15 +169,24 @@
     class_heap_stats_table_ = new_stats_table;
     ASSERT(capacity_increment_ >= 1);
   }
+
   ASSERT(table_[index] == 0);
-  cls.set_id(index);
-  table_[index] = cls.raw();
   if (index >= top_) {
     top_ = index + 1;
   }
 }
 
 
+void ClassTable::RegisterAt(intptr_t index, const Class& cls) {
+  ASSERT(Thread::Current()->IsMutatorThread());
+  ASSERT(index != kIllegalCid);
+  ASSERT(index >= kNumPredefinedCids);
+  AllocateIndex(index);
+  cls.set_id(index);
+  table_[index] = cls.raw();
+}
+
+
 #if defined(DEBUG)
 void ClassTable::Unregister(intptr_t index) {
   table_[index] = 0;
@@ -228,6 +235,7 @@
 }
 
 
+#ifndef PRODUCT
 void ClassTable::PrintToJSONObject(JSONObject* object) {
   if (!FLAG_support_service) {
     return;
@@ -244,6 +252,7 @@
     }
   }
 }
+#endif  // PRODUCT
 
 
 void ClassHeapStats::Initialize() {
@@ -323,6 +332,7 @@
 }
 
 
+#ifndef PRODUCT
 void ClassHeapStats::PrintToJSONObject(const Class& cls,
                                        JSONObject* obj) const {
   if (!FLAG_support_service) {
@@ -359,6 +369,7 @@
   obj->AddProperty("promotedInstances", promoted_count);
   obj->AddProperty("promotedBytes", promoted_size);
 }
+#endif
 
 
 void ClassTable::UpdateAllocatedNew(intptr_t cid, intptr_t size) {
@@ -472,7 +483,7 @@
 
 
 intptr_t ClassTable::StateOffsetFor(intptr_t cid) {
-  return ClassOffsetFor(cid)+ ClassHeapStats::state_offset();
+  return ClassOffsetFor(cid) + ClassHeapStats::state_offset();
 }
 
 
@@ -485,6 +496,7 @@
 }
 
 
+#ifndef PRODUCT
 void ClassTable::AllocationProfilePrintJSON(JSONStream* stream) {
   if (!FLAG_support_service) {
     return;
@@ -526,6 +538,7 @@
     }
   }
 }
+#endif
 
 
 void ClassTable::ResetAllocationAccumulators() {
diff --git a/runtime/vm/class_table.h b/runtime/vm/class_table.h
index af774a0..9a12fc3e 100644
--- a/runtime/vm/class_table.h
+++ b/runtime/vm/class_table.h
@@ -116,7 +116,9 @@
   void ResetAccumulator();
   void UpdatePromotedAfterNewGC();
   void UpdateSize(intptr_t instance_size);
+#ifndef PRODUCT
   void PrintToJSONObject(const Class& cls, JSONObject* obj) const;
+#endif
   void Verify();
 
   bool trace_allocation() const {
@@ -179,6 +181,8 @@
 
   void Register(const Class& cls);
 
+  void AllocateIndex(intptr_t index);
+
   void RegisterAt(intptr_t index, const Class& cls);
 
 #if defined(DEBUG)
@@ -190,8 +194,9 @@
   void Validate();
 
   void Print();
-
+#ifndef PRODUCT
   void PrintToJSONObject(JSONObject* object);
+#endif
 
   // Used by the generated code.
   static intptr_t table_offset() {
diff --git a/runtime/vm/clustered_snapshot.cc b/runtime/vm/clustered_snapshot.cc
new file mode 100644
index 0000000..e938fef
--- /dev/null
+++ b/runtime/vm/clustered_snapshot.cc
@@ -0,0 +1,4998 @@
+// Copyright (c) 2016, the Dart project authors.  Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.
+
+#include "vm/clustered_snapshot.h"
+
+#include "platform/assert.h"
+#include "vm/bootstrap.h"
+#include "vm/class_finalizer.h"
+#include "vm/dart.h"
+#include "vm/dart_entry.h"
+#include "vm/exceptions.h"
+#include "vm/heap.h"
+#include "vm/lockers.h"
+#include "vm/longjump.h"
+#include "vm/native_entry.h"
+#include "vm/object.h"
+#include "vm/object_store.h"
+#include "vm/stub_code.h"
+#include "vm/symbols.h"
+#include "vm/timeline.h"
+#include "vm/version.h"
+
+namespace dart {
+
+static RawObject* AllocateUninitialized(PageSpace* old_space, intptr_t size) {
+  ASSERT(Utils::IsAligned(size, kObjectAlignment));
+  uword address = old_space->TryAllocateDataBumpLocked(size,
+                                                       PageSpace::kForceGrowth);
+  if (address == 0) {
+    FATAL("Out of memory");
+  }
+  return reinterpret_cast<RawObject*>(address + kHeapObjectTag);
+}
+
+
+void Deserializer::InitializeHeader(RawObject* raw,
+                                    intptr_t class_id,
+                                    intptr_t size,
+                                    bool is_vm_isolate,
+                                    bool is_canonical) {
+  ASSERT(Utils::IsAligned(size, kObjectAlignment));
+  uword tags = 0;
+  tags = RawObject::ClassIdTag::update(class_id, tags);
+  tags = RawObject::SizeTag::update(size, tags);
+  tags = RawObject::VMHeapObjectTag::update(is_vm_isolate, tags);
+  tags = RawObject::CanonicalObjectTag::update(is_canonical, tags);
+  raw->ptr()->tags_ = tags;
+}
+
+
+class ClassSerializationCluster : public SerializationCluster {
+ public:
+  explicit ClassSerializationCluster(intptr_t num_cids) :
+      predefined_(kNumPredefinedCids), objects_(num_cids) { }
+  virtual ~ClassSerializationCluster() { }
+
+  void Trace(Serializer* s, RawObject* object) {
+    RawClass* cls = Class::RawCast(object);
+    intptr_t class_id = cls->ptr()->id_;
+
+    if (class_id < kNumPredefinedCids) {
+      // These classes are allocated by Object::Init or Object::InitOnce, so the
+      // deserializer must find them in the class table instead of allocating
+      // them.
+      predefined_.Add(cls);
+    } else {
+      objects_.Add(cls);
+    }
+
+    RawObject** from = cls->from();
+    RawObject** to = cls->to_snapshot(s->kind());
+    for (RawObject** p = from; p <= to; p++) {
+      s->Push(*p);
+    }
+  }
+
+  void WriteAlloc(Serializer* s) {
+    s->WriteCid(kClassCid);
+    intptr_t count = predefined_.length();
+    s->Write<intptr_t>(count);
+    for (intptr_t i = 0; i < count; i++) {
+      RawClass* cls = predefined_[i];
+      intptr_t class_id = cls->ptr()->id_;
+      s->Write<intptr_t>(class_id);
+      s->AssignRef(cls);
+    }
+    count = objects_.length();
+    s->Write<intptr_t>(count);
+    for (intptr_t i = 0; i < count; i++) {
+      RawClass* cls = objects_[i];
+      s->AssignRef(cls);
+    }
+  }
+
+  void WriteFill(Serializer* s) {
+    intptr_t count = predefined_.length();
+    for (intptr_t i = 0; i < count; i++) {
+      WriteClass(s, predefined_[i]);
+    }
+    count = objects_.length();
+    for (intptr_t i = 0; i < count; i++) {
+      WriteClass(s, objects_[i]);
+    }
+  }
+
+  void WriteClass(Serializer* s, RawClass* cls) {
+    Snapshot::Kind kind = s->kind();
+    RawObject** from = cls->from();
+    RawObject** to = cls->to_snapshot(kind);
+    for (RawObject** p = from; p <= to; p++) {
+      s->WriteRef(*p);
+    }
+    intptr_t class_id = cls->ptr()->id_;
+    s->WriteCid(class_id);
+    s->Write<int32_t>(cls->ptr()->instance_size_in_words_);
+    s->Write<int32_t>(cls->ptr()->next_field_offset_in_words_);
+    s->Write<int32_t>(cls->ptr()->type_arguments_field_offset_in_words_);
+    s->Write<uint16_t>(cls->ptr()->num_type_arguments_);
+    s->Write<uint16_t>(cls->ptr()->num_own_type_arguments_);
+    s->Write<uint16_t>(cls->ptr()->num_native_fields_);
+    s->WriteTokenPosition(cls->ptr()->token_pos_);
+    s->Write<uint16_t>(cls->ptr()->state_bits_);
+  }
+
+ private:
+  GrowableArray<RawClass*> predefined_;
+  GrowableArray<RawClass*> objects_;
+};
+
+
+class ClassDeserializationCluster : public DeserializationCluster {
+ public:
+  ClassDeserializationCluster() { }
+  virtual ~ClassDeserializationCluster() { }
+
+  void ReadAlloc(Deserializer* d) {
+    predefined_start_index_ = d->next_index();
+    PageSpace* old_space = d->heap()->old_space();
+    intptr_t count = d->Read<intptr_t>();
+    ClassTable* table = d->isolate()->class_table();
+    for (intptr_t i = 0; i < count; i++) {
+      intptr_t class_id = d->Read<intptr_t>();
+      ASSERT(table->HasValidClassAt(class_id));
+      RawClass* cls = table->At(class_id);
+      ASSERT(cls != NULL);
+      d->AssignRef(cls);
+    }
+    predefined_stop_index_ = d->next_index();
+
+    start_index_ = d->next_index();
+    count = d->Read<intptr_t>();
+    for (intptr_t i = 0; i < count; i++) {
+      d->AssignRef(AllocateUninitialized(old_space,
+                                         Class::InstanceSize()));
+    }
+    stop_index_ = d->next_index();
+  }
+
+  void ReadFill(Deserializer* d) {
+    Snapshot::Kind kind = d->kind();
+    bool is_vm_object = d->isolate() == Dart::vm_isolate();
+    ClassTable* table = d->isolate()->class_table();
+
+    for (intptr_t id = predefined_start_index_;
+         id < predefined_stop_index_;
+         id++) {
+      RawClass* cls = reinterpret_cast<RawClass*>(d->Ref(id));
+      RawObject** from = cls->from();
+      RawObject** to_snapshot = cls->to_snapshot(kind);
+      for (RawObject** p = from; p <= to_snapshot; p++) {
+        *p = d->ReadRef();
+      }
+
+      intptr_t class_id = d->ReadCid();
+      cls->ptr()->id_ = class_id;
+      cls->ptr()->instance_size_in_words_ = d->Read<int32_t>();
+      cls->ptr()->next_field_offset_in_words_ = d->Read<int32_t>();
+      cls->ptr()->type_arguments_field_offset_in_words_ = d->Read<int32_t>();
+      cls->ptr()->num_type_arguments_ = d->Read<uint16_t>();
+      cls->ptr()->num_own_type_arguments_ = d->Read<uint16_t>();
+      cls->ptr()->num_native_fields_ = d->Read<uint16_t>();
+      cls->ptr()->token_pos_ = d->ReadTokenPosition();
+      cls->ptr()->state_bits_ = d->Read<uint16_t>();
+    }
+
+    for (intptr_t id = start_index_; id < stop_index_; id++) {
+      RawClass* cls = reinterpret_cast<RawClass*>(d->Ref(id));
+      Deserializer::InitializeHeader(cls, kClassCid, Class::InstanceSize(),
+                                     is_vm_object);
+      RawObject** from = cls->from();
+      RawObject** to_snapshot = cls->to_snapshot(kind);
+      RawObject** to = cls->to();
+      for (RawObject** p = from; p <= to_snapshot; p++) {
+        *p = d->ReadRef();
+      }
+      for (RawObject** p = to_snapshot + 1; p <= to; p++) {
+        *p = Object::null();
+      }
+
+      intptr_t class_id = d->ReadCid();
+
+      ASSERT(class_id >= kNumPredefinedCids);
+      Instance fake;
+      cls->ptr()->handle_vtable_ = fake.vtable();
+
+      cls->ptr()->id_ = class_id;
+      cls->ptr()->instance_size_in_words_ = d->Read<int32_t>();
+      cls->ptr()->next_field_offset_in_words_ = d->Read<int32_t>();
+      cls->ptr()->type_arguments_field_offset_in_words_ = d->Read<int32_t>();
+      cls->ptr()->num_type_arguments_ = d->Read<uint16_t>();
+      cls->ptr()->num_own_type_arguments_ = d->Read<uint16_t>();
+      cls->ptr()->num_native_fields_ = d->Read<uint16_t>();
+      cls->ptr()->token_pos_ = d->ReadTokenPosition();
+      cls->ptr()->state_bits_ = d->Read<uint16_t>();
+
+      table->AllocateIndex(class_id);
+      table->SetAt(class_id, cls);
+    }
+  }
+
+  void PostLoad(const Array& refs, Snapshot::Kind kind, Zone* zone) {
+    NOT_IN_PRODUCT(TimelineDurationScope tds(Thread::Current(),
+        Timeline::GetIsolateStream(), "PostLoadClass"));
+
+    Class& cls = Class::Handle(zone);
+    for (intptr_t i = predefined_start_index_;
+         i < predefined_stop_index_;
+         i++) {
+      cls ^= refs.At(i);
+      cls.RehashConstants(zone);
+    }
+    for (intptr_t i = start_index_; i < stop_index_; i++) {
+      cls ^= refs.At(i);
+      cls.RehashConstants(zone);
+    }
+  }
+
+ private:
+  intptr_t predefined_start_index_;
+  intptr_t predefined_stop_index_;
+};
+
+
+class UnresolvedClassSerializationCluster : public SerializationCluster {
+ public:
+  UnresolvedClassSerializationCluster() { }
+  virtual ~UnresolvedClassSerializationCluster() { }
+
+  void Trace(Serializer* s, RawObject* object) {
+    RawUnresolvedClass* cls = UnresolvedClass::RawCast(object);
+    objects_.Add(cls);
+
+    RawObject** from = cls->from();
+    RawObject** to = cls->to();
+    for (RawObject** p = from; p <= to; p++) {
+      s->Push(*p);
+    }
+  }
+
+  void WriteAlloc(Serializer* s) {
+    s->WriteCid(kUnresolvedClassCid);
+    intptr_t count = objects_.length();
+    s->Write<intptr_t>(count);
+    for (intptr_t i = 0; i < count; i++) {
+      RawUnresolvedClass* cls = objects_[i];
+      s->AssignRef(cls);
+    }
+  }
+
+  void WriteFill(Serializer* s) {
+    intptr_t count = objects_.length();
+    s->Write<intptr_t>(count);
+    for (intptr_t i = 0; i < count; i++) {
+      RawUnresolvedClass* cls = objects_[i];
+      RawObject** from = cls->from();
+      RawObject** to = cls->to();
+      for (RawObject** p = from; p <= to; p++) {
+        s->WriteRef(*p);
+      }
+      s->WriteTokenPosition(cls->ptr()->token_pos_);
+    }
+  }
+
+ private:
+  GrowableArray<RawUnresolvedClass*> objects_;
+};
+
+
+class UnresolvedClassDeserializationCluster : public DeserializationCluster {
+ public:
+  UnresolvedClassDeserializationCluster() { }
+  virtual ~UnresolvedClassDeserializationCluster() { }
+
+  void ReadAlloc(Deserializer* d) {
+    start_index_ = d->next_index();
+    PageSpace* old_space = d->heap()->old_space();
+    intptr_t count = d->Read<intptr_t>();
+    for (intptr_t i = 0; i < count; i++) {
+      d->AssignRef(AllocateUninitialized(old_space,
+                                         UnresolvedClass::InstanceSize()));
+    }
+    stop_index_ = d->next_index();
+  }
+
+  void ReadFill(Deserializer* d) {
+    bool is_vm_object = d->isolate() == Dart::vm_isolate();
+
+    for (intptr_t id = start_index_; id < stop_index_; id++) {
+      RawUnresolvedClass* cls =
+          reinterpret_cast<RawUnresolvedClass*>(d->Ref(id));
+      Deserializer::InitializeHeader(cls, kUnresolvedClassCid,
+                                     UnresolvedClass::InstanceSize(),
+                                     is_vm_object);
+      RawObject** from = cls->from();
+      RawObject** to = cls->to();
+      for (RawObject** p = from; p <= to; p++) {
+        *p = d->ReadRef();
+      }
+      cls->ptr()->token_pos_ = d->ReadTokenPosition();
+    }
+  }
+};
+
+
+class TypeArgumentsSerializationCluster : public SerializationCluster {
+ public:
+  TypeArgumentsSerializationCluster() { }
+  virtual ~TypeArgumentsSerializationCluster() { }
+
+  void Trace(Serializer* s, RawObject* object) {
+    RawTypeArguments* type_args = TypeArguments::RawCast(object);
+    objects_.Add(type_args);
+
+    s->Push(type_args->ptr()->instantiations_);
+    intptr_t length = Smi::Value(type_args->ptr()->length_);
+    for (intptr_t i = 0; i < length; i++) {
+      s->Push(type_args->ptr()->types()[i]);
+    }
+  }
+
+  void WriteAlloc(Serializer* s) {
+    s->WriteCid(kTypeArgumentsCid);
+    intptr_t count = objects_.length();
+    s->Write<intptr_t>(count);
+    for (intptr_t i = 0; i < count; i++) {
+      RawTypeArguments* type_args = objects_[i];
+      intptr_t length = Smi::Value(type_args->ptr()->length_);
+      s->Write<intptr_t>(length);
+      s->AssignRef(type_args);
+    }
+  }
+
+  void WriteFill(Serializer* s) {
+    intptr_t count = objects_.length();
+    for (intptr_t i = 0; i < count; i++) {
+      RawTypeArguments* type_args = objects_[i];
+      intptr_t length = Smi::Value(type_args->ptr()->length_);
+      s->Write<intptr_t>(length);
+      s->Write<bool>(type_args->IsCanonical());
+      intptr_t hash = Smi::Value(type_args->ptr()->hash_);
+      s->Write<int32_t>(hash);
+      s->WriteRef(type_args->ptr()->instantiations_);
+      for (intptr_t j = 0; j < length; j++) {
+        s->WriteRef(type_args->ptr()->types()[j]);
+      }
+    }
+  }
+
+ private:
+  GrowableArray<RawTypeArguments*> objects_;
+};
+
+
+class TypeArgumentsDeserializationCluster : public DeserializationCluster {
+ public:
+  TypeArgumentsDeserializationCluster() { }
+  virtual ~TypeArgumentsDeserializationCluster() { }
+
+  void ReadAlloc(Deserializer* d) {
+    start_index_ = d->next_index();
+    PageSpace* old_space = d->heap()->old_space();
+    intptr_t count = d->Read<intptr_t>();
+    for (intptr_t i = 0; i < count; i++) {
+      intptr_t length = d->Read<intptr_t>();
+      d->AssignRef(AllocateUninitialized(old_space,
+                                         TypeArguments::InstanceSize(length)));
+    }
+    stop_index_ = d->next_index();
+  }
+
+  void ReadFill(Deserializer* d) {
+    bool is_vm_object = d->isolate() == Dart::vm_isolate();
+
+    for (intptr_t id = start_index_; id < stop_index_; id++) {
+      RawTypeArguments* type_args =
+          reinterpret_cast<RawTypeArguments*>(d->Ref(id));
+      intptr_t length = d->Read<intptr_t>();
+      bool is_canonical = d->Read<bool>();
+      Deserializer::InitializeHeader(type_args, kTypeArgumentsCid,
+                                     TypeArguments::InstanceSize(length),
+                                     is_vm_object, is_canonical);
+      type_args->ptr()->length_ = Smi::New(length);
+      type_args->ptr()->hash_ = Smi::New(d->Read<int32_t>());
+      type_args->ptr()->instantiations_ =
+          reinterpret_cast<RawArray*>(d->ReadRef());
+      for (intptr_t j = 0; j < length; j++) {
+        type_args->ptr()->types()[j] =
+            reinterpret_cast<RawAbstractType*>(d->ReadRef());
+      }
+    }
+  }
+};
+
+
+class PatchClassSerializationCluster : public SerializationCluster {
+ public:
+  PatchClassSerializationCluster() { }
+  virtual ~PatchClassSerializationCluster() { }
+
+  void Trace(Serializer* s, RawObject* object) {
+    RawPatchClass* cls = PatchClass::RawCast(object);
+    objects_.Add(cls);
+
+    RawObject** from = cls->from();
+    RawObject** to = cls->to();
+    for (RawObject** p = from; p <= to; p++) {
+      s->Push(*p);
+    }
+  }
+
+  void WriteAlloc(Serializer* s) {
+    s->WriteCid(kPatchClassCid);
+    intptr_t count = objects_.length();
+    s->Write<intptr_t>(count);
+    for (intptr_t i = 0; i < count; i++) {
+      RawPatchClass* cls = objects_[i];
+      s->AssignRef(cls);
+    }
+  }
+
+  void WriteFill(Serializer* s) {
+    intptr_t count = objects_.length();
+    for (intptr_t i = 0; i < count; i++) {
+      RawPatchClass* cls = objects_[i];
+      RawObject** from = cls->from();
+      RawObject** to = cls->to();
+      for (RawObject** p = from; p <= to; p++) {
+        s->WriteRef(*p);
+      }
+    }
+  }
+
+ private:
+  GrowableArray<RawPatchClass*> objects_;
+};
+
+
+class PatchClassDeserializationCluster : public DeserializationCluster {
+ public:
+  PatchClassDeserializationCluster() { }
+  virtual ~PatchClassDeserializationCluster() { }
+
+  void ReadAlloc(Deserializer* d) {
+    start_index_ = d->next_index();
+    PageSpace* old_space = d->heap()->old_space();
+    intptr_t count = d->Read<intptr_t>();
+    for (intptr_t i = 0; i < count; i++) {
+      d->AssignRef(AllocateUninitialized(old_space,
+                                         PatchClass::InstanceSize()));
+    }
+    stop_index_ = d->next_index();
+  }
+
+  void ReadFill(Deserializer* d) {
+    bool is_vm_object = d->isolate() == Dart::vm_isolate();
+
+    for (intptr_t id = start_index_; id < stop_index_; id++) {
+      RawPatchClass* cls = reinterpret_cast<RawPatchClass*>(d->Ref(id));
+      Deserializer::InitializeHeader(cls, kPatchClassCid,
+                                     PatchClass::InstanceSize(), is_vm_object);
+      RawObject** from = cls->from();
+      RawObject** to = cls->to();
+      for (RawObject** p = from; p <= to; p++) {
+        *p = d->ReadRef();
+      }
+    }
+  }
+};
+
+
+class FunctionSerializationCluster : public SerializationCluster {
+ public:
+  FunctionSerializationCluster() { }
+  virtual ~FunctionSerializationCluster() { }
+
+  void Trace(Serializer* s, RawObject* object) {
+    RawFunction* func = Function::RawCast(object);
+    objects_.Add(func);
+
+    RawObject** from = func->from();
+    RawObject** to = func->to_snapshot();
+    for (RawObject** p = from; p <= to; p++) {
+      s->Push(*p);
+    }
+    if (s->kind() == Snapshot::kAppNoJIT) {
+      s->Push(func->ptr()->code_);
+    } else if (s->kind() == Snapshot::kAppWithJIT) {
+      s->Push(func->ptr()->unoptimized_code_);
+      s->Push(func->ptr()->ic_data_array_);
+    }
+  }
+
+  void WriteAlloc(Serializer* s) {
+    s->WriteCid(kFunctionCid);
+    intptr_t count = objects_.length();
+    s->Write<intptr_t>(count);
+    for (intptr_t i = 0; i < count; i++) {
+      RawFunction* func = objects_[i];
+      s->AssignRef(func);
+    }
+  }
+
+  void WriteFill(Serializer* s) {
+    Snapshot::Kind kind = s->kind();
+    intptr_t count = objects_.length();
+    for (intptr_t i = 0; i < count; i++) {
+      RawFunction* func = objects_[i];
+      RawObject** from = func->from();
+      RawObject** to = func->to_snapshot();
+      for (RawObject** p = from; p <= to; p++) {
+        s->WriteRef(*p);
+      }
+      if (kind == Snapshot::kAppNoJIT) {
+        s->WriteRef(func->ptr()->code_);
+      } else if (s->kind() == Snapshot::kAppWithJIT) {
+        s->WriteRef(func->ptr()->unoptimized_code_);
+        s->WriteRef(func->ptr()->ic_data_array_);
+      }
+
+      s->WriteTokenPosition(func->ptr()->token_pos_);
+      s->WriteTokenPosition(func->ptr()->end_token_pos_);
+      s->Write<int16_t>(func->ptr()->num_fixed_parameters_);
+      s->Write<int16_t>(func->ptr()->num_optional_parameters_);
+      s->Write<uint32_t>(func->ptr()->kind_tag_);
+      if (kind == Snapshot::kAppNoJIT) {
+        // Omit fields used to support de/reoptimization.
+      } else {
+        bool is_optimized = Code::IsOptimized(func->ptr()->code_);
+        if (is_optimized) {
+          s->Write<int32_t>(FLAG_optimization_counter_threshold);
+        } else {
+          s->Write<int32_t>(0);
+        }
+        s->Write<int8_t>(func->ptr()->deoptimization_counter_);
+        s->Write<uint16_t>(func->ptr()->optimized_instruction_count_);
+        s->Write<uint16_t>(func->ptr()->optimized_call_site_count_);
+      }
+    }
+  }
+
+ private:
+  GrowableArray<RawFunction*> objects_;
+};
+
+
+class FunctionDeserializationCluster : public DeserializationCluster {
+ public:
+  FunctionDeserializationCluster() { }
+  virtual ~FunctionDeserializationCluster() { }
+
+  void ReadAlloc(Deserializer* d) {
+    start_index_ = d->next_index();
+    PageSpace* old_space = d->heap()->old_space();
+    intptr_t count = d->Read<intptr_t>();
+    for (intptr_t i = 0; i < count; i++) {
+      d->AssignRef(AllocateUninitialized(old_space,
+                                         Function::InstanceSize()));
+    }
+    stop_index_ = d->next_index();
+  }
+
+  void ReadFill(Deserializer* d) {
+    Snapshot::Kind kind = d->kind();
+    bool is_vm_object = d->isolate() == Dart::vm_isolate();
+
+    for (intptr_t id = start_index_; id < stop_index_; id++) {
+      RawFunction* func = reinterpret_cast<RawFunction*>(d->Ref(id));
+      Deserializer::InitializeHeader(func, kFunctionCid,
+                                     Function::InstanceSize(), is_vm_object);
+      RawObject** from = func->from();
+      RawObject** to_snapshot = func->to_snapshot();
+      RawObject** to = func->to();
+      for (RawObject** p = from; p <= to_snapshot; p++) {
+        *p = d->ReadRef();
+      }
+      for (RawObject** p = to_snapshot + 1; p <= to; p++) {
+        *p = Object::null();
+      }
+      if (kind == Snapshot::kAppNoJIT) {
+        func->ptr()->code_ = reinterpret_cast<RawCode*>(d->ReadRef());
+      } else if (kind == Snapshot::kAppWithJIT) {
+        func->ptr()->unoptimized_code_ =
+            reinterpret_cast<RawCode*>(d->ReadRef());
+        func->ptr()->ic_data_array_ = reinterpret_cast<RawArray*>(d->ReadRef());
+      }
+
+#if defined(DEBUG)
+      func->ptr()->entry_point_ = 0;
+#endif
+
+      func->ptr()->token_pos_ = d->ReadTokenPosition();
+      func->ptr()->end_token_pos_ = d->ReadTokenPosition();
+      func->ptr()->num_fixed_parameters_ = d->Read<int16_t>();
+      func->ptr()->num_optional_parameters_ = d->Read<int16_t>();
+      func->ptr()->kind_tag_ = d->Read<uint32_t>();
+      if (kind == Snapshot::kAppNoJIT) {
+        // Omit fields used to support de/reoptimization.
+      } else {
+        func->ptr()->usage_counter_ = d->Read<int32_t>();
+        func->ptr()->deoptimization_counter_ = d->Read<int8_t>();
+        func->ptr()->optimized_instruction_count_ = d->Read<uint16_t>();
+        func->ptr()->optimized_call_site_count_ = d->Read<uint16_t>();
+      }
+    }
+  }
+
+  void PostLoad(const Array& refs, Snapshot::Kind kind, Zone* zone) {
+    NOT_IN_PRODUCT(TimelineDurationScope tds(Thread::Current(),
+        Timeline::GetIsolateStream(), "PostLoadFunction"));
+
+    if (kind == Snapshot::kAppNoJIT) {
+      Function& func = Function::Handle(zone);
+      for (intptr_t i = start_index_; i < stop_index_; i++) {
+        func ^= refs.At(i);
+        ASSERT(func.raw()->ptr()->code_->IsCode());
+        uword entry_point = func.raw()->ptr()->code_->ptr()->entry_point_;
+        ASSERT(entry_point != 0);
+        func.raw()->ptr()->entry_point_ = entry_point;
+      }
+    } else if (kind == Snapshot::kAppWithJIT) {
+      Function& func = Function::Handle(zone);
+      Code& code = Code::Handle(zone);
+      for (intptr_t i = start_index_; i < stop_index_; i++) {
+        func ^= refs.At(i);
+        code ^= func.unoptimized_code();
+        if (!code.IsNull()) {
+          func.SetInstructions(code);
+          func.set_was_compiled(true);
+        } else {
+          func.ClearCode();
+          func.set_was_compiled(false);
+        }
+      }
+    } else {
+      Function& func = Function::Handle(zone);
+      for (intptr_t i = start_index_; i < stop_index_; i++) {
+        func ^= refs.At(i);
+        func.ClearICDataArray();
+        func.ClearCode();
+        func.set_was_compiled(false);
+      }
+    }
+  }
+};
+
+
+class ClosureDataSerializationCluster : public SerializationCluster {
+ public:
+  ClosureDataSerializationCluster() { }
+  virtual ~ClosureDataSerializationCluster() { }
+
+  void Trace(Serializer* s, RawObject* object) {
+    RawClosureData* data = ClosureData::RawCast(object);
+    objects_.Add(data);
+
+    RawObject** from = data->from();
+    RawObject** to = data->to();
+    for (RawObject** p = from; p <= to; p++) {
+      s->Push(*p);
+    }
+  }
+
+  void WriteAlloc(Serializer* s) {
+    s->WriteCid(kClosureDataCid);
+    intptr_t count = objects_.length();
+    s->Write<intptr_t>(count);
+    for (intptr_t i = 0; i < count; i++) {
+      RawClosureData* data = objects_[i];
+      s->AssignRef(data);
+    }
+  }
+
+  void WriteFill(Serializer* s) {
+    intptr_t count = objects_.length();
+    for (intptr_t i = 0; i < count; i++) {
+      RawClosureData* data = objects_[i];
+      RawObject** from = data->from();
+      RawObject** to = data->to();
+      for (RawObject** p = from; p <= to; p++) {
+        s->WriteRef(*p);
+      }
+    }
+  }
+
+ private:
+  GrowableArray<RawClosureData*> objects_;
+};
+
+
+class ClosureDataDeserializationCluster : public DeserializationCluster {
+ public:
+  ClosureDataDeserializationCluster() { }
+  virtual ~ClosureDataDeserializationCluster() { }
+
+  void ReadAlloc(Deserializer* d) {
+    start_index_ = d->next_index();
+    PageSpace* old_space = d->heap()->old_space();
+    intptr_t count = d->Read<intptr_t>();
+    for (intptr_t i = 0; i < count; i++) {
+      d->AssignRef(AllocateUninitialized(old_space,
+                                         ClosureData::InstanceSize()));
+    }
+    stop_index_ = d->next_index();
+  }
+
+  void ReadFill(Deserializer* d) {
+    bool is_vm_object = d->isolate() == Dart::vm_isolate();
+
+    for (intptr_t id = start_index_; id < stop_index_; id++) {
+      RawClosureData* data = reinterpret_cast<RawClosureData*>(d->Ref(id));
+      Deserializer::InitializeHeader(data, kClosureDataCid,
+                                     ClosureData::InstanceSize(), is_vm_object);
+      RawObject** from = data->from();
+      RawObject** to = data->to();
+      for (RawObject** p = from; p <= to; p++) {
+        *p = d->ReadRef();
+      }
+    }
+  }
+};
+
+
+class RedirectionDataSerializationCluster : public SerializationCluster {
+ public:
+  RedirectionDataSerializationCluster() { }
+  virtual ~RedirectionDataSerializationCluster() { }
+
+  void Trace(Serializer* s, RawObject* object) {
+    RawRedirectionData* data = RedirectionData::RawCast(object);
+    objects_.Add(data);
+
+    RawObject** from = data->from();
+    RawObject** to = data->to();
+    for (RawObject** p = from; p <= to; p++) {
+      s->Push(*p);
+    }
+  }
+
+  void WriteAlloc(Serializer* s) {
+    s->WriteCid(kRedirectionDataCid);
+    intptr_t count = objects_.length();
+    s->Write<intptr_t>(count);
+    for (intptr_t i = 0; i < count; i++) {
+      RawRedirectionData* data = objects_[i];
+      s->AssignRef(data);
+    }
+  }
+
+  void WriteFill(Serializer* s) {
+    intptr_t count = objects_.length();
+    for (intptr_t i = 0; i < count; i++) {
+      RawRedirectionData* data = objects_[i];
+      RawObject** from = data->from();
+      RawObject** to = data->to();
+      for (RawObject** p = from; p <= to; p++) {
+        s->WriteRef(*p);
+      }
+    }
+  }
+
+ private:
+  GrowableArray<RawRedirectionData*> objects_;
+};
+
+
+class RedirectionDataDeserializationCluster : public DeserializationCluster {
+ public:
+  RedirectionDataDeserializationCluster() { }
+  virtual ~RedirectionDataDeserializationCluster() { }
+
+  void ReadAlloc(Deserializer* d) {
+    start_index_ = d->next_index();
+    PageSpace* old_space = d->heap()->old_space();
+    intptr_t count = d->Read<intptr_t>();
+    for (intptr_t i = 0; i < count; i++) {
+      d->AssignRef(AllocateUninitialized(old_space,
+                                         RedirectionData::InstanceSize()));
+    }
+    stop_index_ = d->next_index();
+  }
+
+  void ReadFill(Deserializer* d) {
+    bool is_vm_object = d->isolate() == Dart::vm_isolate();
+
+    for (intptr_t id = start_index_; id < stop_index_; id++) {
+      RawRedirectionData* data =
+          reinterpret_cast<RawRedirectionData*>(d->Ref(id));
+      Deserializer::InitializeHeader(data, kRedirectionDataCid,
+                                     RedirectionData::InstanceSize(),
+                                     is_vm_object);
+      RawObject** from = data->from();
+      RawObject** to = data->to();
+      for (RawObject** p = from; p <= to; p++) {
+        *p = d->ReadRef();
+      }
+    }
+  }
+};
+
+
+class FieldSerializationCluster : public SerializationCluster {
+ public:
+  FieldSerializationCluster() { }
+  virtual ~FieldSerializationCluster() { }
+
+  void Trace(Serializer* s, RawObject* object) {
+    RawField* field = Field::RawCast(object);
+    objects_.Add(field);
+
+    Snapshot::Kind kind = s->kind();
+
+    s->Push(field->ptr()->name_);
+    s->Push(field->ptr()->owner_);
+    s->Push(field->ptr()->type_);
+    // Write out the initial static value or field offset.
+    if (Field::StaticBit::decode(field->ptr()->kind_bits_)) {
+      if (kind == Snapshot::kAppNoJIT) {
+        // For precompiled static fields, the value was already reset and
+        // initializer_ now contains a Function.
+        s->Push(field->ptr()->value_.static_value_);
+      } else if (Field::ConstBit::decode(field->ptr()->kind_bits_)) {
+        // Do not reset const fields.
+        s->Push(field->ptr()->value_.static_value_);
+      } else {
+        // Otherwise, for static fields we write out the initial static value.
+        s->Push(field->ptr()->initializer_.saved_value_);
+      }
+    } else {
+      s->Push(field->ptr()->value_.offset_);
+    }
+    // Write out the initializer function or saved initial value.
+    if (kind == Snapshot::kAppNoJIT) {
+      s->Push(field->ptr()->initializer_.precompiled_);
+    } else {
+      s->Push(field->ptr()->initializer_.saved_value_);
+    }
+    if (kind != Snapshot::kAppNoJIT) {
+      // Write out the guarded list length.
+      s->Push(field->ptr()->guarded_list_length_);
+    }
+  }
+
+  void WriteAlloc(Serializer* s) {
+    s->WriteCid(kFieldCid);
+    intptr_t count = objects_.length();
+    s->Write<intptr_t>(count);
+    for (intptr_t i = 0; i < count; i++) {
+      RawField* field = objects_[i];
+      s->AssignRef(field);
+    }
+  }
+
+  void WriteFill(Serializer* s) {
+    Snapshot::Kind kind = s->kind();
+    intptr_t count = objects_.length();
+    for (intptr_t i = 0; i < count; i++) {
+      RawField* field = objects_[i];
+
+      s->WriteRef(field->ptr()->name_);
+      s->WriteRef(field->ptr()->owner_);
+      s->WriteRef(field->ptr()->type_);
+      // Write out the initial static value or field offset.
+      if (Field::StaticBit::decode(field->ptr()->kind_bits_)) {
+        if (kind == Snapshot::kAppNoJIT) {
+          // For precompiled static fields, the value was already reset and
+          // initializer_ now contains a Function.
+          s->WriteRef(field->ptr()->value_.static_value_);
+        } else if (Field::ConstBit::decode(field->ptr()->kind_bits_)) {
+          // Do not reset const fields.
+          s->WriteRef(field->ptr()->value_.static_value_);
+        } else {
+          // Otherwise, for static fields we write out the initial static value.
+          s->WriteRef(field->ptr()->initializer_.saved_value_);
+        }
+      } else {
+        s->WriteRef(field->ptr()->value_.offset_);
+      }
+      // Write out the initializer function or saved initial value.
+      if (kind == Snapshot::kAppNoJIT) {
+        s->WriteRef(field->ptr()->initializer_.precompiled_);
+      } else {
+        s->WriteRef(field->ptr()->initializer_.saved_value_);
+      }
+      if (kind != Snapshot::kAppNoJIT) {
+        // Write out the guarded list length.
+        s->WriteRef(field->ptr()->guarded_list_length_);
+      }
+
+      if (kind != Snapshot::kAppNoJIT) {
+        s->WriteTokenPosition(field->ptr()->token_pos_);
+        s->WriteCid(field->ptr()->guarded_cid_);
+        s->WriteCid(field->ptr()->is_nullable_);
+      }
+      s->Write<uint8_t>(field->ptr()->kind_bits_);
+    }
+  }
+
+ private:
+  GrowableArray<RawField*> objects_;
+};
+
+
+class FieldDeserializationCluster : public DeserializationCluster {
+ public:
+  FieldDeserializationCluster() { }
+  virtual ~FieldDeserializationCluster() { }
+
+  void ReadAlloc(Deserializer* d) {
+    start_index_ = d->next_index();
+    PageSpace* old_space = d->heap()->old_space();
+    intptr_t count = d->Read<intptr_t>();
+    for (intptr_t i = 0; i < count; i++) {
+      d->AssignRef(AllocateUninitialized(old_space, Field::InstanceSize()));
+    }
+    stop_index_ = d->next_index();
+  }
+
+  void ReadFill(Deserializer* d) {
+    Snapshot::Kind kind = d->kind();
+    bool is_vm_object = d->isolate() == Dart::vm_isolate();
+
+    for (intptr_t id = start_index_; id < stop_index_; id++) {
+      RawField* field = reinterpret_cast<RawField*>(d->Ref(id));
+      Deserializer::InitializeHeader(field, kFieldCid,
+                                     Field::InstanceSize(), is_vm_object);
+      RawObject** from = field->from();
+      RawObject** to_snapshot = field->to_snapshot(kind);
+      RawObject** to = field->to();
+      for (RawObject** p = from; p <= to_snapshot; p++) {
+        *p = d->ReadRef();
+      }
+      for (RawObject** p = to_snapshot + 1; p <= to; p++) {
+        *p = Object::null();
+      }
+
+      if (kind != Snapshot::kAppNoJIT) {
+        field->ptr()->token_pos_ = d->ReadTokenPosition();
+        field->ptr()->guarded_cid_ = d->ReadCid();
+        field->ptr()->is_nullable_ = d->ReadCid();
+      }
+      field->ptr()->kind_bits_ = d->Read<uint8_t>();
+    }
+  }
+
+  void PostLoad(const Array& refs, Snapshot::Kind kind, Zone* zone) {
+    NOT_IN_PRODUCT(TimelineDurationScope tds(Thread::Current(),
+        Timeline::GetIsolateStream(), "PostLoadField"));
+
+    Field& field = Field::Handle(zone);
+    if (!FLAG_use_field_guards) {
+      for (intptr_t i = start_index_; i < stop_index_; i++) {
+        field ^= refs.At(i);
+        field.set_guarded_cid(kDynamicCid);
+        field.set_is_nullable(true);
+        field.set_guarded_list_length(Field::kNoFixedLength);
+        field.set_guarded_list_length_in_object_offset(
+            Field::kUnknownLengthOffset);
+      }
+    } else {
+      for (intptr_t i = start_index_; i < stop_index_; i++) {
+        field ^= refs.At(i);
+        field.InitializeGuardedListLengthInObjectOffset();
+      }
+    }
+  }
+};
+
+
+class LiteralTokenSerializationCluster : public SerializationCluster {
+ public:
+  LiteralTokenSerializationCluster() { }
+  virtual ~LiteralTokenSerializationCluster() { }
+
+  void Trace(Serializer* s, RawObject* object) {
+    RawLiteralToken* token = LiteralToken::RawCast(object);
+    objects_.Add(token);
+
+    RawObject** from = token->from();
+    RawObject** to = token->to();
+    for (RawObject** p = from; p <= to; p++) {
+      s->Push(*p);
+    }
+  }
+
+  void WriteAlloc(Serializer* s) {
+    s->WriteCid(kLiteralTokenCid);
+    intptr_t count = objects_.length();
+    s->Write<intptr_t>(count);
+    for (intptr_t i = 0; i < count; i++) {
+      RawLiteralToken* token = objects_[i];
+      s->AssignRef(token);
+    }
+  }
+
+  void WriteFill(Serializer* s) {
+    intptr_t count = objects_.length();
+    for (intptr_t i = 0; i < count; i++) {
+      RawLiteralToken* token = objects_[i];
+      RawObject** from = token->from();
+      RawObject** to = token->to();
+      for (RawObject** p = from; p <= to; p++) {
+        s->WriteRef(*p);
+      }
+      s->Write<int32_t>(token->ptr()->kind_);
+    }
+  }
+
+ private:
+  GrowableArray<RawLiteralToken*> objects_;
+};
+
+
+class LiteralTokenDeserializationCluster : public DeserializationCluster {
+ public:
+  LiteralTokenDeserializationCluster() { }
+  virtual ~LiteralTokenDeserializationCluster() { }
+
+  void ReadAlloc(Deserializer* d) {
+    start_index_ = d->next_index();
+    PageSpace* old_space = d->heap()->old_space();
+    intptr_t count = d->Read<intptr_t>();
+    for (intptr_t i = 0; i < count; i++) {
+      d->AssignRef(AllocateUninitialized(old_space,
+                                         LiteralToken::InstanceSize()));
+    }
+    stop_index_ = d->next_index();
+  }
+
+  void ReadFill(Deserializer* d) {
+    bool is_vm_object = d->isolate() == Dart::vm_isolate();
+
+    for (intptr_t id = start_index_; id < stop_index_; id++) {
+      RawLiteralToken* token = reinterpret_cast<RawLiteralToken*>(d->Ref(id));
+      Deserializer::InitializeHeader(token, kLiteralTokenCid,
+                                     LiteralToken::InstanceSize(),
+                                     is_vm_object);
+      RawObject** from = token->from();
+      RawObject** to = token->to();
+      for (RawObject** p = from; p <= to; p++) {
+        *p = d->ReadRef();
+      }
+      token->ptr()->kind_ = static_cast<Token::Kind>(d->Read<int32_t>());
+    }
+  }
+};
+
+
+class TokenStreamSerializationCluster : public SerializationCluster {
+ public:
+  TokenStreamSerializationCluster() { }
+  virtual ~TokenStreamSerializationCluster() { }
+
+  void Trace(Serializer* s, RawObject* object) {
+    RawTokenStream* stream = TokenStream::RawCast(object);
+    objects_.Add(stream);
+
+    RawObject** from = stream->from();
+    RawObject** to = stream->to();
+    for (RawObject** p = from; p <= to; p++) {
+      s->Push(*p);
+    }
+  }
+
+  void WriteAlloc(Serializer* s) {
+    s->WriteCid(kTokenStreamCid);
+    intptr_t count = objects_.length();
+    s->Write<intptr_t>(count);
+    for (intptr_t i = 0; i < count; i++) {
+      RawTokenStream* stream = objects_[i];
+      s->AssignRef(stream);
+    }
+  }
+
+  void WriteFill(Serializer* s) {
+    intptr_t count = objects_.length();
+    for (intptr_t i = 0; i < count; i++) {
+      RawTokenStream* stream = objects_[i];
+      RawObject** from = stream->from();
+      RawObject** to = stream->to();
+      for (RawObject** p = from; p <= to; p++) {
+        s->WriteRef(*p);
+      }
+    }
+  }
+
+ private:
+  GrowableArray<RawTokenStream*> objects_;
+};
+
+
+class TokenStreamDeserializationCluster : public DeserializationCluster {
+ public:
+  TokenStreamDeserializationCluster() { }
+  virtual ~TokenStreamDeserializationCluster() { }
+
+  void ReadAlloc(Deserializer* d) {
+    start_index_ = d->next_index();
+    PageSpace* old_space = d->heap()->old_space();
+    intptr_t count = d->Read<intptr_t>();
+    for (intptr_t i = 0; i < count; i++) {
+      d->AssignRef(AllocateUninitialized(old_space,
+                                         TokenStream::InstanceSize()));
+    }
+    stop_index_ = d->next_index();
+  }
+
+  void ReadFill(Deserializer* d) {
+    bool is_vm_object = d->isolate() == Dart::vm_isolate();
+
+    for (intptr_t id = start_index_; id < stop_index_; id++) {
+      RawTokenStream* stream = reinterpret_cast<RawTokenStream*>(d->Ref(id));
+      Deserializer::InitializeHeader(stream, kTokenStreamCid,
+                                     TokenStream::InstanceSize(), is_vm_object);
+      RawObject** from = stream->from();
+      RawObject** to = stream->to();
+      for (RawObject** p = from; p <= to; p++) {
+        *p = d->ReadRef();
+      }
+    }
+  }
+};
+
+
+class ScriptSerializationCluster : public SerializationCluster {
+ public:
+  ScriptSerializationCluster() { }
+  virtual ~ScriptSerializationCluster() { }
+
+  void Trace(Serializer* s, RawObject* object) {
+    RawScript* script = Script::RawCast(object);
+    objects_.Add(script);
+
+    RawObject** from = script->from();
+    RawObject** to = script->to_snapshot(s->kind());
+    for (RawObject** p = from; p <= to; p++) {
+      s->Push(*p);
+    }
+  }
+
+  void WriteAlloc(Serializer* s) {
+    s->WriteCid(kScriptCid);
+    intptr_t count = objects_.length();
+    s->Write<intptr_t>(count);
+    for (intptr_t i = 0; i < count; i++) {
+      RawScript* script = objects_[i];
+      s->AssignRef(script);
+    }
+  }
+
+  void WriteFill(Serializer* s) {
+    Snapshot::Kind kind = s->kind();
+    intptr_t count = objects_.length();
+    for (intptr_t i = 0; i < count; i++) {
+      RawScript* script = objects_[i];
+      RawObject** from = script->from();
+      RawObject** to = script->to_snapshot(kind);
+      for (RawObject** p = from; p <= to; p++) {
+        s->WriteRef(*p);
+      }
+
+      s->Write<int32_t>(script->ptr()->line_offset_);
+      s->Write<int32_t>(script->ptr()->col_offset_);
+      s->Write<int8_t>(script->ptr()->kind_);
+    }
+  }
+
+ private:
+  GrowableArray<RawScript*> objects_;
+};
+
+
+class ScriptDeserializationCluster : public DeserializationCluster {
+ public:
+  ScriptDeserializationCluster() { }
+  virtual ~ScriptDeserializationCluster() { }
+
+  void ReadAlloc(Deserializer* d) {
+    start_index_ = d->next_index();
+    PageSpace* old_space = d->heap()->old_space();
+    intptr_t count = d->Read<intptr_t>();
+    for (intptr_t i = 0; i < count; i++) {
+      d->AssignRef(AllocateUninitialized(old_space, Script::InstanceSize()));
+    }
+    stop_index_ = d->next_index();
+  }
+
+  void ReadFill(Deserializer* d) {
+    Snapshot::Kind kind = d->kind();
+    bool is_vm_object = d->isolate() == Dart::vm_isolate();
+
+    for (intptr_t id = start_index_; id < stop_index_; id++) {
+      RawScript* script = reinterpret_cast<RawScript*>(d->Ref(id));
+      Deserializer::InitializeHeader(script, kScriptCid,
+                                     Script::InstanceSize(), is_vm_object);
+      RawObject** from = script->from();
+      RawObject** to_snapshot = script->to_snapshot(kind);
+      RawObject** to = script->to();
+      for (RawObject** p = from; p <= to_snapshot; p++) {
+        *p = d->ReadRef();
+      }
+      for (RawObject** p = to_snapshot + 1; p <= to; p++) {
+        *p = Object::null();
+      }
+
+      script->ptr()->line_offset_ = d->Read<int32_t>();
+      script->ptr()->col_offset_ = d->Read<int32_t>();
+      script->ptr()->kind_ = d->Read<int8_t>();
+      script->ptr()->load_timestamp_ = 0;
+    }
+  }
+};
+
+
+class LibrarySerializationCluster : public SerializationCluster {
+ public:
+  LibrarySerializationCluster() { }
+  virtual ~LibrarySerializationCluster() { }
+
+  void Trace(Serializer* s, RawObject* object) {
+    RawLibrary* lib = Library::RawCast(object);
+    objects_.Add(lib);
+
+    RawObject** from = lib->from();
+    RawObject** to = lib->to_snapshot();
+    for (RawObject** p = from; p <= to; p++) {
+      s->Push(*p);
+    }
+  }
+
+  void WriteAlloc(Serializer* s) {
+    s->WriteCid(kLibraryCid);
+    intptr_t count = objects_.length();
+    s->Write<intptr_t>(count);
+    for (intptr_t i = 0; i < count; i++) {
+      RawLibrary* lib = objects_[i];
+      s->AssignRef(lib);
+    }
+  }
+
+  void WriteFill(Serializer* s) {
+    intptr_t count = objects_.length();
+    for (intptr_t i = 0; i < count; i++) {
+      RawLibrary* lib = objects_[i];
+      RawObject** from = lib->from();
+      RawObject** to = lib->to_snapshot();
+      for (RawObject** p = from; p <= to; p++) {
+        s->WriteRef(*p);
+      }
+
+      s->Write<int32_t>(lib->ptr()->index_);
+      s->Write<uint16_t>(lib->ptr()->num_imports_);
+      s->Write<int8_t>(lib->ptr()->load_state_);
+      s->Write<bool>(lib->ptr()->corelib_imported_);
+      s->Write<bool>(lib->ptr()->is_dart_scheme_);
+      s->Write<bool>(lib->ptr()->debuggable_);
+    }
+  }
+
+ private:
+  GrowableArray<RawLibrary*> objects_;
+};
+
+class LibraryDeserializationCluster : public DeserializationCluster {
+ public:
+  LibraryDeserializationCluster() { }
+  virtual ~LibraryDeserializationCluster() { }
+
+  void ReadAlloc(Deserializer* d) {
+    start_index_ = d->next_index();
+    PageSpace* old_space = d->heap()->old_space();
+    intptr_t count = d->Read<intptr_t>();
+    for (intptr_t i = 0; i < count; i++) {
+      d->AssignRef(AllocateUninitialized(old_space, Library::InstanceSize()));
+    }
+    stop_index_ = d->next_index();
+  }
+
+  void ReadFill(Deserializer* d) {
+    bool is_vm_object = d->isolate() == Dart::vm_isolate();
+
+    for (intptr_t id = start_index_; id < stop_index_; id++) {
+      RawLibrary* lib = reinterpret_cast<RawLibrary*>(d->Ref(id));
+      Deserializer::InitializeHeader(lib, kLibraryCid,
+                                     Library::InstanceSize(), is_vm_object);
+      RawObject** from = lib->from();
+      RawObject** to_snapshot = lib->to_snapshot();
+      RawObject** to = lib->to();
+      for (RawObject** p = from; p <= to_snapshot; p++) {
+        *p = d->ReadRef();
+      }
+      for (RawObject** p = to_snapshot + 1; p <= to; p++) {
+        *p = Object::null();
+      }
+
+      lib->ptr()->native_entry_resolver_ = NULL;
+      lib->ptr()->native_entry_symbol_resolver_ = NULL;
+      lib->ptr()->index_ = d->Read<int32_t>();
+      lib->ptr()->num_imports_ = d->Read<uint16_t>();
+      lib->ptr()->load_state_ = d->Read<int8_t>();
+      lib->ptr()->corelib_imported_ = d->Read<bool>();
+      lib->ptr()->is_dart_scheme_ = d->Read<bool>();
+      lib->ptr()->debuggable_ = d->Read<bool>();
+      lib->ptr()->is_in_fullsnapshot_ = true;
+    }
+  }
+
+  void PostLoad(const Array& refs, Snapshot::Kind kind, Zone* zone) {
+    // TODO(rmacnak): This is surprisingly slow, roughly 20% of deserialization
+    // time for the JIT. Maybe make the lookups happy with a null?
+
+    NOT_IN_PRODUCT(TimelineDurationScope tds(Thread::Current(),
+        Timeline::GetIsolateStream(), "PostLoadLibrary"));
+
+    Library& lib = Library::Handle(zone);
+    for (intptr_t i = start_index_; i < stop_index_; i++) {
+      lib ^= refs.At(i);
+      const intptr_t kInitialNameCacheSize = 64;
+      lib.InitResolvedNamesCache(kInitialNameCacheSize);
+    }
+  }
+};
+
+
+class NamespaceSerializationCluster : public SerializationCluster {
+ public:
+  NamespaceSerializationCluster() { }
+  virtual ~NamespaceSerializationCluster() { }
+
+  void Trace(Serializer* s, RawObject* object) {
+    RawNamespace* ns = Namespace::RawCast(object);
+    objects_.Add(ns);
+
+    RawObject** from = ns->from();
+    RawObject** to = ns->to();
+    for (RawObject** p = from; p <= to; p++) {
+      s->Push(*p);
+    }
+  }
+
+  void WriteAlloc(Serializer* s) {
+    s->WriteCid(kNamespaceCid);
+    intptr_t count = objects_.length();
+    s->Write<intptr_t>(count);
+    for (intptr_t i = 0; i < count; i++) {
+      RawNamespace* ns = objects_[i];
+      s->AssignRef(ns);
+    }
+  }
+
+  void WriteFill(Serializer* s) {
+    intptr_t count = objects_.length();
+    for (intptr_t i = 0; i < count; i++) {
+      RawNamespace* ns = objects_[i];
+      RawObject** from = ns->from();
+      RawObject** to = ns->to();
+      for (RawObject** p = from; p <= to; p++) {
+        s->WriteRef(*p);
+      }
+    }
+  }
+
+ private:
+  GrowableArray<RawNamespace*> objects_;
+};
+
+
+class NamespaceDeserializationCluster : public DeserializationCluster {
+ public:
+  NamespaceDeserializationCluster() { }
+  virtual ~NamespaceDeserializationCluster() { }
+
+  void ReadAlloc(Deserializer* d) {
+    start_index_ = d->next_index();
+    PageSpace* old_space = d->heap()->old_space();
+    intptr_t count = d->Read<intptr_t>();
+    for (intptr_t i = 0; i < count; i++) {
+      d->AssignRef(AllocateUninitialized(old_space, Namespace::InstanceSize()));
+    }
+    stop_index_ = d->next_index();
+  }
+
+  void ReadFill(Deserializer* d) {
+    bool is_vm_object = d->isolate() == Dart::vm_isolate();
+
+    for (intptr_t id = start_index_; id < stop_index_; id++) {
+      RawNamespace* ns = reinterpret_cast<RawNamespace*>(d->Ref(id));
+      Deserializer::InitializeHeader(ns, kNamespaceCid,
+                                     Namespace::InstanceSize(), is_vm_object);
+      RawObject** from = ns->from();
+      RawObject** to = ns->to();
+      for (RawObject** p = from; p <= to; p++) {
+        *p = d->ReadRef();
+      }
+    }
+  }
+};
+
+
+class CodeSerializationCluster : public SerializationCluster {
+ public:
+  CodeSerializationCluster() { }
+  virtual ~CodeSerializationCluster() { }
+
+  void Trace(Serializer* s, RawObject* object) {
+    RawCode* code = Code::RawCast(object);
+    objects_.Add(code);
+
+    s->Push(code->ptr()->object_pool_);
+    s->Push(code->ptr()->owner_);
+    s->Push(code->ptr()->exception_handlers_);
+    s->Push(code->ptr()->pc_descriptors_);
+    s->Push(code->ptr()->stackmaps_);
+  }
+
+  void WriteAlloc(Serializer* s) {
+    s->WriteCid(kCodeCid);
+    intptr_t count = objects_.length();
+    s->Write<intptr_t>(count);
+    for (intptr_t i = 0; i < count; i++) {
+      RawCode* code = objects_[i];
+      s->AssignRef(code);
+    }
+  }
+
+  void WriteFill(Serializer* s) {
+    Snapshot::Kind kind = s->kind();
+    intptr_t count = objects_.length();
+    for (intptr_t i = 0; i < count; i++) {
+      RawCode* code = objects_[i];
+
+      intptr_t pointer_offsets_length =
+          Code::PtrOffBits::decode(code->ptr()->state_bits_);
+      if (pointer_offsets_length != 0) {
+        FATAL("Cannot serialize code with embedded pointers");
+      }
+      if (kind == Snapshot::kAppNoJIT) {
+        // No disabled code in precompilation.
+        ASSERT(code->ptr()->instructions_ == code->ptr()->active_instructions_);
+      } else {
+        ASSERT(kind == Snapshot::kAppWithJIT);
+        // We never include optimized code in JIT precompilation. Deoptimization
+        // requires code patching and we cannot patch code that is shared
+        // between isolates and should not mutate memory allocated by the
+        // embedder.
+        bool is_optimized = Code::PtrOffBits::decode(code->ptr()->state_bits_);
+        if (is_optimized) {
+          FATAL("Cannot include optimized code in a JIT snapshot");
+        }
+      }
+
+      RawInstructions* instr = code->ptr()->instructions_;
+      int32_t text_offset = s->GetTextOffset(instr, code);
+      s->Write<int32_t>(text_offset);
+
+      s->WriteRef(code->ptr()->object_pool_);
+      s->WriteRef(code->ptr()->owner_);
+      s->WriteRef(code->ptr()->exception_handlers_);
+      s->WriteRef(code->ptr()->pc_descriptors_);
+      s->WriteRef(code->ptr()->stackmaps_);
+
+      s->Write<int32_t>(code->ptr()->state_bits_);
+    }
+  }
+
+ private:
+  GrowableArray<RawCode*> objects_;
+};
+
+
+class CodeDeserializationCluster : public DeserializationCluster {
+ public:
+  CodeDeserializationCluster() { }
+  virtual ~CodeDeserializationCluster() { }
+
+  void ReadAlloc(Deserializer* d) {
+    start_index_ = d->next_index();
+    PageSpace* old_space = d->heap()->old_space();
+    intptr_t count = d->Read<intptr_t>();
+    for (intptr_t i = 0; i < count; i++) {
+      d->AssignRef(AllocateUninitialized(old_space, Code::InstanceSize(0)));
+    }
+    stop_index_ = d->next_index();
+  }
+
+  void ReadFill(Deserializer* d) {
+    bool is_vm_object = d->isolate() == Dart::vm_isolate();
+
+    for (intptr_t id = start_index_; id < stop_index_; id++) {
+      RawCode* code = reinterpret_cast<RawCode*>(d->Ref(id));
+      Deserializer::InitializeHeader(code, kCodeCid,
+                                     Code::InstanceSize(0), is_vm_object);
+
+      int32_t text_offset = d->Read<int32_t>();
+      RawInstructions* instr = reinterpret_cast<RawInstructions*>(
+          d->GetInstructionsAt(text_offset) + kHeapObjectTag);
+      uword entry_point = Instructions::EntryPoint(instr);
+
+      code->ptr()->entry_point_ = entry_point;
+      code->ptr()->active_instructions_ = instr;
+      code->ptr()->instructions_ = instr;
+      code->ptr()->object_pool_ =
+          reinterpret_cast<RawObjectPool*>(d->ReadRef());
+      code->ptr()->owner_ = d->ReadRef();
+      code->ptr()->exception_handlers_ =
+          reinterpret_cast<RawExceptionHandlers*>(d->ReadRef());
+      code->ptr()->pc_descriptors_ =
+          reinterpret_cast<RawPcDescriptors*>(d->ReadRef());
+      code->ptr()->stackmaps_ =
+          reinterpret_cast<RawArray*>(d->ReadRef());
+
+      code->ptr()->deopt_info_array_ = Array::null();
+      code->ptr()->static_calls_target_table_ = Array::null();
+      code->ptr()->var_descriptors_ = LocalVarDescriptors::null();
+      code->ptr()->inlined_metadata_ = Array::null();
+      code->ptr()->code_source_map_ = CodeSourceMap::null();
+      code->ptr()->comments_ = Array::null();
+      code->ptr()->return_address_metadata_ = Object::null();
+
+      code->ptr()->compile_timestamp_ = 0;
+      code->ptr()->state_bits_ = d->Read<int32_t>();
+      code->ptr()->lazy_deopt_pc_offset_ = -1;
+    }
+  }
+};
+
+
+class ObjectPoolSerializationCluster : public SerializationCluster {
+ public:
+  ObjectPoolSerializationCluster() { }
+  virtual ~ObjectPoolSerializationCluster() { }
+
+  void Trace(Serializer* s, RawObject* object) {
+    RawObjectPool* pool = ObjectPool::RawCast(object);
+    objects_.Add(pool);
+
+    intptr_t length = pool->ptr()->length_;
+    RawTypedData* info_array = pool->ptr()->info_array_;
+
+    for (intptr_t i = 0; i < length; i++) {
+      ObjectPool::EntryType entry_type =
+          static_cast<ObjectPool::EntryType>(info_array->ptr()->data()[i]);
+      if (entry_type == ObjectPool::kTaggedObject) {
+        s->Push(pool->ptr()->data()[i].raw_obj_);
+      }
+    }
+
+    // TODO(rmacnak): Allocate the object pool and its info array together.
+  }
+
+  void WriteAlloc(Serializer* s) {
+    s->WriteCid(kObjectPoolCid);
+    intptr_t count = objects_.length();
+    s->Write<intptr_t>(count);
+    for (intptr_t i = 0; i < count; i++) {
+      RawObjectPool* pool = objects_[i];
+      intptr_t length = pool->ptr()->length_;
+      s->Write<intptr_t>(length);
+      s->AssignRef(pool);
+    }
+  }
+
+  void WriteFill(Serializer* s) {
+    intptr_t count = objects_.length();
+    for (intptr_t i = 0; i < count; i++) {
+      RawObjectPool* pool = objects_[i];
+      RawTypedData* info_array = pool->ptr()->info_array_;
+      intptr_t length = pool->ptr()->length_;
+      s->Write<intptr_t>(length);
+      for (intptr_t j = 0; j < length; j++) {
+        ObjectPool::EntryType entry_type =
+            static_cast<ObjectPool::EntryType>(info_array->ptr()->data()[j]);
+        s->Write<int8_t>(entry_type);
+        RawObjectPool::Entry& entry = pool->ptr()->data()[j];
+        switch (entry_type) {
+          case ObjectPool::kTaggedObject: {
+#if !defined(TARGET_ARCH_DBC)
+            if (entry.raw_obj_ ==
+                StubCode::CallNativeCFunction_entry()->code()) {
+              // Natives can run while precompiling, becoming linked and
+              // switching their stub. Reset to the initial stub used for
+              // lazy-linking.
+              s->WriteRef(StubCode::CallBootstrapCFunction_entry()->code());
+              break;
+            }
+#endif
+          s->WriteRef(entry.raw_obj_);
+          break;
+        }
+        case ObjectPool::kImmediate: {
+          s->Write<intptr_t>(entry.raw_value_);
+          break;
+        }
+        case ObjectPool::kNativeEntry: {
+          // Write nothing. Will initialize with the lazy link entry.
+#if defined(TARGET_ARCH_DBC)
+          UNREACHABLE();   // DBC does not support lazy native call linking.
+#endif
+          break;
+        }
+        default:
+          UNREACHABLE();
+        }
+      }
+    }
+  }
+
+ private:
+  GrowableArray<RawObjectPool*> objects_;
+};
+
+
+class ObjectPoolDeserializationCluster : public DeserializationCluster {
+ public:
+  ObjectPoolDeserializationCluster() { }
+  virtual ~ObjectPoolDeserializationCluster() { }
+
+  void ReadAlloc(Deserializer* d) {
+    start_index_ = d->next_index();
+    PageSpace* old_space = d->heap()->old_space();
+    intptr_t count = d->Read<intptr_t>();
+    for (intptr_t i = 0; i < count; i++) {
+      intptr_t length = d->Read<intptr_t>();
+      d->AssignRef(AllocateUninitialized(old_space,
+                                         ObjectPool::InstanceSize(length)));
+    }
+    stop_index_ = d->next_index();
+  }
+
+  void ReadFill(Deserializer* d) {
+    bool is_vm_object = d->isolate() == Dart::vm_isolate();
+    PageSpace* old_space = d->heap()->old_space();
+    for (intptr_t id = start_index_; id < stop_index_; id += 1) {
+      intptr_t length = d->Read<intptr_t>();
+      RawTypedData* info_array = reinterpret_cast<RawTypedData*>(
+          AllocateUninitialized(old_space, TypedData::InstanceSize(length)));
+      Deserializer::InitializeHeader(info_array, kTypedDataUint8ArrayCid,
+                                     TypedData::InstanceSize(length),
+                                     is_vm_object);
+      info_array->ptr()->length_ = Smi::New(length);
+      RawObjectPool* pool = reinterpret_cast<RawObjectPool*>(d->Ref(id + 0));
+      Deserializer::InitializeHeader(pool, kObjectPoolCid,
+                                     ObjectPool::InstanceSize(length),
+                                     is_vm_object);
+      pool->ptr()->length_ = length;
+      pool->ptr()->info_array_ = info_array;
+      for (intptr_t j = 0; j < length; j++) {
+        ObjectPool::EntryType entry_type =
+            static_cast<ObjectPool::EntryType>(d->Read<int8_t>());
+        info_array->ptr()->data()[j] = entry_type;
+        RawObjectPool::Entry& entry = pool->ptr()->data()[j];
+        switch (entry_type) {
+          case ObjectPool::kTaggedObject:
+            entry.raw_obj_ = d->ReadRef();
+            break;
+          case ObjectPool::kImmediate:
+            entry.raw_value_ = d->Read<intptr_t>();
+            break;
+          case ObjectPool::kNativeEntry: {
+#if !defined(TARGET_ARCH_DBC)
+            // Read nothing. Initialize with the lazy link entry.
+            uword new_entry = NativeEntry::LinkNativeCallEntry();
+            entry.raw_value_ = static_cast<intptr_t>(new_entry);
+#else
+            UNREACHABLE();  // DBC does not support lazy native call linking.
+#endif
+            break;
+          }
+          default:
+            UNREACHABLE();
+        }
+      }
+    }
+  }
+};
+
+
+// PcDescriptor, Stackmap, OneByteString, TwoByteString
+class RODataSerializationCluster : public SerializationCluster {
+ public:
+  explicit RODataSerializationCluster(intptr_t cid) : cid_(cid) { }
+  virtual ~RODataSerializationCluster() { }
+
+  void Trace(Serializer* s, RawObject* object) {
+    objects_.Add(object);
+
+    // A string's hash must already be computed when we write it because it
+    // will be loaded into read-only memory.
+    if (cid_ == kOneByteStringCid) {
+      RawOneByteString* str = static_cast<RawOneByteString*>(object);
+      if (str->ptr()->hash_ == Smi::New(0)) {
+        intptr_t hash = String::Hash(str->ptr()->data(),
+                                     Smi::Value(str->ptr()->length_));
+        str->ptr()->hash_ = Smi::New(hash);
+      }
+      ASSERT(str->ptr()->hash_ != Smi::New(0));
+    } else if (cid_ == kTwoByteStringCid) {
+      RawTwoByteString* str = static_cast<RawTwoByteString*>(object);
+      if (str->ptr()->hash_ == Smi::New(0)) {
+        intptr_t hash = String::Hash(str->ptr()->data(),
+                                     Smi::Value(str->ptr()->length_) * 2);
+        str->ptr()->hash_ = Smi::New(hash);
+      }
+      ASSERT(str->ptr()->hash_ != Smi::New(0));
+    }
+  }
+
+  void WriteAlloc(Serializer* s) {
+    s->WriteCid(cid_);
+    intptr_t count = objects_.length();
+    s->Write<intptr_t>(count);
+    for (intptr_t i = 0; i < count; i++) {
+      RawObject* object = objects_[i];
+      int32_t rodata_offset = s->GetRODataOffset(object);
+      s->Write<int32_t>(rodata_offset);
+      s->AssignRef(object);
+    }
+  }
+
+  void WriteFill(Serializer* s) {
+    // No-op.
+  }
+
+ private:
+  const intptr_t cid_;
+  GrowableArray<RawObject*> objects_;
+};
+
+
+class RODataDeserializationCluster : public DeserializationCluster {
+ public:
+  RODataDeserializationCluster() { }
+  virtual ~RODataDeserializationCluster() { }
+
+  void ReadAlloc(Deserializer* d) {
+    intptr_t count = d->Read<intptr_t>();
+    for (intptr_t i = 0; i < count; i++) {
+      int32_t rodata_offset = d->Read<int32_t>();
+      d->AssignRef(d->GetObjectAt(rodata_offset));
+    }
+  }
+
+  void ReadFill(Deserializer* d) {
+    // No-op.
+  }
+};
+
+
+class LocalVarDescriptorsSerializationCluster : public SerializationCluster {
+ public:
+  LocalVarDescriptorsSerializationCluster() { }
+  virtual ~LocalVarDescriptorsSerializationCluster() { }
+
+  void Trace(Serializer* s, RawObject* object) { UNIMPLEMENTED(); }
+  void WriteAlloc(Serializer* s) {}
+  void WriteFill(Serializer* s) {}
+
+ private:
+  GrowableArray<RawClass*> objects_;
+};
+
+
+class ExceptionHandlersSerializationCluster : public SerializationCluster {
+ public:
+  ExceptionHandlersSerializationCluster() { }
+  virtual ~ExceptionHandlersSerializationCluster() { }
+
+  void Trace(Serializer* s, RawObject* object) {
+    RawExceptionHandlers* handlers = ExceptionHandlers::RawCast(object);
+    objects_.Add(handlers);
+
+    s->Push(handlers->ptr()->handled_types_data_);
+  }
+
+  void WriteAlloc(Serializer* s) {
+    s->WriteCid(kExceptionHandlersCid);
+    intptr_t count = objects_.length();
+    s->Write<intptr_t>(count);
+    for (intptr_t i = 0; i < count; i++) {
+      RawExceptionHandlers* handlers = objects_[i];
+      intptr_t length = handlers->ptr()->num_entries_;
+      s->Write<intptr_t>(length);
+      s->AssignRef(handlers);
+    }
+  }
+
+  void WriteFill(Serializer* s) {
+    intptr_t count = objects_.length();
+    for (intptr_t i = 0; i < count; i++) {
+      RawExceptionHandlers* handlers = objects_[i];
+      intptr_t length = handlers->ptr()->num_entries_;
+      s->Write<intptr_t>(length);
+      s->WriteRef(handlers->ptr()->handled_types_data_);
+
+      uint8_t* data = reinterpret_cast<uint8_t*>(handlers->ptr()->data());
+      intptr_t length_in_bytes =
+          length * sizeof(RawExceptionHandlers::HandlerInfo);
+      s->WriteBytes(data, length_in_bytes);
+    }
+  }
+
+ private:
+  GrowableArray<RawExceptionHandlers*> objects_;
+};
+
+
+class ExceptionHandlersDeserializationCluster : public DeserializationCluster {
+ public:
+  ExceptionHandlersDeserializationCluster() { }
+  virtual ~ExceptionHandlersDeserializationCluster() { }
+
+  void ReadAlloc(Deserializer* d) {
+    start_index_ = d->next_index();
+    PageSpace* old_space = d->heap()->old_space();
+    intptr_t count = d->Read<intptr_t>();
+    for (intptr_t i = 0; i < count; i++) {
+      intptr_t length = d->Read<intptr_t>();
+      d->AssignRef(AllocateUninitialized(old_space,
+          ExceptionHandlers::InstanceSize(length)));
+    }
+    stop_index_ = d->next_index();
+  }
+
+  void ReadFill(Deserializer* d) {
+    bool is_vm_object = d->isolate() == Dart::vm_isolate();
+
+    for (intptr_t id = start_index_; id < stop_index_; id++) {
+      RawExceptionHandlers* handlers =
+          reinterpret_cast<RawExceptionHandlers*>(d->Ref(id));
+      intptr_t length = d->Read<intptr_t>();
+      Deserializer::InitializeHeader(handlers, kExceptionHandlersCid,
+                                     ExceptionHandlers::InstanceSize(length),
+                                     is_vm_object);
+      handlers->ptr()->num_entries_ = length;
+      handlers->ptr()->handled_types_data_ =
+          reinterpret_cast<RawArray*>(d->ReadRef());
+
+      uint8_t* data = reinterpret_cast<uint8_t*>(handlers->ptr()->data());
+      intptr_t length_in_bytes =
+          length * sizeof(RawExceptionHandlers::HandlerInfo);
+      d->ReadBytes(data, length_in_bytes);
+    }
+  }
+};
+
+class ContextSerializationCluster : public SerializationCluster {
+ public:
+  ContextSerializationCluster() { }
+  virtual ~ContextSerializationCluster() { }
+
+  void Trace(Serializer* s, RawObject* object) {
+    RawContext* context = Context::RawCast(object);
+    objects_.Add(context);
+
+    s->Push(context->ptr()->parent_);
+    intptr_t length = context->ptr()->num_variables_;
+    for (intptr_t i = 0; i < length; i++) {
+      s->Push(context->ptr()->data()[i]);
+    }
+  }
+
+  void WriteAlloc(Serializer* s) {
+    s->WriteCid(kContextCid);
+    intptr_t count = objects_.length();
+    s->Write<intptr_t>(count);
+    for (intptr_t i = 0; i < count; i++) {
+      RawContext* context = objects_[i];
+      intptr_t length = context->ptr()->num_variables_;
+      s->Write<intptr_t>(length);
+      s->AssignRef(context);
+    }
+  }
+
+  void WriteFill(Serializer* s) {
+    intptr_t count = objects_.length();
+    for (intptr_t i = 0; i < count; i++) {
+      RawContext* context = objects_[i];
+      intptr_t length = context->ptr()->num_variables_;
+      s->Write<intptr_t>(length);
+      s->WriteRef(context->ptr()->parent_);
+      for (intptr_t j = 0; j < length; j++) {
+        s->WriteRef(context->ptr()->data()[j]);
+      }
+    }
+  }
+
+ private:
+  GrowableArray<RawContext*> objects_;
+};
+
+
+class ContextDeserializationCluster : public DeserializationCluster {
+ public:
+  ContextDeserializationCluster() { }
+  virtual ~ContextDeserializationCluster() { }
+
+  void ReadAlloc(Deserializer* d) {
+    start_index_ = d->next_index();
+    PageSpace* old_space = d->heap()->old_space();
+    intptr_t count = d->Read<intptr_t>();
+    for (intptr_t i = 0; i < count; i++) {
+      intptr_t length = d->Read<intptr_t>();
+      d->AssignRef(AllocateUninitialized(old_space,
+                                         Context::InstanceSize(length)));
+    }
+    stop_index_ = d->next_index();
+  }
+
+  void ReadFill(Deserializer* d) {
+    bool is_vm_object = d->isolate() == Dart::vm_isolate();
+
+    for (intptr_t id = start_index_; id < stop_index_; id++) {
+      RawContext* context = reinterpret_cast<RawContext*>(d->Ref(id));
+      intptr_t length = d->Read<intptr_t>();
+      Deserializer::InitializeHeader(context, kContextCid,
+                                     Context::InstanceSize(length),
+                                     is_vm_object);
+      context->ptr()->num_variables_ = length;
+      context->ptr()->parent_ = reinterpret_cast<RawContext*>(d->ReadRef());
+      for (intptr_t j = 0; j < length; j++) {
+        context->ptr()->data()[j] = d->ReadRef();
+      }
+    }
+  }
+};
+
+
+class ContextScopeSerializationCluster : public SerializationCluster {
+ public:
+  ContextScopeSerializationCluster() { }
+  virtual ~ContextScopeSerializationCluster() { }
+
+  void Trace(Serializer* s, RawObject* object) {
+    RawContextScope* scope = ContextScope::RawCast(object);
+    objects_.Add(scope);
+
+    intptr_t length = scope->ptr()->num_variables_;
+    RawObject** from = scope->from();
+    RawObject** to = scope->to(length);
+    for (RawObject** p = from; p <= to; p++) {
+      s->Push(*p);
+    }
+  }
+
+  void WriteAlloc(Serializer* s) {
+    s->WriteCid(kContextScopeCid);
+    intptr_t count = objects_.length();
+    s->Write<intptr_t>(count);
+    for (intptr_t i = 0; i < count; i++) {
+      RawContextScope* scope = objects_[i];
+      intptr_t length = scope->ptr()->num_variables_;
+      s->Write<intptr_t>(length);
+      s->AssignRef(scope);
+    }
+  }
+
+  void WriteFill(Serializer* s) {
+    intptr_t count = objects_.length();
+    for (intptr_t i = 0; i < count; i++) {
+      RawContextScope* scope = objects_[i];
+      intptr_t length = scope->ptr()->num_variables_;
+      s->Write<intptr_t>(length);
+      s->Write<bool>(scope->ptr()->is_implicit_);
+      RawObject** from = scope->from();
+      RawObject** to = scope->to(length);
+      for (RawObject** p = from; p <= to; p++) {
+        s->WriteRef(*p);
+      }
+    }
+  }
+
+ private:
+  GrowableArray<RawContextScope*> objects_;
+};
+
+
+class ContextScopeDeserializationCluster : public DeserializationCluster {
+ public:
+  ContextScopeDeserializationCluster() { }
+  virtual ~ContextScopeDeserializationCluster() { }
+
+  void ReadAlloc(Deserializer* d) {
+    start_index_ = d->next_index();
+    PageSpace* old_space = d->heap()->old_space();
+    intptr_t count = d->Read<intptr_t>();
+    for (intptr_t i = 0; i < count; i++) {
+      intptr_t length = d->Read<intptr_t>();
+      d->AssignRef(AllocateUninitialized(old_space,
+                                         ContextScope::InstanceSize(length)));
+    }
+    stop_index_ = d->next_index();
+  }
+
+  void ReadFill(Deserializer* d) {
+    bool is_vm_object = d->isolate() == Dart::vm_isolate();
+
+    for (intptr_t id = start_index_; id < stop_index_; id++) {
+      RawContextScope* scope = reinterpret_cast<RawContextScope*>(d->Ref(id));
+      intptr_t length = d->Read<intptr_t>();
+      Deserializer::InitializeHeader(scope, kContextScopeCid,
+                                     ContextScope::InstanceSize(length),
+                                     is_vm_object);
+      scope->ptr()->num_variables_ = length;
+      scope->ptr()->is_implicit_ = d->Read<bool>();
+      RawObject** from = scope->from();
+      RawObject** to = scope->to(length);
+      for (RawObject** p = from; p <= to; p++) {
+        *p = d->ReadRef();
+      }
+    }
+  }
+};
+
+
+class ICDataSerializationCluster : public SerializationCluster {
+ public:
+  ICDataSerializationCluster() { }
+  virtual ~ICDataSerializationCluster() { }
+
+  void Trace(Serializer* s, RawObject* object) {
+    RawICData* ic = ICData::RawCast(object);
+    objects_.Add(ic);
+
+    RawObject** from = ic->from();
+    RawObject** to = ic->to_snapshot(s->kind());
+    for (RawObject** p = from; p <= to; p++) {
+      s->Push(*p);
+    }
+  }
+
+  void WriteAlloc(Serializer* s) {
+    s->WriteCid(kICDataCid);
+    intptr_t count = objects_.length();
+    s->Write<intptr_t>(count);
+    for (intptr_t i = 0; i < count; i++) {
+      RawICData* ic = objects_[i];
+      s->AssignRef(ic);
+    }
+  }
+
+  void WriteFill(Serializer* s) {
+    Snapshot::Kind kind = s->kind();
+    intptr_t count = objects_.length();
+    for (intptr_t i = 0; i < count; i++) {
+      RawICData* ic = objects_[i];
+      RawObject** from = ic->from();
+      RawObject** to = ic->to_snapshot(kind);
+      for (RawObject** p = from; p <= to; p++) {
+        s->WriteRef(*p);
+      }
+      s->Write<int32_t>(ic->ptr()->deopt_id_);
+      s->Write<uint32_t>(ic->ptr()->state_bits_);
+#if defined(TAG_IC_DATA)
+      s->Write<intptr_t>(ic->ptr()->tag_);
+#endif
+    }
+  }
+
+ private:
+  GrowableArray<RawICData*> objects_;
+};
+
+
+class ICDataDeserializationCluster : public DeserializationCluster {
+ public:
+  ICDataDeserializationCluster() { }
+  virtual ~ICDataDeserializationCluster() { }
+
+  void ReadAlloc(Deserializer* d) {
+    start_index_ = d->next_index();
+    PageSpace* old_space = d->heap()->old_space();
+    intptr_t count = d->Read<intptr_t>();
+    for (intptr_t i = 0; i < count; i++) {
+      d->AssignRef(AllocateUninitialized(old_space, ICData::InstanceSize()));
+    }
+    stop_index_ = d->next_index();
+  }
+
+  void ReadFill(Deserializer* d) {
+    Snapshot::Kind kind = d->kind();
+    bool is_vm_object = d->isolate() == Dart::vm_isolate();
+
+    for (intptr_t id = start_index_; id < stop_index_; id++) {
+      RawICData* ic = reinterpret_cast<RawICData*>(d->Ref(id));
+      Deserializer::InitializeHeader(ic, kICDataCid,
+                                     ICData::InstanceSize(), is_vm_object);
+      RawObject** from = ic->from();
+      RawObject** to_snapshot = ic->to_snapshot(kind);
+      RawObject** to = ic->to();
+      for (RawObject** p = from; p <= to_snapshot; p++) {
+        *p = d->ReadRef();
+      }
+      for (RawObject** p = to_snapshot + 1; p <= to; p++) {
+        *p = Object::null();
+      }
+      ic->ptr()->deopt_id_ = d->Read<int32_t>();
+      ic->ptr()->state_bits_ = d->Read<int32_t>();
+#if defined(TAG_IC_DATA)
+      ic->ptr()->tag_ = d->Read<intptr_t>();
+#endif
+    }
+  }
+
+  void PostLoad(const Array& refs, Snapshot::Kind kind, Zone* zone) {
+    NOT_IN_PRODUCT(TimelineDurationScope tds(Thread::Current(),
+        Timeline::GetIsolateStream(), "PostLoadICData"));
+
+    if (kind == Snapshot::kAppNoJIT) {
+      ICData& ic = ICData::Handle(zone);
+      Object& funcOrCode = Object::Handle(zone);
+      Code& code = Code::Handle(zone);
+      Smi& entry_point = Smi::Handle(zone);
+      for (intptr_t i = start_index_; i < stop_index_; i++) {
+        ic ^= refs.At(i);
+        for (intptr_t j = 0; j < ic.NumberOfChecks(); j++) {
+          funcOrCode = ic.GetTargetOrCodeAt(j);
+          if (funcOrCode.IsCode()) {
+            code ^= funcOrCode.raw();
+            entry_point = Smi::FromAlignedAddress(code.EntryPoint());
+            ic.SetEntryPointAt(j, entry_point);
+          }
+        }
+      }
+    }
+  }
+};
+
+
+class MegamorphicCacheSerializationCluster : public SerializationCluster {
+ public:
+  MegamorphicCacheSerializationCluster() { }
+  virtual ~MegamorphicCacheSerializationCluster() { }
+
+  void Trace(Serializer* s, RawObject* object) {
+    RawMegamorphicCache* cache = MegamorphicCache::RawCast(object);
+    objects_.Add(cache);
+
+    RawObject** from = cache->from();
+    RawObject** to = cache->to();
+    for (RawObject** p = from; p <= to; p++) {
+      s->Push(*p);
+    }
+  }
+
+  void WriteAlloc(Serializer* s) {
+    s->WriteCid(kMegamorphicCacheCid);
+    intptr_t count = objects_.length();
+    s->Write<intptr_t>(count);
+    for (intptr_t i = 0; i < count; i++) {
+      RawMegamorphicCache* cache = objects_[i];
+      s->AssignRef(cache);
+    }
+  }
+
+  void WriteFill(Serializer* s) {
+    intptr_t count = objects_.length();
+    for (intptr_t i = 0; i < count; i++) {
+      RawMegamorphicCache* cache = objects_[i];
+      RawObject** from = cache->from();
+      RawObject** to = cache->to();
+      for (RawObject** p = from; p <= to; p++) {
+        s->WriteRef(*p);
+      }
+      s->Write<int32_t>(cache->ptr()->filled_entry_count_);
+    }
+  }
+
+ private:
+  GrowableArray<RawMegamorphicCache*> objects_;
+};
+
+
+class MegamorphicCacheDeserializationCluster : public DeserializationCluster {
+ public:
+  MegamorphicCacheDeserializationCluster() { }
+  virtual ~MegamorphicCacheDeserializationCluster() { }
+
+  void ReadAlloc(Deserializer* d) {
+    start_index_ = d->next_index();
+    PageSpace* old_space = d->heap()->old_space();
+    intptr_t count = d->Read<intptr_t>();
+    for (intptr_t i = 0; i < count; i++) {
+      d->AssignRef(AllocateUninitialized(old_space,
+                                         MegamorphicCache::InstanceSize()));
+    }
+    stop_index_ = d->next_index();
+  }
+
+  void ReadFill(Deserializer* d) {
+    bool is_vm_object = d->isolate() == Dart::vm_isolate();
+
+    for (intptr_t id = start_index_; id < stop_index_; id++) {
+      RawMegamorphicCache* cache =
+            reinterpret_cast<RawMegamorphicCache*>(d->Ref(id));
+      Deserializer::InitializeHeader(cache, kMegamorphicCacheCid,
+                                     MegamorphicCache::InstanceSize(),
+                                     is_vm_object);
+      RawObject** from = cache->from();
+      RawObject** to = cache->to();
+      for (RawObject** p = from; p <= to; p++) {
+        *p = d->ReadRef();
+      }
+      cache->ptr()->filled_entry_count_ = d->Read<int32_t>();
+    }
+  }
+};
+
+
+class SubtypeTestCacheSerializationCluster : public SerializationCluster {
+ public:
+  SubtypeTestCacheSerializationCluster() { }
+  virtual ~SubtypeTestCacheSerializationCluster() { }
+
+  void Trace(Serializer* s, RawObject* object) {
+    RawSubtypeTestCache* cache = SubtypeTestCache::RawCast(object);
+    objects_.Add(cache);
+    s->Push(cache->ptr()->cache_);
+  }
+
+  void WriteAlloc(Serializer* s) {
+    s->WriteCid(kSubtypeTestCacheCid);
+    intptr_t count = objects_.length();
+    s->Write<intptr_t>(count);
+    for (intptr_t i = 0; i < count; i++) {
+      RawSubtypeTestCache* cache = objects_[i];
+      s->AssignRef(cache);
+    }
+  }
+
+  void WriteFill(Serializer* s) {
+    intptr_t count = objects_.length();
+    for (intptr_t i = 0; i < count; i++) {
+      RawSubtypeTestCache* cache = objects_[i];
+      s->WriteRef(cache->ptr()->cache_);
+    }
+  }
+
+ private:
+  GrowableArray<RawSubtypeTestCache*> objects_;
+};
+
+
+class SubtypeTestCacheDeserializationCluster : public DeserializationCluster {
+ public:
+  SubtypeTestCacheDeserializationCluster() { }
+  virtual ~SubtypeTestCacheDeserializationCluster() { }
+
+  void ReadAlloc(Deserializer* d) {
+    start_index_ = d->next_index();
+    PageSpace* old_space = d->heap()->old_space();
+    intptr_t count = d->Read<intptr_t>();
+    for (intptr_t i = 0; i < count; i++) {
+      d->AssignRef(AllocateUninitialized(old_space,
+                                         SubtypeTestCache::InstanceSize()));
+    }
+    stop_index_ = d->next_index();
+  }
+
+  void ReadFill(Deserializer* d) {
+    bool is_vm_object = d->isolate() == Dart::vm_isolate();
+
+    for (intptr_t id = start_index_; id < stop_index_; id++) {
+      RawSubtypeTestCache* cache =
+          reinterpret_cast<RawSubtypeTestCache*>(d->Ref(id));
+      Deserializer::InitializeHeader(cache, kSubtypeTestCacheCid,
+                                     SubtypeTestCache::InstanceSize(),
+                                     is_vm_object);
+      cache->ptr()->cache_ = reinterpret_cast<RawArray*>(d->ReadRef());
+    }
+  }
+};
+
+
+class LanguageErrorSerializationCluster : public SerializationCluster {
+ public:
+  LanguageErrorSerializationCluster() { }
+  virtual ~LanguageErrorSerializationCluster() { }
+
+  void Trace(Serializer* s, RawObject* object) {
+    RawLanguageError* error = LanguageError::RawCast(object);
+    objects_.Add(error);
+
+    RawObject** from = error->from();
+    RawObject** to = error->to();
+    for (RawObject** p = from; p <= to; p++) {
+      s->Push(*p);
+    }
+  }
+
+  void WriteAlloc(Serializer* s) {
+    s->WriteCid(kLanguageErrorCid);
+    intptr_t count = objects_.length();
+    s->Write<intptr_t>(count);
+    for (intptr_t i = 0; i < count; i++) {
+      RawLanguageError* error = objects_[i];
+      s->AssignRef(error);
+    }
+  }
+
+  void WriteFill(Serializer* s) {
+    intptr_t count = objects_.length();
+    for (intptr_t i = 0; i < count; i++) {
+      RawLanguageError* error = objects_[i];
+      RawObject** from = error->from();
+      RawObject** to = error->to();
+      for (RawObject** p = from; p <= to; p++) {
+        s->WriteRef(*p);
+      }
+      s->WriteTokenPosition(error->ptr()->token_pos_);
+      s->Write<bool>(error->ptr()->report_after_token_);
+      s->Write<int8_t>(error->ptr()->kind_);
+    }
+  }
+
+ private:
+  GrowableArray<RawLanguageError*> objects_;
+};
+
+
+class LanguageErrorDeserializationCluster : public DeserializationCluster {
+ public:
+  LanguageErrorDeserializationCluster() { }
+  virtual ~LanguageErrorDeserializationCluster() { }
+
+  void ReadAlloc(Deserializer* d) {
+    start_index_ = d->next_index();
+    PageSpace* old_space = d->heap()->old_space();
+    intptr_t count = d->Read<intptr_t>();
+    for (intptr_t i = 0; i < count; i++) {
+      d->AssignRef(AllocateUninitialized(old_space,
+                                         LanguageError::InstanceSize()));
+    }
+    stop_index_ = d->next_index();
+  }
+
+  void ReadFill(Deserializer* d) {
+    bool is_vm_object = d->isolate() == Dart::vm_isolate();
+
+    for (intptr_t id = start_index_; id < stop_index_; id++) {
+      RawLanguageError* error = reinterpret_cast<RawLanguageError*>(d->Ref(id));
+      Deserializer::InitializeHeader(error, kLanguageErrorCid,
+                                     LanguageError::InstanceSize(),
+                                     is_vm_object);
+      RawObject** from = error->from();
+      RawObject** to = error->to();
+      for (RawObject** p = from; p <= to; p++) {
+        *p = d->ReadRef();
+      }
+      error->ptr()->token_pos_ = d->ReadTokenPosition();
+      error->ptr()->report_after_token_ = d->Read<bool>();
+      error->ptr()->kind_ = d->Read<int8_t>();
+    }
+  }
+};
+
+
+class UnhandledExceptionSerializationCluster : public SerializationCluster {
+ public:
+  UnhandledExceptionSerializationCluster() { }
+  virtual ~UnhandledExceptionSerializationCluster() { }
+
+  void Trace(Serializer* s, RawObject* object) {
+    RawUnhandledException* exception = UnhandledException::RawCast(object);
+    objects_.Add(exception);
+
+    RawObject** from = exception->from();
+    RawObject** to = exception->to();
+    for (RawObject** p = from; p <= to; p++) {
+      s->Push(*p);
+    }
+  }
+
+  void WriteAlloc(Serializer* s) {
+    s->WriteCid(kUnhandledExceptionCid);
+    intptr_t count = objects_.length();
+    s->Write<intptr_t>(count);
+    for (intptr_t i = 0; i < count; i++) {
+      RawUnhandledException* exception = objects_[i];
+      s->AssignRef(exception);
+    }
+  }
+
+  void WriteFill(Serializer* s) {
+    intptr_t count = objects_.length();
+    for (intptr_t i = 0; i < count; i++) {
+      RawUnhandledException* exception = objects_[i];
+      RawObject** from = exception->from();
+      RawObject** to = exception->to();
+      for (RawObject** p = from; p <= to; p++) {
+        s->WriteRef(*p);
+      }
+    }
+  }
+
+ private:
+  GrowableArray<RawUnhandledException*> objects_;
+};
+
+
+class UnhandledExceptionDeserializationCluster : public DeserializationCluster {
+ public:
+  UnhandledExceptionDeserializationCluster() { }
+  virtual ~UnhandledExceptionDeserializationCluster() { }
+
+  void ReadAlloc(Deserializer* d) {
+    start_index_ = d->next_index();
+    PageSpace* old_space = d->heap()->old_space();
+    intptr_t count = d->Read<intptr_t>();
+    for (intptr_t i = 0; i < count; i++) {
+      d->AssignRef(AllocateUninitialized(old_space,
+                                         UnhandledException::InstanceSize()));
+    }
+    stop_index_ = d->next_index();
+  }
+
+  void ReadFill(Deserializer* d) {
+    bool is_vm_object = d->isolate() == Dart::vm_isolate();
+
+    for (intptr_t id = start_index_; id < stop_index_; id++) {
+      RawUnhandledException* exception =
+          reinterpret_cast<RawUnhandledException*>(d->Ref(id));
+      Deserializer::InitializeHeader(exception, kUnhandledExceptionCid,
+                                     UnhandledException::InstanceSize(),
+                                     is_vm_object);
+      RawObject** from = exception->from();
+      RawObject** to = exception->to();
+      for (RawObject** p = from; p <= to; p++) {
+        *p = d->ReadRef();
+      }
+    }
+  }
+};
+
+
+class InstanceSerializationCluster : public SerializationCluster {
+ public:
+  explicit InstanceSerializationCluster(intptr_t cid) : cid_(cid) {
+    RawClass* cls = Isolate::Current()->class_table()->At(cid);
+    next_field_offset_ =
+        cls->ptr()->next_field_offset_in_words_ << kWordSizeLog2;
+    instance_size_in_words_ = cls->ptr()->instance_size_in_words_;
+    ASSERT(next_field_offset_ > 0);
+    ASSERT(instance_size_in_words_ > 0);
+  }
+  virtual ~InstanceSerializationCluster() { }
+
+  void Trace(Serializer* s, RawObject* object) {
+    RawInstance* instance = Instance::RawCast(object);
+    objects_.Add(instance);
+
+    intptr_t offset = Instance::NextFieldOffset();
+    while (offset < next_field_offset_) {
+      RawObject* raw_obj = *reinterpret_cast<RawObject**>(
+          reinterpret_cast<uword>(instance->ptr()) + offset);
+      s->Push(raw_obj);
+      offset += kWordSize;
+    }
+  }
+
+  void WriteAlloc(Serializer* s) {
+    s->Write<intptr_t>(cid_);
+    intptr_t count = objects_.length();
+    s->Write<intptr_t>(count);
+
+    s->Write<intptr_t>(next_field_offset_);
+    s->Write<intptr_t>(instance_size_in_words_);
+
+    for (intptr_t i = 0; i < count; i++) {
+      RawInstance* instance = objects_[i];
+      s->AssignRef(instance);
+    }
+  }
+
+  void WriteFill(Serializer* s) {
+    intptr_t count = objects_.length();
+    for (intptr_t i = 0; i < count; i++) {
+      RawInstance* instance = objects_[i];
+      s->Write<bool>(instance->IsCanonical());
+      intptr_t offset = Instance::NextFieldOffset();
+      while (offset < next_field_offset_) {
+        RawObject* raw_obj = *reinterpret_cast<RawObject**>(
+            reinterpret_cast<uword>(instance->ptr()) + offset);
+        s->WriteRef(raw_obj);
+        offset += kWordSize;
+      }
+    }
+  }
+
+ private:
+  const intptr_t cid_;
+  intptr_t next_field_offset_;
+  intptr_t instance_size_in_words_;
+  GrowableArray<RawInstance*> objects_;
+};
+
+
+class InstanceDeserializationCluster : public DeserializationCluster {
+ public:
+  explicit InstanceDeserializationCluster(intptr_t cid) : cid_(cid) { }
+  virtual ~InstanceDeserializationCluster() { }
+
+  void ReadAlloc(Deserializer* d) {
+    start_index_ = d->next_index();
+    PageSpace* old_space = d->heap()->old_space();
+    intptr_t count = d->Read<intptr_t>();
+    next_field_offset_  = d->Read<intptr_t>();
+    instance_size_in_words_  = d->Read<intptr_t>();
+    intptr_t instance_size =
+        Object::RoundedAllocationSize(instance_size_in_words_ * kWordSize);
+    for (intptr_t i = 0; i < count; i++) {
+      d->AssignRef(AllocateUninitialized(old_space, instance_size));
+    }
+    stop_index_ = d->next_index();
+  }
+
+  void ReadFill(Deserializer* d) {
+    intptr_t instance_size =
+        Object::RoundedAllocationSize(instance_size_in_words_ * kWordSize);
+    bool is_vm_object = d->isolate() == Dart::vm_isolate();
+
+    for (intptr_t id = start_index_; id < stop_index_; id++) {
+      RawInstance* instance = reinterpret_cast<RawInstance*>(d->Ref(id));
+      bool is_canonical = d->Read<bool>();
+      Deserializer::InitializeHeader(instance, cid_,
+                                     instance_size,
+                                     is_vm_object, is_canonical);
+      intptr_t offset = Instance::NextFieldOffset();
+      while (offset < next_field_offset_) {
+        RawObject** p = reinterpret_cast<RawObject**>(
+            reinterpret_cast<uword>(instance->ptr()) + offset);
+        *p = d->ReadRef();
+        offset += kWordSize;
+      }
+      if (offset < instance_size) {
+        RawObject** p = reinterpret_cast<RawObject**>(
+            reinterpret_cast<uword>(instance->ptr()) + offset);
+        *p = Object::null();
+        offset += kWordSize;
+      }
+      ASSERT(offset == instance_size);
+    }
+  }
+
+ private:
+  const intptr_t cid_;
+  intptr_t next_field_offset_;
+  intptr_t instance_size_in_words_;
+};
+
+
+class LibraryPrefixSerializationCluster : public SerializationCluster {
+ public:
+  LibraryPrefixSerializationCluster() { }
+  virtual ~LibraryPrefixSerializationCluster() { }
+
+  void Trace(Serializer* s, RawObject* object) {
+    RawLibraryPrefix* prefix = LibraryPrefix::RawCast(object);
+    objects_.Add(prefix);
+
+    RawObject** from = prefix->from();
+    RawObject** to = prefix->to();
+    for (RawObject** p = from; p <= to; p++) {
+      s->Push(*p);
+    }
+  }
+
+  void WriteAlloc(Serializer* s) {
+    s->WriteCid(kLibraryPrefixCid);
+    intptr_t count = objects_.length();
+    s->Write<intptr_t>(count);
+    for (intptr_t i = 0; i < count; i++) {
+      RawLibraryPrefix* prefix = objects_[i];
+      s->AssignRef(prefix);
+    }
+  }
+
+  void WriteFill(Serializer* s) {
+    intptr_t count = objects_.length();
+    for (intptr_t i = 0; i < count; i++) {
+      RawLibraryPrefix* prefix = objects_[i];
+      RawObject** from = prefix->from();
+      RawObject** to = prefix->to();
+      for (RawObject** p = from; p <= to; p++) {
+        s->WriteRef(*p);
+      }
+      s->Write<uint16_t>(prefix->ptr()->num_imports_);
+      s->Write<bool>(prefix->ptr()->is_deferred_load_);
+      s->Write<bool>(prefix->ptr()->is_loaded_);
+    }
+  }
+
+ private:
+  GrowableArray<RawLibraryPrefix*> objects_;
+};
+
+
+class LibraryPrefixDeserializationCluster : public DeserializationCluster {
+ public:
+  LibraryPrefixDeserializationCluster() { }
+  virtual ~LibraryPrefixDeserializationCluster() { }
+
+  void ReadAlloc(Deserializer* d) {
+    start_index_ = d->next_index();
+    PageSpace* old_space = d->heap()->old_space();
+    intptr_t count = d->Read<intptr_t>();
+    for (intptr_t i = 0; i < count; i++) {
+      d->AssignRef(AllocateUninitialized(old_space,
+                                         LibraryPrefix::InstanceSize()));
+    }
+    stop_index_ = d->next_index();
+  }
+
+  void ReadFill(Deserializer* d) {
+    bool is_vm_object = d->isolate() == Dart::vm_isolate();
+
+    for (intptr_t id = start_index_; id < stop_index_; id++) {
+      RawLibraryPrefix* prefix =
+          reinterpret_cast<RawLibraryPrefix*>(d->Ref(id));
+      Deserializer::InitializeHeader(prefix, kLibraryPrefixCid,
+                                     LibraryPrefix::InstanceSize(),
+                                     is_vm_object);
+      RawObject** from = prefix->from();
+      RawObject** to = prefix->to();
+      for (RawObject** p = from; p <= to; p++) {
+        *p = d->ReadRef();
+      }
+      prefix->ptr()->num_imports_ = d->Read<uint16_t>();
+      prefix->ptr()->is_deferred_load_ = d->Read<bool>();
+      prefix->ptr()->is_loaded_ = d->Read<bool>();
+    }
+  }
+};
+
+
+class TypeSerializationCluster : public SerializationCluster {
+ public:
+  TypeSerializationCluster() { }
+  virtual ~TypeSerializationCluster() { }
+
+  void Trace(Serializer* s, RawObject* object) {
+    RawType* type = Type::RawCast(object);
+    if (type->IsCanonical()) {
+      canonical_objects_.Add(type);
+    } else {
+      objects_.Add(type);
+    }
+
+    RawObject** from = type->from();
+    RawObject** to = type->to();
+    for (RawObject** p = from; p <= to; p++) {
+      s->Push(*p);
+    }
+
+    RawSmi* raw_type_class_id = Smi::RawCast(type->ptr()->type_class_id_);
+    RawClass* type_class =
+        s->isolate()->class_table()->At(Smi::Value(raw_type_class_id));
+    s->Push(type_class);
+  }
+
+  void WriteAlloc(Serializer* s) {
+    s->WriteCid(kTypeCid);
+    intptr_t count = canonical_objects_.length();
+    s->Write<intptr_t>(count);
+    for (intptr_t i = 0; i < count; i++) {
+      RawType* type = canonical_objects_[i];
+      s->AssignRef(type);
+    }
+    count = objects_.length();
+    s->Write<intptr_t>(count);
+    for (intptr_t i = 0; i < count; i++) {
+      RawType* type = objects_[i];
+      s->AssignRef(type);
+    }
+  }
+
+  void WriteFill(Serializer* s) {
+    intptr_t count = canonical_objects_.length();
+    for (intptr_t i = 0; i < count; i++) {
+      RawType* type = canonical_objects_[i];
+      RawObject** from = type->from();
+      RawObject** to = type->to();
+      for (RawObject** p = from; p <= to; p++) {
+        s->WriteRef(*p);
+      }
+      s->WriteTokenPosition(type->ptr()->token_pos_);
+      s->Write<int8_t>(type->ptr()->type_state_);
+    }
+    count = objects_.length();
+    for (intptr_t i = 0; i < count; i++) {
+      RawType* type = objects_[i];
+      RawObject** from = type->from();
+      RawObject** to = type->to();
+      for (RawObject** p = from; p <= to; p++) {
+        s->WriteRef(*p);
+      }
+      s->WriteTokenPosition(type->ptr()->token_pos_);
+      s->Write<int8_t>(type->ptr()->type_state_);
+    }
+  }
+
+ private:
+  GrowableArray<RawType*> canonical_objects_;
+  GrowableArray<RawType*> objects_;
+};
+
+
+class TypeDeserializationCluster : public DeserializationCluster {
+ public:
+  TypeDeserializationCluster() { }
+  virtual ~TypeDeserializationCluster() { }
+
+  void ReadAlloc(Deserializer* d) {
+    canonical_start_index_ = d->next_index();
+    PageSpace* old_space = d->heap()->old_space();
+    intptr_t count = d->Read<intptr_t>();
+    for (intptr_t i = 0; i < count; i++) {
+      d->AssignRef(AllocateUninitialized(old_space, Type::InstanceSize()));
+    }
+    canonical_stop_index_ = d->next_index();
+
+    start_index_ = d->next_index();
+    count = d->Read<intptr_t>();
+    for (intptr_t i = 0; i < count; i++) {
+      d->AssignRef(AllocateUninitialized(old_space, Type::InstanceSize()));
+    }
+    stop_index_ = d->next_index();
+  }
+
+  void ReadFill(Deserializer* d) {
+    bool is_vm_object = d->isolate() == Dart::vm_isolate();
+
+    for (intptr_t id = canonical_start_index_;
+         id < canonical_stop_index_;
+         id++) {
+      RawType* type = reinterpret_cast<RawType*>(d->Ref(id));
+      Deserializer::InitializeHeader(type, kTypeCid,
+                                     Type::InstanceSize(), is_vm_object, true);
+      RawObject** from = type->from();
+      RawObject** to = type->to();
+      for (RawObject** p = from; p <= to; p++) {
+        *p = d->ReadRef();
+      }
+      type->ptr()->token_pos_ = d->ReadTokenPosition();
+      type->ptr()->type_state_ = d->Read<int8_t>();
+    }
+
+    for (intptr_t id = start_index_; id < stop_index_; id++) {
+      RawType* type = reinterpret_cast<RawType*>(d->Ref(id));
+      Deserializer::InitializeHeader(type, kTypeCid,
+                                     Type::InstanceSize(), is_vm_object);
+      RawObject** from = type->from();
+      RawObject** to = type->to();
+      for (RawObject** p = from; p <= to; p++) {
+        *p = d->ReadRef();
+      }
+      type->ptr()->token_pos_ = d->ReadTokenPosition();
+      type->ptr()->type_state_ = d->Read<int8_t>();
+    }
+  }
+
+ private:
+  intptr_t canonical_start_index_;
+  intptr_t canonical_stop_index_;
+};
+
+
+class TypeRefSerializationCluster : public SerializationCluster {
+ public:
+  TypeRefSerializationCluster() { }
+  virtual ~TypeRefSerializationCluster() { }
+
+  void Trace(Serializer* s, RawObject* object) {
+    RawTypeRef* type = TypeRef::RawCast(object);
+    objects_.Add(type);
+
+    RawObject** from = type->from();
+    RawObject** to = type->to();
+    for (RawObject** p = from; p <= to; p++) {
+      s->Push(*p);
+    }
+  }
+
+  void WriteAlloc(Serializer* s) {
+    s->WriteCid(kTypeRefCid);
+    intptr_t count = objects_.length();
+    s->Write<intptr_t>(count);
+    for (intptr_t i = 0; i < count; i++) {
+      RawTypeRef* type = objects_[i];
+      s->AssignRef(type);
+    }
+  }
+
+  void WriteFill(Serializer* s) {
+    intptr_t count = objects_.length();
+    for (intptr_t i = 0; i < count; i++) {
+      RawTypeRef* type = objects_[i];
+      RawObject** from = type->from();
+      RawObject** to = type->to();
+      for (RawObject** p = from; p <= to; p++) {
+        s->WriteRef(*p);
+      }
+    }
+  }
+
+ private:
+  GrowableArray<RawTypeRef*> objects_;
+};
+
+
+class TypeRefDeserializationCluster : public DeserializationCluster {
+ public:
+  TypeRefDeserializationCluster() { }
+  virtual ~TypeRefDeserializationCluster() { }
+
+  void ReadAlloc(Deserializer* d) {
+    start_index_ = d->next_index();
+    PageSpace* old_space = d->heap()->old_space();
+    intptr_t count = d->Read<intptr_t>();
+    for (intptr_t i = 0; i < count; i++) {
+      d->AssignRef(AllocateUninitialized(old_space, TypeRef::InstanceSize()));
+    }
+    stop_index_ = d->next_index();
+  }
+
+  void ReadFill(Deserializer* d) {
+    bool is_vm_object = d->isolate() == Dart::vm_isolate();
+
+    for (intptr_t id = start_index_; id < stop_index_; id++) {
+      RawTypeRef* type = reinterpret_cast<RawTypeRef*>(d->Ref(id));
+      Deserializer::InitializeHeader(type, kTypeRefCid,
+                                     TypeRef::InstanceSize(), is_vm_object);
+      RawObject** from = type->from();
+      RawObject** to = type->to();
+      for (RawObject** p = from; p <= to; p++) {
+        *p = d->ReadRef();
+      }
+    }
+  }
+};
+
+
+class TypeParameterSerializationCluster : public SerializationCluster {
+ public:
+  TypeParameterSerializationCluster() { }
+  virtual ~TypeParameterSerializationCluster() { }
+
+  void Trace(Serializer* s, RawObject* object) {
+    RawTypeParameter* type = TypeParameter::RawCast(object);
+    objects_.Add(type);
+    ASSERT(!type->IsCanonical());
+
+    RawObject** from = type->from();
+    RawObject** to = type->to();
+    for (RawObject** p = from; p <= to; p++) {
+      s->Push(*p);
+    }
+  }
+
+  void WriteAlloc(Serializer* s) {
+    s->WriteCid(kTypeParameterCid);
+    intptr_t count = objects_.length();
+    s->Write<intptr_t>(count);
+    for (intptr_t i = 0; i < count; i++) {
+      RawTypeParameter* type = objects_[i];
+      s->AssignRef(type);
+    }
+  }
+
+  void WriteFill(Serializer* s) {
+    intptr_t count = objects_.length();
+    for (intptr_t i = 0; i < count; i++) {
+      RawTypeParameter* type = objects_[i];
+      RawObject** from = type->from();
+      RawObject** to = type->to();
+      for (RawObject** p = from; p <= to; p++) {
+        s->WriteRef(*p);
+      }
+      s->Write<intptr_t>(type->ptr()->parameterized_class_id_);
+      s->WriteTokenPosition(type->ptr()->token_pos_);
+      s->Write<int16_t>(type->ptr()->index_);
+      s->Write<int8_t>(type->ptr()->type_state_);
+    }
+  }
+
+ private:
+  GrowableArray<RawTypeParameter*> objects_;
+};
+
+
+class TypeParameterDeserializationCluster : public DeserializationCluster {
+ public:
+  TypeParameterDeserializationCluster() { }
+  virtual ~TypeParameterDeserializationCluster() { }
+
+  void ReadAlloc(Deserializer* d) {
+    start_index_ = d->next_index();
+    PageSpace* old_space = d->heap()->old_space();
+    intptr_t count = d->Read<intptr_t>();
+    for (intptr_t i = 0; i < count; i++) {
+      d->AssignRef(AllocateUninitialized(old_space,
+                                         TypeParameter::InstanceSize()));
+    }
+    stop_index_ = d->next_index();
+  }
+
+  void ReadFill(Deserializer* d) {
+    bool is_vm_object = d->isolate() == Dart::vm_isolate();
+
+    for (intptr_t id = start_index_; id < stop_index_; id++) {
+      RawTypeParameter* type = reinterpret_cast<RawTypeParameter*>(d->Ref(id));
+      Deserializer::InitializeHeader(type, kTypeParameterCid,
+                                     TypeParameter::InstanceSize(),
+                                     is_vm_object);
+      RawObject** from = type->from();
+      RawObject** to = type->to();
+      for (RawObject** p = from; p <= to; p++) {
+        *p = d->ReadRef();
+      }
+      type->ptr()->parameterized_class_id_ = d->Read<intptr_t>();
+      type->ptr()->token_pos_ = d->ReadTokenPosition();
+      type->ptr()->index_ = d->Read<int16_t>();
+      type->ptr()->type_state_ = d->Read<int8_t>();
+    }
+  }
+};
+
+
+class BoundedTypeSerializationCluster : public SerializationCluster {
+ public:
+  BoundedTypeSerializationCluster() { }
+  virtual ~BoundedTypeSerializationCluster() { }
+
+  void Trace(Serializer* s, RawObject* object) {
+    RawBoundedType* type = BoundedType::RawCast(object);
+    objects_.Add(type);
+
+    RawObject** from = type->from();
+    RawObject** to = type->to();
+    for (RawObject** p = from; p <= to; p++) {
+      s->Push(*p);
+    }
+  }
+
+  void WriteAlloc(Serializer* s) {
+    s->WriteCid(kBoundedTypeCid);
+    intptr_t count = objects_.length();
+    s->Write<intptr_t>(count);
+    for (intptr_t i = 0; i < count; i++) {
+      RawBoundedType* type = objects_[i];
+      s->AssignRef(type);
+    }
+  }
+
+  void WriteFill(Serializer* s) {
+    intptr_t count = objects_.length();
+    for (intptr_t i = 0; i < count; i++) {
+      RawBoundedType* type = objects_[i];
+      RawObject** from = type->from();
+      RawObject** to = type->to();
+      for (RawObject** p = from; p <= to; p++) {
+        s->WriteRef(*p);
+      }
+    }
+  }
+
+ private:
+  GrowableArray<RawBoundedType*> objects_;
+};
+
+
+class BoundedTypeDeserializationCluster : public DeserializationCluster {
+ public:
+  BoundedTypeDeserializationCluster() { }
+  virtual ~BoundedTypeDeserializationCluster() { }
+
+  void ReadAlloc(Deserializer* d) {
+    start_index_ = d->next_index();
+    PageSpace* old_space = d->heap()->old_space();
+    intptr_t count = d->Read<intptr_t>();
+    for (intptr_t i = 0; i < count; i++) {
+      d->AssignRef(AllocateUninitialized(old_space,
+                                         BoundedType::InstanceSize()));
+    }
+    stop_index_ = d->next_index();
+  }
+
+  void ReadFill(Deserializer* d) {
+    bool is_vm_object = d->isolate() == Dart::vm_isolate();
+
+    for (intptr_t id = start_index_; id < stop_index_; id++) {
+      RawBoundedType* type = reinterpret_cast<RawBoundedType*>(d->Ref(id));
+      Deserializer::InitializeHeader(type, kBoundedTypeCid,
+                                     BoundedType::InstanceSize(), is_vm_object);
+      RawObject** from = type->from();
+      RawObject** to = type->to();
+      for (RawObject** p = from; p <= to; p++) {
+        *p = d->ReadRef();
+      }
+    }
+  }
+};
+
+
+class ClosureSerializationCluster : public SerializationCluster {
+ public:
+  ClosureSerializationCluster() { }
+  virtual ~ClosureSerializationCluster() { }
+
+  void Trace(Serializer* s, RawObject* object) {
+    RawClosure* closure = Closure::RawCast(object);
+    objects_.Add(closure);
+
+    RawObject** from = closure->from();
+    RawObject** to = closure->to();
+    for (RawObject** p = from; p <= to; p++) {
+      s->Push(*p);
+    }
+  }
+
+  void WriteAlloc(Serializer* s) {
+    s->WriteCid(kClosureCid);
+    intptr_t count = objects_.length();
+    s->Write<intptr_t>(count);
+    for (intptr_t i = 0; i < count; i++) {
+      RawClosure* closure = objects_[i];
+      s->AssignRef(closure);
+    }
+  }
+
+  void WriteFill(Serializer* s) {
+    intptr_t count = objects_.length();
+    for (intptr_t i = 0; i < count; i++) {
+      RawClosure* closure = objects_[i];
+      s->Write<bool>(closure->IsCanonical());
+      RawObject** from = closure->from();
+      RawObject** to = closure->to();
+      for (RawObject** p = from; p <= to; p++) {
+        s->WriteRef(*p);
+      }
+    }
+  }
+
+ private:
+  GrowableArray<RawClosure*> objects_;
+};
+
+
+class ClosureDeserializationCluster : public DeserializationCluster {
+ public:
+  ClosureDeserializationCluster() { }
+  virtual ~ClosureDeserializationCluster() { }
+
+  void ReadAlloc(Deserializer* d) {
+    start_index_ = d->next_index();
+    PageSpace* old_space = d->heap()->old_space();
+    intptr_t count = d->Read<intptr_t>();
+    for (intptr_t i = 0; i < count; i++) {
+      d->AssignRef(AllocateUninitialized(old_space, Closure::InstanceSize()));
+    }
+    stop_index_ = d->next_index();
+  }
+
+  void ReadFill(Deserializer* d) {
+    bool is_vm_object = d->isolate() == Dart::vm_isolate();
+
+    for (intptr_t id = start_index_; id < stop_index_; id++) {
+      RawClosure* closure = reinterpret_cast<RawClosure*>(d->Ref(id));
+      bool is_canonical = d->Read<bool>();
+      Deserializer::InitializeHeader(closure, kClosureCid,
+                                     Closure::InstanceSize(),
+                                     is_vm_object, is_canonical);
+      RawObject** from = closure->from();
+      RawObject** to = closure->to();
+      for (RawObject** p = from; p <= to; p++) {
+        *p = d->ReadRef();
+      }
+    }
+  }
+};
+
+
+class MintSerializationCluster : public SerializationCluster {
+ public:
+  MintSerializationCluster() { }
+  virtual ~MintSerializationCluster() { }
+
+  void Trace(Serializer* s, RawObject* object) {
+    RawMint* mint = Mint::RawCast(object);
+    objects_.Add(mint);
+  }
+
+  void WriteAlloc(Serializer* s) {
+    s->WriteCid(kMintCid);
+    intptr_t count = objects_.length();
+    s->Write<intptr_t>(count);
+    for (intptr_t i = 0; i < count; i++) {
+      RawMint* mint = objects_[i];
+      s->AssignRef(mint);
+    }
+  }
+
+  void WriteFill(Serializer* s) {
+    intptr_t count = objects_.length();
+    for (intptr_t i = 0; i < count; i++) {
+      RawMint* mint = objects_[i];
+      s->Write<bool>(mint->IsCanonical());
+      s->Write<int64_t>(mint->ptr()->value_);
+    }
+  }
+
+ private:
+  GrowableArray<RawMint*> objects_;
+};
+
+
+class MintDeserializationCluster : public DeserializationCluster {
+ public:
+  MintDeserializationCluster() { }
+  virtual ~MintDeserializationCluster() { }
+
+  void ReadAlloc(Deserializer* d) {
+    start_index_ = d->next_index();
+    PageSpace* old_space = d->heap()->old_space();
+    intptr_t count = d->Read<intptr_t>();
+    for (intptr_t i = 0; i < count; i++) {
+      d->AssignRef(AllocateUninitialized(old_space, Mint::InstanceSize()));
+    }
+    stop_index_ = d->next_index();
+  }
+
+  void ReadFill(Deserializer* d) {
+    bool is_vm_object = d->isolate() == Dart::vm_isolate();
+
+    for (intptr_t id = start_index_; id < stop_index_; id++) {
+      RawMint* mint = reinterpret_cast<RawMint*>(d->Ref(id));
+      bool is_canonical = d->Read<bool>();
+      Deserializer::InitializeHeader(mint, kMintCid,
+                                     Mint::InstanceSize(),
+                                     is_vm_object, is_canonical);
+      mint->ptr()->value_ = d->Read<int64_t>();
+    }
+  }
+};
+
+
+class BigintSerializationCluster : public SerializationCluster {
+ public:
+  BigintSerializationCluster() { }
+  virtual ~BigintSerializationCluster() { }
+
+  void Trace(Serializer* s, RawObject* object) {
+    RawBigint* bigint = Bigint::RawCast(object);
+    objects_.Add(bigint);
+
+    RawObject** from = bigint->from();
+    RawObject** to = bigint->to();
+    for (RawObject** p = from; p <= to; p++) {
+      s->Push(*p);
+    }
+  }
+
+  void WriteAlloc(Serializer* s) {
+    s->WriteCid(kBigintCid);
+    intptr_t count = objects_.length();
+    s->Write<intptr_t>(count);
+    for (intptr_t i = 0; i < count; i++) {
+      RawBigint* bigint = objects_[i];
+      s->AssignRef(bigint);
+    }
+  }
+
+  void WriteFill(Serializer* s) {
+    intptr_t count = objects_.length();
+    for (intptr_t i = 0; i < count; i++) {
+      RawBigint* bigint = objects_[i];
+      s->Write<bool>(bigint->IsCanonical());
+      RawObject** from = bigint->from();
+      RawObject** to = bigint->to();
+      for (RawObject** p = from; p <= to; p++) {
+        s->WriteRef(*p);
+      }
+    }
+  }
+
+ private:
+  GrowableArray<RawBigint*> objects_;
+};
+
+
+class BigintDeserializationCluster : public DeserializationCluster {
+ public:
+  BigintDeserializationCluster() { }
+  virtual ~BigintDeserializationCluster() { }
+
+  void ReadAlloc(Deserializer* d) {
+    start_index_ = d->next_index();
+    PageSpace* old_space = d->heap()->old_space();
+    intptr_t count = d->Read<intptr_t>();
+    for (intptr_t i = 0; i < count; i++) {
+      d->AssignRef(AllocateUninitialized(old_space, Bigint::InstanceSize()));
+    }
+    stop_index_ = d->next_index();
+  }
+
+  void ReadFill(Deserializer* d) {
+    bool is_vm_object = d->isolate() == Dart::vm_isolate();
+
+    for (intptr_t id = start_index_; id < stop_index_; id++) {
+      RawBigint* bigint = reinterpret_cast<RawBigint*>(d->Ref(id));
+      bool is_canonical = d->Read<bool>();
+      Deserializer::InitializeHeader(bigint, kBigintCid,
+                                     Bigint::InstanceSize(),
+                                     is_vm_object, is_canonical);
+      RawObject** from = bigint->from();
+      RawObject** to = bigint->to();
+      for (RawObject** p = from; p <= to; p++) {
+        *p = d->ReadRef();
+      }
+    }
+  }
+};
+
+
+class DoubleSerializationCluster : public SerializationCluster {
+ public:
+  DoubleSerializationCluster() { }
+  virtual ~DoubleSerializationCluster() { }
+
+  void Trace(Serializer* s, RawObject* object) {
+    RawDouble* dbl = Double::RawCast(object);
+    objects_.Add(dbl);
+  }
+
+  void WriteAlloc(Serializer* s) {
+    s->WriteCid(kDoubleCid);
+    intptr_t count = objects_.length();
+    s->Write<intptr_t>(count);
+    for (intptr_t i = 0; i < count; i++) {
+      RawDouble* dbl = objects_[i];
+      s->AssignRef(dbl);
+    }
+  }
+
+  void WriteFill(Serializer* s) {
+    intptr_t count = objects_.length();
+    for (intptr_t i = 0; i < count; i++) {
+      RawDouble* dbl = objects_[i];
+      s->Write<bool>(dbl->IsCanonical());
+      s->Write<double>(dbl->ptr()->value_);
+    }
+  }
+
+ private:
+  GrowableArray<RawDouble*> objects_;
+};
+
+
+class DoubleDeserializationCluster : public DeserializationCluster {
+ public:
+  DoubleDeserializationCluster() { }
+  virtual ~DoubleDeserializationCluster() { }
+
+  void ReadAlloc(Deserializer* d) {
+    start_index_ = d->next_index();
+    PageSpace* old_space = d->heap()->old_space();
+    intptr_t count = d->Read<intptr_t>();
+    for (intptr_t i = 0; i < count; i++) {
+      d->AssignRef(AllocateUninitialized(old_space, Double::InstanceSize()));
+    }
+    stop_index_ = d->next_index();
+  }
+
+  void ReadFill(Deserializer* d) {
+    bool is_vm_object = d->isolate() == Dart::vm_isolate();
+
+    for (intptr_t id = start_index_; id < stop_index_; id++) {
+      RawDouble* dbl = reinterpret_cast<RawDouble*>(d->Ref(id));
+      bool is_canonical = d->Read<bool>();
+      Deserializer::InitializeHeader(dbl, kDoubleCid,
+                                     Double::InstanceSize(),
+                                     is_vm_object, is_canonical);
+      dbl->ptr()->value_ = d->Read<double>();
+    }
+  }
+};
+
+
+class GrowableObjectArraySerializationCluster : public SerializationCluster {
+ public:
+  GrowableObjectArraySerializationCluster() { }
+  virtual ~GrowableObjectArraySerializationCluster() { }
+
+  void Trace(Serializer* s, RawObject* object) {
+    RawGrowableObjectArray* array = GrowableObjectArray::RawCast(object);
+    objects_.Add(array);
+
+    RawObject** from = array->from();
+    RawObject** to = array->to();
+    for (RawObject** p = from; p <= to; p++) {
+      s->Push(*p);
+    }
+  }
+
+  void WriteAlloc(Serializer* s) {
+    s->WriteCid(kGrowableObjectArrayCid);
+    intptr_t count = objects_.length();
+    s->Write<intptr_t>(count);
+    for (intptr_t i = 0; i < count; i++) {
+      RawGrowableObjectArray* array = objects_[i];
+      s->AssignRef(array);
+    }
+  }
+
+  void WriteFill(Serializer* s) {
+    intptr_t count = objects_.length();
+    for (intptr_t i = 0; i < count; i++) {
+      RawGrowableObjectArray* array = objects_[i];
+      s->Write<bool>(array->IsCanonical());
+      RawObject** from = array->from();
+      RawObject** to = array->to();
+      for (RawObject** p = from; p <= to; p++) {
+        s->WriteRef(*p);
+      }
+    }
+  }
+
+ private:
+  GrowableArray<RawGrowableObjectArray*> objects_;
+};
+
+
+class GrowableObjectArrayDeserializationCluster
+    : public DeserializationCluster {
+ public:
+  GrowableObjectArrayDeserializationCluster() { }
+  virtual ~GrowableObjectArrayDeserializationCluster() { }
+
+  void ReadAlloc(Deserializer* d) {
+    start_index_ = d->next_index();
+    PageSpace* old_space = d->heap()->old_space();
+    intptr_t count = d->Read<intptr_t>();
+    for (intptr_t i = 0; i < count; i++) {
+      d->AssignRef(AllocateUninitialized(old_space,
+                                         GrowableObjectArray::InstanceSize()));
+    }
+    stop_index_ = d->next_index();
+  }
+
+  void ReadFill(Deserializer* d) {
+    bool is_vm_object = d->isolate() == Dart::vm_isolate();
+
+    for (intptr_t id = start_index_; id < stop_index_; id++) {
+      RawGrowableObjectArray* list =
+          reinterpret_cast<RawGrowableObjectArray*>(d->Ref(id));
+      bool is_canonical = d->Read<bool>();
+      Deserializer::InitializeHeader(list, kGrowableObjectArrayCid,
+                                     GrowableObjectArray::InstanceSize(),
+                                     is_vm_object, is_canonical);
+      RawObject** from = list->from();
+      RawObject** to = list->to();
+      for (RawObject** p = from; p <= to; p++) {
+        *p = d->ReadRef();
+      }
+    }
+  }
+};
+
+
+class TypedDataSerializationCluster : public SerializationCluster {
+ public:
+  explicit TypedDataSerializationCluster(intptr_t cid) : cid_(cid) { }
+  virtual ~TypedDataSerializationCluster() { }
+
+  void Trace(Serializer* s, RawObject* object) {
+    RawTypedData* data = TypedData::RawCast(object);
+    objects_.Add(data);
+  }
+
+  void WriteAlloc(Serializer* s) {
+    s->Write<intptr_t>(cid_);
+    intptr_t count = objects_.length();
+    s->Write<intptr_t>(count);
+    for (intptr_t i = 0; i < count; i++) {
+      RawTypedData* data = objects_[i];
+      intptr_t length = Smi::Value(data->ptr()->length_);
+      s->Write<intptr_t>(length);
+      s->AssignRef(data);
+    }
+  }
+
+  void WriteFill(Serializer* s) {
+    intptr_t count = objects_.length();
+    intptr_t element_size = TypedData::ElementSizeInBytes(cid_);
+    for (intptr_t i = 0; i < count; i++) {
+      RawTypedData* data = objects_[i];
+      intptr_t length = Smi::Value(data->ptr()->length_);
+      s->Write<intptr_t>(length);
+      s->Write<bool>(data->IsCanonical());
+      uint8_t* cdata = reinterpret_cast<uint8_t*>(data->ptr()->data());
+      s->WriteBytes(cdata, length * element_size);
+    }
+  }
+
+ private:
+  const intptr_t cid_;
+  GrowableArray<RawTypedData*> objects_;
+};
+
+
+class TypedDataDeserializationCluster : public DeserializationCluster {
+ public:
+  explicit TypedDataDeserializationCluster(intptr_t cid) : cid_(cid) { }
+  virtual ~TypedDataDeserializationCluster() { }
+
+  void ReadAlloc(Deserializer* d) {
+    start_index_ = d->next_index();
+    PageSpace* old_space = d->heap()->old_space();
+    intptr_t count = d->Read<intptr_t>();
+    intptr_t element_size = TypedData::ElementSizeInBytes(cid_);
+    for (intptr_t i = 0; i < count; i++) {
+      intptr_t length = d->Read<intptr_t>();
+      d->AssignRef(AllocateUninitialized(old_space,
+          TypedData::InstanceSize(length * element_size)));
+    }
+    stop_index_ = d->next_index();
+  }
+
+  void ReadFill(Deserializer* d) {
+    bool is_vm_object = d->isolate() == Dart::vm_isolate();
+    intptr_t element_size = TypedData::ElementSizeInBytes(cid_);
+
+    for (intptr_t id = start_index_; id < stop_index_; id++) {
+      RawTypedData* data = reinterpret_cast<RawTypedData*>(d->Ref(id));
+      intptr_t length = d->Read<intptr_t>();
+      bool is_canonical = d->Read<bool>();
+      intptr_t length_in_bytes = length * element_size;
+      Deserializer::InitializeHeader(data, cid_,
+                                     TypedData::InstanceSize(length_in_bytes),
+                                     is_vm_object, is_canonical);
+      data->ptr()->length_ = Smi::New(length);
+      uint8_t* cdata = reinterpret_cast<uint8_t*>(data->ptr()->data());
+      d->ReadBytes(cdata, length_in_bytes);
+    }
+  }
+
+ private:
+  const intptr_t cid_;
+};
+
+
+class ExternalTypedDataSerializationCluster : public SerializationCluster {
+ public:
+  explicit ExternalTypedDataSerializationCluster(intptr_t cid) : cid_(cid) { }
+  virtual ~ExternalTypedDataSerializationCluster() { }
+
+  void Trace(Serializer* s, RawObject* object) {
+    RawExternalTypedData* data = ExternalTypedData::RawCast(object);
+    objects_.Add(data);
+    ASSERT(!data->IsCanonical());
+  }
+
+  void WriteAlloc(Serializer* s) {
+    s->Write<intptr_t>(cid_);
+    intptr_t count = objects_.length();
+    s->Write<intptr_t>(count);
+    for (intptr_t i = 0; i < count; i++) {
+      RawExternalTypedData* data = objects_[i];
+      s->AssignRef(data);
+    }
+  }
+
+  void WriteFill(Serializer* s) {
+    intptr_t count = objects_.length();
+    intptr_t element_size = ExternalTypedData::ElementSizeInBytes(cid_);
+    for (intptr_t i = 0; i < count; i++) {
+      RawExternalTypedData* data = objects_[i];
+      intptr_t length = Smi::Value(data->ptr()->length_);
+      s->Write<intptr_t>(length);
+      uint8_t* cdata = reinterpret_cast<uint8_t*>(data->ptr()->data_);
+      s->WriteBytes(cdata, length * element_size);
+    }
+  }
+
+ private:
+  const intptr_t cid_;
+  GrowableArray<RawExternalTypedData*> objects_;
+};
+
+
+class ExternalTypedDataDeserializationCluster : public DeserializationCluster {
+ public:
+  explicit ExternalTypedDataDeserializationCluster(intptr_t cid) : cid_(cid) { }
+  virtual ~ExternalTypedDataDeserializationCluster() { }
+
+  void ReadAlloc(Deserializer* d) {
+    start_index_ = d->next_index();
+    PageSpace* old_space = d->heap()->old_space();
+    intptr_t count = d->Read<intptr_t>();
+    for (intptr_t i = 0; i < count; i++) {
+      d->AssignRef(AllocateUninitialized(old_space,
+                                         ExternalTypedData::InstanceSize()));
+    }
+    stop_index_ = d->next_index();
+  }
+
+  void ReadFill(Deserializer* d) {
+    bool is_vm_object = d->isolate() == Dart::vm_isolate();
+    intptr_t element_size = ExternalTypedData::ElementSizeInBytes(cid_);
+
+    for (intptr_t id = start_index_; id < stop_index_; id++) {
+      RawExternalTypedData* data =
+          reinterpret_cast<RawExternalTypedData*>(d->Ref(id));
+      intptr_t length = d->Read<intptr_t>();
+      Deserializer::InitializeHeader(data, cid_,
+                                     ExternalTypedData::InstanceSize(),
+                                     is_vm_object);
+      data->ptr()->length_ = Smi::New(length);
+      data->ptr()->data_ = const_cast<uint8_t*>(d->CurrentBufferAddress());
+      d->Advance(length * element_size);
+    }
+  }
+
+ private:
+  const intptr_t cid_;
+};
+
+
+class StacktraceSerializationCluster : public SerializationCluster {
+ public:
+  StacktraceSerializationCluster() { }
+  virtual ~StacktraceSerializationCluster() { }
+
+  void Trace(Serializer* s, RawObject* object) {
+    RawStacktrace* trace = Stacktrace::RawCast(object);
+    objects_.Add(trace);
+
+    RawObject** from = trace->from();
+    RawObject** to = trace->to();
+    for (RawObject** p = from; p <= to; p++) {
+      s->Push(*p);
+    }
+  }
+
+  void WriteAlloc(Serializer* s) {
+    s->WriteCid(kStacktraceCid);
+    intptr_t count = objects_.length();
+    s->Write<intptr_t>(count);
+    for (intptr_t i = 0; i < count; i++) {
+      RawStacktrace* trace = objects_[i];
+      s->AssignRef(trace);
+    }
+  }
+
+  void WriteFill(Serializer* s) {
+    intptr_t count = objects_.length();
+    for (intptr_t i = 0; i < count; i++) {
+      RawStacktrace* trace = objects_[i];
+      RawObject** from = trace->from();
+      RawObject** to = trace->to();
+      for (RawObject** p = from; p <= to; p++) {
+        s->WriteRef(*p);
+      }
+    }
+  }
+
+ private:
+  GrowableArray<RawStacktrace*> objects_;
+};
+
+
+class StacktraceDeserializationCluster : public DeserializationCluster {
+ public:
+  StacktraceDeserializationCluster() { }
+  virtual ~StacktraceDeserializationCluster() { }
+
+  void ReadAlloc(Deserializer* d) {
+    start_index_ = d->next_index();
+    PageSpace* old_space = d->heap()->old_space();
+    intptr_t count = d->Read<intptr_t>();
+    for (intptr_t i = 0; i < count; i++) {
+      d->AssignRef(AllocateUninitialized(old_space,
+                                         Stacktrace::InstanceSize()));
+    }
+    stop_index_ = d->next_index();
+  }
+
+  void ReadFill(Deserializer* d) {
+    bool is_vm_object = d->isolate() == Dart::vm_isolate();
+
+    for (intptr_t id = start_index_; id < stop_index_; id++) {
+      RawStacktrace* trace = reinterpret_cast<RawStacktrace*>(d->Ref(id));
+      Deserializer::InitializeHeader(trace, kStacktraceCid,
+                                     Stacktrace::InstanceSize(), is_vm_object);
+      RawObject** from = trace->from();
+      RawObject** to = trace->to();
+      for (RawObject** p = from; p <= to; p++) {
+        *p = d->ReadRef();
+      }
+    }
+  }
+};
+
+
+class RegExpSerializationCluster : public SerializationCluster {
+ public:
+  RegExpSerializationCluster() { }
+  virtual ~RegExpSerializationCluster() { }
+
+  void Trace(Serializer* s, RawObject* object) {
+    RawRegExp* regexp = RegExp::RawCast(object);
+    objects_.Add(regexp);
+
+    RawObject** from = regexp->from();
+    RawObject** to = regexp->to();
+    for (RawObject** p = from; p <= to; p++) {
+      s->Push(*p);
+    }
+  }
+
+  void WriteAlloc(Serializer* s) {
+    s->WriteCid(kRegExpCid);
+    intptr_t count = objects_.length();
+    s->Write<intptr_t>(count);
+    for (intptr_t i = 0; i < count; i++) {
+      RawRegExp* regexp = objects_[i];
+      s->AssignRef(regexp);
+    }
+  }
+
+  void WriteFill(Serializer* s) {
+    intptr_t count = objects_.length();
+    for (intptr_t i = 0; i < count; i++) {
+      RawRegExp* regexp = objects_[i];
+      RawObject** from = regexp->from();
+      RawObject** to = regexp->to();
+      for (RawObject** p = from; p <= to; p++) {
+        s->WriteRef(*p);
+      }
+
+      s->Write<intptr_t>(regexp->ptr()->num_registers_);
+      s->Write<int8_t>(regexp->ptr()->type_flags_);
+    }
+  }
+
+ private:
+  GrowableArray<RawRegExp*> objects_;
+};
+
+
+class RegExpDeserializationCluster : public DeserializationCluster {
+ public:
+  RegExpDeserializationCluster() { }
+  virtual ~RegExpDeserializationCluster() { }
+
+  void ReadAlloc(Deserializer* d) {
+    start_index_ = d->next_index();
+    PageSpace* old_space = d->heap()->old_space();
+    intptr_t count = d->Read<intptr_t>();
+    for (intptr_t i = 0; i < count; i++) {
+      d->AssignRef(AllocateUninitialized(old_space,
+                                         RegExp::InstanceSize()));
+    }
+    stop_index_ = d->next_index();
+  }
+
+  void ReadFill(Deserializer* d) {
+    bool is_vm_object = d->isolate() == Dart::vm_isolate();
+
+    for (intptr_t id = start_index_; id < stop_index_; id++) {
+      RawRegExp* regexp = reinterpret_cast<RawRegExp*>(d->Ref(id));
+      Deserializer::InitializeHeader(regexp, kRegExpCid,
+                                     RegExp::InstanceSize(), is_vm_object);
+      RawObject** from = regexp->from();
+      RawObject** to = regexp->to();
+      for (RawObject** p = from; p <= to; p++) {
+        *p = d->ReadRef();
+      }
+
+      regexp->ptr()->num_registers_ = d->Read<intptr_t>();
+      regexp->ptr()->type_flags_ = d->Read<int8_t>();
+    }
+  }
+};
+
+
+class LinkedHashMapSerializationCluster : public SerializationCluster {
+ public:
+  LinkedHashMapSerializationCluster() { }
+  virtual ~LinkedHashMapSerializationCluster() { }
+
+  void Trace(Serializer* s, RawObject* object) {
+    RawLinkedHashMap* map = LinkedHashMap::RawCast(object);
+    objects_.Add(map);
+
+    s->Push(map->ptr()->type_arguments_);
+
+    intptr_t used_data = Smi::Value(map->ptr()->used_data_);
+    RawArray* data_array = map->ptr()->data_;
+    RawObject** data_elements = data_array->ptr()->data();
+    for (intptr_t i = 0; i < used_data; i += 2) {
+      RawObject* key = data_elements[i];
+      if (key != data_array) {
+        RawObject* value = data_elements[i + 1];
+        s->Push(key);
+        s->Push(value);
+      }
+    }
+  }
+
+  void WriteAlloc(Serializer* s) {
+    s->WriteCid(kLinkedHashMapCid);
+    intptr_t count = objects_.length();
+    s->Write<intptr_t>(count);
+    for (intptr_t i = 0; i < count; i++) {
+      RawLinkedHashMap* map = objects_[i];
+      s->AssignRef(map);
+    }
+  }
+
+  void WriteFill(Serializer* s) {
+    intptr_t count = objects_.length();
+    for (intptr_t i = 0; i < count; i++) {
+      RawLinkedHashMap* map = objects_[i];
+      s->Write<bool>(map->IsCanonical());
+
+      s->WriteRef(map->ptr()->type_arguments_);
+
+      const intptr_t used_data = Smi::Value(map->ptr()->used_data_);
+      ASSERT((used_data & 1) == 0);  // Keys + values, so must be even.
+      const intptr_t deleted_keys = Smi::Value(map->ptr()->deleted_keys_);
+
+      // Write out the number of (not deleted) key/value pairs that will follow.
+      s->Write<intptr_t>((used_data >> 1) - deleted_keys);
+
+      RawArray* data_array = map->ptr()->data_;
+      RawObject** data_elements = data_array->ptr()->data();
+      for (intptr_t i = 0; i < used_data; i += 2) {
+        RawObject* key = data_elements[i];
+        if (key != data_array) {
+          RawObject* value = data_elements[i + 1];
+          s->WriteRef(key);
+          s->WriteRef(value);
+        }
+      }
+    }
+  }
+
+ private:
+  GrowableArray<RawLinkedHashMap*> objects_;
+};
+
+
+class LinkedHashMapDeserializationCluster : public DeserializationCluster {
+ public:
+  LinkedHashMapDeserializationCluster() { }
+  virtual ~LinkedHashMapDeserializationCluster() { }
+
+  void ReadAlloc(Deserializer* d) {
+    start_index_ = d->next_index();
+    PageSpace* old_space = d->heap()->old_space();
+    intptr_t count = d->Read<intptr_t>();
+    for (intptr_t i = 0; i < count; i++) {
+      d->AssignRef(AllocateUninitialized(old_space,
+                                         LinkedHashMap::InstanceSize()));
+    }
+    stop_index_ = d->next_index();
+  }
+
+  void ReadFill(Deserializer* d) {
+    bool is_vm_object = d->isolate() == Dart::vm_isolate();
+    PageSpace* old_space = d->heap()->old_space();
+
+    for (intptr_t id = start_index_; id < stop_index_; id++) {
+      RawLinkedHashMap* map = reinterpret_cast<RawLinkedHashMap*>(d->Ref(id));
+      bool is_canonical = d->Read<bool>();
+      Deserializer::InitializeHeader(map, kLinkedHashMapCid,
+                                     LinkedHashMap::InstanceSize(),
+                                     is_vm_object, is_canonical);
+
+      map->ptr()->type_arguments_ =
+          reinterpret_cast<RawTypeArguments*>(d->ReadRef());
+
+      // TODO(rmacnak): Reserve ref ids and co-allocate in ReadAlloc.
+      intptr_t pairs = d->Read<intptr_t>();
+      intptr_t used_data = pairs << 1;
+      intptr_t data_size = Utils::Maximum(
+          Utils::RoundUpToPowerOfTwo(used_data),
+          static_cast<uintptr_t>(LinkedHashMap::kInitialIndexSize));
+
+      RawArray* data = reinterpret_cast<RawArray*>(
+          AllocateUninitialized(old_space, Array::InstanceSize(data_size)));
+      data->ptr()->type_arguments_ = TypeArguments::null();
+      data->ptr()->length_ = Smi::New(data_size);
+      intptr_t i;
+      for (i = 0; i < used_data; i++) {
+        data->ptr()->data()[i] = d->ReadRef();
+      }
+      for (; i < data_size; i++) {
+        data->ptr()->data()[i] = Object::null();
+      }
+
+      map->ptr()->index_ = TypedData::null();
+      map->ptr()->hash_mask_ = Smi::New(0);
+      map->ptr()->data_ = data;
+      map->ptr()->used_data_ = Smi::New(used_data);
+      map->ptr()->deleted_keys_ = Smi::New(0);
+    }
+  }
+};
+
+
+class ArraySerializationCluster : public SerializationCluster {
+ public:
+  explicit ArraySerializationCluster(intptr_t cid) : cid_(cid) { }
+  virtual ~ArraySerializationCluster() { }
+
+  void Trace(Serializer* s, RawObject* object) {
+    RawArray* array = Array::RawCast(object);
+    objects_.Add(array);
+
+    s->Push(array->ptr()->type_arguments_);
+    intptr_t length = Smi::Value(array->ptr()->length_);
+    for (intptr_t i = 0; i < length; i++) {
+      s->Push(array->ptr()->data()[i]);
+    }
+  }
+
+  void WriteAlloc(Serializer* s) {
+    s->WriteCid(cid_);
+    intptr_t count = objects_.length();
+    s->Write<intptr_t>(count);
+    for (intptr_t i = 0; i < count; i++) {
+      RawArray* array = objects_[i];
+      intptr_t length = Smi::Value(array->ptr()->length_);
+      s->Write<intptr_t>(length);
+      s->AssignRef(array);
+    }
+  }
+
+  void WriteFill(Serializer* s) {
+    intptr_t count = objects_.length();
+    for (intptr_t i = 0; i < count; i++) {
+      RawArray* array = objects_[i];
+      intptr_t length = Smi::Value(array->ptr()->length_);
+      s->Write<intptr_t>(length);
+      s->Write<bool>(array->IsCanonical());
+      s->WriteRef(array->ptr()->type_arguments_);
+      for (intptr_t j = 0; j < length; j++) {
+        s->WriteRef(array->ptr()->data()[j]);
+      }
+    }
+  }
+
+ private:
+  intptr_t cid_;
+  GrowableArray<RawArray*> objects_;
+};
+
+
+class ArrayDeserializationCluster : public DeserializationCluster {
+ public:
+  explicit ArrayDeserializationCluster(intptr_t cid) : cid_(cid) { }
+  virtual ~ArrayDeserializationCluster() { }
+
+  void ReadAlloc(Deserializer* d) {
+    start_index_ = d->next_index();
+    PageSpace* old_space = d->heap()->old_space();
+    intptr_t count = d->Read<intptr_t>();
+    for (intptr_t i = 0; i < count; i++) {
+      intptr_t length = d->Read<intptr_t>();
+      d->AssignRef(AllocateUninitialized(old_space,
+                                         Array::InstanceSize(length)));
+    }
+    stop_index_ = d->next_index();
+  }
+
+  void ReadFill(Deserializer* d) {
+    bool is_vm_object = d->isolate() == Dart::vm_isolate();
+
+    for (intptr_t id = start_index_; id < stop_index_; id++) {
+      RawArray* array = reinterpret_cast<RawArray*>(d->Ref(id));
+      intptr_t length = d->Read<intptr_t>();
+      bool is_canonical = d->Read<bool>();
+      Deserializer::InitializeHeader(array, cid_,
+                                     Array::InstanceSize(length),
+                                     is_vm_object, is_canonical);
+      array->ptr()->type_arguments_ =
+          reinterpret_cast<RawTypeArguments*>(d->ReadRef());
+      array->ptr()->length_ = Smi::New(length);
+      for (intptr_t j = 0; j < length; j++) {
+        array->ptr()->data()[j] = d->ReadRef();
+      }
+    }
+  }
+
+ private:
+  const intptr_t cid_;
+};
+
+
+class OneByteStringSerializationCluster : public SerializationCluster {
+ public:
+  OneByteStringSerializationCluster() { }
+  virtual ~OneByteStringSerializationCluster() { }
+
+  void Trace(Serializer* s, RawObject* object) {
+    RawOneByteString* str = reinterpret_cast<RawOneByteString*>(object);
+    objects_.Add(str);
+  }
+
+  void WriteAlloc(Serializer* s) {
+    s->WriteCid(kOneByteStringCid);
+    intptr_t count = objects_.length();
+    s->Write<intptr_t>(count);
+    for (intptr_t i = 0; i < count; i++) {
+      RawOneByteString* str = objects_[i];
+      intptr_t length = Smi::Value(str->ptr()->length_);
+      s->Write<intptr_t>(length);
+      s->AssignRef(str);
+    }
+  }
+
+  void WriteFill(Serializer* s) {
+    intptr_t count = objects_.length();
+    for (intptr_t i = 0; i < count; i++) {
+      RawOneByteString* str = objects_[i];
+      intptr_t length = Smi::Value(str->ptr()->length_);
+      s->Write<intptr_t>(length);
+      s->Write<bool>(str->IsCanonical());
+      intptr_t hash = Smi::Value(str->ptr()->hash_);
+      s->Write<int32_t>(hash);
+      s->WriteBytes(str->ptr()->data(), length);
+    }
+  }
+
+ private:
+  GrowableArray<RawOneByteString*> objects_;
+};
+
+
+class OneByteStringDeserializationCluster : public DeserializationCluster {
+ public:
+  OneByteStringDeserializationCluster() { }
+  virtual ~OneByteStringDeserializationCluster() { }
+
+  void ReadAlloc(Deserializer* d) {
+    start_index_ = d->next_index();
+    PageSpace* old_space = d->heap()->old_space();
+    intptr_t count = d->Read<intptr_t>();
+    for (intptr_t i = 0; i < count; i++) {
+      intptr_t length = d->Read<intptr_t>();
+      d->AssignRef(AllocateUninitialized(old_space,
+                                         OneByteString::InstanceSize(length)));
+    }
+    stop_index_ = d->next_index();
+  }
+
+  void ReadFill(Deserializer* d) {
+    bool is_vm_object = d->isolate() == Dart::vm_isolate();
+
+    for (intptr_t id = start_index_; id < stop_index_; id++) {
+      RawOneByteString* str = reinterpret_cast<RawOneByteString*>(d->Ref(id));
+      intptr_t length = d->Read<intptr_t>();
+      bool is_canonical = d->Read<bool>();
+      Deserializer::InitializeHeader(str, kOneByteStringCid,
+                                     OneByteString::InstanceSize(length),
+                                     is_vm_object, is_canonical);
+      str->ptr()->length_ = Smi::New(length);
+      str->ptr()->hash_ = Smi::New(d->Read<intptr_t>());
+      for (intptr_t j = 0; j < length; j++) {
+        str->ptr()->data()[j] = d->Read<uint8_t>();
+      }
+    }
+  }
+};
+
+
+class TwoByteStringSerializationCluster : public SerializationCluster {
+ public:
+  TwoByteStringSerializationCluster() { }
+  virtual ~TwoByteStringSerializationCluster() { }
+
+  void Trace(Serializer* s, RawObject* object) {
+    RawTwoByteString* str = reinterpret_cast<RawTwoByteString*>(object);
+    objects_.Add(str);
+  }
+
+  void WriteAlloc(Serializer* s) {
+    s->WriteCid(kTwoByteStringCid);
+    intptr_t count = objects_.length();
+    s->Write<intptr_t>(count);
+    for (intptr_t i = 0; i < count; i++) {
+      RawTwoByteString* str = objects_[i];
+      intptr_t length = Smi::Value(str->ptr()->length_);
+      s->Write<intptr_t>(length);
+      s->AssignRef(str);
+    }
+  }
+
+  void WriteFill(Serializer* s) {
+    intptr_t count = objects_.length();
+    for (intptr_t i = 0; i < count; i++) {
+      RawTwoByteString* str = objects_[i];
+      intptr_t length = Smi::Value(str->ptr()->length_);
+      s->Write<intptr_t>(length);
+      s->Write<bool>(str->IsCanonical());
+      intptr_t hash = Smi::Value(str->ptr()->hash_);
+      s->Write<int32_t>(hash);
+      s->WriteBytes(reinterpret_cast<uint8_t*>(str->ptr()->data()), length * 2);
+    }
+  }
+
+ private:
+  GrowableArray<RawTwoByteString*> objects_;
+};
+
+
+class TwoByteStringDeserializationCluster : public DeserializationCluster {
+ public:
+  TwoByteStringDeserializationCluster() { }
+  virtual ~TwoByteStringDeserializationCluster() { }
+
+  void ReadAlloc(Deserializer* d) {
+    start_index_ = d->next_index();
+    PageSpace* old_space = d->heap()->old_space();
+    intptr_t count = d->Read<intptr_t>();
+    for (intptr_t i = 0; i < count; i++) {
+      intptr_t length = d->Read<intptr_t>();
+      d->AssignRef(AllocateUninitialized(old_space,
+                                         TwoByteString::InstanceSize(length)));
+    }
+    stop_index_ = d->next_index();
+  }
+
+  void ReadFill(Deserializer* d) {
+    bool is_vm_object = d->isolate() == Dart::vm_isolate();
+
+    for (intptr_t id = start_index_; id < stop_index_; id++) {
+      RawTwoByteString* str =
+          reinterpret_cast<RawTwoByteString*>(d->Ref(id));
+      intptr_t length = d->Read<intptr_t>();
+      bool is_canonical = d->Read<bool>();
+      Deserializer::InitializeHeader(str, kTwoByteStringCid,
+                                     TwoByteString::InstanceSize(length),
+                                     is_vm_object, is_canonical);
+      str->ptr()->length_ = Smi::New(length);
+      str->ptr()->hash_ = Smi::New(d->Read<int32_t>());
+      uint8_t* cdata = reinterpret_cast<uint8_t*>(str->ptr()->data());
+      d->ReadBytes(cdata, length * 2);
+    }
+  }
+};
+
+
+Serializer::Serializer(Thread* thread,
+                       Snapshot::Kind kind,
+                       uint8_t** buffer,
+                       ReAlloc alloc,
+                       intptr_t initial_size,
+                       InstructionsWriter* instructions_writer)
+    : StackResource(thread),
+      heap_(thread->isolate()->heap()),
+      zone_(thread->zone()),
+      kind_(kind),
+      stream_(buffer, alloc, initial_size),
+      instructions_writer_(instructions_writer),
+      clusters_by_cid_(NULL),
+      stack_(),
+      num_cids_(0),
+      num_base_objects_(0),
+      num_written_objects_(0),
+      next_ref_index_(1) {
+  num_cids_ = thread->isolate()->class_table()->NumCids();
+  clusters_by_cid_ = new SerializationCluster*[num_cids_];
+  for (intptr_t i = 0; i < num_cids_; i++) {
+    clusters_by_cid_[i] = NULL;
+  }
+}
+
+
+Serializer::~Serializer() {
+  delete[] clusters_by_cid_;
+}
+
+
+SerializationCluster* Serializer::NewClusterForClass(intptr_t cid) {
+  Zone* Z = zone_;
+  if ((cid > kNumPredefinedCids) ||
+      (cid == kInstanceCid) ||
+      RawObject::IsTypedDataViewClassId(cid)) {
+    Push(isolate()->class_table()->At(cid));
+    return new (Z) InstanceSerializationCluster(cid);
+  }
+  if (RawObject::IsExternalTypedDataClassId(cid)) {
+    return new (Z) ExternalTypedDataSerializationCluster(cid);
+  }
+  if (RawObject::IsTypedDataClassId(cid)) {
+    return new (Z) TypedDataSerializationCluster(cid);
+  }
+
+  switch (cid) {
+    case kClassCid: return new (Z) ClassSerializationCluster(num_cids_);
+    case kUnresolvedClassCid:
+      return new (Z) UnresolvedClassSerializationCluster();
+    case kTypeArgumentsCid: return new (Z) TypeArgumentsSerializationCluster();
+    case kPatchClassCid: return new (Z) PatchClassSerializationCluster();
+    case kFunctionCid: return new (Z) FunctionSerializationCluster();
+    case kClosureDataCid: return new (Z) ClosureDataSerializationCluster();
+    case kRedirectionDataCid:
+      return new (Z) RedirectionDataSerializationCluster();
+    case kFieldCid: return new (Z) FieldSerializationCluster();
+    case kLiteralTokenCid: return new (Z) LiteralTokenSerializationCluster();
+    case kTokenStreamCid: return new (Z) TokenStreamSerializationCluster();
+    case kScriptCid: return new (Z) ScriptSerializationCluster();
+    case kLibraryCid: return new (Z) LibrarySerializationCluster();
+    case kNamespaceCid: return new (Z) NamespaceSerializationCluster();
+    case kCodeCid: return new (Z) CodeSerializationCluster();
+    case kObjectPoolCid: return new (Z) ObjectPoolSerializationCluster();
+    case kPcDescriptorsCid:
+      return new (Z) RODataSerializationCluster(kPcDescriptorsCid);
+    case kStackmapCid:
+      return new (Z) RODataSerializationCluster(kStackmapCid);
+    case kExceptionHandlersCid:
+      return new (Z) ExceptionHandlersSerializationCluster();
+    case kContextCid: return new (Z) ContextSerializationCluster();
+    case kContextScopeCid: return new (Z) ContextScopeSerializationCluster();
+    case kICDataCid: return new (Z) ICDataSerializationCluster();
+    case kMegamorphicCacheCid:
+      return new (Z) MegamorphicCacheSerializationCluster();
+    case kSubtypeTestCacheCid:
+      return new (Z) SubtypeTestCacheSerializationCluster();
+    case kLanguageErrorCid:
+      return new (Z) LanguageErrorSerializationCluster();
+    case kUnhandledExceptionCid:
+      return new (Z) UnhandledExceptionSerializationCluster();
+    case kLibraryPrefixCid: return new (Z) LibraryPrefixSerializationCluster();
+    case kTypeCid: return new (Z) TypeSerializationCluster();
+    case kTypeRefCid: return new (Z) TypeRefSerializationCluster();
+    case kTypeParameterCid: return new (Z) TypeParameterSerializationCluster();
+    case kBoundedTypeCid: return new (Z) BoundedTypeSerializationCluster();
+    case kClosureCid: return new (Z) ClosureSerializationCluster();
+    case kMintCid: return new (Z) MintSerializationCluster();
+    case kBigintCid: return new (Z) BigintSerializationCluster();
+    case kDoubleCid: return new (Z) DoubleSerializationCluster();
+    case kGrowableObjectArrayCid:
+      return new (Z) GrowableObjectArraySerializationCluster();
+    case kStacktraceCid: return new (Z) StacktraceSerializationCluster();
+    case kRegExpCid: return new (Z) RegExpSerializationCluster();
+    case kLinkedHashMapCid: return new (Z) LinkedHashMapSerializationCluster();
+    case kArrayCid:
+      return new (Z) ArraySerializationCluster(kArrayCid);
+    case kImmutableArrayCid:
+      return new (Z) ArraySerializationCluster(kImmutableArrayCid);
+    case kOneByteStringCid: {
+      if (Snapshot::IncludesCode(kind_)) {
+        return new (Z) RODataSerializationCluster(kOneByteStringCid);
+      } else {
+        return new (Z) OneByteStringSerializationCluster();
+      }
+    }
+    case kTwoByteStringCid: {
+      if (Snapshot::IncludesCode(kind_)) {
+        return new (Z) RODataSerializationCluster(kTwoByteStringCid);
+      } else {
+        return new (Z) TwoByteStringSerializationCluster();
+      }
+    }
+    default: break;
+  }
+
+  FATAL1("No cluster defined for cid %" Pd, cid);
+  return NULL;
+}
+
+
+void Serializer::Trace(RawObject* object) {
+  intptr_t cid;
+  if (!object->IsHeapObject()) {
+    cid = kSmiCid;
+  } else {
+    cid = object->GetClassId();
+  }
+
+  SerializationCluster* cluster = clusters_by_cid_[cid];
+  if (cluster == NULL) {
+    cluster = NewClusterForClass(cid);
+    clusters_by_cid_[cid] = cluster;
+  }
+  ASSERT(cluster != NULL);
+  cluster->Trace(this, object);
+}
+
+
+void Serializer::WriteVersionAndFeatures() {
+  const char* expected_version = Version::SnapshotString();
+  ASSERT(expected_version != NULL);
+  const intptr_t version_len = strlen(expected_version);
+  WriteBytes(reinterpret_cast<const uint8_t*>(expected_version), version_len);
+
+  const char* expected_features = Dart::FeaturesString(kind_);
+  ASSERT(expected_features != NULL);
+  const intptr_t features_len = strlen(expected_features);
+  WriteBytes(reinterpret_cast<const uint8_t*>(expected_features),
+             features_len + 1);
+  free(const_cast<char*>(expected_features));
+}
+
+
+#if defined(DEBUG)
+static const intptr_t kSectionMarker = 0xABAB;
+#endif
+
+void Serializer::Serialize() {
+  while (stack_.length() > 0) {
+    Trace(stack_.RemoveLast());
+  }
+
+  intptr_t num_clusters = 0;
+  for (intptr_t cid = 1; cid < num_cids_; cid++) {
+    SerializationCluster* cluster = clusters_by_cid_[cid];
+    if (cluster != NULL) {
+      num_clusters++;
+    }
+  }
+
+  intptr_t num_objects = num_base_objects_ + num_written_objects_;
+
+  Write<int32_t>(num_objects);
+  Write<int32_t>(num_clusters);
+
+  for (intptr_t cid = 1; cid < num_cids_; cid++) {
+    SerializationCluster* cluster = clusters_by_cid_[cid];
+    if (cluster != NULL) {
+      cluster->WriteAlloc(this);
+#if defined(DEBUG)
+      Write<intptr_t>(next_ref_index_);
+#endif
+    }
+  }
+
+  // We should have assigned a ref to every object we pushed.
+  ASSERT((next_ref_index_ - 1) == num_objects);
+
+  for (intptr_t cid = 1; cid < num_cids_; cid++) {
+    SerializationCluster* cluster = clusters_by_cid_[cid];
+    if (cluster != NULL) {
+      cluster->WriteFill(this);
+#if defined(DEBUG)
+      Write<intptr_t>(kSectionMarker);
+#endif
+    }
+  }
+}
+
+
+void Serializer::AddVMIsolateBaseObjects() {
+  // These objects are always allocated by Object::InitOnce, so they are not
+  // written into the snapshot.
+
+  AddBaseObject(Object::null());
+  AddBaseObject(Object::sentinel().raw());
+  AddBaseObject(Object::transition_sentinel().raw());
+  AddBaseObject(Object::empty_array().raw());
+  AddBaseObject(Object::zero_array().raw());
+  AddBaseObject(Object::dynamic_type().raw());
+  AddBaseObject(Object::void_type().raw());
+  AddBaseObject(Bool::True().raw());
+  AddBaseObject(Bool::False().raw());
+  AddBaseObject(Object::extractor_parameter_types().raw());
+  AddBaseObject(Object::extractor_parameter_names().raw());
+  AddBaseObject(Object::empty_context_scope().raw());
+  AddBaseObject(Object::empty_descriptors().raw());
+  AddBaseObject(Object::empty_var_descriptors().raw());
+  AddBaseObject(Object::empty_exception_handlers().raw());
+
+  for (intptr_t i = 0; i < ArgumentsDescriptor::kCachedDescriptorCount; i++) {
+    AddBaseObject(ArgumentsDescriptor::cached_args_descriptors_[i]);
+  }
+  for (intptr_t i = 0; i < ICData::kCachedICDataArrayCount; i++) {
+    AddBaseObject(ICData::cached_icdata_arrays_[i]);
+  }
+
+  ClassTable* table = isolate()->class_table();
+  for (intptr_t cid = kClassCid; cid <= kUnwindErrorCid; cid++) {
+    // Error has no class object.
+    if (cid != kErrorCid) {
+      ASSERT(table->HasValidClassAt(cid));
+      AddBaseObject(table->At(cid));
+    }
+  }
+  AddBaseObject(table->At(kDynamicCid));
+  AddBaseObject(table->At(kVoidCid));
+}
+
+
+intptr_t Serializer::WriteVMSnapshot(const Array& symbols,
+                                     const Array& scripts) {
+  NoSafepointScope no_safepoint;
+
+  AddVMIsolateBaseObjects();
+
+  // Push roots.
+  Push(symbols.raw());
+  Push(scripts.raw());
+  if (Snapshot::IncludesCode(kind_)) {
+    StubCode::Push(this);
+  }
+
+  Serialize();
+
+  // Write roots.
+  WriteRef(symbols.raw());
+  WriteRef(scripts.raw());
+  if (Snapshot::IncludesCode(kind_)) {
+    StubCode::WriteRef(this);
+  }
+
+#if defined(DEBUG)
+  Write<intptr_t>(kSectionMarker);
+#endif
+
+  // Note we are not clearing the object id table. The full ref table
+  // of the vm isolate snapshot serves as the base objects for the
+  // regular isolate snapshot.
+
+  // Return the number of objects, -1 accounts for unused ref 0.
+  return next_ref_index_ - 1;
+}
+
+
+void Serializer::WriteFullSnapshot(intptr_t num_base_objects,
+                                   ObjectStore* object_store) {
+  NoSafepointScope no_safepoint;
+
+  if (num_base_objects == 0) {
+    // Units tests not writing a new vm isolate.
+    const Array& base_objects = Object::vm_isolate_snapshot_object_table();
+    for (intptr_t i = 1; i < base_objects.Length(); i++) {
+      AddBaseObject(base_objects.At(i));
+    }
+  } else {
+    // Base objects carried over from WriteVMIsolateSnapshot.
+    num_base_objects_ += num_base_objects;
+    next_ref_index_ += num_base_objects;
+  }
+
+  // Push roots.
+  RawObject** from = object_store->from();
+  RawObject** to = object_store->to_snapshot(kind_);
+  for (RawObject** p = from; p <= to; p++) {
+    Push(*p);
+  }
+
+  Serialize();
+
+  // Write roots.
+  for (RawObject** p = from; p <= to; p++) {
+    WriteRef(*p);
+  }
+
+#if defined(DEBUG)
+  Write<intptr_t>(kSectionMarker);
+#endif
+
+  heap_->ResetObjectIdTable();
+}
+
+
+Deserializer::Deserializer(Thread* thread,
+                           Snapshot::Kind kind,
+                           const uint8_t* buffer,
+                           intptr_t size,
+                           const uint8_t* instructions_buffer,
+                           const uint8_t* data_buffer)
+    : StackResource(thread),
+      heap_(thread->isolate()->heap()),
+      zone_(thread->zone()),
+      kind_(kind),
+      stream_(buffer, size),
+      instructions_reader_(NULL),
+      refs_(NULL),
+      next_ref_index_(1),
+      clusters_(NULL) {
+  if (Snapshot::IncludesCode(kind)) {
+    ASSERT(instructions_buffer != NULL);
+  }
+  if (instructions_buffer != NULL) {
+    instructions_reader_ =
+        new (zone_) InstructionsReader(instructions_buffer, data_buffer);
+  }
+}
+
+
+Deserializer::~Deserializer() {
+  delete[] clusters_;
+}
+
+
+DeserializationCluster* Deserializer::ReadCluster() {
+  intptr_t cid = ReadCid();
+
+  Zone* Z = zone_;
+  if ((cid > kNumPredefinedCids) ||
+      (cid == kInstanceCid) ||
+      RawObject::IsTypedDataViewClassId(cid)) {
+    return new (Z) InstanceDeserializationCluster(cid);
+  }
+  if (RawObject::IsExternalTypedDataClassId(cid)) {
+    return new (Z) ExternalTypedDataDeserializationCluster(cid);
+  }
+  if (RawObject::IsTypedDataClassId(cid)) {
+    return new (Z) TypedDataDeserializationCluster(cid);
+  }
+
+  switch (cid) {
+    case kClassCid: return new (Z) ClassDeserializationCluster();
+    case kUnresolvedClassCid:
+      return new (Z) UnresolvedClassDeserializationCluster();
+    case kTypeArgumentsCid:
+      return new (Z) TypeArgumentsDeserializationCluster();
+    case kPatchClassCid: return new (Z) PatchClassDeserializationCluster();
+    case kFunctionCid: return new (Z) FunctionDeserializationCluster();
+    case kClosureDataCid: return new (Z) ClosureDataDeserializationCluster();
+    case kRedirectionDataCid:
+      return new (Z) RedirectionDataDeserializationCluster();
+    case kFieldCid: return new (Z) FieldDeserializationCluster();
+    case kLiteralTokenCid: return new (Z) LiteralTokenDeserializationCluster();
+    case kTokenStreamCid: return new (Z) TokenStreamDeserializationCluster();
+    case kScriptCid: return new (Z) ScriptDeserializationCluster();
+    case kLibraryCid: return new (Z) LibraryDeserializationCluster();
+    case kNamespaceCid: return new (Z) NamespaceDeserializationCluster();
+    case kCodeCid: return new (Z) CodeDeserializationCluster();
+    case kObjectPoolCid: return new (Z) ObjectPoolDeserializationCluster();
+    case kPcDescriptorsCid:
+    case kStackmapCid:
+      return new (Z) RODataDeserializationCluster();
+    case kExceptionHandlersCid:
+      return new (Z) ExceptionHandlersDeserializationCluster();
+    case kContextCid: return new (Z) ContextDeserializationCluster();
+    case kContextScopeCid: return new (Z) ContextScopeDeserializationCluster();
+    case kICDataCid: return new (Z) ICDataDeserializationCluster();
+    case kMegamorphicCacheCid:
+      return new (Z) MegamorphicCacheDeserializationCluster();
+    case kSubtypeTestCacheCid:
+      return new (Z) SubtypeTestCacheDeserializationCluster();
+    case kLanguageErrorCid:
+      return new (Z) LanguageErrorDeserializationCluster();
+    case kUnhandledExceptionCid:
+      return new (Z) UnhandledExceptionDeserializationCluster();
+    case kLibraryPrefixCid:
+      return new (Z) LibraryPrefixDeserializationCluster();
+    case kTypeCid: return new (Z) TypeDeserializationCluster();
+    case kTypeRefCid: return new (Z) TypeRefDeserializationCluster();
+    case kTypeParameterCid:
+      return new (Z) TypeParameterDeserializationCluster();
+    case kBoundedTypeCid: return new (Z) BoundedTypeDeserializationCluster();
+    case kClosureCid: return new (Z) ClosureDeserializationCluster();
+    case kMintCid: return new (Z) MintDeserializationCluster();
+    case kBigintCid: return new (Z) BigintDeserializationCluster();
+    case kDoubleCid: return new (Z) DoubleDeserializationCluster();
+    case kGrowableObjectArrayCid:
+      return new (Z) GrowableObjectArrayDeserializationCluster();
+    case kStacktraceCid: return new (Z) StacktraceDeserializationCluster();
+    case kRegExpCid: return new (Z) RegExpDeserializationCluster();
+    case kLinkedHashMapCid:
+      return new (Z) LinkedHashMapDeserializationCluster();
+    case kArrayCid:
+      return new (Z) ArrayDeserializationCluster(kArrayCid);
+    case kImmutableArrayCid:
+      return new (Z) ArrayDeserializationCluster(kImmutableArrayCid);
+    case kOneByteStringCid: {
+      if (Snapshot::IncludesCode(kind_)) {
+        return new (Z) RODataDeserializationCluster();
+      } else {
+        return new (Z) OneByteStringDeserializationCluster();
+      }
+    }
+    case kTwoByteStringCid: {
+      if (Snapshot::IncludesCode(kind_)) {
+        return new (Z) RODataDeserializationCluster();
+      } else {
+        return new (Z) TwoByteStringDeserializationCluster();
+      }
+    }
+    default: break;
+  }
+  FATAL1("No cluster defined for cid %" Pd, cid);
+  return NULL;
+}
+
+
+RawApiError* Deserializer::VerifyVersionAndFeatures() {
+  // If the version string doesn't match, return an error.
+  // Note: New things are allocated only if we're going to return an error.
+
+  const char* expected_version = Version::SnapshotString();
+  ASSERT(expected_version != NULL);
+  const intptr_t version_len = strlen(expected_version);
+  if (PendingBytes() < version_len) {
+    const intptr_t kMessageBufferSize = 128;
+    char message_buffer[kMessageBufferSize];
+    OS::SNPrint(message_buffer,
+                kMessageBufferSize,
+                "No full snapshot version found, expected '%s'",
+                expected_version);
+    // This can also fail while bringing up the VM isolate, so make sure to
+    // allocate the error message in old space.
+    const String& msg = String::Handle(String::New(message_buffer, Heap::kOld));
+    return ApiError::New(msg, Heap::kOld);
+  }
+
+  const char* version = reinterpret_cast<const char*>(CurrentBufferAddress());
+  ASSERT(version != NULL);
+  if (strncmp(version, expected_version, version_len)) {
+    const intptr_t kMessageBufferSize = 256;
+    char message_buffer[kMessageBufferSize];
+    char* actual_version = OS::StrNDup(version, version_len);
+    OS::SNPrint(message_buffer,
+                kMessageBufferSize,
+                "Wrong %s snapshot version, expected '%s' found '%s'",
+                (Snapshot::IsFull(kind_)) ? "full" : "script",
+                expected_version,
+                actual_version);
+    free(actual_version);
+    // This can also fail while bringing up the VM isolate, so make sure to
+    // allocate the error message in old space.
+    const String& msg = String::Handle(String::New(message_buffer, Heap::kOld));
+    return ApiError::New(msg, Heap::kOld);
+  }
+  Advance(version_len);
+
+  const char* expected_features = Dart::FeaturesString(kind_);
+  ASSERT(expected_features != NULL);
+  const intptr_t expected_len = strlen(expected_features);
+
+  const char* features = reinterpret_cast<const char*>(CurrentBufferAddress());
+  ASSERT(features != NULL);
+  intptr_t buffer_len = OS::StrNLen(features, PendingBytes());
+  if ((buffer_len != expected_len) ||
+      strncmp(features, expected_features, expected_len)) {
+    const intptr_t kMessageBufferSize = 256;
+    char message_buffer[kMessageBufferSize];
+    char* actual_features = OS::StrNDup(features, buffer_len < 128 ? buffer_len
+                                                                   : 128);
+    OS::SNPrint(message_buffer,
+                kMessageBufferSize,
+                "Wrong features in snapshot, expected '%s' found '%s'",
+                expected_features,
+                actual_features);
+    free(const_cast<char*>(expected_features));
+    free(actual_features);
+    // This can also fail while bringing up the VM isolate, so make sure to
+    // allocate the error message in old space.
+    const String& msg = String::Handle(String::New(message_buffer, Heap::kOld));
+    return ApiError::New(msg, Heap::kOld);
+  }
+  free(const_cast<char*>(expected_features));
+  Advance(expected_len + 1);
+  return ApiError::null();
+}
+
+
+void Deserializer::Prepare() {
+  num_objects_ = Read<int32_t>();
+  num_clusters_ = Read<int32_t>();
+
+  clusters_ = new DeserializationCluster*[num_clusters_];
+  refs_ = Array::New(num_objects_ + 1, Heap::kOld);
+}
+
+
+void Deserializer::Deserialize() {
+  // TODO(rmacnak): Verify num of base objects.
+
+  {
+    NOT_IN_PRODUCT(TimelineDurationScope tds(thread(),
+        Timeline::GetIsolateStream(), "ReadAlloc"));
+    for (intptr_t i = 0; i < num_clusters_; i++) {
+      clusters_[i] = ReadCluster();
+      clusters_[i]->ReadAlloc(this);
+#if defined(DEBUG)
+      intptr_t serializers_next_ref_index_ = Read<intptr_t>();
+      ASSERT(serializers_next_ref_index_ == next_ref_index_);
+#endif
+    }
+  }
+
+  // We should have completely filled the ref array.
+  ASSERT((next_ref_index_ - 1) == num_objects_);
+
+  {
+    NOT_IN_PRODUCT(TimelineDurationScope tds(thread(),
+        Timeline::GetIsolateStream(), "ReadFill"));
+    for (intptr_t i = 0; i < num_clusters_; i++) {
+      clusters_[i]->ReadFill(this);
+#if defined(DEBUG)
+      intptr_t section_marker = Read<intptr_t>();
+      ASSERT(section_marker == kSectionMarker);
+#endif
+    }
+  }
+}
+
+class HeapLocker : public StackResource {
+ public:
+  HeapLocker(Thread* thread, PageSpace* page_space)
+      : StackResource(thread), page_space_(page_space) {
+        page_space_->AcquireDataLock();
+  }
+  ~HeapLocker() {
+    page_space_->ReleaseDataLock();
+  }
+
+ private:
+  PageSpace* page_space_;
+};
+
+
+void Deserializer::AddVMIsolateBaseObjects() {
+  // These objects are always allocated by Object::InitOnce, so they are not
+  // written into the snapshot.
+
+  AddBaseObject(Object::null());
+  AddBaseObject(Object::sentinel().raw());
+  AddBaseObject(Object::transition_sentinel().raw());
+  AddBaseObject(Object::empty_array().raw());
+  AddBaseObject(Object::zero_array().raw());
+  AddBaseObject(Object::dynamic_type().raw());
+  AddBaseObject(Object::void_type().raw());
+  AddBaseObject(Bool::True().raw());
+  AddBaseObject(Bool::False().raw());
+  AddBaseObject(Object::extractor_parameter_types().raw());
+  AddBaseObject(Object::extractor_parameter_names().raw());
+  AddBaseObject(Object::empty_context_scope().raw());
+  AddBaseObject(Object::empty_descriptors().raw());
+  AddBaseObject(Object::empty_var_descriptors().raw());
+  AddBaseObject(Object::empty_exception_handlers().raw());
+
+  for (intptr_t i = 0; i < ArgumentsDescriptor::kCachedDescriptorCount; i++) {
+    AddBaseObject(ArgumentsDescriptor::cached_args_descriptors_[i]);
+  }
+  for (intptr_t i = 0; i < ICData::kCachedICDataArrayCount; i++) {
+    AddBaseObject(ICData::cached_icdata_arrays_[i]);
+  }
+
+  ClassTable* table = isolate()->class_table();
+  for (intptr_t cid = kClassCid; cid <= kUnwindErrorCid; cid++) {
+    // Error has no class object.
+    if (cid != kErrorCid) {
+      ASSERT(table->HasValidClassAt(cid));
+      AddBaseObject(table->At(cid));
+    }
+  }
+  AddBaseObject(table->At(kDynamicCid));
+  AddBaseObject(table->At(kVoidCid));
+}
+
+
+void Deserializer::ReadVMSnapshot() {
+  Array& symbol_table = Array::Handle(zone_);
+  Array& refs = Array::Handle(zone_);
+  Prepare();
+
+  {
+    NoSafepointScope no_safepoint;
+    HeapLocker hl(thread(), heap_->old_space());
+
+    AddVMIsolateBaseObjects();
+
+    Deserialize();
+
+    // Read roots.
+    symbol_table ^= ReadRef();
+    isolate()->object_store()->set_symbol_table(symbol_table);
+    ReadRef();  // Script list.
+    if (Snapshot::IncludesCode(kind_)) {
+      StubCode::ReadRef(this);
+    }
+
+#if defined(DEBUG)
+    intptr_t section_marker = Read<intptr_t>();
+    ASSERT(section_marker == kSectionMarker);
+#endif
+
+    refs = refs_;
+    refs_ = NULL;
+  }
+
+  Symbols::InitOnceFromSnapshot(isolate());
+
+  Object::set_vm_isolate_snapshot_object_table(refs);
+
+#if defined(DEBUG)
+  isolate()->ValidateClassTable();
+#endif
+}
+
+void Deserializer::ReadFullSnapshot(ObjectStore* object_store) {
+  Array& refs = Array::Handle();
+  Prepare();
+
+  {
+    NoSafepointScope no_safepoint;
+    HeapLocker hl(thread(), heap_->old_space());
+
+    // N.B.: Skipping index 0 because ref 0 is illegal.
+    const Array& base_objects = Object::vm_isolate_snapshot_object_table();
+    for (intptr_t i = 1; i < base_objects.Length(); i++) {
+      AddBaseObject(base_objects.At(i));
+    }
+
+    Deserialize();
+
+    // Read roots.
+    RawObject** from = object_store->from();
+    RawObject** to = object_store->to_snapshot(kind_);
+    for (RawObject** p = from; p <= to; p++) {
+      *p = ReadRef();
+    }
+
+#if defined(DEBUG)
+    intptr_t section_marker = Read<intptr_t>();
+    ASSERT(section_marker == kSectionMarker);
+#endif
+
+    refs = refs_;
+    refs_ = NULL;
+  }
+
+#if defined(DEBUG)
+  Isolate* isolate = thread()->isolate();
+  isolate->ValidateClassTable();
+  isolate->heap()->Verify();
+#endif
+
+  {
+    NOT_IN_PRODUCT(TimelineDurationScope tds(thread(),
+        Timeline::GetIsolateStream(), "PostLoad"));
+    for (intptr_t i = 0; i < num_clusters_; i++) {
+      clusters_[i]->PostLoad(refs, kind_, zone_);
+    }
+  }
+
+  // Setup native resolver for bootstrap impl.
+  Bootstrap::SetupNativeResolver();
+}
+
+
+// An object visitor which will iterate over all the script objects in the heap
+// and either count them or collect them into an array. This is used during
+// full snapshot generation of the VM isolate to write out all script
+// objects and their accompanying token streams.
+class ScriptVisitor : public ObjectVisitor {
+ public:
+  explicit ScriptVisitor(Thread* thread) :
+      objHandle_(Object::Handle(thread->zone())),
+      count_(0),
+      scripts_(NULL) {}
+
+  ScriptVisitor(Thread* thread, const Array* scripts) :
+      objHandle_(Object::Handle(thread->zone())),
+      count_(0),
+      scripts_(scripts) {}
+
+  void VisitObject(RawObject* obj) {
+    if (obj->IsScript()) {
+      if (scripts_ != NULL) {
+        objHandle_ = obj;
+        scripts_->SetAt(count_, objHandle_);
+      }
+      count_ += 1;
+    }
+  }
+
+  intptr_t count() const { return count_; }
+
+ private:
+  Object& objHandle_;
+  intptr_t count_;
+  const Array* scripts_;
+};
+
+
+FullSnapshotWriter::FullSnapshotWriter(Snapshot::Kind kind,
+                                       uint8_t** vm_isolate_snapshot_buffer,
+                                       uint8_t** isolate_snapshot_buffer,
+                                       ReAlloc alloc,
+                                       InstructionsWriter* instructions_writer)
+    : thread_(Thread::Current()),
+      kind_(kind),
+      vm_isolate_snapshot_buffer_(vm_isolate_snapshot_buffer),
+      isolate_snapshot_buffer_(isolate_snapshot_buffer),
+      alloc_(alloc),
+      vm_isolate_snapshot_size_(0),
+      isolate_snapshot_size_(0),
+      instructions_writer_(instructions_writer),
+      scripts_(Array::Handle(zone())),
+      saved_symbol_table_(Array::Handle(zone())),
+      new_vm_symbol_table_(Array::Handle(zone())) {
+  ASSERT(isolate_snapshot_buffer_ != NULL);
+  ASSERT(alloc_ != NULL);
+  ASSERT(isolate() != NULL);
+  ASSERT(ClassFinalizer::AllClassesFinalized());
+  ASSERT(isolate() != NULL);
+  ASSERT(heap() != NULL);
+  ObjectStore* object_store = isolate()->object_store();
+  ASSERT(object_store != NULL);
+
+#if defined(DEBUG)
+  // Ensure the class table is valid.
+  isolate()->ValidateClassTable();
+#endif
+  // Can't have any mutation happening while we're serializing.
+  ASSERT(isolate()->background_compiler() == NULL);
+
+  if (vm_isolate_snapshot_buffer != NULL) {
+    NOT_IN_PRODUCT(TimelineDurationScope tds(thread(),
+        Timeline::GetIsolateStream(), "PrepareNewVMIsolate"));
+
+    // Collect all the script objects and their accompanying token stream
+    // objects into an array so that we can write it out as part of the VM
+    // isolate snapshot. We first count the number of script objects, allocate
+    // an array and then fill it up with the script objects.
+    ScriptVisitor scripts_counter(thread());
+    heap()->IterateOldObjects(&scripts_counter);
+    Dart::vm_isolate()->heap()->IterateOldObjects(&scripts_counter);
+    intptr_t count = scripts_counter.count();
+    scripts_ = Array::New(count, Heap::kOld);
+    ScriptVisitor script_visitor(thread(), &scripts_);
+    heap()->IterateOldObjects(&script_visitor);
+    Dart::vm_isolate()->heap()->IterateOldObjects(&script_visitor);
+    ASSERT(script_visitor.count() == count);
+
+    // Tuck away the current symbol table.
+    saved_symbol_table_ = object_store->symbol_table();
+
+    // Create a unified symbol table that will be written as the vm isolate's
+    // symbol table.
+    new_vm_symbol_table_ = Symbols::UnifiedSymbolTable();
+
+    // Create an empty symbol table that will be written as the isolate's symbol
+    // table.
+    Symbols::SetupSymbolTable(isolate());
+  } else {
+    // Reuse the current vm isolate.
+  }
+}
+
+FullSnapshotWriter::~FullSnapshotWriter() {
+  // We may run Dart code afterwards, restore the symbol table if needed.
+  if (!saved_symbol_table_.IsNull()) {
+    isolate()->object_store()->set_symbol_table(saved_symbol_table_);
+    saved_symbol_table_ = Array::null();
+  }
+  new_vm_symbol_table_ = Array::null();
+  scripts_ = Array::null();
+}
+
+
+intptr_t FullSnapshotWriter::WriteVmIsolateSnapshot() {
+  NOT_IN_PRODUCT(TimelineDurationScope tds(thread(),
+      Timeline::GetIsolateStream(), "WriteVmIsolateSnapshot"));
+
+  ASSERT(vm_isolate_snapshot_buffer_ != NULL);
+  Serializer serializer(thread(),
+                        kind_,
+                        vm_isolate_snapshot_buffer_,
+                        alloc_,
+                        kInitialSize,
+                        instructions_writer_);
+
+  serializer.ReserveHeader();
+  serializer.WriteVersionAndFeatures();
+  /*
+   * Now Write out the following
+   * - the symbol table
+   * - all the scripts and token streams for these scripts
+   * - the stub code (precompiled snapshots only)
+   **/
+  intptr_t num_objects = serializer.WriteVMSnapshot(new_vm_symbol_table_,
+                                                    scripts_);
+  serializer.FillHeader(serializer.kind());
+
+  vm_isolate_snapshot_size_ = serializer.bytes_written();
+  return num_objects;
+}
+
+
+void FullSnapshotWriter::WriteIsolateFullSnapshot(
+    intptr_t num_base_objects) {
+  NOT_IN_PRODUCT(TimelineDurationScope tds(thread(),
+      Timeline::GetIsolateStream(), "WriteIsolateFullSnapshot"));
+
+  Serializer serializer(thread(),
+                        kind_,
+                        isolate_snapshot_buffer_,
+                        alloc_,
+                        kInitialSize,
+                        instructions_writer_);
+  ObjectStore* object_store = isolate()->object_store();
+  ASSERT(object_store != NULL);
+
+  serializer.ReserveHeader();
+  serializer.WriteVersionAndFeatures();
+  serializer.WriteFullSnapshot(num_base_objects, object_store);
+  serializer.FillHeader(serializer.kind());
+
+  isolate_snapshot_size_ = serializer.bytes_written();
+}
+
+
+void FullSnapshotWriter::WriteFullSnapshot() {
+  intptr_t num_base_objects;
+  if (vm_isolate_snapshot_buffer() != NULL) {
+    num_base_objects = WriteVmIsolateSnapshot();
+    ASSERT(num_base_objects != 0);
+  } else {
+    num_base_objects = 0;
+  }
+
+  WriteIsolateFullSnapshot(num_base_objects);
+
+  if (Snapshot::IncludesCode(kind_)) {
+    instructions_writer_->Write();
+
+    OS::Print("VMIsolate(CodeSize): %" Pd "\n", VmIsolateSnapshotSize());
+    OS::Print("Isolate(CodeSize): %" Pd "\n", IsolateSnapshotSize());
+    OS::Print("Instructions(CodeSize): %" Pd "\n",
+              instructions_writer_->binary_size());
+    intptr_t total = VmIsolateSnapshotSize() +
+                     IsolateSnapshotSize() +
+                     instructions_writer_->binary_size();
+    OS::Print("Total(CodeSize): %" Pd "\n", total);
+  }
+}
+
+
+RawApiError* IsolateSnapshotReader::ReadFullSnapshot() {
+  Deserializer deserializer(thread_,
+                            kind_,
+                            buffer_,
+                            size_,
+                            instructions_buffer_,
+                            data_buffer_);
+
+  RawApiError* error = deserializer.VerifyVersionAndFeatures();
+  if (error != ApiError::null()) {
+    return error;
+  }
+
+  deserializer.ReadFullSnapshot(thread_->isolate()->object_store());
+
+  return ApiError::null();
+}
+
+
+RawApiError* VmIsolateSnapshotReader::ReadVmIsolateSnapshot() {
+  Deserializer deserializer(thread_,
+                            kind_,
+                            buffer_,
+                            size_,
+                            instructions_buffer_,
+                            data_buffer_);
+
+  RawApiError* error = deserializer.VerifyVersionAndFeatures();
+  if (error != ApiError::null()) {
+    return error;
+  }
+
+  deserializer.ReadVMSnapshot();
+
+  Dart::set_instructions_snapshot_buffer(instructions_buffer_);
+  Dart::set_data_snapshot_buffer(data_buffer_);
+
+  return ApiError::null();
+}
+
+}  // namespace dart
diff --git a/runtime/vm/clustered_snapshot.h b/runtime/vm/clustered_snapshot.h
new file mode 100644
index 0000000..b2bdd0c
--- /dev/null
+++ b/runtime/vm/clustered_snapshot.h
@@ -0,0 +1,478 @@
+// Copyright (c) 2016, the Dart project authors.  Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.
+
+#ifndef VM_CLUSTERED_SNAPSHOT_H_
+#define VM_CLUSTERED_SNAPSHOT_H_
+
+#include "platform/assert.h"
+#include "vm/allocation.h"
+#include "vm/bitfield.h"
+#include "vm/datastream.h"
+#include "vm/exceptions.h"
+#include "vm/globals.h"
+#include "vm/growable_array.h"
+#include "vm/heap.h"
+#include "vm/isolate.h"
+#include "vm/object.h"
+#include "vm/snapshot.h"
+#include "vm/version.h"
+#include "vm/visitor.h"
+
+namespace dart {
+
+// Forward declarations.
+class Serializer;
+class Deserializer;
+class ObjectStore;
+
+// For full snapshots, we use a clustered snapshot format that trades longer
+// serialization time for faster deserialization time and smaller snapshots.
+// Objects are clustered by class to allow writing type information once per
+// class instead once per object, and to allow filling the objects in a tight
+// loop. The snapshot has two major sections: the first describes how to
+// allocate the objects and the second describes how to initialize them.
+// Deserialization starts by allocating a reference array large enough to hold
+// the base objects (objects already available to both the serializer and
+// deserializer) and the objects written in the snapshot. The allocation section
+// is then read for each cluster, filling the reference array. Then the
+// initialization/fill secton is read for each cluster, using the indices into
+// the reference array to fill pointers. At this point, every object has been
+// touched exactly once and in order, making this approach very cache friendly.
+// Finally, each cluster is given an opportunity to perform some fix-ups that
+// require the graph has been fully loaded, such as rehashing, though most
+// clusters do not require fixups.
+
+class SerializationCluster : public ZoneAllocated {
+ public:
+  virtual ~SerializationCluster() { }
+
+  // Add [object] to the cluster and push its outgoing references.
+  virtual void Trace(Serializer* serializer, RawObject* object) = 0;
+
+  // Write the cluster type and information needed to allocate the cluster's
+  // objects. For fixed sized objects, this is just the object count. For
+  // variable sized objects, this is the object count and length of each object.
+  virtual void WriteAlloc(Serializer* serializer) = 0;
+
+  // Write the byte and reference data of the cluster's objects.
+  virtual void WriteFill(Serializer* serializer) = 0;
+};
+
+
+class DeserializationCluster : public ZoneAllocated {
+ public:
+  DeserializationCluster() : start_index_(-1), stop_index_(-1) { }
+  virtual ~DeserializationCluster() { }
+
+  // Allocate memory for all objects in the cluster and write their addresses
+  // into the ref array. Do not touch this memory.
+  virtual void ReadAlloc(Deserializer* deserializer) = 0;
+
+  // Initialize the cluster's objects. Do not touch the memory of other objects.
+  virtual void ReadFill(Deserializer* deserializer) = 0;
+
+  // Complete any action that requires the full graph to be deserialized, such
+  // as rehashing.
+  virtual void PostLoad(const Array& refs, Snapshot::Kind kind, Zone* zone) { }
+
+ protected:
+  // The range of the ref array that belongs to this cluster.
+  intptr_t start_index_;
+  intptr_t stop_index_;
+};
+
+
+enum {
+  kRefTagSize = 1,
+  kRefTagShift = 1,
+  kRefTagMask = 1,
+  kSmiRefTag = 0x0,
+  kHeapRefTag = 0x1,
+};
+
+
+class Serializer : public StackResource {
+ public:
+  Serializer(Thread* thread,
+             Snapshot::Kind kind,
+             uint8_t** buffer,
+             ReAlloc alloc,
+             intptr_t initial_size,
+             InstructionsWriter* instructions_writer_);
+  ~Serializer();
+
+  intptr_t WriteVMSnapshot(const Array& symbols, const Array& scripts);
+  void WriteFullSnapshot(intptr_t num_base_objects, ObjectStore* object_store);
+
+  void AddVMIsolateBaseObjects();
+
+  void AddBaseObject(RawObject* base_object) {
+    AssignRef(base_object);
+    num_base_objects_++;
+  }
+
+  void AssignRef(RawObject* object) {
+    ASSERT(next_ref_index_ != 0);
+    heap_->SetObjectId(object, next_ref_index_);
+    ASSERT(heap_->GetObjectId(object) == next_ref_index_);
+    next_ref_index_++;
+  }
+
+  void Push(RawObject* object) {
+    if (!object->IsHeapObject()) {
+      return;
+    }
+
+    if (object->IsCode() && !Snapshot::IncludesCode(kind_)) {
+      return;  // Do not trace, will write null.
+    }
+
+    if (object->IsSendPort()) {
+      // TODO(rmacnak): Do a better job of resetting fields in precompilation
+      // and assert this is unreachable.
+      return;  // Do not trace, will write null.
+    }
+
+    intptr_t id = heap_->GetObjectId(object);
+    if (id == 0) {
+      heap_->SetObjectId(object, 1);
+      ASSERT(heap_->GetObjectId(object) != 0);
+      stack_.Add(object);
+      num_written_objects_++;
+    }
+  }
+
+  void AddUntracedRef() {
+    num_written_objects_++;
+  }
+
+  void Trace(RawObject* object);
+
+  SerializationCluster* NewClusterForClass(intptr_t cid);
+
+  void ReserveHeader() {
+    // Make room for recording snapshot buffer size.
+    stream_.set_current(stream_.buffer() + Snapshot::kHeaderSize);
+  }
+
+  void FillHeader(Snapshot::Kind kind) {
+    int64_t* data = reinterpret_cast<int64_t*>(stream_.buffer());
+    data[Snapshot::kLengthIndex] = stream_.bytes_written();
+    data[Snapshot::kSnapshotFlagIndex] = kind;
+  }
+
+  void WriteVersionAndFeatures();
+
+  void Serialize();
+  intptr_t bytes_written() { return stream_.bytes_written(); }
+
+  // Writes raw data to the stream (basic type).
+  // sizeof(T) must be in {1,2,4,8}.
+  template <typename T>
+  void Write(T value) {
+    WriteStream::Raw<sizeof(T), T>::Write(&stream_, value);
+  }
+
+  void WriteBytes(const uint8_t* addr, intptr_t len) {
+    stream_.WriteBytes(addr, len);
+  }
+
+  void WriteRef(RawObject* object) {
+    if (!object->IsHeapObject()) {
+      ASSERT(static_cast<intptr_t>(kSmiRefTag) ==
+             static_cast<intptr_t>(kSmiTag));
+      Write<intptr_t>(reinterpret_cast<intptr_t>(object));
+      return;
+    }
+
+    intptr_t id = heap_->GetObjectId(object);
+    if (id == 0) {
+      if (object->IsCode() && !Snapshot::IncludesCode(kind_)) {
+        WriteRef(Object::null());
+        return;
+      }
+      if (object->IsSendPort()) {
+        // TODO(rmacnak): Do a better job of resetting fields in precompilation
+        // and assert this is unreachable.
+        WriteRef(Object::null());
+        return;
+      }
+      FATAL("Missing ref");
+    }
+    Write<intptr_t>((id << kRefTagShift) | kHeapRefTag);
+  }
+
+  void WriteTokenPosition(TokenPosition pos) {
+    Write<int32_t>(pos.SnapshotEncode());
+  }
+
+  void WriteCid(intptr_t cid) {
+    COMPILE_ASSERT(RawObject::kClassIdTagSize <= 32);
+    Write<int32_t>(cid);
+  }
+
+  int32_t GetTextOffset(RawInstructions* instr, RawCode* code) {
+    return instructions_writer_->GetOffsetFor(instr, code);
+  }
+
+  int32_t GetRODataOffset(RawObject* object) {
+    return instructions_writer_->GetObjectOffsetFor(object);
+  }
+
+  Snapshot::Kind kind() const { return kind_; }
+
+ private:
+  Heap* heap_;
+  Zone* zone_;
+  Snapshot::Kind kind_;
+  WriteStream stream_;
+  InstructionsWriter* instructions_writer_;
+  SerializationCluster** clusters_by_cid_;
+  GrowableArray<RawObject*> stack_;
+  intptr_t num_cids_;
+  intptr_t num_base_objects_;
+  intptr_t num_written_objects_;
+  intptr_t next_ref_index_;
+
+  DISALLOW_IMPLICIT_CONSTRUCTORS(Serializer);
+};
+
+
+class Deserializer : public StackResource {
+ public:
+  Deserializer(Thread* thread,
+               Snapshot::Kind kind,
+               const uint8_t* buffer,
+               intptr_t size,
+               const uint8_t* instructions_buffer,
+               const uint8_t* data_buffer);
+  ~Deserializer();
+
+  void ReadFullSnapshot(ObjectStore* object_store);
+  void ReadVMSnapshot();
+
+  void AddVMIsolateBaseObjects();
+
+  static void InitializeHeader(RawObject* raw,
+                               intptr_t cid,
+                               intptr_t size,
+                               bool is_vm_isolate,
+                               bool is_canonical = false);
+
+  // Reads raw data (for basic types).
+  // sizeof(T) must be in {1,2,4,8}.
+  template <typename T>
+  T Read() {
+    return ReadStream::Raw<sizeof(T), T>::Read(&stream_);
+  }
+
+  void ReadBytes(uint8_t* addr, intptr_t len) {
+    stream_.ReadBytes(addr, len);
+  }
+
+  const uint8_t* CurrentBufferAddress() const {
+    return stream_.AddressOfCurrentPosition();
+  }
+
+  void Advance(intptr_t value) {
+    stream_.Advance(value);
+  }
+
+  intptr_t PendingBytes() const {
+    return stream_.PendingBytes();
+  }
+
+  void AddBaseObject(RawObject* base_object) {
+    AssignRef(base_object);
+  }
+
+  void AssignRef(RawObject* object) {
+    ASSERT(next_ref_index_ <= num_objects_);
+    refs_->ptr()->data()[next_ref_index_] = object;
+    next_ref_index_++;
+  }
+
+  RawObject* Ref(intptr_t index) const {
+    ASSERT(index > 0);
+    ASSERT(index <= num_objects_);
+    return refs_->ptr()->data()[index];
+  }
+
+  RawObject* ReadRef() {
+    intptr_t index = Read<intptr_t>();
+    if ((index & kRefTagMask) == kSmiRefTag) {
+      ASSERT(static_cast<intptr_t>(kSmiRefTag) ==
+             static_cast<intptr_t>(kSmiTag));
+      return reinterpret_cast<RawSmi*>(index);
+    }
+    return Ref(index >> kRefTagShift);
+  }
+
+  TokenPosition ReadTokenPosition() {
+    return TokenPosition::SnapshotDecode(Read<int32_t>());
+  }
+
+  intptr_t ReadCid() {
+    COMPILE_ASSERT(RawObject::kClassIdTagSize <= 32);
+    return Read<int32_t>();
+  }
+
+  uword GetInstructionsAt(int32_t offset) {
+    return instructions_reader_->GetInstructionsAt(offset);
+  }
+
+  RawObject* GetObjectAt(int32_t offset) {
+    return instructions_reader_->GetObjectAt(offset);
+  }
+
+  RawApiError* VerifyVersionAndFeatures();
+
+  void Prepare();
+  void Deserialize();
+
+  DeserializationCluster* ReadCluster();
+
+  intptr_t next_index() const { return next_ref_index_; }
+  Heap* heap() const { return heap_; }
+  Snapshot::Kind kind() const { return kind_; }
+
+ private:
+  Heap* heap_;
+  Zone* zone_;
+  Snapshot::Kind kind_;
+  ReadStream stream_;
+  InstructionsReader* instructions_reader_;
+  intptr_t num_objects_;
+  intptr_t num_clusters_;
+  RawArray* refs_;
+  intptr_t next_ref_index_;
+  DeserializationCluster** clusters_;
+};
+
+
+class FullSnapshotWriter {
+ public:
+  static const intptr_t kInitialSize = 64 * KB;
+  FullSnapshotWriter(Snapshot::Kind kind,
+                     uint8_t** vm_isolate_snapshot_buffer,
+                     uint8_t** isolate_snapshot_buffer,
+                     ReAlloc alloc,
+                     InstructionsWriter* instructions_writer);
+  ~FullSnapshotWriter();
+
+  uint8_t** vm_isolate_snapshot_buffer() const {
+    return vm_isolate_snapshot_buffer_;
+  }
+
+  uint8_t** isolate_snapshot_buffer() const {
+    return isolate_snapshot_buffer_;
+  }
+
+  Thread* thread() const { return thread_; }
+  Zone* zone() const { return thread_->zone(); }
+  Isolate* isolate() const { return thread_->isolate(); }
+  Heap* heap() const { return isolate()->heap(); }
+
+  // Writes a full snapshot of the Isolate.
+  void WriteFullSnapshot();
+
+  intptr_t VmIsolateSnapshotSize() const {
+    return vm_isolate_snapshot_size_;
+  }
+  intptr_t IsolateSnapshotSize() const {
+    return isolate_snapshot_size_;
+  }
+
+ private:
+  // Writes a snapshot of the VM Isolate.
+  intptr_t WriteVmIsolateSnapshot();
+
+  // Writes a full snapshot of a regular Dart Isolate.
+  void WriteIsolateFullSnapshot(intptr_t num_base_objects);
+
+  Thread* thread_;
+  Snapshot::Kind kind_;
+  uint8_t** vm_isolate_snapshot_buffer_;
+  uint8_t** isolate_snapshot_buffer_;
+  ReAlloc alloc_;
+  intptr_t vm_isolate_snapshot_size_;
+  intptr_t isolate_snapshot_size_;
+  ForwardList* forward_list_;
+  InstructionsWriter* instructions_writer_;
+  Array& scripts_;
+  Array& saved_symbol_table_;
+  Array& new_vm_symbol_table_;
+
+  DISALLOW_COPY_AND_ASSIGN(FullSnapshotWriter);
+};
+
+
+class VmIsolateSnapshotReader {
+ public:
+  VmIsolateSnapshotReader(Snapshot::Kind kind,
+                          const uint8_t* buffer,
+                          intptr_t size,
+                          const uint8_t* instructions_buffer,
+                          const uint8_t* data_buffer,
+                          Thread* thread) :
+      kind_(kind),
+      thread_(thread),
+      buffer_(buffer),
+      size_(size),
+      instructions_buffer_(instructions_buffer),
+      data_buffer_(data_buffer) {
+    thread->isolate()->set_compilation_allowed(kind != Snapshot::kAppNoJIT);
+  }
+
+  ~VmIsolateSnapshotReader() { }
+
+  RawApiError* ReadVmIsolateSnapshot();
+
+ private:
+  Snapshot::Kind kind_;
+  Thread* thread_;
+  const uint8_t* buffer_;
+  intptr_t size_;
+  const uint8_t* instructions_buffer_;
+  const uint8_t* data_buffer_;
+
+  DISALLOW_COPY_AND_ASSIGN(VmIsolateSnapshotReader);
+};
+
+
+class IsolateSnapshotReader {
+ public:
+  IsolateSnapshotReader(Snapshot::Kind kind,
+                        const uint8_t* buffer,
+                        intptr_t size,
+                        const uint8_t* instructions_buffer,
+                        const uint8_t* data_buffer,
+                        Thread* thread) :
+      kind_(kind),
+      thread_(thread),
+      buffer_(buffer),
+      size_(size),
+      instructions_buffer_(instructions_buffer),
+      data_buffer_(data_buffer) {
+    thread->isolate()->set_compilation_allowed(kind != Snapshot::kAppNoJIT);
+  }
+
+  ~IsolateSnapshotReader() {}
+
+  RawApiError* ReadFullSnapshot();
+
+ private:
+  Snapshot::Kind kind_;
+  Thread* thread_;
+  const uint8_t* buffer_;
+  intptr_t size_;
+  const uint8_t* instructions_buffer_;
+  const uint8_t* data_buffer_;
+
+  DISALLOW_COPY_AND_ASSIGN(IsolateSnapshotReader);
+};
+
+}  // namespace dart
+
+#endif  // VM_CLUSTERED_SNAPSHOT_H_
diff --git a/runtime/vm/code_generator.cc b/runtime/vm/code_generator.cc
index 683834b..4b572a7 100644
--- a/runtime/vm/code_generator.cc
+++ b/runtime/vm/code_generator.cc
@@ -63,6 +63,7 @@
 
 DECLARE_FLAG(int, reload_every);
 DECLARE_FLAG(bool, reload_every_optimized);
+DECLARE_FLAG(bool, reload_every_back_off);
 
 #ifdef DEBUG
 DEFINE_FLAG(charp, gc_at_instance_allocation, NULL,
@@ -109,8 +110,7 @@
   if (length.IsSmi()) {
     const intptr_t len = Smi::Cast(length).Value();
     if ((len >= 0) && (len <= Array::kMaxElements)) {
-      Heap::Space space = isolate->heap()->SpaceForAllocation(kArrayCid);
-      const Array& array = Array::Handle(Array::New(len, space));
+      const Array& array = Array::Handle(Array::New(len, Heap::kNew));
       arguments.SetReturn(array);
       TypeArguments& element_type =
           TypeArguments::CheckedHandle(arguments.ArgAt(1));
@@ -159,7 +159,7 @@
     }
   }
 #endif
-  Heap::Space space = isolate->heap()->SpaceForAllocation(cls.id());
+  Heap::Space space = Heap::kNew;
   const Instance& instance = Instance::Handle(Instance::New(cls, space));
 
   arguments.SetReturn(instance);
@@ -1297,6 +1297,9 @@
     DeoptimizeFunctionsOnStack();
   }
   if (do_reload) {
+    if (FLAG_reload_every_back_off) {
+      FLAG_reload_every *= 2;
+    }
     NOT_IN_PRODUCT(isolate->ReloadSources();)
   }
   if (FLAG_support_debugger && do_stacktrace) {
diff --git a/runtime/vm/constants_dbc.h b/runtime/vm/constants_dbc.h
index 02d86888..5d901b8 100644
--- a/runtime/vm/constants_dbc.h
+++ b/runtime/vm/constants_dbc.h
@@ -83,9 +83,10 @@
 //
 //    Unreachable instruction.
 //
-//  - Nop
+//  - Nop D
 //
-//    This instuction does nothing.
+//    This instuction does nothing. It may refer to an object in the constant
+//    pool that may be decoded by other instructions.
 //
 //  - Compile
 //
@@ -222,6 +223,11 @@
 //        IfNeStrictTOS
 //        Jump T         ;; jump if not equal
 //
+//  - If<Cond>Null rA
+//
+//    Cond is Eq or Ne. Skips the next instruction unless the given condition
+//    holds.
+//
 //  - CreateArrayTOS
 //
 //    Allocate array of length SP[0] with type arguments SP[-1].
@@ -365,14 +371,32 @@
 //
 //    Assert that TOS is a boolean (A = 1) or that TOS is not null (A = 0).
 //
+//  - TestSmi rA, rD
+//
+//    If FP[rA] & FP[rD] != 0, then skip the next instruction. FP[rA] and FP[rD]
+//    must be Smis.
+//
 //  - CheckSmi rA
 //
 //    If FP[rA] is a Smi, then skip the next instruction.
 //
 //  - CheckClassId rA, D
 //
-//    If the object at FP[rA]'s class id matches hthe class id in PP[D], then
-//    skip the following instruction.
+//    If the object at FP[rA]'s class id matches the class id D, then skip the
+//    following instruction.
+//
+//  - CheckDenseSwitch rA, D
+//
+//    Skips the next 3 instructions if the object at FP[rA] is a valid class for
+//    a dense switch with low cid encoded in the following Nop instruction, and
+//    the cid mask encoded in the Nop instruction after that, or if D == 1 and
+//    FP[rA] is a Smi. Skips 2 instructions otherwise.
+//
+//  - CheckCids rA, rB, rC
+//
+//    Skips rC + 1 instructions if the object at FP[rA] is a Smi and
+//    rB == 1, or if FP[rA]'s cid is found in the array of cids encoded by the
+//    following rC Nop instructions. Otherwise skips only rC instructions.
 //
 //  - CheckStack
 //
@@ -436,7 +460,7 @@
 //
 #define BYTECODES_LIST(V)                              \
   V(Trap,                            0, ___, ___, ___) \
-  V(Nop,                             0, ___, ___, ___) \
+  V(Nop,                             D, lit, ___, ___) \
   V(Compile,                         0, ___, ___, ___) \
   V(HotCheck,                      A_D, num, num, ___) \
   V(Intrinsic,                       A, num, ___, ___) \
@@ -495,6 +519,8 @@
   V(IfEqStrict,                    A_D, reg, reg, ___) \
   V(IfNeStrictNum,                 A_D, reg, reg, ___) \
   V(IfEqStrictNum,                 A_D, reg, reg, ___) \
+  V(IfEqNull,                        A, reg, ___, ___) \
+  V(IfNeNull,                        A, reg, ___, ___) \
   V(CreateArrayTOS,                  0, ___, ___, ___) \
   V(Allocate,                        D, lit, ___, ___) \
   V(AllocateT,                       0, ___, ___, ___) \
@@ -520,8 +546,11 @@
   V(InstanceOf,                      A, num, ___, ___) \
   V(AssertAssignable,                D, num, lit, ___) \
   V(AssertBoolean,                   A, num, ___, ___) \
+  V(TestSmi,                       A_D, reg, reg, ___) \
   V(CheckSmi,                        A, reg, ___, ___) \
-  V(CheckClassId,                  A_D, reg, lit, ___) \
+  V(CheckClassId,                  A_D, reg, num, ___) \
+  V(CheckDenseSwitch,              A_D, reg, num, ___) \
+  V(CheckCids,                   A_B_C, reg, num, ___) \
   V(CheckStack,                      0, ___, ___, ___) \
   V(DebugStep,                       0, ___, ___, ___) \
   V(DebugBreak,                      A, num, ___, ___) \
@@ -639,7 +668,11 @@
 const FpuRegister FpuTMP = kFakeFpuRegister;
 const intptr_t kNumberOfFpuRegisters = 1;
 
-enum Condition { EQ, NE };
+// After a comparison, the condition NEXT_IS_TRUE means the following
+// instruction is executed if the comparision is true and skipped over overwise.
+// Conidition NEXT_IS_FALSE means the following instruction is executed if the
+// comparison is false and skipped over otherwise.
+enum Condition { NEXT_IS_TRUE, NEXT_IS_FALSE };
 
 }  // namespace dart
 
diff --git a/runtime/vm/cpuinfo_fuchsia.cc b/runtime/vm/cpuinfo_fuchsia.cc
new file mode 100644
index 0000000..04fe196
--- /dev/null
+++ b/runtime/vm/cpuinfo_fuchsia.cc
@@ -0,0 +1,48 @@
+// Copyright (c) 2016, the Dart project authors.  Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.
+
+#include "vm/globals.h"
+#if defined(TARGET_OS_FUCHSIA)
+
+#include "vm/cpuinfo.h"
+
+#include "platform/assert.h"
+
+// TODO(zra): Use "vm/cpuid.h"
+
+namespace dart {
+
+CpuInfoMethod CpuInfo::method_ = kCpuInfoDefault;
+const char* CpuInfo::fields_[kCpuInfoMax] = {0};
+
+void CpuInfo::InitOnce() {
+  UNIMPLEMENTED();
+}
+
+
+void CpuInfo::Cleanup() {
+  UNIMPLEMENTED();
+}
+
+
+bool CpuInfo::FieldContains(CpuInfoIndices idx, const char* search_string) {
+  UNIMPLEMENTED();
+  return false;
+}
+
+
+const char* CpuInfo::ExtractField(CpuInfoIndices idx) {
+  UNIMPLEMENTED();
+  return "<undefined>";
+}
+
+
+bool CpuInfo::HasField(const char* field) {
+  UNIMPLEMENTED();
+  return false;
+}
+
+}  // namespace dart
+
+#endif  // defined(TARGET_OS_FUCHSIA)
diff --git a/runtime/vm/dart.cc b/runtime/vm/dart.cc
index 93cac99..62c7226 100644
--- a/runtime/vm/dart.cc
+++ b/runtime/vm/dart.cc
@@ -5,6 +5,7 @@
 #include "vm/dart.h"
 
 #include "vm/become.h"
+#include "vm/clustered_snapshot.h"
 #include "vm/code_observers.h"
 #include "vm/cpu.h"
 #include "vm/dart_api_state.h"
@@ -673,6 +674,12 @@
 #elif defined(TARGET_ARCH_DBC64)
     buffer.AddString(" dbc64");
 #endif
+  } else if (Snapshot::IsFull(kind)) {
+#if defined(ARCH_IS_32BIT)
+  buffer.AddString(" 32");
+#else
+  buffer.AddString(" 64");
+#endif
   }
 
   return buffer.Steal();
diff --git a/runtime/vm/dart_api_impl.cc b/runtime/vm/dart_api_impl.cc
index 597bae2..c9942da 100644
--- a/runtime/vm/dart_api_impl.cc
+++ b/runtime/vm/dart_api_impl.cc
@@ -9,6 +9,7 @@
 #include "platform/assert.h"
 #include "lib/stacktrace.h"
 #include "vm/class_finalizer.h"
+#include "vm/clustered_snapshot.h"
 #include "vm/compiler.h"
 #include "vm/dart.h"
 #include "vm/dart_api_impl.h"
@@ -16,7 +17,6 @@
 #include "vm/dart_api_state.h"
 #include "vm/dart_entry.h"
 #include "vm/debugger.h"
-#include "vm/dev_fs.h"
 #include "vm/exceptions.h"
 #include "vm/flags.h"
 #include "vm/growable_array.h"
diff --git a/runtime/vm/dart_api_impl_test.cc b/runtime/vm/dart_api_impl_test.cc
index 1b5a537..53d7755 100644
--- a/runtime/vm/dart_api_impl_test.cc
+++ b/runtime/vm/dart_api_impl_test.cc
@@ -14,7 +14,6 @@
 #include "vm/class_finalizer.h"
 #include "vm/dart_api_impl.h"
 #include "vm/dart_api_state.h"
-#include "vm/dev_fs.h"
 #include "vm/lockers.h"
 #include "vm/timeline.h"
 #include "vm/unit_test.h"
@@ -3036,8 +3035,6 @@
   // Expect small garbage to be collected.
   EXPECT_EQ(kHugeExternalSize,
             isolate->heap()->ExternalInWords(Heap::kOld) * kWordSize);
-  Dart_DeleteWeakPersistentHandle(reinterpret_cast<Dart_Isolate>(isolate),
-                                  weak);
   Dart_ExitScope();
 }
 
diff --git a/runtime/vm/dart_entry.h b/runtime/vm/dart_entry.h
index 8c530ea..e52961b 100644
--- a/runtime/vm/dart_entry.h
+++ b/runtime/vm/dart_entry.h
@@ -106,6 +106,8 @@
 
   friend class SnapshotReader;
   friend class SnapshotWriter;
+  friend class Serializer;
+  friend class Deserializer;
   friend class Simulator;
   DISALLOW_COPY_AND_ASSIGN(ArgumentsDescriptor);
 };
diff --git a/runtime/vm/deferred_objects.h b/runtime/vm/deferred_objects.h
index 35556f8..de2262c 100644
--- a/runtime/vm/deferred_objects.h
+++ b/runtime/vm/deferred_objects.h
@@ -231,20 +231,28 @@
   // a graph which can contain cycles.
   void Create();
 
+  RawObject* GetArg(intptr_t index) const {
+#if !defined(TARGET_ARCH_DBC)
+    return args_[index];
+#else
+    return args_[-index];
+#endif
+  }
+
   RawObject* GetClass() const {
-    return args_[kClassIndex];
+    return GetArg(kClassIndex);
   }
 
   RawObject* GetLength() const {
-    return args_[kLengthIndex];
+    return GetArg(kLengthIndex);
   }
 
   RawObject* GetFieldOffset(intptr_t index) const {
-    return args_[kFieldsStartIndex + kFieldEntrySize * index + kOffsetIndex];
+    return GetArg(kFieldsStartIndex + kFieldEntrySize * index + kOffsetIndex);
   }
 
   RawObject* GetValue(intptr_t index) const {
-    return args_[kFieldsStartIndex + kFieldEntrySize * index + kValueIndex];
+    return GetArg(kFieldsStartIndex + kFieldEntrySize * index + kValueIndex);
   }
 
   // Amount of fields that have to be initialized.
diff --git a/runtime/vm/deopt_instructions.cc b/runtime/vm/deopt_instructions.cc
index ce668f4..f31a897 100644
--- a/runtime/vm/deopt_instructions.cc
+++ b/runtime/vm/deopt_instructions.cc
@@ -692,7 +692,6 @@
       return;
     }
 
-#if !defined(TARGET_ARCH_DBC)
     // We don't always have the Code object for the frame's corresponding
     // unoptimized code as it may have been collected. Use a stub as the pc
     // marker until we can recreate that Code object during deferred
@@ -700,7 +699,6 @@
     // a pc marker.
     *reinterpret_cast<RawObject**>(dest_addr) =
         StubCode::FrameAwaitingMaterialization_entry()->code();
-#endif
     deopt_context->DeferPcMarkerMaterialization(object_table_index_, dest_addr);
   }
 
diff --git a/runtime/vm/dev_fs.cc b/runtime/vm/dev_fs.cc
deleted file mode 100644
index afbb23b..0000000
--- a/runtime/vm/dev_fs.cc
+++ /dev/null
@@ -1,375 +0,0 @@
-// Copyright (c) 2016, the Dart project authors.  Please see the AUTHORS file
-// for details. All rights reserved. Use of this source code is governed by a
-// BSD-style license that can be found in the LICENSE file.
-
-#include <map>
-#include <string>
-#include <vector>
-
-#include "vm/dev_fs.h"
-
-#include "vm/hash_table.h"
-#include "vm/json_stream.h"
-#include "vm/lockers.h"
-#include "vm/object.h"
-#include "vm/unicode.h"
-
-namespace dart {
-
-#ifndef PRODUCT
-
-static const uint8_t decode_table[256] = {
-  64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64,
-  64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64,
-  64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 62, 64, 64, 64, 63,
-  52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 64, 64, 64, 64, 64, 64,
-  64,  0,  1,  2,  3,  4,  5,  6,  7,  8,  9, 10, 11, 12, 13, 14,
-  15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 64, 64, 64, 64, 64,
-  64, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40,
-  41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 64, 64, 64, 64, 64,
-  64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64,
-  64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64,
-  64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64,
-  64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64,
-  64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64,
-  64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64,
-  64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64,
-  64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64
-};
-
-class Base64 {
- public:
-  static void decode(const char* base64,
-                     std::vector<uint8_t>* output) {
-    ASSERT(output != NULL);
-    ASSERT(base64 != NULL);
-    const intptr_t base64_len = strlen(base64);
-    int b[4];
-    for (intptr_t i = 0; i < base64_len; i += 4) {
-      b[0] = decode_table[static_cast<uint8_t>(base64[i])];
-      b[1] = decode_table[static_cast<uint8_t>(base64[i + 1])];
-      b[2] = decode_table[static_cast<uint8_t>(base64[i + 2])];
-      b[3] = decode_table[static_cast<uint8_t>(base64[i + 3])];
-      output->push_back((b[0] << 2) | (b[1] >> 4));
-      if (b[2] < 64) {
-        output->push_back((b[1] << 4) | (b[2] >> 2));
-        if (b[3] < 64)  {
-          output->push_back((b[2] << 6) | b[3]);
-        }
-      }
-    }
-  }
-};
-
-
-class FileSystem {
- public:
-  explicit FileSystem(const std::string& name)
-    : name_(name) {
-  }
-
-  ~FileSystem() {
-  }
-
-  bool ReadFile(const std::string& path,
-                std::vector<uint8_t>** file_contents) {
-    *file_contents = NULL;
-    std::map<std::string, std::vector<uint8_t>*>::iterator iter;
-    iter = files_.find(path);
-    if (iter == files_.end()) {
-      return false;
-    }
-    *file_contents = iter->second;
-    return true;
-  }
-
-  void DeleteFile(const std::string& path) {
-    std::map<std::string, std::vector<uint8_t>*>::iterator iter;
-    iter = files_.find(path);
-    if (iter == files_.end()) {
-      return;
-    }
-    std::vector<uint8_t>* contents = iter->second;
-    files_.erase(iter);
-    delete contents;
-  }
-
-  void WriteFile(const std::string& path,
-                 const char* file_contents) {
-    DeleteFile(path);
-    std::vector<uint8_t>* data = new std::vector<uint8_t>();
-    Base64::decode(file_contents, data);
-    files_[path] = data;
-  }
-
-  void ListFiles(JSONStream* js) {
-    JSONObject jsobj(js);
-    jsobj.AddProperty("type", "FSFilesList");
-    JSONArray jsarr(&jsobj, "files");
-    std::map<std::string, std::vector<uint8_t>*>::iterator iter;
-    for (iter = files_.begin(); iter != files_.end(); iter++) {
-      JSONObject file_info(&jsarr);
-      file_info.AddProperty("name", iter->first.c_str());
-      file_info.AddProperty64("size",
-                              static_cast<int64_t>(iter->second->size()));
-    }
-  }
-
- private:
-  std::string name_;
-
-  std::map<std::string, std::vector<uint8_t>*> files_;
-};
-
-// Some static state is held outside of the DevFS class so that we don't
-// have to include stl headers in our vm/ headers.
-static std::map<std::string, FileSystem*>* file_systems_;
-
-Mutex* DevFS::mutex_ = NULL;
-
-
-void DevFS::Init() {
-  if (mutex_ != NULL) {
-    // Already initialized.
-    ASSERT(file_systems_ != NULL);
-    return;
-  }
-  mutex_ = new Mutex();
-  file_systems_ = new std::map<std::string, FileSystem*>();
-  ASSERT(mutex_ != NULL);
-  ASSERT(file_systems_ != NULL);
-}
-
-
-void DevFS::Cleanup() {
-  delete mutex_;
-  mutex_ = NULL;
-  std::map<std::string, FileSystem*>::iterator iter;
-  for (iter = file_systems_->begin(); iter != file_systems_->end(); iter++) {
-    FileSystem* fs = iter->second;
-    delete fs;
-  }
-  delete file_systems_;
-  file_systems_ = NULL;
-}
-
-
-void DevFS::ListFileSystems(JSONStream* js) {
-  SafepointMutexLocker ml(mutex_);
-  JSONObject jsobj(js);
-  jsobj.AddProperty("type", "FSList");
-  JSONArray jsarr(&jsobj, "fsNames");
-
-  std::map<std::string, FileSystem*>::iterator iter;
-  for (iter = file_systems_->begin(); iter != file_systems_->end(); iter++) {
-    const std::string& key = iter->first;
-    jsarr.AddValue(key.c_str());
-  }
-}
-
-
-FileSystem* DevFS::LookupFileSystem(const char* fs_name) {
-  std::string key = std::string(fs_name);
-  std::map<std::string, FileSystem*>::iterator iter;
-  iter = file_systems_->find(key);
-  if (iter != file_systems_->end()) {
-    return iter->second;
-  }
-  return NULL;
-}
-
-
-FileSystem* DevFS::LookupFileSystem(const String& fs_name) {
-  return LookupFileSystem(fs_name.ToCString());
-}
-
-
-void DevFS::CreateFileSystem(JSONStream* js, const String& fs_name) {
-  SafepointMutexLocker ml(mutex_);
-  // TODO(turnidge): Ensure that fs_name is a legal URI host value, i.e. ascii.
-  if (LookupFileSystem(fs_name) != NULL) {
-    js->PrintError(kFileSystemAlreadyExists,
-                   "%s: file system '%s' already exists",
-                   js->method(), fs_name.ToCString());
-    return;
-  }
-
-  std::string key = std::string(fs_name.ToCString());
-  FileSystem* file_system = new FileSystem(key);
-  (*file_systems_)[key] = file_system;
-
-  JSONObject jsobj(js);
-  jsobj.AddProperty("type", "Success");
-}
-
-
-void DevFS::DeleteFileSystem(JSONStream* js, const String& fs_name) {
-  SafepointMutexLocker ml(mutex_);
-  FileSystem* file_system = LookupFileSystem(fs_name);
-  if (file_system == NULL) {
-    js->PrintError(kFileSystemDoesNotExist,
-                   "%s: file system '%s' does not exist",
-                   js->method(), fs_name.ToCString());
-    return;
-  }
-  std::string key = std::string(fs_name.ToCString());
-  file_systems_->erase(key);
-  delete file_system;
-  JSONObject jsobj(js);
-  jsobj.AddProperty("type", "Success");
-}
-
-
-void DevFS::ListFiles(JSONStream* js, const String& fs_name) {
-  SafepointMutexLocker ml(mutex_);
-  FileSystem* file_system = LookupFileSystem(fs_name);
-  if (file_system == NULL) {
-    js->PrintError(kFileSystemDoesNotExist,
-                   "%s: file system '%s' does not exist",
-                   js->method(), fs_name.ToCString());
-    return;
-  }
-
-  file_system->ListFiles(js);
-}
-
-
-static void PrintWriteFilesError(JSONStream* js,
-                                 intptr_t i) {
-  js->PrintError(kInvalidParams,
-                 "%s: files array invalid at index '%" Pd "'",
-                 js->method(), i);
-}
-
-
-void DevFS::WriteFiles(JSONStream* js,
-                       const String& fs_name,
-                       const Array& files) {
-  SafepointMutexLocker ml(mutex_);
-  FileSystem* file_system = LookupFileSystem(fs_name);
-  if (file_system == NULL) {
-    js->PrintError(kFileSystemDoesNotExist,
-                   "%s: file system '%s' does not exist",
-                   js->method(), fs_name.ToCString());
-    return;
-  }
-
-  Object& test = Object::Handle();
-  GrowableObjectArray& file_info = GrowableObjectArray::Handle();
-  String& path = String::Handle();
-  String& file_contents = String::Handle();
-
-  // First, validate the array of files is properly formed.
-  for (intptr_t i = 0; i < files.Length(); i++) {
-    test = files.At(i);
-    if (!test.IsGrowableObjectArray()) {
-      PrintWriteFilesError(js, i);
-      return;
-    }
-    file_info ^= test.raw();
-    if (file_info.Length() != 2) {
-      PrintWriteFilesError(js, i);
-      return;
-    }
-    test = file_info.At(0);
-    if (!test.IsString()) {
-      PrintWriteFilesError(js, i);
-      return;
-    }
-    std::string key = std::string(String::Cast(test).ToCString());
-    if ((key.size() == 0) || (key[0] != '/')) {
-      js->PrintError(kInvalidParams,
-                     "%s: file system path '%s' must begin with a /",
-                     js->method(), String::Cast(test).ToCString());
-      return;
-    }
-    test = file_info.At(1);
-    if (!test.IsString()) {
-      PrintWriteFilesError(js, i);
-      return;
-    }
-  }
-
-  // Now atomically update the file system.
-  for (intptr_t i = 0; i < files.Length(); i++) {
-    file_info = GrowableObjectArray::RawCast(files.At(i));
-    path = String::RawCast(file_info.At(0));
-    file_contents = String::RawCast(file_info.At(1));
-    file_system->WriteFile(path.ToCString(),
-                           file_contents.ToCString());
-  }
-
-  JSONObject jsobj(js);
-  jsobj.AddProperty("type", "Success");
-}
-
-
-void DevFS::WriteFile(JSONStream* js,
-                      const String& fs_name,
-                      const String& path,
-                      const String& file_contents) {
-  SafepointMutexLocker ml(mutex_);
-  FileSystem* file_system = LookupFileSystem(fs_name);
-  if (file_system == NULL) {
-    js->PrintError(kFileSystemDoesNotExist,
-                   "%s: file system '%s' does not exist",
-                   js->method(), fs_name.ToCString());
-    return;
-  }
-
-  std::string key = std::string(path.ToCString());
-  if ((key.size() == 0) || (key[0] != '/')) {
-    js->PrintError(kInvalidParams,
-                   "%s: file system path '%s' must begin with a /",
-                   js->method(), path.ToCString());
-    return;
-  }
-
-  file_system->WriteFile(path.ToCString(),
-                         file_contents.ToCString());
-
-  JSONObject jsobj(js);
-  jsobj.AddProperty("type", "Success");
-}
-
-
-void DevFS::ReadFile(JSONStream* js,
-                     const String& fs_name,
-                     const String& path) {
-  SafepointMutexLocker ml(mutex_);
-  FileSystem* file_system = LookupFileSystem(fs_name);
-  if (file_system == NULL) {
-    js->PrintError(kFileSystemDoesNotExist,
-                   "%s: file system '%s' does not exist",
-                   js->method(), fs_name.ToCString());
-    return;
-  }
-
-  std::string key = std::string(path.ToCString());
-  if ((key.size() == 0) || (key[0] != '/')) {
-    js->PrintError(kInvalidParams,
-                   "%s: file system path '%s' must begin with a /",
-                   js->method(), path.ToCString());
-    return;
-  }
-  std::vector<uint8_t>* file_contents;
-
-  bool success = file_system->ReadFile(key, &file_contents);
-
-  if (!success) {
-    js->PrintError(kFileDoesNotExist,
-                   "%s: file 'dart-devfs://%s/%s' does not exist",
-                   js->method(), fs_name.ToCString(), path.ToCString());
-    return;
-  }
-
-  JSONObject jsobj(js);
-  jsobj.AddProperty("type", "FSFile");
-  jsobj.AddPropertyBase64("fileContents",
-                          &((*file_contents)[0]),
-                          file_contents->size());
-}
-
-#endif  // !PRODUCT
-
-}  // namespace dart
diff --git a/runtime/vm/dev_fs.h b/runtime/vm/dev_fs.h
deleted file mode 100644
index 75032bd..0000000
--- a/runtime/vm/dev_fs.h
+++ /dev/null
@@ -1,55 +0,0 @@
-// Copyright (c) 2016, the Dart project authors.  Please see the AUTHORS file
-// for details. All rights reserved. Use of this source code is governed by a
-// BSD-style license that can be found in the LICENSE file.
-
-#ifndef VM_DEV_FS_H_
-#define VM_DEV_FS_H_
-
-#include "vm/globals.h"
-
-#include "vm/dart_api_impl.h"
-
-namespace dart {
-
-class Array;
-class FileSystem;
-class JSONStream;
-class Mutex;
-class ObjectPointerVisitor;
-class RawArray;
-class RawObject;
-class String;
-
-
-// Manages dart-devfs:// file systems. These file systems are "virtual"
-// and accessed via the service protocol.
-class DevFS {
- public:
-  static void Init();
-  static void Cleanup();
-
-  static void ListFileSystems(JSONStream* js);
-  static void CreateFileSystem(JSONStream* js, const String& fs_name);
-  static void DeleteFileSystem(JSONStream* js, const String& fs_name);
-  static void ListFiles(JSONStream* js,
-                        const String& fs_name);
-  static void WriteFiles(JSONStream* js,
-                         const String& fs_name,
-                         const Array& files);
-  static void WriteFile(JSONStream* js,
-                        const String& fs_name,
-                        const String& path,
-                        const String& file_contents);
-  static void ReadFile(JSONStream* js,
-                       const String& fs_name,
-                       const String& path);
-
- private:
-  static Mutex* mutex_;
-  static FileSystem* LookupFileSystem(const String& fs_name);
-  static FileSystem* LookupFileSystem(const char* fs_name);
-};
-
-}  // namespace dart
-
-#endif  // VM_DEV_FS_H_
diff --git a/runtime/vm/disassembler.cc b/runtime/vm/disassembler.cc
index 9f3d4a3..eef90c3 100644
--- a/runtime/vm/disassembler.cc
+++ b/runtime/vm/disassembler.cc
@@ -184,12 +184,11 @@
 }
 
 
-void Disassembler::DisassembleCode(const Function& function, bool optimized) {
-  const char* function_fullname = function.ToFullyQualifiedCString();
+void Disassembler::DisassembleCodeHelper(
+    const char* function_fullname, const Code& code, bool optimized) {
   THR_Print("Code for %sfunction '%s' {\n",
             optimized ? "optimized " : "",
             function_fullname);
-  const Code& code = Code::Handle(function.CurrentCode());
   code.Disassemble();
   THR_Print("}\n");
 
@@ -328,6 +327,22 @@
   }
 }
 
+
+void Disassembler::DisassembleCode(const Function& function, bool optimized) {
+  const char* function_fullname = function.ToFullyQualifiedCString();
+  const Code& code = Code::Handle(function.CurrentCode());
+  DisassembleCodeHelper(function_fullname, code, optimized);
+}
+
+
+void Disassembler::DisassembleCodeUnoptimized(
+    const Function& function, bool optimized) {
+  const char* function_fullname = function.ToFullyQualifiedCString();
+  const Code& code = Code::Handle(function.unoptimized_code());
+  DisassembleCodeHelper(function_fullname, code, optimized);
+}
+
+
 #endif  // !PRODUCT
 
 }  // namespace dart
diff --git a/runtime/vm/disassembler.h b/runtime/vm/disassembler.h
index c7a1eec..fc3649a 100644
--- a/runtime/vm/disassembler.h
+++ b/runtime/vm/disassembler.h
@@ -137,8 +137,13 @@
   static bool CanFindOldObject(uword addr);
 
   static void DisassembleCode(const Function& function, bool optimized);
+  static void DisassembleCodeUnoptimized(
+      const Function& function, bool optimized);
 
  private:
+  static void DisassembleCodeHelper(
+      const char* function_fullname, const Code& code, bool optimized);
+
   static const int kHexadecimalBufferSize = 32;
   static const int kUserReadableBufferSize = 256;
 };
diff --git a/runtime/vm/flag_list.h b/runtime/vm/flag_list.h
index a6e91c5..c22e43c 100644
--- a/runtime/vm/flag_list.h
+++ b/runtime/vm/flag_list.h
@@ -132,12 +132,6 @@
   "Precompilation compiler mode")                                              \
 C(precompiled_runtime, true, false, bool, false,                               \
   "Precompiled runtime mode")                                                  \
-R(pretenure_all, false, bool, false,                                           \
-  "Global pretenuring (for testing).")                                         \
-P(pretenure_interval, int, 10,                                                 \
-  "Back off pretenuring after this many cycles.")                              \
-P(pretenure_threshold, int, 98,                                                \
-  "Trigger pretenuring when this many percent are promoted.")                  \
 R(print_ssa_liveness, false, bool, false,                                      \
   "Print liveness for ssa variables.")                                         \
 R(print_ssa_liveranges, false, bool, false,                                    \
diff --git a/runtime/vm/flow_graph_builder.cc b/runtime/vm/flow_graph_builder.cc
index d1e0b85..aeea819 100644
--- a/runtime/vm/flow_graph_builder.cc
+++ b/runtime/vm/flow_graph_builder.cc
@@ -3660,7 +3660,7 @@
                                      kEmitStoreBarrier,
                                      token_pos);
   // Maybe initializing unboxed store.
-  store->set_is_potential_unboxed_initialization(true);
+  store->set_is_initialization(node->is_initializer());
   ReturnDefinition(store);
 }
 
diff --git a/runtime/vm/flow_graph_compiler.h b/runtime/vm/flow_graph_compiler.h
index e728582..0a30dc9 100644
--- a/runtime/vm/flow_graph_compiler.h
+++ b/runtime/vm/flow_graph_compiler.h
@@ -532,6 +532,9 @@
   void EmitDeopt(intptr_t deopt_id,
                  ICData::DeoptReasonId reason,
                  uint32_t flags = 0);
+
+  // If the cid does not fit in 16 bits, then this will cause a bailout.
+  uint16_t ToEmbeddableCid(intptr_t cid, Instruction* instruction);
 #endif  // defined(TARGET_ARCH_DBC)
 
   void AddDeoptIndexAtCall(intptr_t deopt_id, TokenPosition token_pos);
diff --git a/runtime/vm/flow_graph_compiler_arm.cc b/runtime/vm/flow_graph_compiler_arm.cc
index 269e668..4162943 100644
--- a/runtime/vm/flow_graph_compiler_arm.cc
+++ b/runtime/vm/flow_graph_compiler_arm.cc
@@ -20,7 +20,6 @@
 #include "vm/stack_frame.h"
 #include "vm/stub_code.h"
 #include "vm/symbols.h"
-#include "vm/verified_memory.h"
 
 namespace dart {
 
@@ -1252,8 +1251,7 @@
 #endif  // DEBUG
   __ LoadFieldFromOffset(kWord, R1, R0, Array::element_offset(edge_id));
   __ add(R1, R1, Operand(Smi::RawValue(1)));
-  __ StoreIntoObjectNoBarrierOffset(
-      R0, Array::element_offset(edge_id), R1, Assembler::kOnlySmi);
+  __ StoreIntoObjectNoBarrierOffset(R0, Array::element_offset(edge_id), R1);
 #if defined(DEBUG)
   assembler_->set_use_far_branches(old_use_far_branches);
 #endif  // DEBUG
diff --git a/runtime/vm/flow_graph_compiler_dbc.cc b/runtime/vm/flow_graph_compiler_dbc.cc
index 81c639e8..8fb0b4c 100644
--- a/runtime/vm/flow_graph_compiler_dbc.cc
+++ b/runtime/vm/flow_graph_compiler_dbc.cc
@@ -20,7 +20,6 @@
 #include "vm/stack_frame.h"
 #include "vm/stub_code.h"
 #include "vm/symbols.h"
-#include "vm/verified_memory.h"
 
 namespace dart {
 
@@ -398,6 +397,16 @@
 }
 
 
+uint16_t FlowGraphCompiler::ToEmbeddableCid(intptr_t cid,
+                                            Instruction* instruction) {
+  if (!Utils::IsUint(16, cid)) {
+    instruction->Unsupported(this);
+    UNREACHABLE();
+  }
+  return static_cast<uint16_t>(cid);
+}
+
+
 #undef __
 #define __ compiler_->assembler()->
 
@@ -417,6 +426,7 @@
     __ LoadConstant(destination.reg(), source.constant());
   } else {
     compiler_->Bailout("Unsupported move");
+    UNREACHABLE();
   }
 
   move->Eliminate();
diff --git a/runtime/vm/flow_graph_compiler_ia32.cc b/runtime/vm/flow_graph_compiler_ia32.cc
index 64d92a9..4a036e2 100644
--- a/runtime/vm/flow_graph_compiler_ia32.cc
+++ b/runtime/vm/flow_graph_compiler_ia32.cc
@@ -22,7 +22,6 @@
 #include "vm/stack_frame.h"
 #include "vm/stub_code.h"
 #include "vm/symbols.h"
-#include "vm/verified_memory.h"
 
 namespace dart {
 
diff --git a/runtime/vm/flow_graph_compiler_x64.cc b/runtime/vm/flow_graph_compiler_x64.cc
index b76da0b..5cc5c93 100644
--- a/runtime/vm/flow_graph_compiler_x64.cc
+++ b/runtime/vm/flow_graph_compiler_x64.cc
@@ -19,7 +19,6 @@
 #include "vm/stack_frame.h"
 #include "vm/stub_code.h"
 #include "vm/symbols.h"
-#include "vm/verified_memory.h"
 
 namespace dart {
 
diff --git a/runtime/vm/freelist.cc b/runtime/vm/freelist.cc
index b90f93f..ceae578 100644
--- a/runtime/vm/freelist.cc
+++ b/runtime/vm/freelist.cc
@@ -25,8 +25,7 @@
   uword tags = 0;
   tags = RawObject::SizeTag::update(size, tags);
   tags = RawObject::ClassIdTag::update(kFreeListElement, tags);
-  // All words in a freelist element header must look like smis; see
-  // TryAllocateSmiInitializedLocked.
+  // All words in a freelist element header should look like Smis.
   ASSERT(!reinterpret_cast<RawObject*>(tags)->IsHeapObject());
 
   result->tags_ = tags;
diff --git a/runtime/vm/freelist.h b/runtime/vm/freelist.h
index 2c47512..975e50c 100644
--- a/runtime/vm/freelist.h
+++ b/runtime/vm/freelist.h
@@ -18,8 +18,7 @@
 // pointer to chain elements of the list together. For objects larger than the
 // object size encodable in tags field, the size of the element is embedded in
 // the element at the address following the next_ field. All words written by
-// the freelist are guaranteed to look like smis, as required by
-// TryAllocateSmiInitializedLocked.
+// the freelist are guaranteed to look like Smis.
 // A FreeListElement never has its header mark bit set.
 class FreeListElement {
  public:
diff --git a/runtime/vm/heap.cc b/runtime/vm/heap.cc
index 46fd761..21a4b46 100644
--- a/runtime/vm/heap.cc
+++ b/runtime/vm/heap.cc
@@ -39,8 +39,7 @@
       finalization_tasks_(0),
       read_only_(false),
       gc_new_space_in_progress_(false),
-      gc_old_space_in_progress_(false),
-      pretenure_policy_(0) {
+      gc_old_space_in_progress_(false) {
   for (int sel = 0;
        sel < kNumWeakSelectors;
        sel++) {
@@ -148,14 +147,6 @@
 }
 
 
-uword Heap::AllocatePretenured(intptr_t size) {
-  ASSERT(Thread::Current()->no_safepoint_scope_depth() == 0);
-  uword addr = old_space_.TryAllocateDataBump(size, PageSpace::kControlGrowth);
-  if (addr != 0) return addr;
-  return AllocateOld(size, HeapPage::kData);
-}
-
-
 void Heap::AllocateExternal(intptr_t size, Space space) {
   ASSERT(Thread::Current()->no_safepoint_scope_depth() == 0);
   if (space == kNew) {
@@ -381,7 +372,6 @@
     UpdateClassHeapStatsBeforeGC(kNew);
     new_space_.Scavenge(invoke_api_callbacks);
     isolate()->class_table()->UpdatePromoted();
-    UpdatePretenurePolicy();
     RecordAfterGC(kNew);
     PrintStats();
     NOT_IN_PRODUCT(PrintStatsToTimeline(&tds));
@@ -464,41 +454,6 @@
 #endif
 
 
-bool Heap::ShouldPretenure(intptr_t class_id) const {
-  if (class_id == kOneByteStringCid) {
-    return pretenure_policy_ > 0;
-  } else {
-    return false;
-  }
-}
-
-
-void Heap::UpdatePretenurePolicy() {
-  if (FLAG_disable_alloc_stubs_after_gc) {
-    ClassTable* table = isolate_->class_table();
-    Zone* zone = Thread::Current()->zone();
-    for (intptr_t cid = 1; cid < table->NumCids(); ++cid) {
-      if (((cid >= kNumPredefinedCids) || (cid == kArrayCid)) &&
-          table->IsValidIndex(cid) &&
-          table->HasValidClassAt(cid)) {
-        const Class& cls = Class::Handle(zone, table->At(cid));
-        cls.DisableAllocationStub();
-      }
-    }
-  }
-  ClassHeapStats* stats =
-      isolate_->class_table()->StatsWithUpdatedSize(kOneByteStringCid);
-  int allocated = stats->pre_gc.new_count;
-  int promo_percent = (allocated == 0) ? 0 :
-      (100 * stats->promoted_count) / allocated;
-  if (promo_percent >= FLAG_pretenure_threshold) {
-    pretenure_policy_ += FLAG_pretenure_interval;
-  } else {
-    pretenure_policy_ = Utils::Maximum(0, pretenure_policy_ - 1);
-  }
-}
-
-
 void Heap::UpdateGlobalMaxUsed() {
   ASSERT(isolate_ != NULL);
   // We are accessing the used in words count for both new and old space
@@ -531,16 +486,11 @@
 }
 
 
-Heap::Space Heap::SpaceForAllocation(intptr_t cid) {
-  return FLAG_pretenure_all ? kPretenured : kNew;
-}
-
-
 intptr_t Heap::TopOffset(Heap::Space space) {
   if (space == kNew) {
     return OFFSET_OF(Heap, new_space_) + Scavenger::top_offset();
   } else {
-    ASSERT(space == kPretenured);
+    ASSERT(space == kOld);
     return OFFSET_OF(Heap, old_space_) + PageSpace::top_offset();
   }
 }
@@ -550,7 +500,7 @@
   if (space == kNew) {
     return OFFSET_OF(Heap, new_space_) + Scavenger::end_offset();
   } else {
-    ASSERT(space == kPretenured);
+    ASSERT(space == kOld);
     return OFFSET_OF(Heap, old_space_) + PageSpace::end_offset();
   }
 }
@@ -734,6 +684,7 @@
 }
 
 
+#ifndef PRODUCT
 void Heap::PrintToJSONObject(Space space, JSONObject* object) const {
   if (space == kNew) {
     new_space_.PrintToJSONObject(object);
@@ -741,6 +692,7 @@
     old_space_.PrintToJSONObject(object);
   }
 }
+#endif  // PRODUCT
 
 
 void Heap::RecordBeforeGC(Space space, GCReason reason) {
diff --git a/runtime/vm/heap.h b/runtime/vm/heap.h
index d13150e..d90adc4 100644
--- a/runtime/vm/heap.h
+++ b/runtime/vm/heap.h
@@ -31,8 +31,6 @@
     kNew,
     kOld,
     kCode,
-    // TODO(koda): Harmonize all old-space allocation and get rid of this.
-    kPretenured,
   };
 
   enum WeakSelector {
@@ -81,8 +79,6 @@
         return AllocateOld(size, HeapPage::kData);
       case kCode:
         return AllocateOld(size, HeapPage::kExecutable);
-      case kPretenured:
-        return AllocatePretenured(size);
       default:
         UNREACHABLE();
     }
@@ -148,7 +144,6 @@
   // Accessors for inlined allocation in generated code.
   static intptr_t TopOffset(Space space);
   static intptr_t EndOffset(Space space);
-  static Space SpaceForAllocation(intptr_t class_id);
 
   // Initialize the heap and register it with the isolate.
   static void Init(Isolate* isolate,
@@ -244,12 +239,14 @@
     return size <= kNewAllocatableSize;
   }
 
+#ifndef PRODUCT
   void PrintToJSONObject(Space space, JSONObject* object) const;
 
   // The heap map contains the sizes and class ids for the objects in each page.
   void PrintHeapMapToJSONStream(Isolate* isolate, JSONStream* stream) {
-    return old_space_.PrintHeapMapToJSONStream(isolate, stream);
+    old_space_.PrintHeapMapToJSONStream(isolate, stream);
   }
+#endif  // PRODUCT
 
   Isolate* isolate() const { return isolate_; }
 
@@ -260,8 +257,6 @@
   intptr_t finalization_tasks() const { return finalization_tasks_; }
   void set_finalization_tasks(intptr_t count) { finalization_tasks_ = count; }
 
-  bool ShouldPretenure(intptr_t class_id) const;
-
   void SetupExternalPage(void* pointer, uword size, bool is_executable) {
     old_space_.SetupExternalPage(pointer, size, is_executable);
   }
@@ -306,7 +301,6 @@
 
   uword AllocateNew(intptr_t size);
   uword AllocateOld(intptr_t size, HeapPage::PageType type);
-  uword AllocatePretenured(intptr_t size);
 
   // Visit all pointers. Caller must ensure concurrent sweeper is not running,
   // and the visitor must not allocate.
@@ -331,7 +325,6 @@
   void RecordAfterGC(Space space);
   void PrintStats();
   void UpdateClassHeapStatsBeforeGC(Heap::Space space);
-  void UpdatePretenurePolicy();
   void PrintStatsToTimeline(TimelineEventScope* event);
 
   // Updates gc in progress flags.
@@ -371,8 +364,6 @@
   bool gc_new_space_in_progress_;
   bool gc_old_space_in_progress_;
 
-  int pretenure_policy_;
-
   friend class Become;  // VisitObjectPointers
   friend class ServiceEvent;
   friend class PageSpace;  // VerifyGC
diff --git a/runtime/vm/il_printer.cc b/runtime/vm/il_printer.cc
index 204725c..5104545 100644
--- a/runtime/vm/il_printer.cc
+++ b/runtime/vm/il_printer.cc
@@ -1275,8 +1275,15 @@
   return Thread::Current()->zone()->MakeCopyOfString(buffer);
 }
 
+
 #else  // PRODUCT
 
+
+const char* Instruction::ToCString() const {
+  return DebugName();
+}
+
+
 void FlowGraphPrinter::PrintOneInstruction(Instruction* instr,
                                            bool print_locations) {
   UNREACHABLE();
diff --git a/runtime/vm/intermediate_language.cc b/runtime/vm/intermediate_language.cc
index cc5e51f..689d535 100644
--- a/runtime/vm/intermediate_language.cc
+++ b/runtime/vm/intermediate_language.cc
@@ -126,6 +126,12 @@
 }
 
 
+void Instruction::Unsupported(FlowGraphCompiler* compiler) {
+  compiler->Bailout(ToCString());
+  UNREACHABLE();
+}
+
+
 bool Value::Equals(Value* other) const {
   return definition() == other->definition();
 }
@@ -3329,9 +3335,10 @@
 
   __ PushConstant(function());
   __ StaticCall(ArgumentCount(), argdesc_kidx);
-  compiler->AddCurrentDescriptor(RawPcDescriptors::kUnoptStaticCall,
-                                 deopt_id(),
-                                 token_pos());
+  RawPcDescriptors::Kind kind = (compiler->is_optimizing())
+                              ? RawPcDescriptors::kOther
+                              : RawPcDescriptors::kUnoptStaticCall;
+  compiler->AddCurrentDescriptor(kind, deopt_id(), token_pos());
 
   compiler->RecordAfterCall(this);
 
diff --git a/runtime/vm/intermediate_language.h b/runtime/vm/intermediate_language.h
index 2705634..7307344 100644
--- a/runtime/vm/intermediate_language.h
+++ b/runtime/vm/intermediate_language.h
@@ -891,6 +891,8 @@
 
   void ClearEnv() { env_ = NULL; }
 
+  void Unsupported(FlowGraphCompiler* compiler);
+
  protected:
   // GetDeoptId and/or CopyDeoptIdFrom.
   friend class CallSiteInliner;
@@ -3581,8 +3583,7 @@
         offset_in_bytes_(field.Offset()),
         emit_store_barrier_(emit_store_barrier),
         token_pos_(token_pos),
-        is_potential_unboxed_initialization_(false),
-        is_object_reference_initialization_(false) {
+        is_initialization_(false) {
     SetInputAt(kInstancePos, instance);
     SetInputAt(kValuePos, value);
     CheckField(field);
@@ -3597,20 +3598,14 @@
         offset_in_bytes_(offset_in_bytes),
         emit_store_barrier_(emit_store_barrier),
         token_pos_(token_pos),
-        is_potential_unboxed_initialization_(false),
-        is_object_reference_initialization_(false) {
+        is_initialization_(false) {
     SetInputAt(kInstancePos, instance);
     SetInputAt(kValuePos, value);
   }
 
   DECLARE_INSTRUCTION(StoreInstanceField)
 
-  void set_is_potential_unboxed_initialization(bool value) {
-    is_potential_unboxed_initialization_ = value;
-  }
-  void set_is_object_reference_initialization(bool value) {
-    is_object_reference_initialization_ = value;
-  }
+  void set_is_initialization(bool value) { is_initialization_ = value; }
 
   enum {
     kInstancePos = 0,
@@ -3619,12 +3614,8 @@
 
   Value* instance() const { return inputs_[kInstancePos]; }
   Value* value() const { return inputs_[kValuePos]; }
-  bool is_potential_unboxed_initialization() const {
-    return is_potential_unboxed_initialization_;
-  }
-  bool is_object_reference_initialization() const {
-    return is_object_reference_initialization_;
-  }
+  bool is_initialization() const { return is_initialization_; }
+
   virtual TokenPosition token_pos() const { return token_pos_; }
 
   const Field& field() const { return field_; }
@@ -3669,11 +3660,8 @@
   intptr_t offset_in_bytes_;
   const StoreBarrierType emit_store_barrier_;
   const TokenPosition token_pos_;
-  // This may be the first store to an unboxed field.
-  bool is_potential_unboxed_initialization_;
-  // True if this store initializes an object reference field of an object that
-  // was allocated uninitialized; see AllocateUninitializedContext.
-  bool is_object_reference_initialization_;
+  // Marks initialiing stores. E.g. in the constructor.
+  bool is_initialization_;
 
   DISALLOW_COPY_AND_ASSIGN(StoreInstanceFieldInstr);
 };
diff --git a/runtime/vm/intermediate_language_arm.cc b/runtime/vm/intermediate_language_arm.cc
index a0fa8cc..1750418 100644
--- a/runtime/vm/intermediate_language_arm.cc
+++ b/runtime/vm/intermediate_language_arm.cc
@@ -2011,7 +2011,7 @@
           ((IsPotentialUnboxedStore()) ? 3 : 0);
   LocationSummary* summary = new(zone) LocationSummary(
       zone, kNumInputs, kNumTemps,
-          ((IsUnboxedStore() && opt && is_potential_unboxed_initialization_) ||
+          ((IsUnboxedStore() && opt && is_initialization()) ||
            IsPotentialUnboxedStore())
           ? LocationSummary::kCallOnSlowPath
           : LocationSummary::kNoCall);
@@ -2072,7 +2072,7 @@
     const Register temp2 = locs()->temp(1).reg();
     const intptr_t cid = field().UnboxedFieldCid();
 
-    if (is_potential_unboxed_initialization_) {
+    if (is_initialization()) {
       const Class* cls = NULL;
       switch (cid) {
         case kDoubleCid:
@@ -2214,21 +2214,14 @@
                              CanValueBeSmi());
   } else {
     if (locs()->in(1).IsConstant()) {
-      __ StoreIntoObjectNoBarrierOffset(
-          instance_reg,
-          offset_in_bytes_,
-          locs()->in(1).constant(),
-          is_object_reference_initialization_ ?
-              Assembler::kEmptyOrSmiOrNull :
-              Assembler::kHeapObjectOrSmi);
+      __ StoreIntoObjectNoBarrierOffset(instance_reg,
+                                        offset_in_bytes_,
+                                        locs()->in(1).constant());
     } else {
       const Register value_reg = locs()->in(1).reg();
       __ StoreIntoObjectNoBarrierOffset(instance_reg,
                                         offset_in_bytes_,
-                                        value_reg,
-                                        is_object_reference_initialization_ ?
-                                            Assembler::kEmptyOrSmiOrNull :
-                                            Assembler::kHeapObjectOrSmi);
+                                        value_reg);
     }
   }
   __ Bind(&skip_store);
@@ -2345,12 +2338,12 @@
   // R3: new object end address.
 
   // Store the type argument field.
-  __ InitializeFieldNoBarrier(R0,
+  __ StoreIntoObjectNoBarrier(R0,
                               FieldAddress(R0, Array::type_arguments_offset()),
                               kElemTypeReg);
 
   // Set the length field.
-  __ InitializeFieldNoBarrier(R0,
+  __ StoreIntoObjectNoBarrier(R0,
                               FieldAddress(R0, Array::length_offset()),
                               kLengthReg);
 
diff --git a/runtime/vm/intermediate_language_arm64.cc b/runtime/vm/intermediate_language_arm64.cc
index a2fa2cb..642bc9e 100644
--- a/runtime/vm/intermediate_language_arm64.cc
+++ b/runtime/vm/intermediate_language_arm64.cc
@@ -1761,7 +1761,7 @@
           ((IsPotentialUnboxedStore()) ? 2 : 0);
   LocationSummary* summary = new(zone) LocationSummary(
       zone, kNumInputs, kNumTemps,
-          ((IsUnboxedStore() && opt && is_potential_unboxed_initialization_) ||
+          ((IsUnboxedStore() && opt && is_initialization()) ||
            IsPotentialUnboxedStore())
           ? LocationSummary::kCallOnSlowPath
           : LocationSummary::kNoCall);
@@ -1798,7 +1798,7 @@
     const Register temp2 = locs()->temp(1).reg();
     const intptr_t cid = field().UnboxedFieldCid();
 
-    if (is_potential_unboxed_initialization_) {
+    if (is_initialization()) {
       const Class* cls = NULL;
       switch (cid) {
         case kDoubleCid:
diff --git a/runtime/vm/intermediate_language_dbc.cc b/runtime/vm/intermediate_language_dbc.cc
index c1719d4..8c954b6 100644
--- a/runtime/vm/intermediate_language_dbc.cc
+++ b/runtime/vm/intermediate_language_dbc.cc
@@ -102,8 +102,6 @@
   M(UnboxInteger32)                                                            \
   M(CheckedSmiOp)                                                              \
   M(CheckArrayBound)                                                           \
-  M(CheckClass)                                                                \
-  M(TestSmi)                                                                   \
   M(RelationalOp)                                                              \
   M(EqualityCompare)                                                           \
   M(LoadIndexed)
@@ -159,7 +157,7 @@
   Condition Name##Instr::EmitComparisonCode(FlowGraphCompiler*,                \
                                             BranchLabels) {                    \
     UNIMPLEMENTED();                                                           \
-    return EQ;                                                                 \
+    return NEXT_IS_TRUE;                                                       \
   }
 
 #define DEFINE_UNIMPLEMENTED(Name)                                             \
@@ -171,7 +169,6 @@
 #undef DEFINE_UNIMPLEMENTED
 
 DEFINE_UNIMPLEMENTED_EMIT_BRANCH_CODE(TestCids)
-DEFINE_UNIMPLEMENTED_EMIT_BRANCH_CODE(TestSmi)
 DEFINE_UNIMPLEMENTED_EMIT_BRANCH_CODE(RelationalOp)
 DEFINE_UNIMPLEMENTED_EMIT_BRANCH_CODE(EqualityCompare)
 
@@ -231,11 +228,8 @@
 
 
 void PolymorphicInstanceCallInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
-#if defined(PRODUCT)
-  compiler->Bailout("PolymorphicInstanceCallInstr::EmitNativeCode");
-#else  // defined(PRODUCT)
-  compiler->Bailout(ToCString());
-#endif  // defined(PRODUCT)
+  Unsupported(compiler);
+  UNREACHABLE();
 }
 
 
@@ -383,14 +377,14 @@
 static void EmitBranchOnCondition(FlowGraphCompiler* compiler,
                                   Condition true_condition,
                                   BranchLabels labels) {
-  if (labels.fall_through == labels.false_label) {
-    // If the next block is the false successor, fall through to it.
+  if (true_condition == NEXT_IS_TRUE) {
     __ Jump(labels.true_label);
+    if (labels.fall_through != labels.false_label) {
+      __ Jump(labels.false_label);
+    }
   } else {
-    // If the next block is not the false successor, branch to it.
+    ASSERT(true_condition == NEXT_IS_FALSE);
     __ Jump(labels.false_label);
-
-    // Fall through or jump to the true successor.
     if (labels.fall_through != labels.true_label) {
       __ Jump(labels.true_label);
     }
@@ -403,34 +397,33 @@
   ASSERT((kind() == Token::kNE_STRICT) ||
          (kind() == Token::kEQ_STRICT));
 
+  Token::Kind comparison;
+  Condition condition;
+  if (labels.fall_through == labels.false_label) {
+    condition = NEXT_IS_TRUE;
+    comparison = kind();
+  } else {
+    // Flip comparision to save a jump.
+    condition = NEXT_IS_FALSE;
+    comparison = (kind() == Token::kEQ_STRICT) ? Token::kNE_STRICT
+                                               : Token::kEQ_STRICT;
+  }
+
   if (!compiler->is_optimizing()) {
     const Bytecode::Opcode eq_op = needs_number_check() ?
         Bytecode::kIfEqStrictNumTOS : Bytecode::kIfEqStrictTOS;
     const Bytecode::Opcode ne_op = needs_number_check() ?
         Bytecode::kIfNeStrictNumTOS : Bytecode::kIfNeStrictTOS;
-
-    if (kind() == Token::kEQ_STRICT) {
-      __ Emit((labels.fall_through == labels.false_label) ? eq_op : ne_op);
-    } else {
-      __ Emit((labels.fall_through == labels.false_label) ? ne_op : eq_op);
-    }
+    __ Emit(comparison == Token::kEQ_STRICT ? eq_op : ne_op);
   } else {
     const Bytecode::Opcode eq_op = needs_number_check() ?
         Bytecode::kIfEqStrictNum : Bytecode::kIfEqStrict;
     const Bytecode::Opcode ne_op = needs_number_check() ?
         Bytecode::kIfNeStrictNum : Bytecode::kIfNeStrict;
-
-    if (kind() == Token::kEQ_STRICT) {
-      __ Emit(Bytecode::Encode(
-          (labels.fall_through == labels.false_label) ? eq_op : ne_op,
-          locs()->in(0).reg(),
-          locs()->in(1).reg()));
-    } else {
-      __ Emit(Bytecode::Encode(
-          (labels.fall_through == labels.false_label) ? ne_op : eq_op,
-          locs()->in(0).reg(),
-          locs()->in(1).reg()));
-    }
+    __ Emit(Bytecode::Encode(
+        (comparison == Token::kEQ_STRICT) ? eq_op : ne_op,
+        locs()->in(0).reg(),
+        locs()->in(1).reg()));
   }
 
   if (needs_number_check() && token_pos().IsReal()) {
@@ -439,7 +432,8 @@
                                    Thread::kNoDeoptId,
                                    token_pos());
   }
-  return EQ;
+
+  return condition;
 }
 
 
@@ -504,6 +498,13 @@
 
 
 EMIT_NATIVE_CODE(Goto, 0) {
+  if (!compiler->is_optimizing()) {
+    // Add a deoptimization descriptor for deoptimizing instructions that
+    // may be inserted before this instruction.
+    compiler->AddCurrentDescriptor(RawPcDescriptors::kDeopt,
+                                   GetDeoptId(),
+                                   TokenPosition::kNoSource);
+  }
   if (HasParallelMove()) {
     compiler->parallel_move_resolver()->EmitNativeCode(parallel_move());
   }
@@ -515,6 +516,34 @@
 }
 
 
+Condition TestSmiInstr::EmitComparisonCode(FlowGraphCompiler* compiler,
+                                           BranchLabels labels) {
+  ASSERT((kind() == Token::kEQ) ||
+         (kind() == Token::kNE));
+  Register left = locs()->in(0).reg();
+  Register right = locs()->in(1).reg();
+  __ TestSmi(left, right);
+  return (kind() == Token::kEQ) ? NEXT_IS_TRUE : NEXT_IS_FALSE;
+}
+
+
+void TestSmiInstr::EmitBranchCode(FlowGraphCompiler* compiler,
+                                  BranchInstr* branch) {
+  BranchLabels labels = compiler->CreateBranchLabels(branch);
+  Condition true_condition = EmitComparisonCode(compiler, labels);
+  EmitBranchOnCondition(compiler, true_condition, labels);
+}
+
+
+EMIT_NATIVE_CODE(TestSmi,
+                 2,
+                 Location::RequiresRegister(),
+                 LocationSummary::kNoCall) {
+  // Never emitted outside of the BranchInstr.
+  UNREACHABLE();
+}
+
+
 EMIT_NATIVE_CODE(CreateArray,
                  2, Location::RequiresRegister(),
                  LocationSummary::kCall) {
@@ -533,13 +562,9 @@
 EMIT_NATIVE_CODE(StoreIndexed, 3) {
   if (compiler->is_optimizing()) {
     if (class_id() != kArrayCid) {
-#if defined(PRODUCT)
-      compiler->Bailout("StoreIndexed");
-#else  // defined(PRODUCT)
-      compiler->Bailout(ToCString());
-#endif  // defined(PRODUCT)
+      Unsupported(compiler);
+      UNREACHABLE();
     }
-
     __ StoreIndexed(locs()->in(kArrayPos).reg(),
                     locs()->in(kIndexPos).reg(),
                     locs()->in(kValuePos).reg());
@@ -983,12 +1008,55 @@
 
 
 EMIT_NATIVE_CODE(CheckClassId, 1) {
-  intptr_t cid = __ AddConstant(Smi::Handle(Smi::New(cid_)));
-  __ CheckClassId(locs()->in(0).reg(), cid);
+  __ CheckClassId(locs()->in(0).reg(),
+                  compiler->ToEmbeddableCid(cid_, this));
   compiler->EmitDeopt(deopt_id(), ICData::kDeoptCheckClass);
 }
 
 
+EMIT_NATIVE_CODE(CheckClass, 1) {
+  const Register value = locs()->in(0).reg();
+  if (IsNullCheck()) {
+    ASSERT(DeoptIfNull() || DeoptIfNotNull());
+    if (DeoptIfNull()) {
+      __ IfEqNull(value);
+    } else {
+      __ IfNeNull(value);
+    }
+  } else {
+    ASSERT((unary_checks().GetReceiverClassIdAt(0) != kSmiCid) ||
+           (unary_checks().NumberOfChecks() > 1));
+    const intptr_t may_be_smi =
+        (unary_checks().GetReceiverClassIdAt(0) == kSmiCid) ? 1 : 0;
+    if (IsDenseSwitch()) {
+      ASSERT(cids_[0] < cids_[cids_.length() - 1]);
+      const intptr_t low_cid = cids_[0];
+      const intptr_t cid_mask = ComputeCidMask();
+      __ CheckDenseSwitch(value, may_be_smi);
+      __ Nop(compiler->ToEmbeddableCid(low_cid, this));
+      __ Nop(__ AddConstant(Smi::Handle(Smi::New(cid_mask))));
+    } else {
+      GrowableArray<CidTarget> sorted_ic_data;
+      FlowGraphCompiler::SortICDataByCount(unary_checks(),
+                                           &sorted_ic_data,
+                                           /* drop_smi = */ true);
+      const intptr_t sorted_length = sorted_ic_data.length();
+      if (!Utils::IsUint(8, sorted_length)) {
+        Unsupported(compiler);
+        UNREACHABLE();
+      }
+      __ CheckCids(value, may_be_smi, sorted_length);
+      for (intptr_t i = 0; i < sorted_length; i++) {
+        __ Nop(compiler->ToEmbeddableCid(sorted_ic_data[i].cid, this));
+      }
+    }
+  }
+  compiler->EmitDeopt(deopt_id(),
+                      ICData::kDeoptCheckClass,
+                      licm_hoisted_ ? ICData::kHoisted : 0);
+}
+
+
 EMIT_NATIVE_CODE(BinarySmiOp, 2, Location::RequiresRegister()) {
   const Register left = locs()->in(0).reg();
   const Register right = locs()->in(1).reg();
@@ -1042,7 +1110,7 @@
   if (can_deopt) {
     compiler->EmitDeopt(deopt_id(), ICData::kDeoptBinarySmiOp);
   } else if (needs_nop) {
-    __ Nop();
+    __ Nop(0);
   }
 }
 
diff --git a/runtime/vm/intermediate_language_ia32.cc b/runtime/vm/intermediate_language_ia32.cc
index aa3c355..34555dc 100644
--- a/runtime/vm/intermediate_language_ia32.cc
+++ b/runtime/vm/intermediate_language_ia32.cc
@@ -1725,7 +1725,7 @@
           ((IsPotentialUnboxedStore()) ? 3 : 0);
   LocationSummary* summary = new(zone) LocationSummary(
       zone, kNumInputs, kNumTemps,
-          ((IsUnboxedStore() && opt && is_potential_unboxed_initialization_) ||
+          ((IsUnboxedStore() && opt && is_initialization()) ||
            IsPotentialUnboxedStore())
           ? LocationSummary::kCallOnSlowPath
           : LocationSummary::kNoCall);
@@ -1787,7 +1787,7 @@
     Register temp2 = locs()->temp(1).reg();
     const intptr_t cid = field().UnboxedFieldCid();
 
-    if (is_potential_unboxed_initialization_) {
+    if (is_initialization()) {
       const Class* cls = NULL;
       switch (cid) {
         case kDoubleCid:
@@ -1935,18 +1935,12 @@
       __ StoreIntoObjectNoBarrier(
           instance_reg,
           FieldAddress(instance_reg, offset_in_bytes_),
-          locs()->in(1).constant(),
-          is_object_reference_initialization_ ?
-              Assembler::kEmptyOrSmiOrNull :
-              Assembler::kHeapObjectOrSmi);
+          locs()->in(1).constant());
     } else {
       Register value_reg = locs()->in(1).reg();
       __ StoreIntoObjectNoBarrier(instance_reg,
                                   FieldAddress(instance_reg, offset_in_bytes_),
-                                  value_reg,
-                                  is_object_reference_initialization_ ?
-                                      Assembler::kEmptyOrSmiOrNull :
-                                      Assembler::kHeapObjectOrSmi);
+                                  value_reg);
     }
   }
   __ Bind(&skip_store);
@@ -2066,12 +2060,12 @@
                       EDI);  // temp
 
   // Store the type argument field.
-  __ InitializeFieldNoBarrier(EAX,
+  __ StoreIntoObjectNoBarrier(EAX,
                               FieldAddress(EAX, Array::type_arguments_offset()),
                               kElemTypeReg);
 
   // Set the length field.
-  __ InitializeFieldNoBarrier(EAX,
+  __ StoreIntoObjectNoBarrier(EAX,
                               FieldAddress(EAX, Array::length_offset()),
                               kLengthReg);
 
@@ -2089,13 +2083,13 @@
       intptr_t current_offset = 0;
       __ movl(EBX, raw_null);
       while (current_offset < array_size) {
-        __ InitializeFieldNoBarrier(EAX, Address(EDI, current_offset), EBX);
+        __ StoreIntoObjectNoBarrier(EAX, Address(EDI, current_offset), EBX);
         current_offset += kWordSize;
       }
     } else {
       Label init_loop;
       __ Bind(&init_loop);
-      __ InitializeFieldNoBarrier(EAX, Address(EDI, 0), Object::null_object());
+      __ StoreIntoObjectNoBarrier(EAX, Address(EDI, 0), Object::null_object());
       __ addl(EDI, Immediate(kWordSize));
       __ cmpl(EDI, EBX);
       __ j(BELOW, &init_loop, Assembler::kNearJump);
diff --git a/runtime/vm/intermediate_language_mips.cc b/runtime/vm/intermediate_language_mips.cc
index 6b9100b..342e267 100644
--- a/runtime/vm/intermediate_language_mips.cc
+++ b/runtime/vm/intermediate_language_mips.cc
@@ -1922,7 +1922,7 @@
           ((IsPotentialUnboxedStore()) ? 3 : 0);
   LocationSummary* summary = new(zone) LocationSummary(
       zone, kNumInputs, kNumTemps,
-          ((IsUnboxedStore() && opt && is_potential_unboxed_initialization_) ||
+          ((IsUnboxedStore() && opt && is_initialization()) ||
            IsPotentialUnboxedStore())
           ? LocationSummary::kCallOnSlowPath
           : LocationSummary::kNoCall);
@@ -1978,7 +1978,7 @@
     Register temp2 = locs()->temp(1).reg();
     const intptr_t cid = field().UnboxedFieldCid();
 
-    if (is_potential_unboxed_initialization_) {
+    if (is_initialization()) {
       const Class* cls = NULL;
       switch (cid) {
         case kDoubleCid:
diff --git a/runtime/vm/intermediate_language_x64.cc b/runtime/vm/intermediate_language_x64.cc
index 2b71a16..da4b4c2 100644
--- a/runtime/vm/intermediate_language_x64.cc
+++ b/runtime/vm/intermediate_language_x64.cc
@@ -1753,7 +1753,7 @@
           ((IsPotentialUnboxedStore()) ? 3 : 0);
   LocationSummary* summary = new(zone) LocationSummary(
       zone, kNumInputs, kNumTemps,
-          ((IsUnboxedStore() && opt && is_potential_unboxed_initialization_) ||
+          ((IsUnboxedStore() && opt && is_initialization()) ||
            IsPotentialUnboxedStore())
           ? LocationSummary::kCallOnSlowPath
           : LocationSummary::kNoCall);
@@ -1813,7 +1813,7 @@
     Register temp2 = locs()->temp(1).reg();
     const intptr_t cid = field().UnboxedFieldCid();
 
-    if (is_potential_unboxed_initialization_) {
+    if (is_initialization()) {
       const Class* cls = NULL;
       switch (cid) {
         case kDoubleCid:
@@ -1958,18 +1958,12 @@
     if (locs()->in(1).IsConstant()) {
       __ StoreIntoObjectNoBarrier(instance_reg,
                                   FieldAddress(instance_reg, offset_in_bytes_),
-                                  locs()->in(1).constant(),
-                                  is_object_reference_initialization_ ?
-                                      Assembler::kEmptyOrSmiOrNull :
-                                      Assembler::kHeapObjectOrSmi);
+                                  locs()->in(1).constant());
     } else {
       Register value_reg = locs()->in(1).reg();
       __ StoreIntoObjectNoBarrier(instance_reg,
           FieldAddress(instance_reg, offset_in_bytes_),
-          value_reg,
-          is_object_reference_initialization_ ?
-              Assembler::kEmptyOrSmiOrNull :
-              Assembler::kHeapObjectOrSmi);
+          value_reg);
     }
   }
   __ Bind(&skip_store);
@@ -2086,12 +2080,12 @@
 
   // RAX: new object start as a tagged pointer.
   // Store the type argument field.
-  __ InitializeFieldNoBarrier(RAX,
+  __ StoreIntoObjectNoBarrier(RAX,
                               FieldAddress(RAX, Array::type_arguments_offset()),
                               kElemTypeReg);
 
   // Set the length field.
-  __ InitializeFieldNoBarrier(RAX,
+  __ StoreIntoObjectNoBarrier(RAX,
                               FieldAddress(RAX, Array::length_offset()),
                               kLengthReg);
 
@@ -2107,13 +2101,13 @@
     if (array_size < (kInlineArraySize * kWordSize)) {
       intptr_t current_offset = 0;
       while (current_offset < array_size) {
-        __ InitializeFieldNoBarrier(RAX, Address(RDI, current_offset), R12);
+        __ StoreIntoObjectNoBarrier(RAX, Address(RDI, current_offset), R12);
         current_offset += kWordSize;
       }
     } else {
       Label init_loop;
       __ Bind(&init_loop);
-      __ InitializeFieldNoBarrier(RAX, Address(RDI, 0), R12);
+      __ StoreIntoObjectNoBarrier(RAX, Address(RDI, 0), R12);
       __ addq(RDI, Immediate(kWordSize));
       __ cmpq(RDI, RCX);
       __ j(BELOW, &init_loop, Assembler::kNearJump);
diff --git a/runtime/vm/intrinsifier_arm.cc b/runtime/vm/intrinsifier_arm.cc
index e17abcb..2daafed 100644
--- a/runtime/vm/intrinsifier_arm.cc
+++ b/runtime/vm/intrinsifier_arm.cc
@@ -103,7 +103,7 @@
   // Store backing array object in growable array object.
   __ ldr(R1, Address(SP, kArrayOffset));  // Data argument.
   // R0 is new, no barrier needed.
-  __ InitializeFieldNoBarrier(
+  __ StoreIntoObjectNoBarrier(
       R0,
       FieldAddress(R0, GrowableObjectArray::data_offset()),
       R1);
@@ -111,14 +111,14 @@
   // R0: new growable array object start as a tagged pointer.
   // Store the type argument field in the growable array object.
   __ ldr(R1, Address(SP, kTypeArgumentsOffset));  // Type argument.
-  __ InitializeFieldNoBarrier(
+  __ StoreIntoObjectNoBarrier(
       R0,
       FieldAddress(R0, GrowableObjectArray::type_arguments_offset()),
       R1);
 
   // Set the length field in the growable array object to 0.
   __ LoadImmediate(R1, 0);
-  __ InitializeFieldNoBarrier(
+  __ StoreIntoObjectNoBarrier(
       R0,
       FieldAddress(R0, GrowableObjectArray::length_offset()),
       R1);
@@ -166,7 +166,7 @@
 #define TYPED_ARRAY_ALLOCATION(type_name, cid, max_len, scale_shift)           \
   Label fall_through;                                                          \
   const intptr_t kArrayLengthStackOffset = 0 * kWordSize;                      \
-  __ MaybeTraceAllocation(cid, R2, &fall_through);                             \
+  NOT_IN_PRODUCT(__ MaybeTraceAllocation(cid, R2, &fall_through));             \
   __ ldr(R2, Address(SP, kArrayLengthStackOffset));  /* Array length. */       \
   /* Check that length is a positive Smi. */                                   \
   /* R2: requested array length argument. */                                   \
@@ -183,7 +183,7 @@
   const intptr_t fixed_size = sizeof(Raw##type_name) + kObjectAlignment - 1;   \
   __ AddImmediate(R2, fixed_size);                                             \
   __ bic(R2, R2, Operand(kObjectAlignment - 1));                               \
-  Heap::Space space = Heap::SpaceForAllocation(cid);                           \
+  Heap::Space space = Heap::kNew;                                              \
   __ ldr(R3, Address(THR, Thread::heap_offset()));                             \
   __ ldr(R0, Address(R3, Heap::TopOffset(space)));                             \
                                                                                \
@@ -202,7 +202,7 @@
                                                                                \
   /* Successfully allocated the object(s), now update top to point to */       \
   /* next object start and initialize the object. */                           \
-  __ LoadAllocationStatsAddress(R4, cid);                                      \
+  NOT_IN_PRODUCT(__ LoadAllocationStatsAddress(R4, cid));                      \
   __ str(R1, Address(R3, Heap::TopOffset(space)));                             \
   __ AddImmediate(R0, kHeapObjectTag);                                         \
   /* Initialize the tags. */                                                   \
@@ -227,7 +227,7 @@
   /* R2: allocation size. */                                                   \
   /* R4: allocation stats address. */                                          \
   __ ldr(R3, Address(SP, kArrayLengthStackOffset));  /* Array length. */       \
-  __ InitializeFieldNoBarrier(R0,                                              \
+  __ StoreIntoObjectNoBarrier(R0,                                              \
                               FieldAddress(R0, type_name::length_offset()),    \
                               R3);                                             \
   /* Initialize all array elements to 0. */                                    \
@@ -249,7 +249,7 @@
   __ b(&init_loop, CC);                                                        \
   __ str(R8, Address(R3, -2 * kWordSize), HI);                                 \
                                                                                \
-  __ IncrementAllocationStatsWithSize(R4, R2, space);                          \
+  NOT_IN_PRODUCT(__ IncrementAllocationStatsWithSize(R4, R2, space));          \
   __ Ret();                                                                    \
   __ Bind(&fall_through);                                                      \
 
@@ -778,6 +778,11 @@
 }
 
 
+void Intrinsifier::Smi_bitAndFromSmi(Assembler* assembler) {
+  Integer_bitAndFromInteger(assembler);
+}
+
+
 void Intrinsifier::Bigint_lsh(Assembler* assembler) {
   // static void _lsh(Uint32List x_digits, int x_used, int n,
   //                  Uint32List r_digits)
@@ -1824,7 +1829,7 @@
                                      Label* failure) {
   const Register length_reg = R2;
   Label fail;
-  __ MaybeTraceAllocation(kOneByteStringCid, R0, failure);
+  NOT_IN_PRODUCT(__ MaybeTraceAllocation(kOneByteStringCid, R0, failure));
   __ mov(R8, Operand(length_reg));  // Save the length register.
   // TODO(koda): Protect against negative length and overflow here.
   __ SmiUntag(length_reg);
@@ -1833,7 +1838,7 @@
   __ bic(length_reg, length_reg, Operand(kObjectAlignment - 1));
 
   const intptr_t cid = kOneByteStringCid;
-  Heap::Space space = Heap::SpaceForAllocation(cid);
+  Heap::Space space = Heap::kNew;
   __ ldr(R3, Address(THR, Thread::heap_offset()));
   __ ldr(R0, Address(R3, Heap::TopOffset(space)));
 
@@ -1852,7 +1857,7 @@
 
   // Successfully allocated the object(s), now update top to point to
   // next object start and initialize the object.
-  __ LoadAllocationStatsAddress(R4, cid);
+  NOT_IN_PRODUCT(__ LoadAllocationStatsAddress(R4, cid));
   __ str(R1, Address(R3, Heap::TopOffset(space)));
   __ AddImmediate(R0, kHeapObjectTag);
 
@@ -1876,16 +1881,16 @@
   }
 
   // Set the length field using the saved length (R8).
-  __ InitializeFieldNoBarrier(R0,
+  __ StoreIntoObjectNoBarrier(R0,
                               FieldAddress(R0, String::length_offset()),
                               R8);
   // Clear hash.
   __ LoadImmediate(TMP, 0);
-  __ InitializeFieldNoBarrier(R0,
+  __ StoreIntoObjectNoBarrier(R0,
                               FieldAddress(R0, String::hash_offset()),
                               TMP);
 
-  __ IncrementAllocationStatsWithSize(R4, R2, space);
+  NOT_IN_PRODUCT(__ IncrementAllocationStatsWithSize(R4, R2, space));
   __ b(ok);
 
   __ Bind(&fail);
diff --git a/runtime/vm/intrinsifier_arm64.cc b/runtime/vm/intrinsifier_arm64.cc
index fbaa1677..f9cade2 100644
--- a/runtime/vm/intrinsifier_arm64.cc
+++ b/runtime/vm/intrinsifier_arm64.cc
@@ -183,7 +183,7 @@
 #define TYPED_ARRAY_ALLOCATION(type_name, cid, max_len, scale_shift)           \
   Label fall_through;                                                          \
   const intptr_t kArrayLengthStackOffset = 0 * kWordSize;                      \
-  __ MaybeTraceAllocation(cid, R2, &fall_through);                             \
+  NOT_IN_PRODUCT(__ MaybeTraceAllocation(cid, R2, &fall_through));             \
   __ ldr(R2, Address(SP, kArrayLengthStackOffset));  /* Array length. */       \
   /* Check that length is a positive Smi. */                                   \
   /* R2: requested array length argument. */                                   \
@@ -200,7 +200,7 @@
   const intptr_t fixed_size = sizeof(Raw##type_name) + kObjectAlignment - 1;   \
   __ AddImmediate(R2, R2, fixed_size);                                         \
   __ andi(R2, R2, Immediate(~(kObjectAlignment - 1)));                         \
-  Heap::Space space = Heap::SpaceForAllocation(cid);                           \
+  Heap::Space space = Heap::kNew;                                              \
   __ ldr(R3, Address(THR, Thread::heap_offset()));                             \
   __ ldr(R0, Address(R3, Heap::TopOffset(space)));                             \
                                                                                \
@@ -221,7 +221,7 @@
   /* next object start and initialize the object. */                           \
   __ str(R1, Address(R3, Heap::TopOffset(space)));                             \
   __ AddImmediate(R0, R0, kHeapObjectTag);                                     \
-  __ UpdateAllocationStatsWithSize(cid, R2, space);                            \
+  NOT_IN_PRODUCT(__ UpdateAllocationStatsWithSize(cid, R2, space));            \
   /* Initialize the tags. */                                                   \
   /* R0: new object start as a tagged pointer. */                              \
   /* R1: new object end address. */                                            \
@@ -682,6 +682,11 @@
 }
 
 
+void Intrinsifier::Smi_bitAndFromSmi(Assembler* assembler) {
+  Integer_bitAndFromInteger(assembler);
+}
+
+
 void Intrinsifier::Bigint_lsh(Assembler* assembler) {
   // static void _lsh(Uint32List x_digits, int x_used, int n,
   //                  Uint32List r_digits)
@@ -1907,7 +1912,7 @@
                                      Label* failure) {
   const Register length_reg = R2;
   Label fail;
-  __ MaybeTraceAllocation(kOneByteStringCid, R0, failure);
+  NOT_IN_PRODUCT(__ MaybeTraceAllocation(kOneByteStringCid, R0, failure));
   __ mov(R6, length_reg);  // Save the length register.
   // TODO(koda): Protect against negative length and overflow here.
   __ SmiUntag(length_reg);
@@ -1916,7 +1921,7 @@
   __ andi(length_reg, length_reg, Immediate(~(kObjectAlignment - 1)));
 
   const intptr_t cid = kOneByteStringCid;
-  Heap::Space space = Heap::SpaceForAllocation(cid);
+  Heap::Space space = Heap::kNew;
   __ ldr(R3, Address(THR, Thread::heap_offset()));
   __ ldr(R0, Address(R3, Heap::TopOffset(space)));
 
@@ -1937,7 +1942,7 @@
   // next object start and initialize the object.
   __ str(R1, Address(R3, Heap::TopOffset(space)));
   __ AddImmediate(R0, R0, kHeapObjectTag);
-  __ UpdateAllocationStatsWithSize(cid, R2, space);
+  NOT_IN_PRODUCT(__ UpdateAllocationStatsWithSize(cid, R2, space));
 
   // Initialize the tags.
   // R0: new object start as a tagged pointer.
diff --git a/runtime/vm/intrinsifier_ia32.cc b/runtime/vm/intrinsifier_ia32.cc
index 130b5d6..b9ea3a1 100644
--- a/runtime/vm/intrinsifier_ia32.cc
+++ b/runtime/vm/intrinsifier_ia32.cc
@@ -136,17 +136,12 @@
   // Try allocating in new space.
   const Class& cls = Class::Handle(
       Isolate::Current()->object_store()->growable_object_array_class());
-#if defined(DEBUG)
-  static const bool kJumpLength = Assembler::kFarJump;
-#else
-  static const bool kJumpLength = Assembler::kNearJump;
-#endif  // DEBUG
-  __ TryAllocate(cls, &fall_through, kJumpLength, EAX, EBX);
+  __ TryAllocate(cls, &fall_through, Assembler::kNearJump, EAX, EBX);
 
   // Store backing array object in growable array object.
   __ movl(EBX, Address(ESP, kArrayOffset));  // data argument.
   // EAX is new, no barrier needed.
-  __ InitializeFieldNoBarrier(
+  __ StoreIntoObjectNoBarrier(
       EAX,
       FieldAddress(EAX, GrowableObjectArray::data_offset()),
       EBX);
@@ -154,7 +149,7 @@
   // EAX: new growable array object start as a tagged pointer.
   // Store the type argument field in the growable array object.
   __ movl(EBX, Address(ESP, kTypeArgumentsOffset));  // type argument.
-  __ InitializeFieldNoBarrier(
+  __ StoreIntoObjectNoBarrier(
       EAX,
       FieldAddress(EAX, GrowableObjectArray::type_arguments_offset()),
       EBX);
@@ -200,7 +195,7 @@
 #define TYPED_ARRAY_ALLOCATION(type_name, cid, max_len, scale_factor)          \
   Label fall_through;                                                          \
   const intptr_t kArrayLengthStackOffset = 1 * kWordSize;                      \
-  __ MaybeTraceAllocation(cid, EDI, &fall_through, false);                     \
+  NOT_IN_PRODUCT(__ MaybeTraceAllocation(cid, EDI, &fall_through, false));     \
   __ movl(EDI, Address(ESP, kArrayLengthStackOffset));  /* Array length. */    \
   /* Check that length is a positive Smi. */                                   \
   /* EDI: requested array length argument. */                                  \
@@ -223,7 +218,7 @@
   const intptr_t fixed_size = sizeof(Raw##type_name) + kObjectAlignment - 1;   \
   __ leal(EDI, Address(EDI, scale_factor, fixed_size));                        \
   __ andl(EDI, Immediate(-kObjectAlignment));                                  \
-  Heap::Space space = Heap::SpaceForAllocation(cid);                           \
+  Heap::Space space = Heap::kNew;                                              \
   __ movl(ECX, Address(THR, Thread::heap_offset()));                           \
   __ movl(EAX, Address(ECX, Heap::TopOffset(space)));                          \
   __ movl(EBX, EAX);                                                           \
@@ -244,7 +239,7 @@
   /* next object start and initialize the object. */                           \
   __ movl(Address(ECX, Heap::TopOffset(space)), EBX);                          \
   __ addl(EAX, Immediate(kHeapObjectTag));                                     \
-  __ UpdateAllocationStatsWithSize(cid, EDI, ECX, space);                      \
+  NOT_IN_PRODUCT(__ UpdateAllocationStatsWithSize(cid, EDI, ECX, space));      \
                                                                                \
   /* Initialize the tags. */                                                   \
   /* EAX: new object start as a tagged pointer. */                             \
@@ -269,7 +264,7 @@
   /* EAX: new object start as a tagged pointer. */                             \
   /* EBX: new object end address. */                                           \
   __ movl(EDI, Address(ESP, kArrayLengthStackOffset));  /* Array length. */    \
-  __ InitializeFieldNoBarrier(EAX,                                             \
+  __ StoreIntoObjectNoBarrier(EAX,                                             \
                               FieldAddress(EAX, type_name::length_offset()),   \
                               EDI);                                            \
   /* Initialize all array elements to 0. */                                    \
@@ -822,6 +817,11 @@
 }
 
 
+void Intrinsifier::Smi_bitAndFromSmi(Assembler* assembler) {
+  Integer_bitAndFromInteger(assembler);
+}
+
+
 void Intrinsifier::Bigint_lsh(Assembler* assembler) {
   // static void _lsh(Uint32List x_digits, int x_used, int n,
   //                  Uint32List r_digits)
@@ -1859,7 +1859,8 @@
                                      Label* ok,
                                      Label* failure,
                                      Register length_reg) {
-  __ MaybeTraceAllocation(kOneByteStringCid, EAX, failure, false);
+  NOT_IN_PRODUCT(
+    __ MaybeTraceAllocation(kOneByteStringCid, EAX, failure, false));
   if (length_reg != EDI) {
     __ movl(EDI, length_reg);
   }
@@ -1871,7 +1872,7 @@
   __ andl(EDI, Immediate(-kObjectAlignment));
 
   const intptr_t cid = kOneByteStringCid;
-  Heap::Space space = Heap::SpaceForAllocation(cid);
+  Heap::Space space = Heap::kNew;
   __ movl(ECX, Address(THR, Thread::heap_offset()));
   __ movl(EAX, Address(ECX, Heap::TopOffset(space)));
   __ movl(EBX, EAX);
@@ -1893,7 +1894,7 @@
   __ movl(Address(ECX, Heap::TopOffset(space)), EBX);
   __ addl(EAX, Immediate(kHeapObjectTag));
 
-  __ UpdateAllocationStatsWithSize(cid, EDI, ECX, space);
+  NOT_IN_PRODUCT(__ UpdateAllocationStatsWithSize(cid, EDI, ECX, space));
 
   // Initialize the tags.
   // EAX: new object start as a tagged pointer.
@@ -1917,7 +1918,7 @@
 
   // Set the length field.
   __ popl(EDI);
-  __ InitializeFieldNoBarrier(EAX,
+  __ StoreIntoObjectNoBarrier(EAX,
                               FieldAddress(EAX, String::length_offset()),
                               EDI);
   // Clear hash.
diff --git a/runtime/vm/intrinsifier_mips.cc b/runtime/vm/intrinsifier_mips.cc
index d8da470..748f06d 100644
--- a/runtime/vm/intrinsifier_mips.cc
+++ b/runtime/vm/intrinsifier_mips.cc
@@ -161,7 +161,7 @@
 #define TYPED_ARRAY_ALLOCATION(type_name, cid, max_len, scale_shift)           \
   Label fall_through;                                                          \
   const intptr_t kArrayLengthStackOffset = 0 * kWordSize;                      \
-  __ MaybeTraceAllocation(cid, T2, &fall_through);                             \
+  NOT_IN_PRODUCT(__ MaybeTraceAllocation(cid, T2, &fall_through));             \
   __ lw(T2, Address(SP, kArrayLengthStackOffset));  /* Array length. */        \
   /* Check that length is a positive Smi. */                                   \
   /* T2: requested array length argument. */                                   \
@@ -177,7 +177,7 @@
   __ AddImmediate(T2, fixed_size);                                             \
   __ LoadImmediate(TMP, -kObjectAlignment);                                    \
   __ and_(T2, T2, TMP);                                                        \
-  Heap::Space space = Heap::SpaceForAllocation(cid);                           \
+  Heap::Space space = Heap::kNew;                                              \
   __ lw(T3, Address(THR, Thread::heap_offset()));                              \
   __ lw(V0, Address(T3, Heap::TopOffset(space)));                              \
                                                                                \
@@ -198,7 +198,7 @@
   /* next object start and initialize the object. */                           \
   __ sw(T1, Address(T3, Heap::TopOffset(space)));                              \
   __ AddImmediate(V0, kHeapObjectTag);                                         \
-  __ UpdateAllocationStatsWithSize(cid, T2, T4, space);                        \
+  NOT_IN_PRODUCT(__ UpdateAllocationStatsWithSize(cid, T2, T4, space));        \
   /* Initialize the tags. */                                                   \
   /* V0: new object start as a tagged pointer. */                              \
   /* T1: new object end address. */                                            \
@@ -776,6 +776,11 @@
 }
 
 
+void Intrinsifier::Smi_bitAndFromSmi(Assembler* assembler) {
+  Integer_bitAndFromInteger(assembler);
+}
+
+
 void Intrinsifier::Bigint_lsh(Assembler* assembler) {
   // static void _lsh(Uint32List x_digits, int x_used, int n,
   //                  Uint32List r_digits)
@@ -1941,7 +1946,7 @@
                                      Label* ok,
                                      Label* failure) {
   const Register length_reg = T2;
-  __ MaybeTraceAllocation(kOneByteStringCid, V0, failure);
+  NOT_IN_PRODUCT(__ MaybeTraceAllocation(kOneByteStringCid, V0, failure));
   __ mov(T6, length_reg);  // Save the length register.
   // TODO(koda): Protect against negative length and overflow here.
   __ SmiUntag(length_reg);
@@ -1951,7 +1956,7 @@
   __ and_(length_reg, length_reg, TMP);
 
   const intptr_t cid = kOneByteStringCid;
-  Heap::Space space = Heap::SpaceForAllocation(cid);
+  Heap::Space space = Heap::kNew;
   __ lw(T3, Address(THR, Thread::heap_offset()));
   __ lw(V0, Address(T3, Heap::TopOffset(space)));
 
@@ -1972,7 +1977,7 @@
   __ sw(T1, Address(T3, Heap::TopOffset(space)));
   __ AddImmediate(V0, kHeapObjectTag);
 
-  __ UpdateAllocationStatsWithSize(cid, T2, T3, space);
+  NOT_IN_PRODUCT(__ UpdateAllocationStatsWithSize(cid, T2, T3, space));
 
   // Initialize the tags.
   // V0: new object start as a tagged pointer.
diff --git a/runtime/vm/intrinsifier_x64.cc b/runtime/vm/intrinsifier_x64.cc
index 5ac5d39..cdb11df 100644
--- a/runtime/vm/intrinsifier_x64.cc
+++ b/runtime/vm/intrinsifier_x64.cc
@@ -100,7 +100,7 @@
   // Store backing array object in growable array object.
   __ movq(RCX, Address(RSP, kArrayOffset));  // data argument.
   // RAX is new, no barrier needed.
-  __ InitializeFieldNoBarrier(
+  __ StoreIntoObjectNoBarrier(
       RAX,
       FieldAddress(RAX, GrowableObjectArray::data_offset()),
       RCX);
@@ -108,7 +108,7 @@
   // RAX: new growable array object start as a tagged pointer.
   // Store the type argument field in the growable array object.
   __ movq(RCX, Address(RSP, kTypeArgumentsOffset));  // type argument.
-  __ InitializeFieldNoBarrier(
+  __ StoreIntoObjectNoBarrier(
       RAX,
       FieldAddress(RAX, GrowableObjectArray::type_arguments_offset()),
       RCX);
@@ -153,7 +153,7 @@
 #define TYPED_ARRAY_ALLOCATION(type_name, cid, max_len, scale_factor)          \
   Label fall_through;                                                          \
   const intptr_t kArrayLengthStackOffset = 1 * kWordSize;                      \
-  __ MaybeTraceAllocation(cid, &fall_through, false);                          \
+  NOT_IN_PRODUCT(__ MaybeTraceAllocation(cid, &fall_through, false));          \
   __ movq(RDI, Address(RSP, kArrayLengthStackOffset));  /* Array length. */    \
   /* Check that length is a positive Smi. */                                   \
   /* RDI: requested array length argument. */                                  \
@@ -176,7 +176,7 @@
   const intptr_t fixed_size = sizeof(Raw##type_name) + kObjectAlignment - 1;   \
   __ leaq(RDI, Address(RDI, scale_factor, fixed_size));                        \
   __ andq(RDI, Immediate(-kObjectAlignment));                                  \
-  Heap::Space space = Heap::SpaceForAllocation(cid);                           \
+  Heap::Space space = Heap::kNew;                                              \
   __ movq(R13, Address(THR, Thread::heap_offset()));                           \
   __ movq(RAX, Address(R13, Heap::TopOffset(space)));                          \
   __ movq(RCX, RAX);                                                           \
@@ -197,7 +197,7 @@
   /* next object start and initialize the object. */                           \
   __ movq(Address(R13, Heap::TopOffset(space)), RCX);                          \
   __ addq(RAX, Immediate(kHeapObjectTag));                                     \
-  __ UpdateAllocationStatsWithSize(cid, RDI, space);                           \
+  NOT_IN_PRODUCT(__ UpdateAllocationStatsWithSize(cid, RDI, space));           \
   /* Initialize the tags. */                                                   \
   /* RAX: new object start as a tagged pointer. */                             \
   /* RCX: new object end address. */                                           \
@@ -222,7 +222,7 @@
   /* RAX: new object start as a tagged pointer. */                             \
   /* RCX: new object end address. */                                           \
   __ movq(RDI, Address(RSP, kArrayLengthStackOffset));  /* Array length. */    \
-  __ InitializeFieldNoBarrier(RAX,                                             \
+  __ StoreIntoObjectNoBarrier(RAX,                                             \
                               FieldAddress(RAX, type_name::length_offset()),   \
                               RDI);                                            \
   /* Initialize all array elements to 0. */                                    \
@@ -741,6 +741,11 @@
 }
 
 
+void Intrinsifier::Smi_bitAndFromSmi(Assembler* assembler) {
+  Integer_bitAndFromInteger(assembler);
+}
+
+
 void Intrinsifier::Bigint_lsh(Assembler* assembler) {
   // static void _lsh(Uint32List x_digits, int x_used, int n,
   //                  Uint32List r_digits)
@@ -1826,7 +1831,7 @@
                                      Label* ok,
                                      Label* failure,
                                      Register length_reg) {
-  __ MaybeTraceAllocation(kOneByteStringCid, failure, false);
+  NOT_IN_PRODUCT(__ MaybeTraceAllocation(kOneByteStringCid, failure, false));
   if (length_reg != RDI) {
     __ movq(RDI, length_reg);
   }
@@ -1838,7 +1843,7 @@
   __ andq(RDI, Immediate(-kObjectAlignment));
 
   const intptr_t cid = kOneByteStringCid;
-  Heap::Space space = Heap::SpaceForAllocation(cid);
+  Heap::Space space = Heap::kNew;
   __ movq(R13, Address(THR, Thread::heap_offset()));
   __ movq(RAX, Address(R13, Heap::TopOffset(space)));
 
@@ -1859,7 +1864,7 @@
   // next object start and initialize the object.
   __ movq(Address(R13, Heap::TopOffset(space)), RCX);
   __ addq(RAX, Immediate(kHeapObjectTag));
-  __ UpdateAllocationStatsWithSize(cid, RDI, space);
+  NOT_IN_PRODUCT(__ UpdateAllocationStatsWithSize(cid, RDI, space));
 
   // Initialize the tags.
   // RAX: new object start as a tagged pointer.
@@ -1882,7 +1887,7 @@
 
   // Set the length field.
   __ popq(RDI);
-  __ InitializeFieldNoBarrier(RAX,
+  __ StoreIntoObjectNoBarrier(RAX,
                               FieldAddress(RAX, String::length_offset()),
                               RDI);
   // Clear hash.
diff --git a/runtime/vm/isolate.cc b/runtime/vm/isolate.cc
index bd26039..eee6d04 100644
--- a/runtime/vm/isolate.cc
+++ b/runtime/vm/isolate.cc
@@ -55,6 +55,7 @@
 DECLARE_FLAG(bool, trace_service);
 DECLARE_FLAG(bool, trace_reload);
 DECLARE_FLAG(bool, warn_on_pause_with_no_debugger);
+DECLARE_FLAG(bool, check_reloaded);
 
 NOT_IN_PRODUCT(
 static void CheckedModeHandler(bool value) {
@@ -797,6 +798,7 @@
       symbols_mutex_(new Mutex()),
       type_canonicalization_mutex_(new Mutex()),
       constant_canonicalization_mutex_(new Mutex()),
+      megamorphic_lookup_mutex_(new Mutex()),
       message_handler_(NULL),
       spawn_state_(NULL),
       is_runnable_(false),
@@ -863,6 +865,8 @@
   type_canonicalization_mutex_ = NULL;
   delete constant_canonicalization_mutex_;
   constant_canonicalization_mutex_ = NULL;
+  delete megamorphic_lookup_mutex_;
+  megamorphic_lookup_mutex_ = NULL;
   delete message_handler_;
   message_handler_ = NULL;  // Fail fast if we send messages to a dead isolate.
   ASSERT(deopt_context_ == NULL);  // No deopt in progress when isolate deleted.
@@ -1693,6 +1697,15 @@
     }
   }
 
+  if (FLAG_check_reloaded &&
+      (this != Dart::vm_isolate()) &&
+      !ServiceIsolate::IsServiceIsolateDescendant(this)) {
+    if (!HasAttemptedReload()) {
+      FATAL("Isolate did not reload before exiting and "
+            "--check-reloaded is enabled.\n");
+    }
+  }
+
   // Then, proceed with low-level teardown.
   LowLevelShutdown();
 
@@ -1834,6 +1847,7 @@
 }
 
 
+#ifndef PRODUCT
 static const char* ExceptionPauseInfoToServiceEnum(Dart_ExceptionPauseInfo pi) {
   switch (pi) {
     case kPauseOnAllExceptions:
@@ -1970,6 +1984,7 @@
     }
   }
 }
+#endif
 
 
 void Isolate::set_tag_table(const GrowableObjectArray& value) {
diff --git a/runtime/vm/isolate.h b/runtime/vm/isolate.h
index 701579c..7f1df6d 100644
--- a/runtime/vm/isolate.h
+++ b/runtime/vm/isolate.h
@@ -282,6 +282,9 @@
   Mutex* constant_canonicalization_mutex() const {
     return constant_canonicalization_mutex_;
   }
+  Mutex* megamorphic_lookup_mutex() const {
+    return megamorphic_lookup_mutex_;
+  }
 
   Debugger* debugger() const {
     if (!FLAG_support_debugger) {
@@ -455,7 +458,9 @@
     return defer_finalization_count_ == 0;
   }
 
+#ifndef PRODUCT
   void PrintJSON(JSONStream* stream, bool ref = true);
+#endif
 
   // Mutator thread is used to aggregate compiler stats.
   CompilerStats* aggregate_compiler_stats() {
@@ -717,6 +722,7 @@
   Mutex* symbols_mutex_;  // Protects concurrent access to the symbol table.
   Mutex* type_canonicalization_mutex_;  // Protects type canonicalization.
   Mutex* constant_canonicalization_mutex_;  // Protects const canonicalization.
+  Mutex* megamorphic_lookup_mutex_;  // Protects megamorphic table lookup.
   MessageHandler* message_handler_;
   IsolateSpawnState* spawn_state_;
   bool is_runnable_;
diff --git a/runtime/vm/isolate_reload.cc b/runtime/vm/isolate_reload.cc
index 1232ef6..15aaf29 100644
--- a/runtime/vm/isolate_reload.cc
+++ b/runtime/vm/isolate_reload.cc
@@ -27,7 +27,10 @@
 DEFINE_FLAG(bool, identity_reload, false, "Enable checks for identity reload.");
 DEFINE_FLAG(int, reload_every, 0, "Reload every N stack overflow checks.");
 DEFINE_FLAG(bool, reload_every_optimized, true, "Only from optimized code.");
-
+DEFINE_FLAG(bool, reload_every_back_off, false,
+            "Double the --reload-every value after each reload.");
+DEFINE_FLAG(bool, check_reloaded, false,
+            "Assert that an isolate has reloaded at least once.")
 #ifndef PRODUCT
 
 #define I (isolate())
@@ -215,7 +218,7 @@
   if (FLAG_trace_reload) {
     THR_Print("ISO-RELOAD: Error: %s\n", error.ToErrorCString());
   }
-  ServiceEvent service_event(Isolate::Current(), ServiceEvent::kIsolateReload);
+  ServiceEvent service_event(I, ServiceEvent::kIsolateReload);
   service_event.set_reload_error(&error);
   Service::HandleEvent(&service_event);
 }
@@ -227,12 +230,13 @@
 
 
 void IsolateReloadContext::ReportSuccess() {
-  ServiceEvent service_event(Isolate::Current(), ServiceEvent::kIsolateReload);
+  ServiceEvent service_event(I, ServiceEvent::kIsolateReload);
   Service::HandleEvent(&service_event);
 }
 
 
 void IsolateReloadContext::StartReload() {
+  TIMELINE_SCOPE(Reload);
   Thread* thread = Thread::Current();
 
   // Grab root library before calling CheckpointBeforeReload.
@@ -277,7 +281,7 @@
 void IsolateReloadContext::RegisterClass(const Class& new_cls) {
   const Class& old_cls = Class::Handle(OldClassOrNull(new_cls));
   if (old_cls.IsNull()) {
-    Isolate::Current()->class_table()->Register(new_cls);
+    I->class_table()->Register(new_cls);
 
     if (FLAG_identity_reload) {
       TIR_Print("Could not find replacement class for %s\n",
@@ -346,6 +350,7 @@
 
 
 void IsolateReloadContext::DeoptimizeDependentCode() {
+  TIMELINE_SCOPE(DeoptimizeDependentCode);
   ClassTable* class_table = I->class_table();
 
   const intptr_t bottom = Dart::vm_isolate()->class_table()->NumCids();
@@ -595,7 +600,7 @@
     }
 
     // Reset the registered libraries to the filtered array.
-    Library::RegisterLibraries(Thread::Current(), saved_libs);
+    Library::RegisterLibraries(thread, saved_libs);
   }
 
   Library& saved_root_lib = Library::Handle(Z, saved_root_library());
@@ -618,39 +623,36 @@
 
 #ifdef DEBUG
 void IsolateReloadContext::VerifyMaps() {
+  TIMELINE_SCOPE(VerifyMaps);
   Class& cls = Class::Handle();
   Class& new_cls = Class::Handle();
   Class& cls2 = Class::Handle();
-  Class& new_cls2 = Class::Handle();
 
   // Verify that two old classes aren't both mapped to the same new
-  // class.  This could happen is the IsSameClass function is broken.
+  // class. This could happen is the IsSameClass function is broken.
   UnorderedHashMap<ClassMapTraits> class_map(class_map_storage_);
+  UnorderedHashMap<ClassMapTraits> reverse_class_map(
+      HashTables::New<UnorderedHashMap<ClassMapTraits> >(
+         class_map.NumOccupied()));
   {
     UnorderedHashMap<ClassMapTraits>::Iterator it(&class_map);
     while (it.MoveNext()) {
       const intptr_t entry = it.Current();
       new_cls = Class::RawCast(class_map.GetKey(entry));
       cls = Class::RawCast(class_map.GetPayload(entry, 0));
-      if (new_cls.raw() != cls.raw()) {
-        UnorderedHashMap<ClassMapTraits>::Iterator it2(&class_map);
-        while (it2.MoveNext()) {
-          new_cls2 = Class::RawCast(class_map.GetKey(entry));
-          if (new_cls.raw() == new_cls2.raw()) {
-            cls2 = Class::RawCast(class_map.GetPayload(entry, 0));
-            if (cls.raw() != cls2.raw()) {
-              OS::PrintErr(
-                  "Classes '%s' and '%s' are distinct classes but both map to "
-                  "class '%s'\n",
-                  cls.ToCString(), cls2.ToCString(), new_cls.ToCString());
-              UNREACHABLE();
-            }
-          }
-        }
+      cls2 ^= reverse_class_map.GetOrNull(new_cls);
+      if (!cls2.IsNull()) {
+        OS::PrintErr("Classes '%s' and '%s' are distinct classes but both map "
+                     " to class '%s'\n",
+                     cls.ToCString(), cls2.ToCString(), new_cls.ToCString());
+        UNREACHABLE();
       }
+      bool update = reverse_class_map.UpdateOrInsert(cls, new_cls);
+      ASSERT(!update);
     }
   }
   class_map.Release();
+  reverse_class_map.Release();
 }
 #endif
 
diff --git a/runtime/vm/jit_optimizer.cc b/runtime/vm/jit_optimizer.cc
index 7ff3b0c..e40cf12 100644
--- a/runtime/vm/jit_optimizer.cc
+++ b/runtime/vm/jit_optimizer.cc
@@ -1823,6 +1823,18 @@
     return TryInlineFloat64x2Method(call, recognized_kind);
   }
 
+  if (recognized_kind == MethodRecognizer::kSmi_bitAndFromSmi) {
+    AddReceiverCheck(call);
+    BinarySmiOpInstr* op =
+        new(Z) BinarySmiOpInstr(
+            Token::kBIT_AND,
+            new(Z) Value(call->ArgumentAt(0)),
+            new(Z) Value(call->ArgumentAt(1)),
+            call->deopt_id());
+    ReplaceCall(call, op);
+    return true;
+  }
+
   return false;
 }
 
@@ -2775,7 +2787,6 @@
 void JitOptimizer::VisitStoreInstanceField(
     StoreInstanceFieldInstr* instr) {
   if (instr->IsUnboxedStore()) {
-    ASSERT(instr->is_potential_unboxed_initialization_);
     // Determine if this field should be unboxed based on the usage of getter
     // and setter functions: The heuristic requires that the setter has a
     // usage count of at least 1/kGetterSetterRatio of the getter usage count.
@@ -2844,7 +2855,7 @@
                                      instr->token_pos());
   // Storing into uninitialized memory; remember to prevent dead store
   // elimination and ensure proper GC barrier.
-  store->set_is_object_reference_initialization(true);
+  store->set_is_initialization(true);
   flow_graph_->InsertAfter(replacement, store, NULL, FlowGraph::kEffect);
   Definition* cursor = store;
   for (intptr_t i = 0; i < instr->num_context_variables(); ++i) {
@@ -2856,7 +2867,7 @@
                                        instr->token_pos());
     // Storing into uninitialized memory; remember to prevent dead store
     // elimination and ensure proper GC barrier.
-    store->set_is_object_reference_initialization(true);
+    store->set_is_initialization(true);
     flow_graph_->InsertAfter(cursor, store, NULL, FlowGraph::kEffect);
     cursor = store;
   }
diff --git a/runtime/vm/megamorphic_cache_table.cc b/runtime/vm/megamorphic_cache_table.cc
index 4705b21..e3d34dd 100644
--- a/runtime/vm/megamorphic_cache_table.cc
+++ b/runtime/vm/megamorphic_cache_table.cc
@@ -16,7 +16,7 @@
                                                    const String& name,
                                                    const Array& descriptor) {
   // Multiple compilation threads could access this lookup.
-  SafepointMutexLocker ml(isolate->mutex());
+  SafepointMutexLocker ml(isolate->megamorphic_lookup_mutex());
   ASSERT(name.IsSymbol());
   // TODO(rmacnak): ASSERT(descriptor.IsCanonical());
 
diff --git a/runtime/vm/method_recognizer.h b/runtime/vm/method_recognizer.h
index 33b7f42..083b477 100644
--- a/runtime/vm/method_recognizer.h
+++ b/runtime/vm/method_recognizer.h
@@ -157,6 +157,7 @@
 #define CORE_LIB_INTRINSIC_LIST(V)                                             \
   V(_Smi, ~, Smi_bitNegate, Smi, 0x63bfee11)                                   \
   V(_Smi, get:bitLength, Smi_bitLength, Smi, 0x25b2e24c)                       \
+  V(_Smi, _bitAndFromSmi, Smi_bitAndFromSmi, Smi, 0x0df806ed)                  \
   V(_Bigint, _lsh, Bigint_lsh, Dynamic, 0x5cd95513)                            \
   V(_Bigint, _rsh, Bigint_rsh, Dynamic, 0x2d68d0e1)                            \
   V(_Bigint, _absAdd, Bigint_absAdd, Dynamic, 0x492f4865)                      \
diff --git a/runtime/vm/native_symbol_fuchsia.cc b/runtime/vm/native_symbol_fuchsia.cc
new file mode 100644
index 0000000..cb0f02a
--- /dev/null
+++ b/runtime/vm/native_symbol_fuchsia.cc
@@ -0,0 +1,36 @@
+// Copyright (c) 2016, the Dart project authors.  Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.
+
+#include "vm/globals.h"
+#if defined(TARGET_OS_FUCHSIA)
+
+#include "vm/native_symbol.h"
+
+#include "platform/assert.h"
+
+namespace dart {
+
+void NativeSymbolResolver::InitOnce() {
+  UNIMPLEMENTED();
+}
+
+
+void NativeSymbolResolver::ShutdownOnce() {
+  UNIMPLEMENTED();
+}
+
+
+char* NativeSymbolResolver::LookupSymbolName(uintptr_t pc, uintptr_t* start) {
+  UNIMPLEMENTED();
+  return NULL;
+}
+
+
+void NativeSymbolResolver::FreeSymbolName(char* name) {
+  UNIMPLEMENTED();
+}
+
+}  // namespace dart
+
+#endif  // defined(TARGET_OS_FUCHSIA)
diff --git a/runtime/vm/object.cc b/runtime/vm/object.cc
index 0214cfb..9b2e319 100644
--- a/runtime/vm/object.cc
+++ b/runtime/vm/object.cc
@@ -45,7 +45,6 @@
 #include "vm/timer.h"
 #include "vm/type_table.h"
 #include "vm/unicode.h"
-#include "vm/verified_memory.h"
 #include "vm/weak_code.h"
 
 namespace dart {
@@ -1028,9 +1027,9 @@
 }
 
 
-void Object::InitVmIsolateSnapshotObjectTable(intptr_t len) {
+void Object::set_vm_isolate_snapshot_object_table(const Array& table) {
   ASSERT(Isolate::Current() == Dart::vm_isolate());
-  *vm_isolate_snapshot_object_table_ = Array::New(len, Heap::kOld);
+  *vm_isolate_snapshot_object_table_ = table.raw();
 }
 
 
@@ -1065,7 +1064,7 @@
 
       intptr_t leftover_len = (leftover_size - TypedData::InstanceSize(0));
       ASSERT(TypedData::InstanceSize(leftover_len) == leftover_size);
-      raw->InitializeSmi(&(raw->ptr()->length_), Smi::New(leftover_len));
+      raw->StoreSmi(&(raw->ptr()->length_), Smi::New(leftover_len));
     } else {
       // Update the leftover space as a basic object.
       ASSERT(leftover_size == Object::InstanceSize());
@@ -1691,8 +1690,8 @@
 #define REGISTER_TYPED_DATA_VIEW_CLASS(clazz)                                  \
   cls = Class::NewTypedDataViewClass(kTypedData##clazz##ViewCid);
   CLASS_LIST_TYPED_DATA(REGISTER_TYPED_DATA_VIEW_CLASS);
-  cls = Class::NewTypedDataViewClass(kByteDataViewCid);
 #undef REGISTER_TYPED_DATA_VIEW_CLASS
+  cls = Class::NewTypedDataViewClass(kByteDataViewCid);
 #define REGISTER_EXT_TYPED_DATA_CLASS(clazz)                                   \
   cls = Class::NewExternalTypedDataClass(kExternalTypedData##clazz##Cid);
   CLASS_LIST_TYPED_DATA(REGISTER_EXT_TYPED_DATA_CLASS);
@@ -1799,7 +1798,6 @@
   tags = RawObject::VMHeapObjectTag::update(is_vm_object, tags);
   reinterpret_cast<RawObject*>(address)->tags_ = tags;
   ASSERT(is_vm_object == RawObject::IsVMHeapObject(tags));
-  VerifiedMemory::Accept(address, size);
 }
 
 
@@ -1918,7 +1916,6 @@
   memmove(reinterpret_cast<uint8_t*>(clone_addr + kHeaderSizeInBytes),
           reinterpret_cast<uint8_t*>(orig_addr + kHeaderSizeInBytes),
           size - kHeaderSizeInBytes);
-  VerifiedMemory::Accept(clone_addr, size);
   // Add clone to store buffer, if needed.
   if (!raw_clone->IsOldObject()) {
     // No need to remember an object in new space.
@@ -4448,6 +4445,7 @@
   while (it.MoveNext()) {
     constant ^= set.GetKey(it.Current());
     ASSERT(!constant.IsNull());
+    ASSERT(constant.IsCanonical());
     InsertCanonicalConstant(zone, constant);
   }
   set.Release();
@@ -14037,7 +14035,6 @@
   MemoryRegion region(reinterpret_cast<void*>(instrs.EntryPoint()),
                       instrs.size());
   assembler->FinalizeInstructions(region);
-  VerifiedMemory::Accept(region.start(), region.size());
   CPU::FlushICache(instrs.EntryPoint(), instrs.size());
 
   code.set_compile_timestamp(OS::GetCurrentMonotonicMicros());
@@ -21340,8 +21337,6 @@
                          space));
     NoSafepointScope no_safepoint;
     raw->StoreSmi(&(raw->ptr()->length_), Smi::New(len));
-    VerifiedMemory::Accept(reinterpret_cast<uword>(raw->ptr()),
-                           Array::InstanceSize(len));
     return raw;
   }
 }
@@ -22018,6 +22013,12 @@
 
 
 const char* TypedData::ToCString() const {
+  switch (GetClassId()) {
+#define CASE_TYPED_DATA_CLASS(clazz)                                           \
+  case kTypedData##clazz##Cid: return #clazz;
+  CLASS_LIST_TYPED_DATA(CASE_TYPED_DATA_CLASS);
+#undef CASE_TYPED_DATA_CLASS
+  }
   return "TypedData";
 }
 
diff --git a/runtime/vm/object.h b/runtime/vm/object.h
index cd014a3..143cf9e 100644
--- a/runtime/vm/object.h
+++ b/runtime/vm/object.h
@@ -25,7 +25,6 @@
 #include "vm/tags.h"
 #include "vm/thread.h"
 #include "vm/token_position.h"
-#include "vm/verified_memory.h"
 
 namespace dart {
 
@@ -504,7 +503,7 @@
     return *void_type_;
   }
 
-  static void InitVmIsolateSnapshotObjectTable(intptr_t len);
+  static void set_vm_isolate_snapshot_object_table(const Array& table);
 
   static RawClass* class_class() { return class_class_; }
   static RawClass* dynamic_class() { return dynamic_class_; }
@@ -655,7 +654,6 @@
     ASSERT(Contains(reinterpret_cast<uword>(to)));
     if (raw()->IsNewObject()) {
       memmove(const_cast<RawObject**>(to), from, count * kWordSize);
-      VerifiedMemory::Accept(reinterpret_cast<uword>(to), count * kWordSize);
     } else {
       for (intptr_t i = 0; i < count; ++i) {
         StorePointer(&to[i], from[i]);
@@ -835,6 +833,7 @@
   friend void RawObject::Validate(Isolate* isolate) const;
   friend class Closure;
   friend class SnapshotReader;
+  friend class InstanceDeserializationCluster;
   friend class OneByteString;
   friend class TwoByteString;
   friend class ExternalOneByteString;
@@ -2217,6 +2216,8 @@
   FINAL_HEAP_OBJECT_IMPLEMENTATION(ICData, Object);
   friend class Class;
   friend class SnapshotWriter;
+  friend class Serializer;
+  friend class Deserializer;
 };
 
 
@@ -3335,6 +3336,7 @@
   friend class Class;
   friend class HeapProfiler;
   friend class RawField;
+  friend class FieldSerializationCluster;
 };
 
 
@@ -3859,6 +3861,7 @@
   friend class DictionaryIterator;
   friend class Namespace;
   friend class Object;
+  friend class LibraryDeserializationCluster;
 };
 
 
@@ -4860,6 +4863,8 @@
   FINAL_HEAP_OBJECT_IMPLEMENTATION(Code, Object);
   friend class Class;
   friend class SnapshotWriter;
+  friend class FunctionSerializationCluster;
+  friend class CodeSerializationCluster;
   friend class CodePatcher;  // for set_instructions
   friend class Precompiler;  // for set_instructions
   // So that the RawFunction pointer visitor can determine whether code the
@@ -5413,6 +5418,9 @@
   friend class SnapshotWriter;
   friend class StubCode;
   friend class TypedDataView;
+  friend class InstanceSerializationCluster;
+  friend class InstanceDeserializationCluster;
+  friend class ClassDeserializationCluster;  // vtable
 };
 
 
@@ -6786,6 +6794,7 @@
   // So that SkippedCodeFunctions can print a debug string from a NoHandleScope.
   friend class SkippedCodeFunctions;
   friend class RawOneByteString;
+  friend class RODataSerializationCluster;  // SetHash
 };
 
 
@@ -7429,7 +7438,7 @@
     ASSERT(index < Length());
 
     // TODO(iposva): Add storing NoSafepointScope.
-    DataStorePointer(ObjectAddr(index), value.raw());
+    data()->StorePointer(ObjectAddr(index), value.raw());
   }
 
   void Add(const Object& value, Heap::Space space = Heap::kNew) const;
@@ -7499,9 +7508,6 @@
     ASSERT((index >= 0) && (index < Length()));
     return &(DataArray()->data()[index]);
   }
-  void DataStorePointer(RawObject** addr, RawObject* value) const {
-    data()->StorePointer(addr, value);
-  }
 
   static const int kDefaultInitialCapacity = 4;
 
@@ -8132,6 +8138,7 @@
   static RawLinkedHashMap* NewUninitialized(Heap::Space space = Heap::kNew);
 
   friend class Class;
+  friend class LinkedHashMapDeserializationCluster;
 };
 
 
diff --git a/runtime/vm/object_store.h b/runtime/vm/object_store.h
index a58bc6c..6ab93de 100644
--- a/runtime/vm/object_store.h
+++ b/runtime/vm/object_store.h
@@ -612,9 +612,8 @@
     return NULL;
   }
 
-  friend class FullSnapshotWriter;
-  friend class SnapshotReader;
-  friend class VmIsolateSnapshotReader;
+  friend class Serializer;
+  friend class Deserializer;
 
   DISALLOW_COPY_AND_ASSIGN(ObjectStore);
 };
diff --git a/runtime/vm/os_fuchsia.cc b/runtime/vm/os_fuchsia.cc
new file mode 100644
index 0000000..10faf1c
--- /dev/null
+++ b/runtime/vm/os_fuchsia.cc
@@ -0,0 +1,223 @@
+// Copyright (c) 2016, the Dart project authors.  Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.
+
+#include "vm/globals.h"
+#if defined(TARGET_OS_FUCHSIA)
+
+#include "vm/os.h"
+
+#include <magenta/syscalls.h>
+#include <magenta/types.h>
+
+#include "platform/assert.h"
+
+namespace dart {
+
+const char* OS::Name() {
+  return "fuchsia";
+}
+
+
+intptr_t OS::ProcessId() {
+  UNIMPLEMENTED();
+  return 0;
+}
+
+
+const char* OS::GetTimeZoneName(int64_t seconds_since_epoch) {
+  UNIMPLEMENTED();
+  return "";
+}
+
+
+int OS::GetTimeZoneOffsetInSeconds(int64_t seconds_since_epoch) {
+  UNIMPLEMENTED();
+  return 0;
+}
+
+
+int OS::GetLocalTimeZoneAdjustmentInSeconds() {
+  UNIMPLEMENTED();
+  return 0;
+}
+
+
+int64_t OS::GetCurrentTimeMillis() {
+  return GetCurrentTimeMicros() / 1000;
+}
+
+
+int64_t OS::GetCurrentTimeMicros() {
+  return _magenta_current_time() / 1000;
+}
+
+
+int64_t OS::GetCurrentMonotonicTicks() {
+  UNIMPLEMENTED();
+  return 0;
+}
+
+
+int64_t OS::GetCurrentMonotonicFrequency() {
+  UNIMPLEMENTED();
+  return 0;
+}
+
+
+int64_t OS::GetCurrentMonotonicMicros() {
+  UNIMPLEMENTED();
+  return 0;
+}
+
+
+int64_t OS::GetCurrentThreadCPUMicros() {
+  UNIMPLEMENTED();
+  return 0;
+}
+
+
+void* OS::AlignedAllocate(intptr_t size, intptr_t alignment) {
+  UNIMPLEMENTED();
+  return NULL;
+}
+
+
+void OS::AlignedFree(void* ptr) {
+  UNIMPLEMENTED();
+}
+
+
+// TODO(5411554):  May need to hoist these architecture dependent code
+// into a architecture specific file e.g: os_ia32_linux.cc
+intptr_t OS::ActivationFrameAlignment() {
+  UNIMPLEMENTED();
+  return 0;
+}
+
+
+intptr_t OS::PreferredCodeAlignment() {
+  UNIMPLEMENTED();
+  return 0;
+}
+
+
+bool OS::AllowStackFrameIteratorFromAnotherThread() {
+  UNIMPLEMENTED();
+  return false;
+}
+
+
+int OS::NumberOfAvailableProcessors() {
+  UNIMPLEMENTED();
+  return 0;
+}
+
+
+void OS::Sleep(int64_t millis) {
+  UNIMPLEMENTED();
+}
+
+
+void OS::SleepMicros(int64_t micros) {
+  UNIMPLEMENTED();
+}
+
+
+void OS::DebugBreak() {
+  UNIMPLEMENTED();
+}
+
+
+char* OS::StrNDup(const char* s, intptr_t n) {
+  UNIMPLEMENTED();
+  return NULL;
+}
+
+
+intptr_t OS::StrNLen(const char* s, intptr_t n) {
+  UNIMPLEMENTED();
+  return 0;
+}
+
+
+void OS::Print(const char* format, ...) {
+  UNIMPLEMENTED();
+}
+
+
+void OS::VFPrint(FILE* stream, const char* format, va_list args) {
+  vfprintf(stream, format, args);
+  fflush(stream);
+}
+
+
+int OS::SNPrint(char* str, size_t size, const char* format, ...) {
+  UNIMPLEMENTED();
+  return 0;
+}
+
+
+int OS::VSNPrint(char* str, size_t size, const char* format, va_list args) {
+  UNIMPLEMENTED();
+  return 0;
+}
+
+
+char* OS::SCreate(Zone* zone, const char* format, ...) {
+  UNIMPLEMENTED();
+  return NULL;
+}
+
+
+char* OS::VSCreate(Zone* zone, const char* format, va_list args) {
+  UNIMPLEMENTED();
+  return NULL;
+}
+
+
+bool OS::StringToInt64(const char* str, int64_t* value) {
+  UNIMPLEMENTED();
+  return false;
+}
+
+
+void OS::RegisterCodeObservers() {
+  UNIMPLEMENTED();
+}
+
+
+void OS::PrintErr(const char* format, ...) {
+  va_list args;
+  va_start(args, format);
+  VFPrint(stderr, format, args);
+  va_end(args);
+}
+
+
+void OS::InitOnce() {
+  // TODO(5411554): For now we check that initonce is called only once,
+  // Once there is more formal mechanism to call InitOnce we can move
+  // this check there.
+  static bool init_once_called = false;
+  ASSERT(init_once_called == false);
+  init_once_called = true;
+}
+
+
+void OS::Shutdown() {
+}
+
+
+void OS::Abort() {
+  abort();
+}
+
+
+void OS::Exit(int code) {
+  UNIMPLEMENTED();
+}
+
+}  // namespace dart
+
+#endif  // defined(TARGET_OS_FUCHSIA)
diff --git a/runtime/vm/os_thread.cc b/runtime/vm/os_thread.cc
index 75a7a31..3700c9f 100644
--- a/runtime/vm/os_thread.cc
+++ b/runtime/vm/os_thread.cc
@@ -26,7 +26,9 @@
 #if defined(DEBUG)
     join_id_(kInvalidThreadJoinId),
 #endif
+#ifndef PRODUCT
     trace_id_(OSThread::GetCurrentThreadTraceId()),
+#endif
     name_(NULL),
     timeline_block_lock_(new Mutex()),
     timeline_block_(NULL),
diff --git a/runtime/vm/os_thread.h b/runtime/vm/os_thread.h
index 415cff89..e950832 100644
--- a/runtime/vm/os_thread.h
+++ b/runtime/vm/os_thread.h
@@ -12,6 +12,8 @@
 // Declare the OS-specific types ahead of defining the generic classes.
 #if defined(TARGET_OS_ANDROID)
 #include "vm/os_thread_android.h"
+#elif defined(TARGET_OS_FUCHSIA)
+#include "vm/os_thread_fuchsia.h"
 #elif defined(TARGET_OS_LINUX)
 #include "vm/os_thread_linux.h"
 #elif defined(TARGET_OS_MACOS)
@@ -61,10 +63,12 @@
     return id_;
   }
 
+#ifndef PRODUCT
   ThreadId trace_id() const {
     ASSERT(trace_id_ != OSThread::kInvalidThreadId);
     return trace_id_;
   }
+#endif
 
   const char* name() const {
     return name_;
@@ -203,7 +207,9 @@
   }
 
   static void Cleanup();
+#ifndef PRODUCT
   static ThreadId GetCurrentThreadTraceId();
+#endif  // PRODUCT
   static OSThread* GetOSThreadFromThread(Thread* thread);
   static void AddThreadToListLocked(OSThread* thread);
   static void RemoveThreadFromList(OSThread* thread);
@@ -217,7 +223,9 @@
   // only called once per OSThread.
   ThreadJoinId join_id_;
 #endif
+#ifndef PRODUCT
   const ThreadId trace_id_;  // Used to interface with tracing tools.
+#endif
   char* name_;  // A name for this thread.
 
   Mutex* timeline_block_lock_;
diff --git a/runtime/vm/os_thread_android.cc b/runtime/vm/os_thread_android.cc
index 5a54c59..0ea90e6 100644
--- a/runtime/vm/os_thread_android.cc
+++ b/runtime/vm/os_thread_android.cc
@@ -188,9 +188,11 @@
 }
 
 
+#ifndef PRODUCT
 ThreadId OSThread::GetCurrentThreadTraceId() {
   return GetCurrentThreadId();
 }
+#endif  // PRODUCT
 
 
 ThreadJoinId OSThread::GetCurrentThreadJoinId(OSThread* thread) {
diff --git a/runtime/vm/os_thread_fuchsia.cc b/runtime/vm/os_thread_fuchsia.cc
new file mode 100644
index 0000000..c3e8d25
--- /dev/null
+++ b/runtime/vm/os_thread_fuchsia.cc
@@ -0,0 +1,436 @@
+// Copyright (c) 2016, the Dart project authors.  Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.
+
+#include "platform/globals.h"  // NOLINT
+#if defined(TARGET_OS_FUCHSIA)
+
+#include "vm/os_thread.h"
+#include "vm/os_thread_fuchsia.h"
+
+#include <errno.h>  // NOLINT
+#include <magenta/syscalls.h>
+#include <magenta/types.h>
+
+#include "platform/assert.h"
+
+namespace dart {
+
+#define VALIDATE_PTHREAD_RESULT(result) \
+  if (result != 0) { \
+    FATAL1("pthread error: %d", result); \
+  }
+
+
+#if defined(DEBUG)
+#define ASSERT_PTHREAD_SUCCESS(result) VALIDATE_PTHREAD_RESULT(result)
+#else
+// NOTE: This (currently) expands to a no-op.
+#define ASSERT_PTHREAD_SUCCESS(result) ASSERT(result == 0)
+#endif
+
+
+#ifdef DEBUG
+#define RETURN_ON_PTHREAD_FAILURE(result) \
+  if (result != 0) { \
+    const int kBufferSize = 1024; \
+    char error_buf[kBufferSize]; \
+    fprintf(stderr, "%s:%d: pthread error: %d\n", \
+            __FILE__, __LINE__, result); \
+    return result; \
+  }
+#else
+#define RETURN_ON_PTHREAD_FAILURE(result) \
+  if (result != 0) return result;
+#endif
+
+
+static void ComputeTimeSpecMicros(struct timespec* ts, int64_t micros) {
+  // time in nanoseconds.
+  mx_time_t now = _magenta_current_time();
+  mx_time_t target = now + (micros * kNanosecondsPerMicrosecond);
+  int64_t secs = target / kNanosecondsPerSecond;
+  int64_t nanos = target - (secs * kNanosecondsPerSecond);
+
+  ts->tv_sec += secs;
+  ts->tv_nsec += nanos;
+  if (ts->tv_nsec >= kNanosecondsPerSecond) {
+    ts->tv_sec += 1;
+    ts->tv_nsec -= kNanosecondsPerSecond;
+  }
+}
+
+
+class ThreadStartData {
+ public:
+  ThreadStartData(const char* name,
+                  OSThread::ThreadStartFunction function,
+                  uword parameter)
+      : name_(name), function_(function), parameter_(parameter) {}
+
+  const char* name() const { return name_; }
+  OSThread::ThreadStartFunction function() const { return function_; }
+  uword parameter() const { return parameter_; }
+
+ private:
+  const char* name_;
+  OSThread::ThreadStartFunction function_;
+  uword parameter_;
+
+  DISALLOW_COPY_AND_ASSIGN(ThreadStartData);
+};
+
+
+// Dispatch to the thread start function provided by the caller. This trampoline
+// is used to ensure that the thread is properly destroyed if the thread just
+// exits.
+static void* ThreadStart(void* data_ptr) {
+  ThreadStartData* data = reinterpret_cast<ThreadStartData*>(data_ptr);
+
+  const char* name = data->name();
+  OSThread::ThreadStartFunction function = data->function();
+  uword parameter = data->parameter();
+  delete data;
+
+  // Create new OSThread object and set as TLS for new thread.
+  OSThread* thread = OSThread::CreateOSThread();
+  if (thread != NULL) {
+    OSThread::SetCurrent(thread);
+    thread->set_name(name);
+    // Call the supplied thread start function handing it its parameters.
+    function(parameter);
+  }
+
+  return NULL;
+}
+
+
+int OSThread::Start(const char* name,
+                    ThreadStartFunction function,
+                    uword parameter) {
+  pthread_attr_t attr;
+  int result = pthread_attr_init(&attr);
+  RETURN_ON_PTHREAD_FAILURE(result);
+
+  result = pthread_attr_setstacksize(&attr, OSThread::GetMaxStackSize());
+  RETURN_ON_PTHREAD_FAILURE(result);
+
+  ThreadStartData* data = new ThreadStartData(name, function, parameter);
+
+  pthread_t tid;
+  result = pthread_create(&tid, &attr, ThreadStart, data);
+  RETURN_ON_PTHREAD_FAILURE(result);
+
+  result = pthread_attr_destroy(&attr);
+  RETURN_ON_PTHREAD_FAILURE(result);
+
+  return 0;
+}
+
+
+const ThreadId OSThread::kInvalidThreadId = static_cast<ThreadId>(0);
+const ThreadJoinId OSThread::kInvalidThreadJoinId =
+    static_cast<ThreadJoinId>(0);
+
+
+ThreadLocalKey OSThread::CreateThreadLocal(ThreadDestructor destructor) {
+  pthread_key_t key = kUnsetThreadLocalKey;
+  int result = pthread_key_create(&key, destructor);
+  VALIDATE_PTHREAD_RESULT(result);
+  ASSERT(key != kUnsetThreadLocalKey);
+  return key;
+}
+
+
+void OSThread::DeleteThreadLocal(ThreadLocalKey key) {
+  ASSERT(key != kUnsetThreadLocalKey);
+  int result = pthread_key_delete(key);
+  VALIDATE_PTHREAD_RESULT(result);
+}
+
+
+void OSThread::SetThreadLocal(ThreadLocalKey key, uword value) {
+  ASSERT(key != kUnsetThreadLocalKey);
+  int result = pthread_setspecific(key, reinterpret_cast<void*>(value));
+  VALIDATE_PTHREAD_RESULT(result);
+}
+
+
+intptr_t OSThread::GetMaxStackSize() {
+  const int kStackSize = (128 * kWordSize * KB);
+  return kStackSize;
+}
+
+
+ThreadId OSThread::GetCurrentThreadId() {
+  return pthread_self();
+}
+
+
+#ifndef PRODUCT
+ThreadId OSThread::GetCurrentThreadTraceId() {
+  UNIMPLEMENTED();
+  return 0;
+}
+#endif  // PRODUCT
+
+
+ThreadJoinId OSThread::GetCurrentThreadJoinId(OSThread* thread) {
+  ASSERT(thread != NULL);
+  // Make sure we're filling in the join id for the current thread.
+  ASSERT(thread->id() == GetCurrentThreadId());
+  // Make sure the join_id_ hasn't been set, yet.
+  DEBUG_ASSERT(thread->join_id_ == kInvalidThreadJoinId);
+  pthread_t id = pthread_self();
+#if defined(DEBUG)
+  thread->join_id_ = id;
+#endif
+  return id;
+}
+
+
+void OSThread::Join(ThreadJoinId id) {
+  int result = pthread_join(id, NULL);
+  ASSERT(result == 0);
+}
+
+
+intptr_t OSThread::ThreadIdToIntPtr(ThreadId id) {
+  ASSERT(sizeof(id) == sizeof(intptr_t));
+  return static_cast<intptr_t>(id);
+}
+
+
+ThreadId OSThread::ThreadIdFromIntPtr(intptr_t id) {
+  return static_cast<ThreadId>(id);
+}
+
+
+bool OSThread::Compare(ThreadId a, ThreadId b) {
+  return pthread_equal(a, b) != 0;
+}
+
+
+Mutex::Mutex() {
+  pthread_mutexattr_t attr;
+  int result = pthread_mutexattr_init(&attr);
+  VALIDATE_PTHREAD_RESULT(result);
+
+#if defined(DEBUG)
+  result = pthread_mutexattr_settype(&attr, PTHREAD_MUTEX_ERRORCHECK);
+  VALIDATE_PTHREAD_RESULT(result);
+#endif  // defined(DEBUG)
+
+  result = pthread_mutex_init(data_.mutex(), &attr);
+  // Verify that creating a pthread_mutex succeeded.
+  VALIDATE_PTHREAD_RESULT(result);
+
+  result = pthread_mutexattr_destroy(&attr);
+  VALIDATE_PTHREAD_RESULT(result);
+
+#if defined(DEBUG)
+  // When running with assertions enabled we track the owner.
+  owner_ = OSThread::kInvalidThreadId;
+#endif  // defined(DEBUG)
+}
+
+
+Mutex::~Mutex() {
+  int result = pthread_mutex_destroy(data_.mutex());
+  // Verify that the pthread_mutex was destroyed.
+  VALIDATE_PTHREAD_RESULT(result);
+
+#if defined(DEBUG)
+  // When running with assertions enabled we track the owner.
+  ASSERT(owner_ == OSThread::kInvalidThreadId);
+#endif  // defined(DEBUG)
+}
+
+
+void Mutex::Lock() {
+  int result = pthread_mutex_lock(data_.mutex());
+  // Specifically check for dead lock to help debugging.
+  ASSERT(result != EDEADLK);
+  ASSERT_PTHREAD_SUCCESS(result);  // Verify no other errors.
+#if defined(DEBUG)
+  // When running with assertions enabled we track the owner.
+  owner_ = OSThread::GetCurrentThreadId();
+#endif  // defined(DEBUG)
+}
+
+
+bool Mutex::TryLock() {
+  int result = pthread_mutex_trylock(data_.mutex());
+  // Return false if the lock is busy and locking failed.
+  if (result == EBUSY) {
+    return false;
+  }
+  ASSERT_PTHREAD_SUCCESS(result);  // Verify no other errors.
+#if defined(DEBUG)
+  // When running with assertions enabled we track the owner.
+  owner_ = OSThread::GetCurrentThreadId();
+#endif  // defined(DEBUG)
+  return true;
+}
+
+
+void Mutex::Unlock() {
+#if defined(DEBUG)
+  // When running with assertions enabled we track the owner.
+  ASSERT(IsOwnedByCurrentThread());
+  owner_ = OSThread::kInvalidThreadId;
+#endif  // defined(DEBUG)
+  int result = pthread_mutex_unlock(data_.mutex());
+  // Specifically check for wrong thread unlocking to aid debugging.
+  ASSERT(result != EPERM);
+  ASSERT_PTHREAD_SUCCESS(result);  // Verify no other errors.
+}
+
+
+Monitor::Monitor() {
+  pthread_mutexattr_t mutex_attr;
+  int result = pthread_mutexattr_init(&mutex_attr);
+  VALIDATE_PTHREAD_RESULT(result);
+
+#if defined(DEBUG)
+  result = pthread_mutexattr_settype(&mutex_attr, PTHREAD_MUTEX_ERRORCHECK);
+  VALIDATE_PTHREAD_RESULT(result);
+#endif  // defined(DEBUG)
+
+  result = pthread_mutex_init(data_.mutex(), &mutex_attr);
+  VALIDATE_PTHREAD_RESULT(result);
+
+  result = pthread_mutexattr_destroy(&mutex_attr);
+  VALIDATE_PTHREAD_RESULT(result);
+
+  pthread_condattr_t cond_attr;
+  result = pthread_condattr_init(&cond_attr);
+  VALIDATE_PTHREAD_RESULT(result);
+
+  result = pthread_condattr_setclock(&cond_attr, CLOCK_MONOTONIC);
+  VALIDATE_PTHREAD_RESULT(result);
+
+  result = pthread_cond_init(data_.cond(), &cond_attr);
+  VALIDATE_PTHREAD_RESULT(result);
+
+  result = pthread_condattr_destroy(&cond_attr);
+  VALIDATE_PTHREAD_RESULT(result);
+
+#if defined(DEBUG)
+  // When running with assertions enabled we track the owner.
+  owner_ = OSThread::kInvalidThreadId;
+#endif  // defined(DEBUG)
+}
+
+
+Monitor::~Monitor() {
+#if defined(DEBUG)
+  // When running with assertions enabled we track the owner.
+  ASSERT(owner_ == OSThread::kInvalidThreadId);
+#endif  // defined(DEBUG)
+
+  int result = pthread_mutex_destroy(data_.mutex());
+  VALIDATE_PTHREAD_RESULT(result);
+
+  result = pthread_cond_destroy(data_.cond());
+  VALIDATE_PTHREAD_RESULT(result);
+}
+
+
+bool Monitor::TryEnter() {
+  int result = pthread_mutex_trylock(data_.mutex());
+  // Return false if the lock is busy and locking failed.
+  if (result == EBUSY) {
+    return false;
+  }
+  ASSERT_PTHREAD_SUCCESS(result);  // Verify no other errors.
+#if defined(DEBUG)
+  // When running with assertions enabled we track the owner.
+  ASSERT(owner_ == OSThread::kInvalidThreadId);
+  owner_ = OSThread::GetCurrentThreadId();
+#endif  // defined(DEBUG)
+  return true;
+}
+
+
+void Monitor::Enter() {
+  int result = pthread_mutex_lock(data_.mutex());
+  VALIDATE_PTHREAD_RESULT(result);
+
+#if defined(DEBUG)
+  // When running with assertions enabled we track the owner.
+  ASSERT(owner_ == OSThread::kInvalidThreadId);
+  owner_ = OSThread::GetCurrentThreadId();
+#endif  // defined(DEBUG)
+}
+
+
+void Monitor::Exit() {
+#if defined(DEBUG)
+  // When running with assertions enabled we track the owner.
+  ASSERT(IsOwnedByCurrentThread());
+  owner_ = OSThread::kInvalidThreadId;
+#endif  // defined(DEBUG)
+
+  int result = pthread_mutex_unlock(data_.mutex());
+  VALIDATE_PTHREAD_RESULT(result);
+}
+
+
+Monitor::WaitResult Monitor::Wait(int64_t millis) {
+  Monitor::WaitResult retval = WaitMicros(millis * kMicrosecondsPerMillisecond);
+  return retval;
+}
+
+
+Monitor::WaitResult Monitor::WaitMicros(int64_t micros) {
+#if defined(DEBUG)
+  // When running with assertions enabled we track the owner.
+  ASSERT(IsOwnedByCurrentThread());
+  ThreadId saved_owner = owner_;
+  owner_ = OSThread::kInvalidThreadId;
+#endif  // defined(DEBUG)
+
+  Monitor::WaitResult retval = kNotified;
+  if (micros == kNoTimeout) {
+    // Wait forever.
+    int result = pthread_cond_wait(data_.cond(), data_.mutex());
+    VALIDATE_PTHREAD_RESULT(result);
+  } else {
+    struct timespec ts;
+    ComputeTimeSpecMicros(&ts, micros);
+    int result = pthread_cond_timedwait(data_.cond(), data_.mutex(), &ts);
+    ASSERT((result == 0) || (result == ETIMEDOUT));
+    if (result == ETIMEDOUT) {
+      retval = kTimedOut;
+    }
+  }
+
+#if defined(DEBUG)
+  // When running with assertions enabled we track the owner.
+  ASSERT(owner_ == OSThread::kInvalidThreadId);
+  owner_ = OSThread::GetCurrentThreadId();
+  ASSERT(owner_ == saved_owner);
+#endif  // defined(DEBUG)
+  return retval;
+}
+
+
+void Monitor::Notify() {
+  // When running with assertions enabled we track the owner.
+  ASSERT(IsOwnedByCurrentThread());
+  int result = pthread_cond_signal(data_.cond());
+  VALIDATE_PTHREAD_RESULT(result);
+}
+
+
+void Monitor::NotifyAll() {
+  // When running with assertions enabled we track the owner.
+  ASSERT(IsOwnedByCurrentThread());
+  int result = pthread_cond_broadcast(data_.cond());
+  VALIDATE_PTHREAD_RESULT(result);
+}
+
+}  // namespace dart
+
+#endif  // defined(TARGET_OS_FUCHSIA)
diff --git a/runtime/vm/os_thread_fuchsia.h b/runtime/vm/os_thread_fuchsia.h
new file mode 100644
index 0000000..b45207b
--- /dev/null
+++ b/runtime/vm/os_thread_fuchsia.h
@@ -0,0 +1,80 @@
+// Copyright (c) 2016, the Dart project authors.  Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.
+
+#ifndef VM_OS_THREAD_FUCHSIA_H_
+#define VM_OS_THREAD_FUCHSIA_H_
+
+#if !defined(VM_OS_THREAD_H_)
+#error Do not include os_thread_fuchsia.h directly; use os_thread.h instead.
+#endif
+
+#include <pthread.h>
+
+#include "platform/assert.h"
+#include "platform/globals.h"
+
+namespace dart {
+
+typedef pthread_key_t ThreadLocalKey;
+typedef pthread_t ThreadId;
+typedef pthread_t ThreadJoinId;
+
+
+static const ThreadLocalKey kUnsetThreadLocalKey =
+    static_cast<pthread_key_t>(-1);
+
+
+class ThreadInlineImpl {
+ private:
+  ThreadInlineImpl() {}
+  ~ThreadInlineImpl() {}
+
+  static uword GetThreadLocal(ThreadLocalKey key) {
+    ASSERT(key != kUnsetThreadLocalKey);
+    return reinterpret_cast<uword>(pthread_getspecific(key));
+  }
+
+  friend class OSThread;
+
+  DISALLOW_ALLOCATION();
+  DISALLOW_COPY_AND_ASSIGN(ThreadInlineImpl);
+};
+
+
+class MutexData {
+ private:
+  MutexData() {}
+  ~MutexData() {}
+
+  pthread_mutex_t* mutex() { return &mutex_; }
+
+  pthread_mutex_t mutex_;
+
+  friend class Mutex;
+
+  DISALLOW_ALLOCATION();
+  DISALLOW_COPY_AND_ASSIGN(MutexData);
+};
+
+
+class MonitorData {
+ private:
+  MonitorData() {}
+  ~MonitorData() {}
+
+  pthread_mutex_t* mutex() { return &mutex_; }
+  pthread_cond_t* cond() { return &cond_; }
+
+  pthread_mutex_t mutex_;
+  pthread_cond_t cond_;
+
+  friend class Monitor;
+
+  DISALLOW_ALLOCATION();
+  DISALLOW_COPY_AND_ASSIGN(MonitorData);
+};
+
+}  // namespace dart
+
+#endif  // VM_OS_THREAD_FUCHSIA_H_
diff --git a/runtime/vm/os_thread_linux.cc b/runtime/vm/os_thread_linux.cc
index 9695042..0f8f958 100644
--- a/runtime/vm/os_thread_linux.cc
+++ b/runtime/vm/os_thread_linux.cc
@@ -189,9 +189,11 @@
 }
 
 
+#ifndef PRODUCT
 ThreadId OSThread::GetCurrentThreadTraceId() {
   return syscall(__NR_gettid);
 }
+#endif  // PRODUCT
 
 
 ThreadJoinId OSThread::GetCurrentThreadJoinId(OSThread* thread) {
diff --git a/runtime/vm/os_thread_macos.cc b/runtime/vm/os_thread_macos.cc
index b5fd6f9..53034be 100644
--- a/runtime/vm/os_thread_macos.cc
+++ b/runtime/vm/os_thread_macos.cc
@@ -163,9 +163,11 @@
 }
 
 
+#ifndef PRODUCT
 ThreadId OSThread::GetCurrentThreadTraceId() {
   return ThreadIdFromIntPtr(pthread_mach_thread_np(pthread_self()));
 }
+#endif  // PRODUCT
 
 
 ThreadJoinId OSThread::GetCurrentThreadJoinId(OSThread* thread) {
diff --git a/runtime/vm/os_thread_win.cc b/runtime/vm/os_thread_win.cc
index ed8de4af..6be6804 100644
--- a/runtime/vm/os_thread_win.cc
+++ b/runtime/vm/os_thread_win.cc
@@ -125,9 +125,11 @@
 }
 
 
+#ifndef PRODUCT
 ThreadId OSThread::GetCurrentThreadTraceId() {
   return ::GetCurrentThreadId();
 }
+#endif  // PRODUCT
 
 
 ThreadJoinId OSThread::GetCurrentThreadJoinId(OSThread* thread) {
diff --git a/runtime/vm/pages.cc b/runtime/vm/pages.cc
index e5d1e65..a10d097 100644
--- a/runtime/vm/pages.cc
+++ b/runtime/vm/pages.cc
@@ -12,7 +12,6 @@
 #include "vm/object.h"
 #include "vm/os_thread.h"
 #include "vm/safepoint.h"
-#include "vm/verified_memory.h"
 #include "vm/virtual_memory.h"
 
 namespace dart {
@@ -57,7 +56,7 @@
 
 HeapPage* HeapPage::Allocate(intptr_t size_in_words, PageType type) {
   VirtualMemory* memory =
-      VerifiedMemory::Reserve(size_in_words << kWordSizeLog2);
+      VirtualMemory::Reserve(size_in_words << kWordSizeLog2);
   if (memory == NULL) {
     return NULL;
   }
@@ -672,6 +671,7 @@
 }
 
 
+#ifndef PRODUCT
 void PageSpace::PrintToJSONObject(JSONObject* object) const {
   if (!FLAG_support_service) {
     return;
@@ -755,6 +755,7 @@
     }
   }
 }
+#endif  // PRODUCT
 
 
 bool PageSpace::ShouldCollectCode() {
@@ -1055,23 +1056,6 @@
 }
 
 
-uword PageSpace::TryAllocateSmiInitializedLocked(intptr_t size,
-                                                 GrowthPolicy growth_policy) {
-  uword result = TryAllocateDataBumpLocked(size, growth_policy);
-  if (collections() != 0) {
-    FATAL1("%" Pd " GCs before TryAllocateSmiInitializedLocked", collections());
-  }
-#if defined(DEBUG)
-  RawObject** begin = reinterpret_cast<RawObject**>(result);
-  RawObject** end = reinterpret_cast<RawObject**>(result + size);
-  for (RawObject** current = begin; current < end; ++current) {
-    ASSERT(!(*current)->IsHeapObject());
-  }
-#endif
-  return result;
-}
-
-
 void PageSpace::SetupExternalPage(void* pointer,
                                   uword size,
                                   bool is_executable) {
diff --git a/runtime/vm/pages.h b/runtime/vm/pages.h
index ff1ca15..ae32c87 100644
--- a/runtime/vm/pages.h
+++ b/runtime/vm/pages.h
@@ -314,8 +314,10 @@
     return collections_;
   }
 
+#ifndef PRODUCT
   void PrintToJSONObject(JSONObject* object) const;
   void PrintHeapMapToJSONStream(Isolate* isolate, JSONStream* stream) const;
+#endif  // PRODUCT
 
   void AllocateExternal(intptr_t size);
   void FreeExternal(intptr_t size);
@@ -345,11 +347,6 @@
   uword TryAllocateDataBumpLocked(intptr_t size, GrowthPolicy growth_policy);
   // Prefer small freelist blocks, then chip away at the bump block.
   uword TryAllocatePromoLocked(intptr_t size, GrowthPolicy growth_policy);
-  // Allocates memory where every word is guaranteed to be a Smi. Calling this
-  // method after the first garbage collection is inefficient in release mode
-  // and illegal in debug mode.
-  uword TryAllocateSmiInitializedLocked(intptr_t size,
-                                        GrowthPolicy growth_policy);
 
   // Bump block allocation from generated code.
   uword* TopAddress() { return &bump_top_; }
diff --git a/runtime/vm/parser.cc b/runtime/vm/parser.cc
index 4619602..c902639 100644
--- a/runtime/vm/parser.cc
+++ b/runtime/vm/parser.cc
@@ -1473,7 +1473,8 @@
 
   EnsureExpressionTemp();
   StoreInstanceFieldNode* store_field =
-      new StoreInstanceFieldNode(ident_pos, receiver, field, value);
+      new StoreInstanceFieldNode(ident_pos, receiver, field, value,
+                                 /* is_initializer = */ false);
   current_block_->statements->Add(store_field);
   current_block_->statements->Add(new ReturnNode(ST(ident_pos)));
   return CloseBlock();
@@ -2073,6 +2074,9 @@
                                    bool evaluate_metadata,
                                    ParamList* params) {
   TRACE_PARSER("ParseFormalParameters");
+  // Optional parameter lists cannot be empty.
+  // The completely empty parameter list is handled before getting here.
+  bool has_seen_parameter = false;
   do {
     ConsumeToken();
     if (!params->has_optional_positional_parameters &&
@@ -2089,9 +2093,18 @@
       params->has_optional_named_parameters = true;
       return;
     }
+    Token::Kind terminator =
+       params->has_optional_positional_parameters ? Token::kRBRACK :
+       params->has_optional_named_parameters ? Token::kRBRACE :
+       Token :: kRPAREN;
+    if (has_seen_parameter && CurrentToken() == terminator) {
+      // Allow a trailing comma.
+      break;
+    }
     ParseFormalParameter(allow_explicit_default_values,
                          evaluate_metadata,
                          params);
+    has_seen_parameter = true;
   } while (CurrentToken() == Token::kCOMMA);
 }
 
@@ -2609,7 +2622,8 @@
       initialized_fields, instance, &field, init_expr);
   if (initializer == NULL) {
     initializer =
-        new(Z) StoreInstanceFieldNode(field_pos, instance, field, init_expr);
+        new(Z) StoreInstanceFieldNode(field_pos, instance, field, init_expr,
+                                      /* is_initializer = */ true);
   }
   return initializer;
 }
@@ -2735,7 +2749,8 @@
           new StoreInstanceFieldNode(field.token_pos(),
                                      instance,
                                      field,
-                                     init_expr);
+                                     init_expr,
+                                     /* is_initializer = */ true);
       current_block_->statements->Add(field_init);
     }
   }
@@ -3198,7 +3213,8 @@
                                     value);
         if (initializer == NULL) {
           initializer = new(Z) StoreInstanceFieldNode(
-              param.name_pos, instance, field, value);
+              param.name_pos, instance, field, value,
+              /* is_initializer = */ true);
         }
         current_block_->statements->Add(initializer);
       }
@@ -11226,6 +11242,10 @@
       ASSERT((CurrentToken() == Token::kLPAREN) ||
              (CurrentToken() == Token::kCOMMA));
       ConsumeToken();
+      if (CurrentToken() == Token::kRPAREN) {
+        // Allow trailing comma.
+        break;
+      }
       if (IsIdentifier() && (LookaheadToken(1) == Token::kCOLON)) {
         named_argument_seen = true;
         // The canonicalization of the arguments descriptor array built in
diff --git a/runtime/vm/raw_object.cc b/runtime/vm/raw_object.cc
index 8f92e9d..73070ac 100644
--- a/runtime/vm/raw_object.cc
+++ b/runtime/vm/raw_object.cc
@@ -15,11 +15,6 @@
 
 namespace dart {
 
-#if defined(DEBUG)
-DEFINE_FLAG(bool, validate_overwrite, true, "Verify overwritten fields.");
-#endif  // DEBUG
-
-
 void RawObject::Validate(Isolate* isolate) const {
   if (Object::void_class_ == reinterpret_cast<RawClass*>(kHeapObjectTag)) {
     // Validation relies on properly initialized class classes. Skip if the
@@ -226,22 +221,6 @@
 }
 
 
-#if defined(DEBUG)
-void RawObject::ValidateOverwrittenPointer(RawObject* raw) {
-  if (FLAG_validate_overwrite) {
-    raw->Validate(Isolate::Current());
-  }
-}
-
-
-void RawObject::ValidateOverwrittenSmi(RawSmi* raw) {
-  if (FLAG_validate_overwrite && raw->IsHeapObject() && raw != Object::null()) {
-    FATAL1("Expected smi/null, found: %" Px "\n", reinterpret_cast<uword>(raw));
-  }
-}
-#endif  // DEBUG
-
-
 intptr_t RawObject::VisitPointers(ObjectPointerVisitor* visitor) {
   intptr_t size = 0;
 
diff --git a/runtime/vm/raw_object.h b/runtime/vm/raw_object.h
index 286a5f9..dd8ff93 100644
--- a/runtime/vm/raw_object.h
+++ b/runtime/vm/raw_object.h
@@ -11,7 +11,6 @@
 #include "vm/snapshot.h"
 #include "vm/token.h"
 #include "vm/token_position.h"
-#include "vm/verified_memory.h"
 
 namespace dart {
 
@@ -250,9 +249,11 @@
     }                                                                          \
     SNAPSHOT_WRITER_SUPPORT()                                                  \
     HEAP_PROFILER_SUPPORT()                                                    \
+    friend class object##SerializationCluster;                                 \
+    friend class object##DeserializationCluster;                               \
 
-// RawObject is the base class of all raw objects, even though it carries the
-// class_ field not all raw objects are allocated in the heap and thus cannot
+// RawObject is the base class of all raw objects; even though it carries the
+// tags_ field not all raw objects are allocated in the heap and thus cannot
 // be dereferenced (e.g. RawSmi).
 class RawObject {
  public:
@@ -570,10 +571,7 @@
 
   template<typename type>
   void StorePointer(type const* addr, type value) {
-#if defined(DEBUG)
-    ValidateOverwrittenPointer(*addr);
-#endif  // DEBUG
-    VerifiedMemory::Write(const_cast<type*>(addr), value);
+    *const_cast<type*>(addr) = value;
     // Filter stores based on source and target.
     if (!value->IsHeapObject()) return;
     if (value->IsNewObject() && this->IsOldObject() &&
@@ -586,29 +584,14 @@
   // Use for storing into an explicitly Smi-typed field of an object
   // (i.e., both the previous and new value are Smis).
   void StoreSmi(RawSmi* const* addr, RawSmi* value) {
-#if defined(DEBUG)
-    ValidateOverwrittenSmi(*addr);
-#endif  // DEBUG
     // Can't use Contains, as array length is initialized through this method.
     ASSERT(reinterpret_cast<uword>(addr) >= RawObject::ToAddr(this));
-    VerifiedMemory::Write(const_cast<RawSmi**>(addr), value);
+    *const_cast<RawSmi**>(addr) = value;
   }
 
-  void InitializeSmi(RawSmi* const* addr, RawSmi* value) {
-    // Can't use Contains, as array length is initialized through this method.
-    ASSERT(reinterpret_cast<uword>(addr) >= RawObject::ToAddr(this));
-    // This is an initializing store, so any previous content is OK.
-    VerifiedMemory::Accept(reinterpret_cast<uword>(addr), kWordSize);
-    VerifiedMemory::Write(const_cast<RawSmi**>(addr), value);
-  }
-
-#if defined(DEBUG)
-  static void ValidateOverwrittenPointer(RawObject* raw);
-  static void ValidateOverwrittenSmi(RawSmi* raw);
-#endif  // DEBUG
-
   friend class Api;
   friend class ApiMessageReader;  // GetClassId
+  friend class Serializer;  // GetClassId
   friend class Array;
   friend class Bigint;
   friend class ByteBuffer;
@@ -644,6 +627,7 @@
   friend class AssemblyInstructionsWriter;
   friend class BlobInstructionsWriter;
   friend class SnapshotReader;
+  friend class Deserializer;
   friend class SnapshotWriter;
   friend class String;
   friend class Type;  // GetClassId
@@ -730,6 +714,7 @@
   friend class RawInstance;
   friend class RawInstructions;
   friend class SnapshotReader;
+  friend class InstanceSerializationCluster;
 };
 
 
@@ -829,7 +814,9 @@
   // So that the SkippedCodeFunctions::DetachCode can null out the code fields.
   friend class SkippedCodeFunctions;
   friend class Class;
+
   RAW_HEAP_OBJECT_IMPLEMENTATION(Function);
+
   static bool ShouldVisitCode(RawCode* raw_code);
   static bool CheckUsageCounter(RawFunction* raw_fun);
 
@@ -1161,6 +1148,7 @@
   friend class SkippedCodeFunctions;
   friend class StackFrame;
   friend class Profiler;
+  friend class FunctionDeserializationCluster;
 };
 
 
@@ -1850,6 +1838,7 @@
 
   friend class ApiMessageReader;
   friend class SnapshotReader;
+  friend class RODataSerializationCluster;
 };
 
 
@@ -1861,6 +1850,7 @@
   const uint16_t* data() const { OPEN_ARRAY_START(uint16_t, uint16_t); }
 
   friend class SnapshotReader;
+  friend class RODataSerializationCluster;
 };
 
 
@@ -1936,6 +1926,9 @@
     return reinterpret_cast<RawObject**>(&ptr()->data()[length - 1]);
   }
 
+  friend class LinkedHashMapSerializationCluster;
+  friend class LinkedHashMapDeserializationCluster;
+  friend class Deserializer;
   friend class RawCode;
   friend class RawImmutableArray;
   friend class SnapshotReader;
@@ -1988,7 +1981,6 @@
     return reinterpret_cast<RawObject**>(&ptr()->deleted_keys_);
   }
 
-
   friend class SnapshotReader;
 };
 
@@ -1999,6 +1991,7 @@
   ALIGN8 float value_[4];
 
   friend class SnapshotReader;
+
  public:
   float x() const { return value_[0]; }
   float y() const { return value_[1]; }
@@ -2014,6 +2007,7 @@
   ALIGN8 int32_t value_[4];
 
   friend class SnapshotReader;
+
  public:
   int32_t x() const { return value_[0]; }
   int32_t y() const { return value_[1]; }
@@ -2029,6 +2023,7 @@
   ALIGN8 double value_[2];
 
   friend class SnapshotReader;
+
  public:
   double x() const { return value_[0]; }
   double y() const { return value_[1]; }
@@ -2065,6 +2060,8 @@
   friend class SnapshotReader;
   friend class ObjectPool;
   friend class RawObjectPool;
+  friend class ObjectPoolSerializationCluster;
+  friend class ObjectPoolDeserializationCluster;
 };
 
 
diff --git a/runtime/vm/raw_object_snapshot.cc b/runtime/vm/raw_object_snapshot.cc
index b245a94..c2448a3 100644
--- a/runtime/vm/raw_object_snapshot.cc
+++ b/runtime/vm/raw_object_snapshot.cc
@@ -1374,94 +1374,13 @@
 }
 
 
-#if defined(DEBUG)
-static uword Checksum(uword entry, intptr_t size) {
-  uword sum = 0;
-  uword* start = reinterpret_cast<uword*>(entry);
-  uword* end = reinterpret_cast<uword*>(entry + size);
-  for (uword* cursor = start; cursor < end; cursor++) {
-    sum ^= *cursor;
-  }
-  return sum;
-}
-#endif
-
-
 RawCode* Code::ReadFrom(SnapshotReader* reader,
                         intptr_t object_id,
                         intptr_t tags,
                         Snapshot::Kind kind,
                         bool as_reference) {
-  ASSERT(Snapshot::IncludesCode(kind));
-  ASSERT(Snapshot::IsFull(kind));
-
-  Code& result = Code::ZoneHandle(reader->zone(), NEW_OBJECT_WITH_LEN(Code, 0));
-  reader->AddBackRef(object_id, &result, kIsDeserialized);
-
-  result.set_compile_timestamp(0);
-  result.set_state_bits(reader->Read<int32_t>());
-  result.set_lazy_deopt_pc_offset(-1);
-
-  int32_t text_offset = reader->Read<int32_t>();
-  RawInstructions* instr = reinterpret_cast<RawInstructions*>(
-      reader->GetInstructionsAt(text_offset) + kHeapObjectTag);
-  uword entry_point = Instructions::EntryPoint(instr);
-
-#if defined(DEBUG)
-  ASSERT(instr->IsMarked());
-  ASSERT(instr->IsVMHeapObject());
-  uword expected_check = reader->Read<uword>();
-  intptr_t instructions_size = Utils::RoundUp(instr->size_,
-                                              OS::PreferredCodeAlignment());
-  uword actual_check = Checksum(entry_point, instructions_size);
-  ASSERT(expected_check == actual_check);
-#endif
-
-  result.StoreNonPointer(&result.raw_ptr()->entry_point_, entry_point);
-
-  result.StorePointer(&result.raw_ptr()->active_instructions_, instr);
-  result.StorePointer(&result.raw_ptr()->instructions_, instr);
-
-  (*reader->PassiveObjectHandle()) ^= reader->ReadObjectImpl(kAsReference);
-  result.StorePointer(reinterpret_cast<RawObject*const*>(
-                          &result.raw_ptr()->object_pool_),
-                      reader->PassiveObjectHandle()->raw());
-
-  (*reader->PassiveObjectHandle()) ^= reader->ReadObjectImpl(kAsReference);
-  result.StorePointer(&result.raw_ptr()->owner_,
-                      reader->PassiveObjectHandle()->raw());
-
-  (*reader->PassiveObjectHandle()) ^= reader->ReadObjectImpl(kAsReference);
-  result.StorePointer(reinterpret_cast<RawObject*const*>(
-                          &result.raw_ptr()->exception_handlers_),
-                      reader->PassiveObjectHandle()->raw());
-
-  (*reader->PassiveObjectHandle()) ^= reader->ReadObjectImpl(kAsReference);
-  result.StorePointer(reinterpret_cast<RawObject*const*>(
-                          &result.raw_ptr()->pc_descriptors_),
-                      reader->PassiveObjectHandle()->raw());
-
-  (*reader->PassiveObjectHandle()) ^= reader->ReadObjectImpl(kAsReference);
-  result.StorePointer(reinterpret_cast<RawObject*const*>(
-                          &result.raw_ptr()->stackmaps_),
-                      reader->PassiveObjectHandle()->raw());
-
-  result.StorePointer(&result.raw_ptr()->deopt_info_array_,
-                      Array::null());
-  result.StorePointer(&result.raw_ptr()->static_calls_target_table_,
-                      Array::null());
-  result.StorePointer(&result.raw_ptr()->var_descriptors_,
-                      LocalVarDescriptors::null());
-  result.StorePointer(&result.raw_ptr()->inlined_metadata_,
-                      Array::null());
-  result.StorePointer(&result.raw_ptr()->code_source_map_,
-                      CodeSourceMap::null());
-  result.StorePointer(&result.raw_ptr()->comments_,
-                      Array::null());
-  result.StorePointer(&result.raw_ptr()->return_address_metadata_,
-                      Object::null());
-
-  return result.raw();
+  UNREACHABLE();
+  return Code::null();
 }
 
 
@@ -1469,55 +1388,7 @@
                       intptr_t object_id,
                       Snapshot::Kind kind,
                       bool as_reference) {
-  ASSERT(Snapshot::IncludesCode(kind));
-  ASSERT(Snapshot::IsFull(kind));
-
-  intptr_t pointer_offsets_length =
-      Code::PtrOffBits::decode(ptr()->state_bits_);
-  if (pointer_offsets_length != 0) {
-    FATAL("Cannot serialize code with embedded pointers");
-  }
-  if (kind == Snapshot::kAppNoJIT) {
-    // No disabled code in precompilation.
-    ASSERT(ptr()->instructions_ == ptr()->active_instructions_);
-  } else {
-    ASSERT(kind == Snapshot::kAppWithJIT);
-    // We never include optimized code in JIT precompilation. Deoptimization
-    // requires code patching and we cannot patch code that is shared between
-    // isolates and should not mutate memory allocated by the embedder.
-    bool is_optimized = Code::PtrOffBits::decode(ptr()->state_bits_);
-    if (is_optimized) {
-      FATAL("Cannot include optimized code in a JIT snapshot");
-    }
-  }
-
-  // Write out the serialization header value for this object.
-  writer->WriteInlinedObjectHeader(object_id);
-
-  // Write out the class and tags information.
-  writer->WriteVMIsolateObject(kCodeCid);
-  writer->WriteTags(writer->GetObjectTags(this));
-
-  // Write out all the non object fields.
-  writer->Write<int32_t>(ptr()->state_bits_);
-
-  RawInstructions* instr = ptr()->instructions_;
-  int32_t text_offset = writer->GetInstructionsId(instr, this);
-  writer->Write<int32_t>(text_offset);
-
-#if defined(DEBUG)
-  uword entry = ptr()->entry_point_;
-  intptr_t instructions_size = Utils::RoundUp(instr->size_,
-                                              OS::PreferredCodeAlignment());
-  uword check = Checksum(entry, instructions_size);
-  writer->Write<uword>(check);
-#endif
-
-  writer->WriteObjectImpl(ptr()->object_pool_, kAsReference);
-  writer->WriteObjectImpl(ptr()->owner_, kAsReference);
-  writer->WriteObjectImpl(ptr()->exception_handlers_, kAsReference);
-  writer->WriteObjectImpl(ptr()->pc_descriptors_, kAsReference);
-  writer->WriteObjectImpl(ptr()->stackmaps_, kAsReference);
+  UNREACHABLE();
 }
 
 
@@ -1544,62 +1415,8 @@
                                     intptr_t tags,
                                     Snapshot::Kind kind,
                                     bool as_reference) {
-  ASSERT(Snapshot::IncludesCode(kind));
-  ASSERT(Snapshot::IsFull(kind));
-
-  intptr_t len = reader->Read<intptr_t>();
-  ObjectPool* result = NULL;
-  DeserializeState state;
-  if (!as_reference) {
-    result = reinterpret_cast<ObjectPool*>(reader->GetBackRef(object_id));
-    state = kIsDeserialized;
-  } else {
-    state = kIsNotDeserialized;
-  }
-  if (result == NULL) {
-    result = &(ObjectPool::ZoneHandle(
-        reader->zone(), NEW_OBJECT_WITH_LEN(ObjectPool, len)));
-    reader->AddBackRef(object_id, result, state);
-  }
-  if (!as_reference) {
-    // Read all the individual elements for inlined objects.
-    const TypedData& info_array =
-        TypedData::Handle(reader->NewTypedData(kTypedDataInt8ArrayCid, len));
-    result->set_info_array(info_array);
-
-    NoSafepointScope no_safepoint;
-    for (intptr_t i = 0; i < len; i++) {
-      ObjectPool::EntryType entry_type =
-          static_cast<ObjectPool::EntryType>(reader->Read<int8_t>());
-      *reinterpret_cast<int8_t*>(info_array.DataAddr(i)) = entry_type;
-      switch (entry_type) {
-        case ObjectPool::kTaggedObject: {
-          (*reader->PassiveObjectHandle()) =
-              reader->ReadObjectImpl(kAsReference);
-          result->SetObjectAt(i, *(reader->PassiveObjectHandle()));
-          break;
-        }
-        case ObjectPool::kImmediate: {
-          intptr_t raw_value = reader->Read<intptr_t>();
-          result->SetRawValueAt(i, raw_value);
-          break;
-        }
-        case ObjectPool::kNativeEntry: {
-#if !defined(TARGET_ARCH_DBC)
-          // Read nothing. Initialize with the lazy link entry.
-          uword new_entry = NativeEntry::LinkNativeCallEntry();
-          result->SetRawValueAt(i, static_cast<intptr_t>(new_entry));
-#else
-          UNREACHABLE();  // DBC does not support lazy native call linking.
-#endif
-          break;
-        }
-        default:
-          UNREACHABLE();
-      }
-    }
-  }
-  return result->raw();
+  UNREACHABLE();
+  return ObjectPool::null();
 }
 
 
@@ -1607,68 +1424,7 @@
                             intptr_t object_id,
                             Snapshot::Kind kind,
                             bool as_reference) {
-  ASSERT(Snapshot::IncludesCode(kind));
-  ASSERT(Snapshot::IsFull(kind));
-  intptr_t tags = writer->GetObjectTags(this);
-  intptr_t length = ptr()->length_;
-
-  if (as_reference) {
-    // Write out the serialization header value for this object.
-    writer->WriteInlinedObjectHeader(kOmittedObjectId);
-
-    // Write out the class information.
-    writer->WriteVMIsolateObject(kObjectPoolCid);
-    writer->WriteTags(tags);
-
-    // Write out the length field.
-    writer->Write<intptr_t>(length);
-  } else {
-    // Write out the serialization header value for this object.
-    writer->WriteInlinedObjectHeader(object_id);
-
-    // Write out the class and tags information.
-    writer->WriteVMIsolateObject(kObjectPoolCid);
-    writer->WriteTags(tags);
-
-    RawTypedData* info_array = ptr()->info_array_->ptr();
-    ASSERT(info_array != TypedData::null());
-
-    writer->Write<intptr_t>(length);
-    for (intptr_t i = 0; i < length; i++) {
-      ObjectPool::EntryType entry_type =
-          static_cast<ObjectPool::EntryType>(info_array->data()[i]);
-      writer->Write<int8_t>(entry_type);
-      Entry& entry = ptr()->data()[i];
-      switch (entry_type) {
-        case ObjectPool::kTaggedObject: {
-#if !defined(TARGET_ARCH_DBC)
-          if (entry.raw_obj_ == StubCode::CallNativeCFunction_entry()->code()) {
-            // Natives can run while precompiling, becoming linked and switching
-            // their stub. Reset to the initial stub used for lazy-linking.
-            writer->WriteObjectImpl(
-                StubCode::CallBootstrapCFunction_entry()->code(), kAsReference);
-            break;
-          }
-#endif
-          writer->WriteObjectImpl(entry.raw_obj_, kAsReference);
-          break;
-        }
-        case ObjectPool::kImmediate: {
-          writer->Write<intptr_t>(entry.raw_value_);
-          break;
-        }
-        case ObjectPool::kNativeEntry: {
-          // Write nothing. Will initialize with the lazy link entry.
-#if defined(TARGET_ARCH_DBC)
-          UNREACHABLE();   // DBC does not support lazy native call linking.
-#endif
-          break;
-        }
-        default:
-          UNREACHABLE();
-      }
-    }
-  }
+  UNREACHABLE();
 }
 
 
@@ -1677,15 +1433,8 @@
                                           intptr_t tags,
                                           Snapshot::Kind kind,
                                           bool as_reference) {
-  ASSERT(Snapshot::IncludesCode(kind));
-  ASSERT(Snapshot::IsFull(kind));
-
-  intptr_t offset = reader->Read<int32_t>();
-  PcDescriptors& result = PcDescriptors::ZoneHandle(reader->zone());
-  result ^= reader->GetObjectAt(offset);
-  reader->AddBackRef(object_id, &result, kIsDeserialized);
-
-  return result.raw();
+  UNREACHABLE();
+  return PcDescriptors::null();
 }
 
 
@@ -1693,15 +1442,7 @@
                                intptr_t object_id,
                                Snapshot::Kind kind,
                                bool as_reference) {
-  ASSERT(Snapshot::IncludesCode(kind));
-  ASSERT(Snapshot::IsFull(kind));
-
-  // Write out the serialization header value for this object.
-  writer->WriteInlinedObjectHeader(object_id);
-  writer->WriteIndexedObject(kPcDescriptorsCid);
-  writer->WriteTags(writer->GetObjectTags(this));
-
-  writer->Write<int32_t>(writer->GetObjectId(this));
+  UNREACHABLE();
 }
 
 
@@ -1710,23 +1451,8 @@
                                           intptr_t tags,
                                           Snapshot::Kind kind,
                                           bool as_reference) {
-  ASSERT(Snapshot::IncludesCode(kind));
-  ASSERT(Snapshot::IsFull(kind));
-
-  const int32_t length = reader->Read<int32_t>();
-  CodeSourceMap& result =
-      CodeSourceMap::ZoneHandle(reader->zone(),
-                                NEW_OBJECT_WITH_LEN(CodeSourceMap, length));
-  reader->AddBackRef(object_id, &result, kIsDeserialized);
-
-  if (result.Length() > 0) {
-    NoSafepointScope no_safepoint;
-    intptr_t len = result.Length();
-    uint8_t* data = result.UnsafeMutableNonPointer(result.raw_ptr()->data());
-    reader->ReadBytes(data, len);
-  }
-
-  return result.raw();
+  UNREACHABLE();
+  return CodeSourceMap::null();
 }
 
 
@@ -1734,19 +1460,7 @@
                                intptr_t object_id,
                                Snapshot::Kind kind,
                                bool as_reference) {
-  ASSERT(Snapshot::IncludesCode(kind));
-  ASSERT(Snapshot::IsFull(kind));
-
-  // Write out the serialization header value for this object.
-  writer->WriteInlinedObjectHeader(object_id);
-  writer->WriteIndexedObject(kCodeSourceMapCid);
-  writer->WriteTags(writer->GetObjectTags(this));
-  writer->Write<int32_t>(ptr()->length_);
-  if (ptr()->length_ > 0) {
-    intptr_t len = ptr()->length_;
-    uint8_t* data = reinterpret_cast<uint8_t*>(ptr()->data());
-    writer->WriteBytes(data, len);
-  }
+  UNREACHABLE();
 }
 
 
@@ -1755,15 +1469,8 @@
                                 intptr_t tags,
                                 Snapshot::Kind kind,
                                 bool as_reference) {
-  ASSERT(Snapshot::IncludesCode(kind));
-  ASSERT(Snapshot::IsFull(kind));
-
-  intptr_t offset = reader->Read<int32_t>();
-  Stackmap& result = Stackmap::ZoneHandle(reader->zone());
-  result ^= reader->GetObjectAt(offset);
-  reader->AddBackRef(object_id, &result, kIsDeserialized);
-
-  return result.raw();
+  UNREACHABLE();
+  return Stackmap::null();
 }
 
 
@@ -1771,15 +1478,7 @@
                           intptr_t object_id,
                           Snapshot::Kind kind,
                           bool as_reference) {
-  ASSERT(Snapshot::IncludesCode(kind));
-  ASSERT(Snapshot::IsFull(kind));
-
-  // Write out the serialization header value for this object.
-  writer->WriteInlinedObjectHeader(object_id);
-  writer->WriteIndexedObject(kStackmapCid);
-  writer->WriteTags(writer->GetObjectTags(this));
-
-  writer->Write<int32_t>(writer->GetObjectId(this));
+  UNREACHABLE();
 }
 
 
@@ -1788,32 +1487,8 @@
                                                       intptr_t tags,
                                                       Snapshot::Kind kind,
                                                       bool as_reference) {
-  ASSERT(Snapshot::IncludesCode(kind));
-  ASSERT(Snapshot::IsFull(kind));
-
-  const int32_t num_entries = reader->Read<int32_t>();
-
-  LocalVarDescriptors& result =
-      LocalVarDescriptors::ZoneHandle(reader->zone(),
-                                      NEW_OBJECT_WITH_LEN(LocalVarDescriptors,
-                                                          num_entries));
-  reader->AddBackRef(object_id, &result, kIsDeserialized);
-
-  for (intptr_t i = 0; i < num_entries; i++) {
-    (*reader->StringHandle()) ^= reader->ReadObjectImpl(kAsReference);
-    result.StorePointer(result.raw()->nameAddrAt(i),
-                        reader->StringHandle()->raw());
-  }
-
-  if (num_entries > 0) {
-    NoSafepointScope no_safepoint;
-    intptr_t len = num_entries * sizeof(RawLocalVarDescriptors::VarInfo);
-    uint8_t* data = result.UnsafeMutableNonPointer(
-        reinterpret_cast<const uint8_t*>(result.raw()->data()));
-    reader->ReadBytes(data, len);
-  }
-
-  return result.raw();
+  UNREACHABLE();
+  return LocalVarDescriptors::null();
 }
 
 
@@ -1821,22 +1496,7 @@
                                      intptr_t object_id,
                                      Snapshot::Kind kind,
                                      bool as_reference) {
-  ASSERT(Snapshot::IncludesCode(kind));
-  ASSERT(Snapshot::IsFull(kind));
-
-  // Write out the serialization header value for this object.
-  writer->WriteInlinedObjectHeader(object_id);
-  writer->WriteIndexedObject(kLocalVarDescriptorsCid);
-  writer->WriteTags(writer->GetObjectTags(this));
-  writer->Write<int32_t>(ptr()->num_entries_);
-  for (intptr_t i = 0; i < ptr()->num_entries_; i++) {
-    writer->WriteObjectImpl(ptr()->names()[i], kAsReference);
-  }
-  if (ptr()->num_entries_ > 0) {
-    intptr_t len = ptr()->num_entries_ * sizeof(VarInfo);
-    uint8_t* data = reinterpret_cast<uint8_t*>(this->data());
-    writer->WriteBytes(data, len);
-  }
+  UNREACHABLE();
 }
 
 
@@ -1845,30 +1505,8 @@
                                                   intptr_t tags,
                                                   Snapshot::Kind kind,
                                                   bool as_reference) {
-  ASSERT(Snapshot::IncludesCode(kind));
-  ASSERT(Snapshot::IsFull(kind));
-
-  const int32_t num_entries = reader->Read<int32_t>();
-  ExceptionHandlers& result =
-      ExceptionHandlers::ZoneHandle(reader->zone(),
-                                    NEW_OBJECT_WITH_LEN(ExceptionHandlers,
-                                                        num_entries));
-  reader->AddBackRef(object_id, &result, kIsDeserialized);
-
-  if (result.num_entries() > 0) {
-    NoSafepointScope no_safepoint;
-    const intptr_t len =
-        result.num_entries() * sizeof(RawExceptionHandlers::HandlerInfo);
-    uint8_t* data = result.UnsafeMutableNonPointer(
-        reinterpret_cast<const uint8_t*>(result.raw_ptr()->data()));
-    reader->ReadBytes(data, len);
-  }
-
-  *(reader->ArrayHandle()) ^= reader->ReadObjectImpl(kAsInlinedObject);
-  result.StorePointer(&result.raw_ptr()->handled_types_data_,
-                      reader->ArrayHandle()->raw());
-
-  return result.raw();
+  UNREACHABLE();
+  return ExceptionHandlers::null();
 }
 
 
@@ -1876,22 +1514,7 @@
                                    intptr_t object_id,
                                    Snapshot::Kind kind,
                                    bool as_reference) {
-  ASSERT(Snapshot::IncludesCode(kind));
-  ASSERT(Snapshot::IsFull(kind));
-
-  // Write out the serialization header value for this object.
-  writer->WriteInlinedObjectHeader(object_id);
-  writer->WriteIndexedObject(kExceptionHandlersCid);
-  writer->WriteTags(writer->GetObjectTags(this));
-  writer->Write<int32_t>(ptr()->num_entries_);
-
-  if (ptr()->num_entries_ > 0) {
-    intptr_t len = ptr()->num_entries_ * sizeof(HandlerInfo);
-    uint8_t* data = reinterpret_cast<uint8_t*>(ptr()->data());
-    writer->WriteBytes(data, len);
-  }
-
-  writer->WriteObjectImpl(ptr()->handled_types_data_, kAsInlinedObject);
+  UNREACHABLE();
 }
 
 
@@ -2106,22 +1729,8 @@
                                                 intptr_t tags,
                                                 Snapshot::Kind kind,
                                                 bool as_reference) {
-  ASSERT(Snapshot::IncludesCode(kind));
-  ASSERT(Snapshot::IsFull(kind));
-
-  MegamorphicCache& result =
-      MegamorphicCache::ZoneHandle(reader->zone(),
-                                   NEW_OBJECT(MegamorphicCache));
-  reader->AddBackRef(object_id, &result, kIsDeserialized);
-
-  result.set_filled_entry_count(reader->Read<int32_t>());
-
-  // Set all the object fields.
-  READ_OBJECT_FIELDS(result,
-                     result.raw()->from(), result.raw()->to(),
-                     kAsReference);
-
-  return result.raw();
+  UNREACHABLE();
+  return MegamorphicCache::null();
 }
 
 
@@ -2129,22 +1738,7 @@
                                   intptr_t object_id,
                                   Snapshot::Kind kind,
                                   bool as_reference) {
-  ASSERT(Snapshot::IncludesCode(kind));
-  ASSERT(Snapshot::IsFull(kind));
-
-  // Write out the serialization header value for this object.
-  writer->WriteInlinedObjectHeader(object_id);
-
-  // Write out the class and tags information.
-  writer->WriteVMIsolateObject(kMegamorphicCacheCid);
-  writer->WriteTags(writer->GetObjectTags(this));
-
-  // Write out all the non object fields.
-  writer->Write<int32_t>(ptr()->filled_entry_count_);
-
-  // Write out all the object pointer fields.
-  SnapshotWriterVisitor visitor(writer, kAsReference);
-  visitor.VisitPointers(from(), to());
+  UNREACHABLE();
 }
 
 
@@ -2153,22 +1747,8 @@
                                                 intptr_t tags,
                                                 Snapshot::Kind kind,
                                                 bool as_reference) {
-  ASSERT(Snapshot::IncludesCode(kind));
-  ASSERT(Snapshot::IsFull(kind));
-
-  SubtypeTestCache& result =
-      SubtypeTestCache::ZoneHandle(reader->zone(),
-                                   NEW_OBJECT(SubtypeTestCache));
-  reader->AddBackRef(object_id, &result, kIsDeserialized);
-
-  // Set all the object fields.
-  // TODO(5411462): Need to assert No GC can happen here, even though
-  // allocations may happen.
-  (*reader->ArrayHandle()) ^= reader->ReadObjectImpl(kAsReference);
-  result.StorePointer(&result.raw_ptr()->cache_,
-                      reader->ArrayHandle()->raw());
-
-  return result.raw();
+  UNREACHABLE();
+  return SubtypeTestCache::null();
 }
 
 
@@ -2176,18 +1756,7 @@
                                   intptr_t object_id,
                                   Snapshot::Kind kind,
                                   bool as_reference) {
-  ASSERT(Snapshot::IncludesCode(kind));
-  ASSERT(Snapshot::IsFull(kind));
-
-  // Write out the serialization header value for this object.
-  writer->WriteInlinedObjectHeader(object_id);
-
-  // Write out the class and tags information.
-  writer->WriteVMIsolateObject(kSubtypeTestCacheCid);
-  writer->WriteTags(writer->GetObjectTags(this));
-
-  // Write out all the object pointer fields.
-  writer->WriteObjectImpl(ptr()->cache_, kAsReference);
+  UNREACHABLE();
 }
 
 
@@ -2621,14 +2190,6 @@
                                           intptr_t tags,
                                           Snapshot::Kind kind,
                                           bool as_reference) {
-  if (Snapshot::IncludesCode(kind)) {
-    ASSERT(Snapshot::IsFull(kind));
-    intptr_t offset = reader->Read<int32_t>();
-    String& result = String::ZoneHandle(reader->zone());
-    result ^= reader->GetObjectAt(offset);
-    reader->AddBackRef(object_id, &result, kIsDeserialized);
-    return raw(result);
-  }
   // Read the length so that we can determine instance size to allocate.
   ASSERT(reader != NULL);
   intptr_t len = reader->ReadSmiValue();
@@ -2736,22 +2297,6 @@
                                intptr_t object_id,
                                Snapshot::Kind kind,
                                bool as_reference) {
-  if (Snapshot::IncludesCode(kind)) {
-    ASSERT(Snapshot::IncludesCode(kind));
-    ASSERT(Snapshot::IsFull(kind));
-    // Assert that hash is computed.
-    if (ptr()->hash_ == NULL) {
-      ptr()->hash_ = Smi::New(String::Hash(ptr()->data(),
-                                           Smi::Value(ptr()->length_)));
-    }
-    ASSERT(ptr()->hash_ != NULL);
-    // Write out the serialization header value for this object.
-    writer->WriteInlinedObjectHeader(object_id);
-    writer->WriteIndexedObject(kOneByteStringCid);
-    writer->WriteTags(writer->GetObjectTags(this));
-    writer->Write<int32_t>(writer->GetObjectId(this));
-    return;
-  }
   StringWriteTo(writer,
                 object_id,
                 kind,
@@ -3017,8 +2562,7 @@
 
   LinkedHashMap& map = LinkedHashMap::ZoneHandle(
       reader->zone(), LinkedHashMap::null());
-  if ((Snapshot::IsFull(kind) && !Snapshot::IncludesCode(kind)) ||
-      kind == Snapshot::kScript) {
+  if (Snapshot::IsFull(kind) || kind == Snapshot::kScript) {
     // The immutable maps that seed map literals are not yet VM-internal, so
     // we don't reach this.
     UNREACHABLE();
@@ -3076,8 +2620,7 @@
                                intptr_t object_id,
                                Snapshot::Kind kind,
                                bool as_reference) {
-  if ((Snapshot::IsFull(kind) && !Snapshot::IncludesCode(kind)) ||
-      kind == Snapshot::kScript) {
+  if (Snapshot::IsFull(kind) || kind == Snapshot::kScript) {
     // The immutable maps that seed map literals are not yet VM-internal, so
     // we don't reach this.
   }
@@ -3552,18 +3095,14 @@
                                 intptr_t tags,
                                 Snapshot::Kind kind,
                                 bool as_reference) {
-  ASSERT(kind == Snapshot::kMessage || Snapshot::IncludesCode(kind));
+  ASSERT(kind == Snapshot::kMessage);
 
   uint64_t id = reader->Read<uint64_t>();
   uint64_t origin_id = reader->Read<uint64_t>();
 
-  SendPort& result = SendPort::ZoneHandle(reader->zone());
-  if (Snapshot::IncludesCode(kind)) {
-    // TODO(rmacnak): Reset fields in precompiled snapshots and assert
-    // this is unreachable.
-  } else {
-    result = SendPort::New(id, origin_id);
-  }
+  SendPort& result =
+      SendPort::ZoneHandle(reader->zone(),
+                           SendPort::New(id, origin_id));
   reader->AddBackRef(object_id, &result, kIsDeserialized);
   return result.raw();
 }
diff --git a/runtime/vm/redundancy_elimination.cc b/runtime/vm/redundancy_elimination.cc
index 9a8c912..32fe07b 100644
--- a/runtime/vm/redundancy_elimination.cc
+++ b/runtime/vm/redundancy_elimination.cc
@@ -2539,8 +2539,7 @@
       case Instruction::kStoreInstanceField: {
         StoreInstanceFieldInstr* store_instance = instr->AsStoreInstanceField();
         // Can't eliminate stores that initialize fields.
-        return !(store_instance->is_potential_unboxed_initialization() ||
-                 store_instance->is_object_reference_initialization());
+        return !store_instance->is_initialization();
       }
       case Instruction::kStoreIndexed:
       case Instruction::kStoreStaticField:
diff --git a/runtime/vm/scavenger.cc b/runtime/vm/scavenger.cc
index ba36ac8..d1bfd3f 100644
--- a/runtime/vm/scavenger.cc
+++ b/runtime/vm/scavenger.cc
@@ -15,7 +15,6 @@
 #include "vm/store_buffer.h"
 #include "vm/thread_registry.h"
 #include "vm/timeline.h"
-#include "vm/verified_memory.h"
 #include "vm/verifier.h"
 #include "vm/visitor.h"
 #include "vm/weak_table.h"
@@ -161,7 +160,6 @@
       memmove(reinterpret_cast<void*>(new_addr),
               reinterpret_cast<void*>(raw_addr),
               size);
-      VerifiedMemory::Accept(new_addr, size);
       // Remember forwarding address.
       ForwardTo(raw_addr, new_addr);
     }
@@ -170,7 +168,6 @@
     *p = new_obj;
     // Update the store buffer as needed.
     if (visiting_old_object_ != NULL) {
-      VerifiedMemory::Accept(reinterpret_cast<uword>(p), sizeof(*p));
       UpdateStoreBuffer(p, new_obj);
     }
   }
@@ -289,7 +286,7 @@
     return new SemiSpace(NULL);
   } else {
     intptr_t size_in_bytes = size_in_words << kWordSizeLog2;
-    VirtualMemory* reserved = VerifiedMemory::Reserve(size_in_bytes);
+    VirtualMemory* reserved = VirtualMemory::Reserve(size_in_bytes);
     if ((reserved == NULL) || !reserved->Commit(false)) {  // Not executable.
       // TODO(koda): If cache_ is not empty, we could try to delete it.
       delete reserved;
@@ -297,7 +294,6 @@
     }
 #if defined(DEBUG)
     memset(reserved->address(), Heap::kZapByte, size_in_bytes);
-    VerifiedMemory::Accept(reserved->start(), size_in_bytes);
 #endif  // defined(DEBUG)
     return new SemiSpace(reserved);
   }
@@ -309,7 +305,6 @@
   if (reserved_ != NULL) {
     const intptr_t size_in_bytes = size_in_words() << kWordSizeLog2;
     memset(reserved_->address(), Heap::kZapByte, size_in_bytes);
-    VerifiedMemory::Accept(reserved_->start(), size_in_bytes);
   }
 #endif
   SemiSpace* old_cache = NULL;
@@ -429,7 +424,6 @@
     // objects candidates for promotion next time.
     survivor_end_ = end_;
   }
-  VerifiedMemory::Accept(to_->start(), to_->end() - to_->start());
 #if defined(DEBUG)
   // We can only safely verify the store buffers from old space if there is no
   // concurrent old space task. At the same time we prevent new tasks from
diff --git a/runtime/vm/service.cc b/runtime/vm/service.cc
index 09da5f3..e209d3b 100644
--- a/runtime/vm/service.cc
+++ b/runtime/vm/service.cc
@@ -14,7 +14,6 @@
 #include "vm/dart_api_state.h"
 #include "vm/dart_entry.h"
 #include "vm/debugger.h"
-#include "vm/dev_fs.h"
 #include "vm/isolate.h"
 #include "vm/lockers.h"
 #include "vm/message.h"
@@ -4008,127 +4007,6 @@
 }
 
 
-static const MethodParameter* create_dev_fs_params[] = {
-  NO_ISOLATE_PARAMETER,
-  new DartStringParameter("fsName", true),
-  NULL,
-};
-
-
-static bool CreateDevFS(Thread* thread, JSONStream* js) {
-  const String& fs_name =
-      String::Handle(String::RawCast(js->LookupObjectParam("fsName")));
-  DevFS::CreateFileSystem(js, fs_name);
-  return true;
-}
-
-
-static const MethodParameter* delete_dev_fs_params[] = {
-  NO_ISOLATE_PARAMETER,
-  new DartStringParameter("fsName", true),
-  NULL,
-};
-
-
-static bool DeleteDevFS(Thread* thread, JSONStream* js) {
-  const String& fs_name =
-      String::Handle(String::RawCast(js->LookupObjectParam("fsName")));
-  DevFS::DeleteFileSystem(js, fs_name);
-  return true;
-}
-
-
-static const MethodParameter* list_dev_fs_params[] = {
-  NO_ISOLATE_PARAMETER,
-  NULL,
-};
-
-
-static bool ListDevFS(Thread* thread, JSONStream* js) {
-  DevFS::ListFileSystems(js);
-  return true;
-}
-
-
-static const MethodParameter* write_dev_fs_file_params[] = {
-  NO_ISOLATE_PARAMETER,
-  new DartStringParameter("fsName", true),
-  new DartStringParameter("path", true),
-  new DartStringParameter("fileContents", true),
-  NULL,
-};
-
-
-static bool WriteDevFSFile(Thread* thread, JSONStream* js) {
-  const String& fs_name =
-      String::Handle(String::RawCast(js->LookupObjectParam("fsName")));
-  const String& path =
-      String::Handle(String::RawCast(js->LookupObjectParam("path")));
-  const String& file_contents =
-      String::Handle(String::RawCast(js->LookupObjectParam("fileContents")));
-  DevFS::WriteFile(js, fs_name, path, file_contents);
-  return true;
-}
-
-
-static const MethodParameter* write_dev_fs_files_params[] = {
-  NO_ISOLATE_PARAMETER,
-  new DartStringParameter("fsName", true),
-  new DartListParameter("files", true),
-  NULL,
-};
-
-
-static bool WriteDevFSFiles(Thread* thread, JSONStream* js) {
-  const String& fs_name =
-      String::Handle(String::RawCast(js->LookupObjectParam("fsName")));
-  Array& files = Array::Handle();
-  const Object& files_param = Object::Handle(js->LookupObjectParam("files"));
-  if (files_param.IsArray()) {
-    files ^= files_param.raw();
-  } else {
-    ASSERT(files_param.IsGrowableObjectArray());
-    files ^= GrowableObjectArray::Cast(files_param).data();
-  }
-  ASSERT(!files.IsNull());
-  DevFS::WriteFiles(js, fs_name, files);
-  return true;
-}
-
-
-static const MethodParameter* read_dev_fs_file_params[] = {
-  NO_ISOLATE_PARAMETER,
-  new DartStringParameter("fsName", true),
-  new DartStringParameter("path", true),
-  NULL,
-};
-
-
-static bool ReadDevFSFile(Thread* thread, JSONStream* js) {
-  const String& fs_name =
-      String::Handle(String::RawCast(js->LookupObjectParam("fsName")));
-  const String& path =
-      String::Handle(String::RawCast(js->LookupObjectParam("path")));
-  DevFS::ReadFile(js, fs_name, path);
-  return true;
-}
-
-
-static const MethodParameter* list_dev_fs_files_params[] = {
-  NO_ISOLATE_PARAMETER,
-  new DartStringParameter("fsName", true),
-  NULL,
-};
-
-
-static bool ListDevFSFiles(Thread* thread, JSONStream* js) {
-  const String& fs_name =
-      String::Handle(String::RawCast(js->LookupObjectParam("fsName")));
-  DevFS::ListFiles(js, fs_name);
-  return true;
-}
-
-
 static const ServiceMethodDescriptor service_methods_[] = {
   { "_dumpIdZone", DumpIdZone, NULL },
   { "_echo", Echo,
@@ -4241,20 +4119,6 @@
     set_vm_name_params },
   { "_setVMTimelineFlags", SetVMTimelineFlags,
     set_vm_timeline_flags_params },
-  { "_createDevFS", CreateDevFS,
-    create_dev_fs_params },
-  { "_deleteDevFS", DeleteDevFS,
-    delete_dev_fs_params },
-  { "_listDevFS", ListDevFS,
-    list_dev_fs_params },
-  { "_writeDevFSFile", WriteDevFSFile,
-    write_dev_fs_file_params },
-  { "_writeDevFSFiles", WriteDevFSFiles,
-    write_dev_fs_files_params },
-  { "_readDevFSFile", ReadDevFSFile,
-    read_dev_fs_file_params },
-  { "_listDevFSFiles", ListDevFSFiles,
-    list_dev_fs_files_params },
 };
 
 
diff --git a/runtime/vm/service_isolate.cc b/runtime/vm/service_isolate.cc
index c73c176..e85a5c0 100644
--- a/runtime/vm/service_isolate.cc
+++ b/runtime/vm/service_isolate.cc
@@ -7,7 +7,6 @@
 #include "vm/compiler.h"
 #include "vm/dart_api_impl.h"
 #include "vm/dart_entry.h"
-#include "vm/dev_fs.h"
 #include "vm/isolate.h"
 #include "vm/lockers.h"
 #include "vm/message.h"
@@ -458,9 +457,6 @@
   // Grab the isolate create callback here to avoid race conditions with tests
   // that change this after Dart_Initialize returns.
   create_callback_ = Isolate::CreateCallback();
-  if (FLAG_support_service) {
-    DevFS::Init();
-  }
   Dart::thread_pool()->Run(new RunServiceTask());
 }
 
@@ -509,9 +505,6 @@
     free(server_address_);
     server_address_ = NULL;
   }
-  if (FLAG_support_service) {
-    DevFS::Cleanup();
-  }
 }
 
 
diff --git a/runtime/vm/signal_handler.h b/runtime/vm/signal_handler.h
index ecdfe02..615e584 100644
--- a/runtime/vm/signal_handler.h
+++ b/runtime/vm/signal_handler.h
@@ -34,6 +34,9 @@
 struct mcontext_t;
 struct sigset_t {
 };
+#elif defined(TARGET_OS_FUCHSIA)
+#include <signal.h>  // NOLINT
+#include <ucontext.h>  // NOLINT
 #endif
 
 
@@ -84,7 +87,8 @@
   // that no actual instructions are skipped and then branch to the actual
   // signal handler.
   //
-  // For the kernel patch that fixes the issue see: http://git.kernel.org/cgit/linux/kernel/git/torvalds/linux.git/commit/?id=6ecf830e5029598732e04067e325d946097519cb
+  // For the kernel patch that fixes the issue see:
+  // http://git.kernel.org/cgit/linux/kernel/git/torvalds/linux.git/commit/?id=6ecf830e5029598732e04067e325d946097519cb
   //
   // Note: this function is marked "naked" because we must guarantee that
   // our NOPs occur before any compiler generated prologue.
diff --git a/runtime/vm/signal_handler_fuchsia.cc b/runtime/vm/signal_handler_fuchsia.cc
new file mode 100644
index 0000000..1b704eb
--- /dev/null
+++ b/runtime/vm/signal_handler_fuchsia.cc
@@ -0,0 +1,55 @@
+// Copyright (c) 2016, the Dart project authors.  Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.
+
+#include "vm/globals.h"
+#if defined(TARGET_OS_FUCHSIA)
+
+#include "vm/signal_handler.h"
+
+#include "platform/assert.h"
+
+namespace dart {
+
+uintptr_t SignalHandler::GetProgramCounter(const mcontext_t& mcontext) {
+  UNIMPLEMENTED();
+  return 0;
+}
+
+
+uintptr_t SignalHandler::GetFramePointer(const mcontext_t& mcontext) {
+  UNIMPLEMENTED();
+  return 0;
+}
+
+
+uintptr_t SignalHandler::GetCStackPointer(const mcontext_t& mcontext) {
+  UNIMPLEMENTED();
+  return 0;
+}
+
+
+uintptr_t SignalHandler::GetDartStackPointer(const mcontext_t& mcontext) {
+  UNIMPLEMENTED();
+  return 0;
+}
+
+
+uintptr_t SignalHandler::GetLinkRegister(const mcontext_t& mcontext) {
+  UNIMPLEMENTED();
+  return 0;
+}
+
+
+void SignalHandler::InstallImpl(SignalAction action) {
+  UNIMPLEMENTED();
+}
+
+
+void SignalHandler::Remove() {
+  UNIMPLEMENTED();
+}
+
+}  // namespace dart
+
+#endif  // defined(TARGET_OS_FUCHSIA)
diff --git a/runtime/vm/simulator_dbc.cc b/runtime/vm/simulator_dbc.cc
index 5a26b53..b1c648d 100644
--- a/runtime/vm/simulator_dbc.cc
+++ b/runtime/vm/simulator_dbc.cc
@@ -2048,6 +2048,16 @@
   }
 
   {
+    BYTECODE(TestSmi, A_D);
+    intptr_t left = reinterpret_cast<intptr_t>(RAW_CAST(Smi, FP[rA]));
+    intptr_t right = reinterpret_cast<intptr_t>(RAW_CAST(Smi, FP[rD]));
+    if ((left & right) != 0) {
+      pc++;
+    }
+    DISPATCH();
+  }
+
+  {
     BYTECODE(CheckSmi, 0);
     intptr_t obj = reinterpret_cast<intptr_t>(FP[rA]);
     if ((obj & kSmiTagMask) == kSmiTag) {
@@ -2058,11 +2068,59 @@
 
   {
     BYTECODE(CheckClassId, A_D);
-    const RawSmi* actual_cid =
-        SimulatorHelpers::GetClassIdAsSmi(static_cast<RawObject*>(FP[rA]));
-    const RawSmi* desired_cid = RAW_CAST(Smi, LOAD_CONSTANT(rD));
-    if (actual_cid == desired_cid) {
-      pc++;
+    const intptr_t actual_cid = SimulatorHelpers::GetClassId(FP[rA]);
+    const intptr_t desired_cid = rD;
+    pc += (actual_cid == desired_cid) ? 1 : 0;
+    DISPATCH();
+  }
+
+  {
+    BYTECODE(CheckDenseSwitch, A_D);
+    const intptr_t raw_value = reinterpret_cast<intptr_t>(FP[rA]);
+    const bool is_smi = ((raw_value & kSmiTagMask) == kSmiTag);
+    const intptr_t cid_min = Bytecode::DecodeD(*pc);
+    const intptr_t cid_mask =
+        Smi::Value(RAW_CAST(Smi, LOAD_CONSTANT(Bytecode::DecodeD(*(pc + 1)))));
+    if (LIKELY(!is_smi)) {
+      const intptr_t cid_max = Utils::HighestBit(cid_mask) + cid_min;
+      const intptr_t cid = SimulatorHelpers::GetClassId(FP[rA]);
+      // The cid is in-bounds, and the bit is set in the mask.
+      if ((cid >= cid_min) && (cid <= cid_max) &&
+          ((cid_mask & (1 << (cid - cid_min))) != 0)) {
+        pc += 3;
+      } else {
+        pc += 2;
+      }
+    } else {
+      const bool may_be_smi = (rD == 1);
+      pc += (may_be_smi ? 3 : 2);
+    }
+    DISPATCH();
+  }
+
+  {
+    BYTECODE(CheckCids, A_B_C);
+    const intptr_t raw_value = reinterpret_cast<intptr_t>(FP[rA]);
+    const bool is_smi = ((raw_value & kSmiTagMask) == kSmiTag);
+    const bool may_be_smi = (rB == 1);
+    const intptr_t cids_length = rC;
+    if (LIKELY(!is_smi)) {
+      const intptr_t cid = SimulatorHelpers::GetClassId(FP[rA]);
+      for (intptr_t i = 0; i < cids_length; i++) {
+        const intptr_t desired_cid = Bytecode::DecodeD(*(pc + i));
+        if (cid == desired_cid) {
+          pc++;
+          break;
+        }
+        // The cids are sorted.
+        if (cid < desired_cid) {
+          break;
+        }
+      }
+      pc += cids_length;
+    } else {
+      pc += cids_length;
+      pc += (may_be_smi ? 1 : 0);
     }
     DISPATCH();
   }
@@ -2156,6 +2214,22 @@
   }
 
   {
+    BYTECODE(IfEqNull, A);
+    if (FP[rA] != null_value) {
+      pc++;
+    }
+    DISPATCH();
+  }
+
+  {
+    BYTECODE(IfNeNull, A_D);
+    if (FP[rA] == null_value) {
+      pc++;
+    }
+    DISPATCH();
+  }
+
+  {
     BYTECODE(Jump, 0);
     const int32_t target = static_cast<int32_t>(op) >> 8;
     pc += (target - 1);
@@ -2241,7 +2315,7 @@
       INVOKE_RUNTIME(DRT_DeoptimizeMaterialize, native_args);
     }
     const intptr_t materialization_arg_count =
-        Smi::Value(RAW_CAST(Smi, *SP--));
+        Smi::Value(RAW_CAST(Smi, *SP--)) / kWordSize;
     if (is_lazy) {
       // Reload the result. It might have been relocated by GC.
       result = *SP--;
diff --git a/runtime/vm/snapshot.cc b/runtime/vm/snapshot.cc
index e5c5b0b..4195995 100644
--- a/runtime/vm/snapshot.cc
+++ b/runtime/vm/snapshot.cc
@@ -19,7 +19,6 @@
 #include "vm/stub_code.h"
 #include "vm/symbols.h"
 #include "vm/timeline.h"
-#include "vm/verified_memory.h"
 #include "vm/version.h"
 
 // We currently only expect the Dart mutator to read snapshots.
@@ -29,8 +28,6 @@
 
 namespace dart {
 
-static const int kNumVmIsolateSnapshotReferences = 32 * KB;
-static const int kNumInitialReferencesInFullSnapshot = 160 * KB;
 static const int kNumInitialReferences = 64;
 
 
@@ -586,82 +583,6 @@
 };
 
 
-RawApiError* SnapshotReader::ReadFullSnapshot() {
-  ASSERT(Snapshot::IsFull(kind_));
-  Thread* thread = Thread::Current();
-  Isolate* isolate = thread->isolate();
-  ASSERT(isolate != NULL);
-  ObjectStore* object_store = isolate->object_store();
-  ASSERT(object_store != NULL);
-
-  // First read the version string, and check that it matches.
-  RawApiError* error = VerifyVersionAndFeatures();
-  if (error != ApiError::null()) {
-    return error;
-  }
-
-  // The version string matches. Read the rest of the snapshot.
-
-  // TODO(asiva): Add a check here to ensure we have the right heap
-  // size for the full snapshot being read.
-  {
-    NoSafepointScope no_safepoint;
-    HeapLocker hl(thread, old_space());
-
-    // Read in all the objects stored in the object store.
-    intptr_t num_flds =
-        (object_store->to_snapshot(kind_) - object_store->from());
-    for (intptr_t i = 0; i <= num_flds; i++) {
-      *(object_store->from() + i) = ReadObjectImpl(kAsInlinedObject);
-    }
-    for (intptr_t i = 0; i < backward_references_->length(); i++) {
-      if (!(*backward_references_)[i].is_deserialized()) {
-        ReadObjectImpl(kAsInlinedObject);
-        (*backward_references_)[i].set_state(kIsDeserialized);
-      }
-    }
-
-    if (kind_ == Snapshot::kAppNoJIT) {
-      ICData& ic = ICData::Handle(thread->zone());
-      Object& funcOrCode = Object::Handle(thread->zone());
-      Code& code = Code::Handle(thread->zone());
-      Smi& entry_point = Smi::Handle(thread->zone());
-      for (intptr_t i = 0; i < backward_references_->length(); i++) {
-        if ((*backward_references_)[i].reference()->IsICData()) {
-          ic ^= (*backward_references_)[i].reference()->raw();
-          for (intptr_t j = 0; j < ic.NumberOfChecks(); j++) {
-            funcOrCode = ic.GetTargetOrCodeAt(j);
-            if (funcOrCode.IsCode()) {
-              code ^= funcOrCode.raw();
-              entry_point = Smi::FromAlignedAddress(code.EntryPoint());
-              ic.SetEntryPointAt(j, entry_point);
-            }
-          }
-        }
-      }
-    }
-
-    // Validate the class table.
-#if defined(DEBUG)
-    isolate->ValidateClassTable();
-#endif
-
-    // Setup native resolver for bootstrap impl.
-    Bootstrap::SetupNativeResolver();
-  }
-
-  Class& cls = Class::Handle(thread->zone());
-  for (intptr_t i = 0; i < backward_references_->length(); i++) {
-    if ((*backward_references_)[i].reference()->IsClass()) {
-      cls ^= (*backward_references_)[i].reference()->raw();
-      cls.RehashConstants(thread->zone());
-    }
-  }
-
-  return ApiError::null();
-}
-
-
 RawObject* SnapshotReader::ReadScriptSnapshot() {
   ASSERT(kind_ == Snapshot::kScript);
 
@@ -1506,12 +1427,8 @@
   ASSERT_NO_SAFEPOINT_SCOPE();
   ASSERT(Utils::IsAligned(size, kObjectAlignment));
 
-  // Allocate memory where all words look like smis. This is currently
-  // only needed for DEBUG-mode validation in StorePointer/StoreSmi, but will
-  // be essential with the upcoming deletion barrier.
   uword address =
-      old_space()->TryAllocateSmiInitializedLocked(size,
-                                                   PageSpace::kForceGrowth);
+      old_space()->TryAllocateDataBumpLocked(size, PageSpace::kForceGrowth);
   if (address == 0) {
     // Use the preallocated out of memory exception to avoid calling
     // into dart code or allocating any code.
@@ -1521,7 +1438,6 @@
         object_store()->preallocated_unhandled_exception());
     thread()->long_jump_base()->Jump(1, error);
   }
-  VerifiedMemory::Accept(address, size);
 
   RawObject* raw_obj = reinterpret_cast<RawObject*>(address + kHeapObjectTag);
   uword tags = 0;
@@ -1702,108 +1618,6 @@
 }
 
 
-VmIsolateSnapshotReader::VmIsolateSnapshotReader(
-    Snapshot::Kind kind,
-    const uint8_t* buffer,
-    intptr_t size,
-    const uint8_t* instructions_buffer,
-    const uint8_t* data_buffer,
-    Thread* thread)
-      : SnapshotReader(buffer,
-                       size,
-                       instructions_buffer,
-                       data_buffer,
-                       kind,
-                       new ZoneGrowableArray<BackRefNode>(
-                           kNumVmIsolateSnapshotReferences),
-                       thread) {
-  ASSERT(Snapshot::IsFull(kind));
-}
-
-
-VmIsolateSnapshotReader::~VmIsolateSnapshotReader() {
-  intptr_t len = GetBackwardReferenceTable()->length();
-  Object::InitVmIsolateSnapshotObjectTable(len);
-  ZoneGrowableArray<BackRefNode>* backrefs = GetBackwardReferenceTable();
-  for (intptr_t i = 0; i < len; i++) {
-    Object::vm_isolate_snapshot_object_table().SetAt(
-        i, *(backrefs->At(i).reference()));
-  }
-  ResetBackwardReferenceTable();
-  Dart::set_instructions_snapshot_buffer(instructions_buffer_);
-  Dart::set_data_snapshot_buffer(data_buffer_);
-}
-
-
-RawApiError* VmIsolateSnapshotReader::ReadVmIsolateSnapshot() {
-  ASSERT(Snapshot::IsFull(kind()));
-  Thread* thread = Thread::Current();
-  Isolate* isolate = thread->isolate();
-  ASSERT(isolate != NULL);
-  ASSERT(isolate == Dart::vm_isolate());
-  ObjectStore* object_store = isolate->object_store();
-  ASSERT(object_store != NULL);
-
-  // First read the version string, and check that it matches.
-  RawApiError* error = VerifyVersionAndFeatures();
-  if (error != ApiError::null()) {
-    return error;
-  }
-
-  // The version string matches. Read the rest of the snapshot.
-
-  {
-    NoSafepointScope no_safepoint;
-    HeapLocker hl(thread, old_space());
-
-    // Read in the symbol table.
-    object_store->symbol_table_ = reinterpret_cast<RawArray*>(ReadObject());
-
-    Symbols::InitOnceFromSnapshot(isolate);
-
-    // Read in all the script objects and the accompanying token streams
-    // for bootstrap libraries so that they are in the VM isolate's read
-    // only memory.
-    *(ArrayHandle()) ^= ReadObject();
-
-    if (Snapshot::IncludesCode(kind())) {
-      StubCode::ReadFrom(this);
-    }
-
-    // Validate the class table.
-#if defined(DEBUG)
-    isolate->ValidateClassTable();
-#endif
-
-    return ApiError::null();
-  }
-}
-
-
-IsolateSnapshotReader::IsolateSnapshotReader(Snapshot::Kind kind,
-                                             const uint8_t* buffer,
-                                             intptr_t size,
-                                             const uint8_t* instructions_buffer,
-                                             const uint8_t* data_buffer,
-                                             Thread* thread)
-    : SnapshotReader(buffer,
-                     size,
-                     instructions_buffer,
-                     data_buffer,
-                     kind,
-                     new ZoneGrowableArray<BackRefNode>(
-                         kNumInitialReferencesInFullSnapshot),
-                     thread) {
-  isolate()->set_compilation_allowed(kind != Snapshot::kAppNoJIT);
-  ASSERT(Snapshot::IsFull(kind));
-}
-
-
-IsolateSnapshotReader::~IsolateSnapshotReader() {
-  ResetBackwardReferenceTable();
-}
-
-
 ScriptSnapshotReader::ScriptSnapshotReader(const uint8_t* buffer,
                                            intptr_t size,
                                            Thread* thread)
@@ -2039,212 +1853,6 @@
 };
 
 
-FullSnapshotWriter::FullSnapshotWriter(Snapshot::Kind kind,
-                                       uint8_t** vm_isolate_snapshot_buffer,
-                                       uint8_t** isolate_snapshot_buffer,
-                                       ReAlloc alloc,
-                                       InstructionsWriter* instructions_writer)
-    : thread_(Thread::Current()),
-      kind_(kind),
-      vm_isolate_snapshot_buffer_(vm_isolate_snapshot_buffer),
-      isolate_snapshot_buffer_(isolate_snapshot_buffer),
-      alloc_(alloc),
-      vm_isolate_snapshot_size_(0),
-      isolate_snapshot_size_(0),
-      forward_list_(NULL),
-      instructions_writer_(instructions_writer),
-      scripts_(Array::Handle(zone())),
-      saved_symbol_table_(Array::Handle(zone())),
-      new_vm_symbol_table_(Array::Handle(zone())) {
-  ASSERT(isolate_snapshot_buffer_ != NULL);
-  ASSERT(alloc_ != NULL);
-  ASSERT(isolate() != NULL);
-  ASSERT(ClassFinalizer::AllClassesFinalized());
-  ASSERT(isolate() != NULL);
-  ASSERT(heap() != NULL);
-  ObjectStore* object_store = isolate()->object_store();
-  ASSERT(object_store != NULL);
-
-#if defined(DEBUG)
-  // Ensure the class table is valid.
-  isolate()->ValidateClassTable();
-#endif
-  // Can't have any mutation happening while we're serializing.
-  ASSERT(isolate()->background_compiler() == NULL);
-
-  intptr_t first_object_id = -1;
-  if (vm_isolate_snapshot_buffer != NULL) {
-    NOT_IN_PRODUCT(TimelineDurationScope tds(thread(),
-        Timeline::GetIsolateStream(), "PrepareNewVMIsolate"));
-
-    // Collect all the script objects and their accompanying token stream
-    // objects into an array so that we can write it out as part of the VM
-    // isolate snapshot. We first count the number of script objects, allocate
-    // an array and then fill it up with the script objects.
-    ScriptVisitor scripts_counter(thread());
-    heap()->IterateOldObjects(&scripts_counter);
-    Dart::vm_isolate()->heap()->IterateOldObjects(&scripts_counter);
-    intptr_t count = scripts_counter.count();
-    scripts_ = Array::New(count, Heap::kOld);
-    ScriptVisitor script_visitor(thread(), &scripts_);
-    heap()->IterateOldObjects(&script_visitor);
-    Dart::vm_isolate()->heap()->IterateOldObjects(&script_visitor);
-    ASSERT(script_visitor.count() == count);
-
-    // Tuck away the current symbol table.
-    saved_symbol_table_ = object_store->symbol_table();
-
-    // Create a unified symbol table that will be written as the vm isolate's
-    // symbol table.
-    new_vm_symbol_table_ = Symbols::UnifiedSymbolTable();
-
-    // Create an empty symbol table that will be written as the isolate's symbol
-    // table.
-    Symbols::SetupSymbolTable(isolate());
-
-    first_object_id = kMaxPredefinedObjectIds;
-  } else {
-    intptr_t max_vm_isolate_object_id =
-        Object::vm_isolate_snapshot_object_table().Length();
-    first_object_id = kMaxPredefinedObjectIds + max_vm_isolate_object_id;
-  }
-
-  forward_list_ = new ForwardList(thread(), first_object_id);
-  ASSERT(forward_list_ != NULL);
-}
-
-
-FullSnapshotWriter::~FullSnapshotWriter() {
-  delete forward_list_;
-  // We may run Dart code afterwards, restore the symbol table if needed.
-  if (!saved_symbol_table_.IsNull()) {
-    isolate()->object_store()->set_symbol_table(saved_symbol_table_);
-    saved_symbol_table_ = Array::null();
-  }
-  new_vm_symbol_table_ = Array::null();
-  scripts_ = Array::null();
-}
-
-
-void FullSnapshotWriter::WriteVmIsolateSnapshot() {
-  NOT_IN_PRODUCT(TimelineDurationScope tds(thread(),
-      Timeline::GetIsolateStream(), "WriteVmIsolateSnapshot"));
-
-  ASSERT(vm_isolate_snapshot_buffer_ != NULL);
-  SnapshotWriter writer(thread(),
-                        kind_,
-                        vm_isolate_snapshot_buffer_,
-                        alloc_,
-                        kInitialSize,
-                        forward_list_,
-                        instructions_writer_,
-                        true, /* can_send_any_object */
-                        true /* writing_vm_isolate */);
-  // Write full snapshot for the VM isolate.
-  // Setup for long jump in case there is an exception while writing
-  // the snapshot.
-  LongJumpScope jump;
-  if (setjmp(*jump.Set()) == 0) {
-    // Reserve space in the output buffer for a snapshot header.
-    writer.ReserveHeader();
-
-    // Write out the version string.
-    writer.WriteVersionAndFeatures();
-
-    /*
-     * Now Write out the following
-     * - the symbol table
-     * - all the scripts and token streams for these scripts
-     * - the stub code (precompiled snapshots only)
-     **/
-    // Write out the symbol table.
-    writer.WriteObject(new_vm_symbol_table_.raw());
-
-    // Write out all the script objects and the accompanying token streams
-    // for the bootstrap libraries so that they are in the VM isolate
-    // read only memory.
-    writer.WriteObject(scripts_.raw());
-
-    if (Snapshot::IncludesCode(kind_)) {
-      StubCode::WriteTo(&writer);
-    }
-
-    writer.FillHeader(writer.kind());
-
-    vm_isolate_snapshot_size_ = writer.BytesWritten();
-  } else {
-    writer.ThrowException(writer.exception_type(), writer.exception_msg());
-  }
-}
-
-
-void FullSnapshotWriter::WriteIsolateFullSnapshot() {
-  NOT_IN_PRODUCT(TimelineDurationScope tds(thread(),
-      Timeline::GetIsolateStream(), "WriteIsolateFullSnapshot"));
-
-  SnapshotWriter writer(thread(),
-                        kind_,
-                        isolate_snapshot_buffer_,
-                        alloc_,
-                        kInitialSize,
-                        forward_list_,
-                        instructions_writer_,
-                        true, /* can_send_any_object */
-                        false /* writing_vm_isolate */);
-  ObjectStore* object_store = isolate()->object_store();
-  ASSERT(object_store != NULL);
-
-  // Write full snapshot for a regular isolate.
-  // Setup for long jump in case there is an exception while writing
-  // the snapshot.
-  LongJumpScope jump;
-  if (setjmp(*jump.Set()) == 0) {
-    // Reserve space in the output buffer for a snapshot header.
-    writer.ReserveHeader();
-
-    // Write out the version string.
-    writer.WriteVersionAndFeatures();
-
-    // Write out the full snapshot.
-
-    // Write out all the objects in the object store of the isolate which
-    // is the root set for all dart allocated objects at this point.
-    SnapshotWriterVisitor visitor(&writer, false);
-    visitor.VisitPointers(object_store->from(),
-                          object_store->to_snapshot(kind_));
-
-    // Write out all forwarded objects.
-    writer.WriteForwardedObjects();
-
-    writer.FillHeader(writer.kind());
-
-    isolate_snapshot_size_ = writer.BytesWritten();
-  } else {
-    writer.ThrowException(writer.exception_type(), writer.exception_msg());
-  }
-}
-
-
-void FullSnapshotWriter::WriteFullSnapshot() {
-  if (vm_isolate_snapshot_buffer() != NULL) {
-    WriteVmIsolateSnapshot();
-  }
-  WriteIsolateFullSnapshot();
-  if (Snapshot::IncludesCode(kind_)) {
-    instructions_writer_->Write();
-
-    OS::Print("VMIsolate(CodeSize): %" Pd "\n", VmIsolateSnapshotSize());
-    OS::Print("Isolate(CodeSize): %" Pd "\n", IsolateSnapshotSize());
-    OS::Print("Instructions(CodeSize): %" Pd "\n",
-              instructions_writer_->binary_size());
-    intptr_t total = VmIsolateSnapshotSize() +
-                     IsolateSnapshotSize() +
-                     instructions_writer_->binary_size();
-    OS::Print("Total(CodeSize): %" Pd "\n", total);
-  }
-}
-
-
 ForwardList::ForwardList(Thread* thread, intptr_t first_object_id)
     : thread_(thread),
       first_object_id_(first_object_id),
diff --git a/runtime/vm/snapshot.h b/runtime/vm/snapshot.h
index 601eba5..1d8f585 100644
--- a/runtime/vm/snapshot.h
+++ b/runtime/vm/snapshot.h
@@ -429,10 +429,7 @@
   // Get an object from the backward references list.
   Object* GetBackRef(intptr_t id);
 
-  // Read a full snap shot.
-  RawApiError* ReadFullSnapshot();
-
-  // Read a script snap shot.
+  // Read a script snapshot.
   RawObject* ReadScriptSnapshot();
 
   // Read version number of snapshot and verify.
@@ -650,38 +647,6 @@
 };
 
 
-class VmIsolateSnapshotReader : public SnapshotReader {
- public:
-  VmIsolateSnapshotReader(Snapshot::Kind kind,
-                          const uint8_t* buffer,
-                          intptr_t size,
-                          const uint8_t* instructions_buffer,
-                          const uint8_t* data_buffer,
-                          Thread* thread);
-  ~VmIsolateSnapshotReader();
-
-  RawApiError* ReadVmIsolateSnapshot();
-
- private:
-  DISALLOW_COPY_AND_ASSIGN(VmIsolateSnapshotReader);
-};
-
-
-class IsolateSnapshotReader : public SnapshotReader {
- public:
-  IsolateSnapshotReader(Snapshot::Kind kind,
-                        const uint8_t* buffer,
-                        intptr_t size,
-                        const uint8_t* instructions_buffer,
-                        const uint8_t* data_buffer,
-                        Thread* thread);
-  ~IsolateSnapshotReader();
-
- private:
-  DISALLOW_COPY_AND_ASSIGN(IsolateSnapshotReader);
-};
-
-
 class ScriptSnapshotReader : public SnapshotReader {
  public:
   ScriptSnapshotReader(const uint8_t* buffer,
@@ -849,7 +814,6 @@
   GrowableArray<Node*> nodes_;
   intptr_t first_unprocessed_object_id_;
 
-  friend class FullSnapshotWriter;
   DISALLOW_COPY_AND_ASSIGN(ForwardList);
 };
 
@@ -1075,7 +1039,6 @@
   bool can_send_any_object_;  // True if any Dart instance can be sent.
   bool writing_vm_isolate_;
 
-  friend class FullSnapshotWriter;
   friend class RawArray;
   friend class RawClass;
   friend class RawClosureData;
@@ -1109,63 +1072,6 @@
 };
 
 
-class FullSnapshotWriter {
- public:
-  static const intptr_t kInitialSize = 64 * KB;
-  FullSnapshotWriter(Snapshot::Kind kind,
-                     uint8_t** vm_isolate_snapshot_buffer,
-                     uint8_t** isolate_snapshot_buffer,
-                     ReAlloc alloc,
-                     InstructionsWriter* instructions_writer);
-  ~FullSnapshotWriter();
-
-  uint8_t** vm_isolate_snapshot_buffer() {
-    return vm_isolate_snapshot_buffer_;
-  }
-
-  uint8_t** isolate_snapshot_buffer() {
-    return isolate_snapshot_buffer_;
-  }
-
-  Thread* thread() const { return thread_; }
-  Zone* zone() const { return thread_->zone(); }
-  Isolate* isolate() const { return thread_->isolate(); }
-  Heap* heap() const { return isolate()->heap(); }
-
-  // Writes a full snapshot of the Isolate.
-  void WriteFullSnapshot();
-
-  intptr_t VmIsolateSnapshotSize() const {
-    return vm_isolate_snapshot_size_;
-  }
-  intptr_t IsolateSnapshotSize() const {
-    return isolate_snapshot_size_;
-  }
-
- private:
-  // Writes a snapshot of the VM Isolate.
-  void WriteVmIsolateSnapshot();
-
-  // Writes a full snapshot of a regular Dart Isolate.
-  void WriteIsolateFullSnapshot();
-
-  Thread* thread_;
-  Snapshot::Kind kind_;
-  uint8_t** vm_isolate_snapshot_buffer_;
-  uint8_t** isolate_snapshot_buffer_;
-  ReAlloc alloc_;
-  intptr_t vm_isolate_snapshot_size_;
-  intptr_t isolate_snapshot_size_;
-  ForwardList* forward_list_;
-  InstructionsWriter* instructions_writer_;
-  Array& scripts_;
-  Array& saved_symbol_table_;
-  Array& new_vm_symbol_table_;
-
-  DISALLOW_COPY_AND_ASSIGN(FullSnapshotWriter);
-};
-
-
 class ScriptSnapshotWriter : public SnapshotWriter {
  public:
   static const intptr_t kInitialSize = 64 * KB;
diff --git a/runtime/vm/snapshot_test.cc b/runtime/vm/snapshot_test.cc
index 493607a..c5862e4 100644
--- a/runtime/vm/snapshot_test.cc
+++ b/runtime/vm/snapshot_test.cc
@@ -7,6 +7,7 @@
 #include "include/dart_tools_api.h"
 #include "platform/assert.h"
 #include "vm/class_finalizer.h"
+#include "vm/clustered_snapshot.h"
 #include "vm/dart_api_impl.h"
 #include "vm/dart_api_message.h"
 #include "vm/dart_api_state.h"
diff --git a/runtime/vm/stub_code.cc b/runtime/vm/stub_code.cc
index 04ca4f4..a6992bf 100644
--- a/runtime/vm/stub_code.cc
+++ b/runtime/vm/stub_code.cc
@@ -14,6 +14,7 @@
 #include "vm/snapshot.h"
 #include "vm/virtual_memory.h"
 #include "vm/visitor.h"
+#include "vm/clustered_snapshot.h"
 
 namespace dart {
 
@@ -59,22 +60,31 @@
 #undef STUB_CODE_GENERATE
 
 
-void StubCode::ReadFrom(SnapshotReader* reader) {
+void StubCode::Push(Serializer* serializer) {
+#define WRITE_STUB(name)                                                       \
+  serializer->Push(StubCode::name##_entry()->code());
+  VM_STUB_CODE_LIST(WRITE_STUB);
+#undef WRITE_STUB
+}
+
+
+void StubCode::WriteRef(Serializer* serializer) {
+#define WRITE_STUB(name)                                                       \
+  serializer->WriteRef(StubCode::name##_entry()->code());
+  VM_STUB_CODE_LIST(WRITE_STUB);
+#undef WRITE_STUB
+}
+
+
+void StubCode::ReadRef(Deserializer* deserializer) {
+  Code& code = Code::Handle();
 #define READ_STUB(name)                                                        \
-  *(reader->CodeHandle()) ^= reader->ReadObject();                             \
-  name##_entry_ = new StubEntry(*(reader->CodeHandle()));
+  code ^= deserializer->ReadRef();                                             \
+  name##_entry_ = new StubEntry(code);
   VM_STUB_CODE_LIST(READ_STUB);
 #undef READ_STUB
 }
 
-void StubCode::WriteTo(SnapshotWriter* writer) {
-  // TODO(rmacnak): Consider writing only the instructions to avoid
-  // vm_isolate_is_symbolic.
-#define WRITE_STUB(name)                                                       \
-  writer->WriteObject(StubCode::name##_entry()->code());
-  VM_STUB_CODE_LIST(WRITE_STUB);
-#undef WRITE_STUB
-}
 
 
 void StubCode::Init(Isolate* isolate) { }
diff --git a/runtime/vm/stub_code.h b/runtime/vm/stub_code.h
index 8232a4b..cecec5d 100644
--- a/runtime/vm/stub_code.h
+++ b/runtime/vm/stub_code.h
@@ -17,6 +17,8 @@
 class RawCode;
 class SnapshotReader;
 class SnapshotWriter;
+class Serializer;
+class Deserializer;
 
 // List of stubs created in the VM isolate, these stubs are shared by different
 // isolates running in this dart process.
@@ -71,6 +73,7 @@
   V(FixCallersTarget)                                                          \
   V(Deoptimize)                                                                \
   V(DeoptimizeLazy)                                                            \
+  V(FrameAwaitingMaterialization)                                              \
 
 #endif  // !defined(TARGET_ARCH_DBC)
 
@@ -112,8 +115,9 @@
   // only once and the stub code resides in the vm_isolate heap.
   static void InitOnce();
 
-  static void ReadFrom(SnapshotReader* reader);
-  static void WriteTo(SnapshotWriter* writer);
+  static void Push(Serializer* serializer);
+  static void WriteRef(Serializer* serializer);
+  static void ReadRef(Deserializer* deserializer);
 
   // Generate all stubs which are generated on a per isolate basis as they
   // have embedded objects which are isolate specific.
diff --git a/runtime/vm/stub_code_arm.cc b/runtime/vm/stub_code_arm.cc
index 538cd33..9a7429b 100644
--- a/runtime/vm/stub_code_arm.cc
+++ b/runtime/vm/stub_code_arm.cc
@@ -371,7 +371,8 @@
   Label loop;
   __ Bind(&loop);
   __ ldr(IP, Address(R1, kWordSize, Address::PreIndex));
-  __ InitializeFieldNoBarrier(R0, Address(R3, R2, LSL, 1), IP);
+  // Generational barrier is needed, array is not necessarily in new space.
+  __ StoreIntoObject(R0, Address(R3, R2, LSL, 1), IP);
   __ Bind(&enter);
   __ subs(R2, R2, Operand(Smi::RawValue(1)));  // R2 is Smi.
   __ b(&loop, PL);
@@ -625,7 +626,7 @@
   __ b(&slow_case, GT);
 
   const intptr_t cid = kArrayCid;
-  __ MaybeTraceAllocation(cid, R4, &slow_case);
+  NOT_IN_PRODUCT(__ MaybeTraceAllocation(cid, R4, &slow_case));
 
   const intptr_t fixed_size = sizeof(RawArray) + kObjectAlignment - 1;
   __ LoadImmediate(R9, fixed_size);
@@ -634,7 +635,7 @@
   __ bic(R9, R9, Operand(kObjectAlignment - 1));
 
   // R9: Allocation size.
-  Heap::Space space = Heap::SpaceForAllocation(cid);
+  Heap::Space space = Heap::kNew;
   __ LoadIsolate(R8);
   __ ldr(R8, Address(R8, Isolate::heap_offset()));
   // Potential new object start.
@@ -652,7 +653,7 @@
 
   // Successfully allocated the object(s), now update top to point to
   // next object start and initialize the object.
-  __ LoadAllocationStatsAddress(R3, cid);
+  NOT_IN_PRODUCT(__ LoadAllocationStatsAddress(R3, cid));
   __ str(NOTFP, Address(R8, Heap::TopOffset(space)));
   __ add(R0, R0, Operand(kHeapObjectTag));
 
@@ -678,12 +679,12 @@
   // R0: new object start as a tagged pointer.
   // NOTFP: new object end address.
   // Store the type argument field.
-  __ InitializeFieldNoBarrier(R0,
+  __ StoreIntoObjectNoBarrier(R0,
                               FieldAddress(R0, Array::type_arguments_offset()),
                               R1);
 
   // Set the length field.
-  __ InitializeFieldNoBarrier(R0,
+  __ StoreIntoObjectNoBarrier(R0,
                               FieldAddress(R0, Array::length_offset()),
                               R2);
 
@@ -695,7 +696,7 @@
   // data area to be initialized.
   // NOTFP: new object end address.
   // R9: allocation size.
-  __ IncrementAllocationStatsWithSize(R3, R9, space);
+  NOT_IN_PRODUCT(__ IncrementAllocationStatsWithSize(R3, R9, space));
 
   __ LoadObject(R8, Object::null_object());
   __ mov(R9, Operand(R8));
@@ -858,12 +859,12 @@
     ASSERT(kSmiTagShift == 1);
     __ bic(R2, R2, Operand(kObjectAlignment - 1));
 
-    __ MaybeTraceAllocation(kContextCid, R8, &slow_case);
+    NOT_IN_PRODUCT(__ MaybeTraceAllocation(kContextCid, R8, &slow_case));
     // Now allocate the object.
     // R1: number of context variables.
     // R2: object size.
     const intptr_t cid = kContextCid;
-    Heap::Space space = Heap::SpaceForAllocation(cid);
+    Heap::Space space = Heap::kNew;
     __ LoadIsolate(R9);
     __ ldr(R9, Address(R9, Isolate::heap_offset()));
     __ ldr(R0, Address(R9, Heap::TopOffset(space)));
@@ -889,7 +890,7 @@
     // R2: object size.
     // R3: next object start.
     // R9: heap.
-    __ LoadAllocationStatsAddress(R4, cid);
+    NOT_IN_PRODUCT(__ LoadAllocationStatsAddress(R4, cid));
     __ str(R3, Address(R9, Heap::TopOffset(space)));
     __ add(R0, R0, Operand(kHeapObjectTag));
 
@@ -926,7 +927,7 @@
     // R3: next object start.
     // R4: allocation stats address.
     __ LoadObject(R8, Object::null_object());
-    __ InitializeFieldNoBarrier(R0, FieldAddress(R0, Context::parent_offset()),
+    __ StoreIntoObjectNoBarrier(R0, FieldAddress(R0, Context::parent_offset()),
                                 R8);
 
     // Initialize the context variables.
@@ -939,7 +940,7 @@
     Label loop;
     __ AddImmediate(NOTFP, R0, Context::variable_offset(0) - kHeapObjectTag);
     __ InitializeFieldsNoBarrier(R0, NOTFP, R3, R8, R9);
-    __ IncrementAllocationStatsWithSize(R4, R2, space);
+    NOT_IN_PRODUCT(__ IncrementAllocationStatsWithSize(R4, R2, space));
 
     // Done allocating and initializing the context.
     // R0: new object.
@@ -1062,7 +1063,7 @@
     Label slow_case;
     // Allocate the object and update top to point to
     // next object start and initialize the allocated object.
-    Heap::Space space = Heap::SpaceForAllocation(cls.id());
+    Heap::Space space = Heap::kNew;
     __ ldr(R9, Address(THR, Thread::heap_offset()));
     __ ldr(R0, Address(R9, Heap::TopOffset(space)));
     __ AddImmediate(R1, R0, instance_size);
@@ -1081,7 +1082,7 @@
 
     // Load the address of the allocation stats table. We split up the load
     // and the increment so that the dependent load is not too nearby.
-    __ LoadAllocationStatsAddress(R9, cls.id());
+    NOT_IN_PRODUCT(__ LoadAllocationStatsAddress(R9, cls.id()));
 
     // R0: new object start.
     // R1: next object start.
@@ -1130,7 +1131,7 @@
       // Set the type arguments in the new object.
       __ ldr(R4, Address(SP, 0));
       FieldAddress type_args(R0, cls.type_arguments_field_offset());
-      __ InitializeFieldNoBarrier(R0, type_args, R4);
+      __ StoreIntoObjectNoBarrier(R0, type_args, R4);
     }
 
     // Done allocating and initializing the instance.
@@ -1138,7 +1139,7 @@
     // R9: allocation stats table.
 
     // Update allocation stats.
-    __ IncrementAllocationStats(R9, cls.id(), space);
+    NOT_IN_PRODUCT(__ IncrementAllocationStats(R9, cls.id(), space));
 
     // R0: new object (tagged).
     __ Ret();
diff --git a/runtime/vm/stub_code_arm64.cc b/runtime/vm/stub_code_arm64.cc
index 97a20c8..04dd0f4 100644
--- a/runtime/vm/stub_code_arm64.cc
+++ b/runtime/vm/stub_code_arm64.cc
@@ -652,9 +652,9 @@
   __ b(&slow_case, GT);
 
   const intptr_t cid = kArrayCid;
-  __ MaybeTraceAllocation(kArrayCid, R4, &slow_case);
+  NOT_IN_PRODUCT(__ MaybeTraceAllocation(kArrayCid, R4, &slow_case));
 
-  Heap::Space space = Heap::SpaceForAllocation(cid);
+  Heap::Space space = Heap::kNew;
   __ LoadIsolate(R8);
   __ ldr(R8, Address(R8, Isolate::heap_offset()));
 
@@ -693,7 +693,7 @@
   // R8: heap.
   __ StoreToOffset(R7, R8, Heap::TopOffset(space));
   __ add(R0, R0, Operand(kHeapObjectTag));
-  __ UpdateAllocationStatsWithSize(cid, R3, space);
+  NOT_IN_PRODUCT(__ UpdateAllocationStatsWithSize(cid, R3, space));
 
   // R0: new object start as a tagged pointer.
   // R1: array element type.
@@ -917,12 +917,12 @@
     ASSERT(kSmiTagShift == 1);
     __ andi(R2, R2, Immediate(~(kObjectAlignment - 1)));
 
-    __ MaybeTraceAllocation(kContextCid, R4, &slow_case);
+    NOT_IN_PRODUCT(__ MaybeTraceAllocation(kContextCid, R4, &slow_case));
     // Now allocate the object.
     // R1: number of context variables.
     // R2: object size.
     const intptr_t cid = kContextCid;
-    Heap::Space space = Heap::SpaceForAllocation(cid);
+    Heap::Space space = Heap::kNew;
     __ LoadIsolate(R5);
     __ ldr(R5, Address(R5, Isolate::heap_offset()));
     __ ldr(R0, Address(R5, Heap::TopOffset(space)));
@@ -950,7 +950,7 @@
     // R5: heap.
     __ str(R3, Address(R5, Heap::TopOffset(space)));
     __ add(R0, R0, Operand(kHeapObjectTag));
-    __ UpdateAllocationStatsWithSize(cid, R2, space);
+    NOT_IN_PRODUCT(__ UpdateAllocationStatsWithSize(cid, R2, space));
 
     // Calculate the size tag.
     // R0: new object.
@@ -1108,7 +1108,7 @@
     // Allocate the object and update top to point to
     // next object start and initialize the allocated object.
     // R1: instantiated type arguments (if is_cls_parameterized).
-    Heap::Space space = Heap::SpaceForAllocation(cls.id());
+    Heap::Space space = Heap::kNew;
     __ ldr(R5, Address(THR, Thread::heap_offset()));
     __ ldr(R2, Address(R5, Heap::TopOffset(space)));
     __ AddImmediate(R3, R2, instance_size);
@@ -1124,7 +1124,7 @@
       __ b(&slow_case, CS);  // Unsigned higher or equal.
     }
     __ str(R3, Address(R5, Heap::TopOffset(space)));
-    __ UpdateAllocationStats(cls.id(), space);
+    NOT_IN_PRODUCT(__ UpdateAllocationStats(cls.id(), space));
 
     // R2: new object start.
     // R3: next object start.
diff --git a/runtime/vm/stub_code_dbc.cc b/runtime/vm/stub_code_dbc.cc
index 36e44b3c..66a46bd 100644
--- a/runtime/vm/stub_code_dbc.cc
+++ b/runtime/vm/stub_code_dbc.cc
@@ -64,13 +64,17 @@
 }
 
 
+void StubCode::GenerateFrameAwaitingMaterializationStub(Assembler* assembler) {
+  __ Trap();
+}
+
+
 // Print the stop message.
 DEFINE_LEAF_RUNTIME_ENTRY(void, PrintStopMessage, 1, const char* message) {
   OS::Print("Stop message: %s\n", message);
 }
 END_LEAF_RUNTIME_ENTRY
 
-
 }  // namespace dart
 
 #endif  // defined TARGET_ARCH_DBC
diff --git a/runtime/vm/stub_code_ia32.cc b/runtime/vm/stub_code_ia32.cc
index 836e4b5..139a4a2 100644
--- a/runtime/vm/stub_code_ia32.cc
+++ b/runtime/vm/stub_code_ia32.cc
@@ -323,8 +323,8 @@
   __ jmp(&loop_condition, Assembler::kNearJump);
   __ Bind(&loop);
   __ movl(EDI, Address(EBX, 0));
-  // No generational barrier needed, since array is in new space.
-  __ InitializeFieldNoBarrier(EAX, Address(ECX, 0), EDI);
+  // Generational barrier is needed, array is not necessarily in new space.
+  __ StoreIntoObject(EAX, Address(ECX, 0), EDI);
   __ AddImmediate(ECX, Immediate(kWordSize));
   __ AddImmediate(EBX, Immediate(-kWordSize));
   __ Bind(&loop_condition);
@@ -561,10 +561,10 @@
   __ cmpl(EDX, max_len);
   __ j(GREATER, &slow_case);
 
-  __ MaybeTraceAllocation(kArrayCid,
-                          EAX,
-                          &slow_case,
-                          Assembler::kFarJump);
+  NOT_IN_PRODUCT(__ MaybeTraceAllocation(kArrayCid,
+                                         EAX,
+                                         &slow_case,
+                                         Assembler::kFarJump));
 
   const intptr_t fixed_size = sizeof(RawArray) + kObjectAlignment - 1;
   __ leal(EBX, Address(EDX, TIMES_2, fixed_size));  // EDX is Smi.
@@ -576,7 +576,7 @@
   // EBX: allocation size.
 
   const intptr_t cid = kArrayCid;
-  Heap::Space space = Heap::SpaceForAllocation(cid);
+  Heap::Space space = Heap::kNew;
   __ movl(EDI, Address(THR, Thread::heap_offset()));
   __ movl(EAX, Address(EDI, Heap::TopOffset(space)));
   __ addl(EBX, EAX);
@@ -596,7 +596,7 @@
   __ movl(Address(EDI, Heap::TopOffset(space)), EBX);
   __ subl(EBX, EAX);
   __ addl(EAX, Immediate(kHeapObjectTag));
-  __ UpdateAllocationStatsWithSize(cid, EBX, EDI, space);
+  NOT_IN_PRODUCT(__ UpdateAllocationStatsWithSize(cid, EBX, EDI, space));
 
   // Initialize the tags.
   // EAX: new object start as a tagged pointer.
@@ -624,12 +624,13 @@
   // ECX: array element type.
   // EDX: Array length as Smi (preserved).
   // Store the type argument field.
-  __ InitializeFieldNoBarrier(EAX,
+  // No generetional barrier needed, since we store into a new object.
+  __ StoreIntoObjectNoBarrier(EAX,
                               FieldAddress(EAX, Array::type_arguments_offset()),
                               ECX);
 
   // Set the length field.
-  __ InitializeFieldNoBarrier(EAX,
+  __ StoreIntoObjectNoBarrier(EAX,
                               FieldAddress(EAX, Array::length_offset()),
                               EDX);
 
@@ -648,7 +649,7 @@
   __ cmpl(EDI, EBX);
   __ j(ABOVE_EQUAL, &done, Assembler::kNearJump);
   // No generational barrier needed, since we are storing null.
-  __ InitializeFieldNoBarrier(EAX, Address(EDI, 0), Object::null_object());
+  __ StoreIntoObjectNoBarrier(EAX, Address(EDI, 0), Object::null_object());
   __ addl(EDI, Immediate(kWordSize));
   __ jmp(&init_loop, Assembler::kNearJump);
   __ Bind(&done);
@@ -798,15 +799,15 @@
     __ leal(EBX, Address(EDX, TIMES_4, fixed_size));
     __ andl(EBX, Immediate(-kObjectAlignment));
 
-    __ MaybeTraceAllocation(kContextCid,
-                            EAX,
-                            &slow_case,
-                            Assembler::kFarJump);
+    NOT_IN_PRODUCT(__ MaybeTraceAllocation(kContextCid,
+                                           EAX,
+                                           &slow_case,
+                                           Assembler::kFarJump));
 
     // Now allocate the object.
     // EDX: number of context variables.
     const intptr_t cid = kContextCid;
-    Heap::Space space = Heap::SpaceForAllocation(cid);
+    Heap::Space space = Heap::kNew;
     __ movl(ECX, Address(THR, Thread::heap_offset()));
     __ movl(EAX, Address(ECX, Heap::TopOffset(space)));
     __ addl(EBX, EAX);
@@ -836,7 +837,7 @@
     __ subl(EBX, EAX);
     __ addl(EAX, Immediate(kHeapObjectTag));
     // Generate isolate-independent code to allow sharing between isolates.
-    __ UpdateAllocationStatsWithSize(cid, EBX, EDI, space);
+    NOT_IN_PRODUCT(__ UpdateAllocationStatsWithSize(cid, EBX, EDI, space));
 
     // Calculate the size tag.
     // EAX: new object.
@@ -872,7 +873,7 @@
     // EAX: new object.
     // EDX: number of context variables.
     // No generational barrier needed, since we are storing null.
-    __ InitializeFieldNoBarrier(EAX,
+    __ StoreIntoObjectNoBarrier(EAX,
                                 FieldAddress(EAX, Context::parent_offset()),
                                 Object::null_object());
 
@@ -887,7 +888,7 @@
       __ Bind(&loop);
       __ decl(EDX);
       // No generational barrier needed, since we are storing null.
-      __ InitializeFieldNoBarrier(EAX,
+      __ StoreIntoObjectNoBarrier(EAX,
                                   Address(EBX, EDX, TIMES_4, 0),
                                   Object::null_object());
       __ Bind(&entry);
@@ -1018,7 +1019,7 @@
     // Allocate the object and update top to point to
     // next object start and initialize the allocated object.
     // EDX: instantiated type arguments (if is_cls_parameterized).
-    Heap::Space space = Heap::SpaceForAllocation(cls.id());
+    Heap::Space space = Heap::kNew;
     __ movl(EDI, Address(THR, Thread::heap_offset()));
     __ movl(EAX, Address(EDI, Heap::TopOffset(space)));
     __ leal(EBX, Address(EAX, instance_size));
@@ -1033,7 +1034,7 @@
       __ j(ABOVE_EQUAL, &slow_case);
     }
     __ movl(Address(EDI, Heap::TopOffset(space)), EBX);
-    __ UpdateAllocationStats(cls.id(), ECX, space);
+    NOT_IN_PRODUCT(__ UpdateAllocationStats(cls.id(), ECX, space));
 
     // EAX: new object start (untagged).
     // EBX: next object start.
@@ -1058,7 +1059,7 @@
       for (intptr_t current_offset = Instance::NextFieldOffset();
            current_offset < instance_size;
            current_offset += kWordSize) {
-        __ InitializeFieldNoBarrier(EAX,
+        __ StoreIntoObjectNoBarrier(EAX,
                                     FieldAddress(EAX, current_offset),
                                     Object::null_object());
       }
@@ -1074,7 +1075,7 @@
       __ Bind(&init_loop);
       __ cmpl(ECX, EBX);
       __ j(ABOVE_EQUAL, &done, Assembler::kNearJump);
-      __ InitializeFieldNoBarrier(EAX,
+      __ StoreIntoObjectNoBarrier(EAX,
                                   Address(ECX, 0),
                                   Object::null_object());
       __ addl(ECX, Immediate(kWordSize));
@@ -1082,11 +1083,11 @@
       __ Bind(&done);
     }
     if (is_cls_parameterized) {
+      // EAX: new object (tagged).
       // EDX: new object type arguments.
       // Set the type arguments in the new object.
       intptr_t offset = cls.type_arguments_field_offset();
-      // TODO(koda): Figure out why previous content is sometimes null here.
-      __ InitializeFieldNoBarrier(EAX, FieldAddress(EAX, offset), EDX);
+      __ StoreIntoObjectNoBarrier(EAX, FieldAddress(EAX, offset), EDX);
     }
     // Done allocating and initializing the instance.
     // EAX: new object (tagged).
diff --git a/runtime/vm/stub_code_mips.cc b/runtime/vm/stub_code_mips.cc
index 27aa782..ef36894 100644
--- a/runtime/vm/stub_code_mips.cc
+++ b/runtime/vm/stub_code_mips.cc
@@ -656,7 +656,7 @@
   __ BranchUnsignedGreater(T3, Immediate(max_len), &slow_case);
 
   const intptr_t cid = kArrayCid;
-  __ MaybeTraceAllocation(kArrayCid, T4, &slow_case);
+  NOT_IN_PRODUCT(__ MaybeTraceAllocation(kArrayCid, T4, &slow_case));
 
   const intptr_t fixed_size = sizeof(RawArray) + kObjectAlignment - 1;
   __ LoadImmediate(T2, fixed_size);
@@ -668,7 +668,7 @@
 
   // T2: Allocation size.
 
-  Heap::Space space = Heap::SpaceForAllocation(cid);
+  Heap::Space space = Heap::kNew;
   __ LoadIsolate(T3);
   __ lw(T3, Address(T3, Isolate::heap_offset()));
   // Potential new object start.
@@ -690,7 +690,7 @@
   // T3: heap.
   __ sw(T1, Address(T3, Heap::TopOffset(space)));
   __ addiu(T0, T0, Immediate(kHeapObjectTag));
-  __ UpdateAllocationStatsWithSize(cid, T2, T4, space);
+  NOT_IN_PRODUCT(__ UpdateAllocationStatsWithSize(cid, T2, T4, space));
 
   // Initialize the tags.
   // T0: new object start as a tagged pointer.
@@ -935,12 +935,12 @@
     __ LoadImmediate(T0, ~((kObjectAlignment) - 1));
     __ and_(T2, T2, T0);
 
-    __ MaybeTraceAllocation(kContextCid, T4, &slow_case);
+    NOT_IN_PRODUCT(__ MaybeTraceAllocation(kContextCid, T4, &slow_case));
     // Now allocate the object.
     // T1: number of context variables.
     // T2: object size.
     const intptr_t cid = kContextCid;
-    Heap::Space space = Heap::SpaceForAllocation(cid);
+    Heap::Space space = Heap::kNew;
     __ LoadIsolate(T5);
     __ lw(T5, Address(T5, Isolate::heap_offset()));
     __ lw(V0, Address(T5, Heap::TopOffset(space)));
@@ -968,7 +968,7 @@
     // T5: heap.
     __ sw(T3, Address(T5, Heap::TopOffset(space)));
     __ addiu(V0, V0, Immediate(kHeapObjectTag));
-    __ UpdateAllocationStatsWithSize(cid, T2, T5, space);
+    NOT_IN_PRODUCT(__ UpdateAllocationStatsWithSize(cid, T2, T5, space));
 
     // Calculate the size tag.
     // V0: new object.
@@ -1135,7 +1135,7 @@
     // Allocate the object and update top to point to
     // next object start and initialize the allocated object.
     // T1: instantiated type arguments (if is_cls_parameterized).
-    Heap::Space space = Heap::SpaceForAllocation(cls.id());
+    Heap::Space space = Heap::kNew;
     __ lw(T5, Address(THR, Thread::heap_offset()));
     __ lw(T2, Address(T5, Heap::TopOffset(space)));
     __ LoadImmediate(T4, instance_size);
@@ -1153,7 +1153,7 @@
     // Successfully allocated the object(s), now update top to point to
     // next object start and initialize the object.
     __ sw(T3, Address(T5, Heap::TopOffset(space)));
-    __ UpdateAllocationStats(cls.id(), T5, space);
+    NOT_IN_PRODUCT(__ UpdateAllocationStats(cls.id(), T5, space));
 
     // T2: new object start.
     // T3: next object start.
diff --git a/runtime/vm/stub_code_x64.cc b/runtime/vm/stub_code_x64.cc
index d66c125..45edaf4 100644
--- a/runtime/vm/stub_code_x64.cc
+++ b/runtime/vm/stub_code_x64.cc
@@ -339,8 +339,8 @@
   __ jmp(&loop_condition, kJumpLength);
   __ Bind(&loop);
   __ movq(RDI, Address(R12, 0));
-  // No generational barrier needed, since array is in new space.
-  __ InitializeFieldNoBarrier(RAX, Address(RBX, 0), RDI);
+  // Generational barrier is needed, array is not necessarily in new space.
+  __ StoreIntoObject(RAX, Address(RBX, 0), RDI);
   __ addq(RBX, Immediate(kWordSize));
   __ subq(R12, Immediate(kWordSize));
   __ Bind(&loop_condition);
@@ -588,9 +588,9 @@
   __ j(GREATER, &slow_case);
 
   // Check for allocation tracing.
-  __ MaybeTraceAllocation(kArrayCid,
-                          &slow_case,
-                          Assembler::kFarJump);
+  NOT_IN_PRODUCT(__ MaybeTraceAllocation(kArrayCid,
+                                         &slow_case,
+                                         Assembler::kFarJump));
 
   const intptr_t fixed_size = sizeof(RawArray) + kObjectAlignment - 1;
   __ leaq(RDI, Address(RDI, TIMES_4, fixed_size));  // RDI is a Smi.
@@ -598,7 +598,7 @@
   __ andq(RDI, Immediate(-kObjectAlignment));
 
   const intptr_t cid = kArrayCid;
-  Heap::Space space = Heap::SpaceForAllocation(cid);
+  Heap::Space space = Heap::kNew;
   __ movq(R13, Address(THR, Thread::heap_offset()));
   __ movq(RAX, Address(R13, Heap::TopOffset(space)));
 
@@ -619,7 +619,7 @@
   // next object start and initialize the object.
   __ movq(Address(R13, Heap::TopOffset(space)), RCX);
   __ addq(RAX, Immediate(kHeapObjectTag));
-  __ UpdateAllocationStatsWithSize(cid, RDI, space);
+  NOT_IN_PRODUCT(__ UpdateAllocationStatsWithSize(cid, RDI, space));
   // Initialize the tags.
   // RAX: new object start as a tagged pointer.
   // RDI: allocation size.
@@ -641,12 +641,13 @@
 
   // RAX: new object start as a tagged pointer.
   // Store the type argument field.
-  __ InitializeFieldNoBarrier(RAX,
+  // No generetional barrier needed, since we store into a new object.
+  __ StoreIntoObjectNoBarrier(RAX,
                               FieldAddress(RAX, Array::type_arguments_offset()),
                               RBX);
 
   // Set the length field.
-  __ InitializeFieldNoBarrier(RAX,
+  __ StoreIntoObjectNoBarrier(RAX,
                               FieldAddress(RAX, Array::length_offset()),
                               R10);
 
@@ -668,7 +669,7 @@
 #endif  // DEBUG
   __ j(ABOVE_EQUAL, &done, kJumpLength);
   // No generational barrier needed, since we are storing null.
-  __ InitializeFieldNoBarrier(RAX, Address(RDI, 0), R12);
+  __ StoreIntoObjectNoBarrier(RAX, Address(RDI, 0), R12);
   __ addq(RDI, Immediate(kWordSize));
   __ jmp(&init_loop, kJumpLength);
   __ Bind(&done);
@@ -843,14 +844,14 @@
     __ andq(R13, Immediate(-kObjectAlignment));
 
     // Check for allocation tracing.
-    __ MaybeTraceAllocation(kContextCid,
-                            &slow_case,
-                            Assembler::kFarJump);
+    NOT_IN_PRODUCT(__ MaybeTraceAllocation(kContextCid,
+                                           &slow_case,
+                                           Assembler::kFarJump));
 
     // Now allocate the object.
     // R10: number of context variables.
     const intptr_t cid = kContextCid;
-    Heap::Space space = Heap::SpaceForAllocation(cid);
+    Heap::Space space = Heap::kNew;
     __ movq(RCX, Address(THR, Thread::heap_offset()));
     __ movq(RAX, Address(RCX, Heap::TopOffset(space)));
     __ addq(R13, RAX);
@@ -877,7 +878,7 @@
     __ subq(R13, RAX);
     __ addq(RAX, Immediate(kHeapObjectTag));
     // Generate isolate-independent code to allow sharing between isolates.
-    __ UpdateAllocationStatsWithSize(cid, R13, space);
+    NOT_IN_PRODUCT(__ UpdateAllocationStatsWithSize(cid, R13, space));
 
     // Calculate the size tag.
     // RAX: new object.
@@ -913,7 +914,7 @@
     // RAX: new object.
     // R10: number of context variables.
     // No generational barrier needed, since we are storing null.
-    __ InitializeFieldNoBarrier(RAX,
+    __ StoreIntoObjectNoBarrier(RAX,
                                 FieldAddress(RAX, Context::parent_offset()),
                                 R9);
 
@@ -932,7 +933,7 @@
       __ Bind(&loop);
       __ decq(R10);
       // No generational barrier needed, since we are storing null.
-      __ InitializeFieldNoBarrier(RAX,
+      __ StoreIntoObjectNoBarrier(RAX,
                                   Address(R13, R10, TIMES_8, 0),
                                   R9);
       __ Bind(&entry);
@@ -1053,7 +1054,7 @@
     // Allocate the object and update top to point to
     // next object start and initialize the allocated object.
     // RDX: instantiated type arguments (if is_cls_parameterized).
-    Heap::Space space = Heap::SpaceForAllocation(cls.id());
+    Heap::Space space = Heap::kNew;
     __ movq(RCX, Address(THR, Thread::heap_offset()));
     __ movq(RAX, Address(RCX, Heap::TopOffset(space)));
     __ leaq(RBX, Address(RAX, instance_size));
@@ -1068,7 +1069,7 @@
       __ j(ABOVE_EQUAL, &slow_case);
     }
     __ movq(Address(RCX, Heap::TopOffset(space)), RBX);
-    __ UpdateAllocationStats(cls.id(), space);
+    NOT_IN_PRODUCT(__ UpdateAllocationStats(cls.id(), space));
 
     // RAX: new object start (untagged).
     // RBX: next object start.
@@ -1093,7 +1094,7 @@
       for (intptr_t current_offset = Instance::NextFieldOffset();
            current_offset < instance_size;
            current_offset += kWordSize) {
-        __ InitializeFieldNoBarrier(RAX,
+        __ StoreIntoObjectNoBarrier(RAX,
                                     FieldAddress(RAX, current_offset),
                                     R9);
       }
@@ -1114,16 +1115,17 @@
       static const bool kJumpLength = Assembler::kNearJump;
 #endif  // DEBUG
       __ j(ABOVE_EQUAL, &done, kJumpLength);
-      __ InitializeFieldNoBarrier(RAX, Address(RCX, 0), R9);
+      __ StoreIntoObjectNoBarrier(RAX, Address(RCX, 0), R9);
       __ addq(RCX, Immediate(kWordSize));
       __ jmp(&init_loop, Assembler::kNearJump);
       __ Bind(&done);
     }
     if (is_cls_parameterized) {
+      // RAX: new object (tagged).
       // RDX: new object type arguments.
       // Set the type arguments in the new object.
       intptr_t offset = cls.type_arguments_field_offset();
-      __ InitializeFieldNoBarrier(RAX, FieldAddress(RAX, offset), RDX);
+      __ StoreIntoObjectNoBarrier(RAX, FieldAddress(RAX, offset), RDX);
     }
     // Done allocating and initializing the instance.
     // RAX: new object (tagged).
diff --git a/runtime/vm/symbols.h b/runtime/vm/symbols.h
index 547c2e5..5e6a165 100644
--- a/runtime/vm/symbols.h
+++ b/runtime/vm/symbols.h
@@ -676,6 +676,8 @@
   friend class String;
   friend class SnapshotReader;
   friend class SnapshotWriter;
+  friend class Serializer;
+  friend class Deserializer;
   friend class ApiMessageReader;
 
   DISALLOW_COPY_AND_ASSIGN(Symbols);
diff --git a/runtime/vm/thread_interrupter_fuchsia.cc b/runtime/vm/thread_interrupter_fuchsia.cc
new file mode 100644
index 0000000..d089211
--- /dev/null
+++ b/runtime/vm/thread_interrupter_fuchsia.cc
@@ -0,0 +1,30 @@
+// Copyright (c) 2016, the Dart project authors.  Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.
+
+#include "platform/globals.h"
+#if defined(TARGET_OS_FUCHSIA)
+
+#include "vm/thread_interrupter.h"
+
+#include "platform/assert.h"
+
+namespace dart {
+
+void ThreadInterrupter::InterruptThread(OSThread* thread) {
+  UNIMPLEMENTED();
+}
+
+
+void ThreadInterrupter::InstallSignalHandler() {
+  UNIMPLEMENTED();
+}
+
+
+void ThreadInterrupter::RemoveSignalHandler() {
+  UNIMPLEMENTED();
+}
+
+}  // namespace dart
+
+#endif  // defined(TARGET_OS_FUCHSIA)
diff --git a/runtime/vm/timeline.cc b/runtime/vm/timeline.cc
index dbe9ef0..f4c62a8 100644
--- a/runtime/vm/timeline.cc
+++ b/runtime/vm/timeline.cc
@@ -2,6 +2,8 @@
 // for details. All rights reserved. Use of this source code is governed by a
 // BSD-style license that can be found in the LICENSE file.
 
+#ifndef PRODUCT
+
 #include <cstdlib>
 
 #include "vm/atomic.h"
@@ -16,8 +18,6 @@
 
 namespace dart {
 
-#ifndef PRODUCT
-
 DEFINE_FLAG(bool, complete_timeline, false, "Record the complete timeline");
 DEFINE_FLAG(bool, startup_timeline, false, "Record the startup timeline");
 DEFINE_FLAG(bool, trace_timeline, false,
@@ -1692,6 +1692,6 @@
   return r;
 }
 
-#endif  // !PRODUCT
-
 }  // namespace dart
+
+#endif  // !PRODUCT
diff --git a/runtime/vm/verified_memory.cc b/runtime/vm/verified_memory.cc
deleted file mode 100644
index a3a3d9e..0000000
--- a/runtime/vm/verified_memory.cc
+++ /dev/null
@@ -1,36 +0,0 @@
-// Copyright (c) 2014, the Dart project authors.  Please see the AUTHORS file
-// for details. All rights reserved. Use of this source code is governed by a
-// BSD-style license that can be found in the LICENSE file.
-
-#include "vm/verified_memory.h"
-
-namespace dart {
-
-#if defined(DEBUG)
-
-DEFINE_FLAG(bool, verified_mem, false,
-            "Enable write-barrier verification mode (slow, DEBUG only).");
-DEFINE_FLAG(int, verified_mem_max_reserve_mb, (kWordSize <= 4) ? 16 : 32,
-            "When verified_mem is true, largest supported reservation (MB).");
-
-
-VirtualMemory* VerifiedMemory::ReserveInternal(intptr_t size) {
-  if (size > offset()) {
-    FATAL1("Requested reservation of %" Pd " bytes exceeds the limit. "
-           "Use --verified_mem_max_reserve_mb to increase it.", size);
-  }
-  VirtualMemory* result = VirtualMemory::Reserve(size + offset());
-  if (result != NULL) {
-    // Commit the offset part of the reservation (writable, not executable).
-    result->Commit(result->start() + offset(), size, /* executable = */ false);
-    // Truncate without unmapping, so that the returned object looks like
-    // a normal 'size' bytes reservation (but VirtualMemory will correctly
-    // unmap the entire original reservation on destruction).
-    result->Truncate(size, /* try_unmap = */ false);
-  }
-  return result;
-}
-
-#endif  // DEBUG
-
-}  // namespace dart
diff --git a/runtime/vm/verified_memory.h b/runtime/vm/verified_memory.h
deleted file mode 100644
index db292cc..0000000
--- a/runtime/vm/verified_memory.h
+++ /dev/null
@@ -1,90 +0,0 @@
-// Copyright (c) 2014, the Dart project authors.  Please see the AUTHORS file
-// for details. All rights reserved. Use of this source code is governed by a
-// BSD-style license that can be found in the LICENSE file.
-
-#ifndef VM_VERIFIED_MEMORY_H_
-#define VM_VERIFIED_MEMORY_H_
-
-#include "vm/allocation.h"
-#include "vm/flags.h"
-#include "vm/virtual_memory.h"
-
-namespace dart {
-
-#if defined(DEBUG)
-DECLARE_FLAG(bool, verified_mem);
-DECLARE_FLAG(int, verified_mem_max_reserve_mb);
-#endif
-
-
-// A wrapper around VirtualMemory for verifying that a particular class of
-// memory writes are only performed through a particular interface.
-//
-// The main use case is verifying that storing pointers into objects is only
-// performed by code aware of the GC write barrier.
-//
-// NOTE: Verification is enabled only if 'verified_mem' is true, and this flag
-// only exists in DEBUG builds.
-class VerifiedMemory : public AllStatic {
- public:
-  // Reserves a block of memory for which all methods in this class may
-  // be called. Returns NULL if out of memory.
-  static VirtualMemory* Reserve(intptr_t size) {
-    return enabled() ? ReserveInternal(size) : VirtualMemory::Reserve(size);
-  }
-
-  // Verifies that [start, start + size) has only been mutated through
-  // methods in this class (or explicitly accepted by calling Accept).
-  static void Verify(uword start, intptr_t size) {
-    if (!enabled()) return;
-    ASSERT(size <= offset());
-    ASSERT(memcmp(reinterpret_cast<void*>(start + offset()),
-                  reinterpret_cast<void*>(start),
-                  size) == 0);
-  }
-
-  // Assigns value to *ptr after verifying previous content at that location.
-  template<typename T>
-  static void Write(T* ptr, const T& value) {
-    if (enabled()) {
-      uword addr = reinterpret_cast<uword>(ptr);
-      Verify(addr, sizeof(T));
-      T* offset_ptr = reinterpret_cast<T*>(addr + offset());
-      *offset_ptr = value;
-    }
-    *ptr = value;
-  }
-
-  // Accepts the current state of [start, start + size), even if it has been
-  // mutated by other means.
-  static void Accept(uword start, intptr_t size) {
-    if (!enabled()) return;
-    ASSERT(size <= offset());
-    memmove(reinterpret_cast<void*>(start + offset()),
-            reinterpret_cast<void*>(start),
-            size);
-  }
-
- private:
-#if defined(DEBUG)
-  static bool enabled() { return FLAG_verified_mem; }
-  static intptr_t offset() { return FLAG_verified_mem_max_reserve_mb * MB; }
-  static VirtualMemory* ReserveInternal(intptr_t size);
-#else
-  // In release mode, most code in this class is optimized away.
-  static bool enabled() { return false; }
-  static intptr_t offset() { UNREACHABLE(); return -1; }
-  static VirtualMemory* ReserveInternal(intptr_t size) {
-    UNREACHABLE();
-    return NULL;
-  }
-#endif
-
-  friend class Assembler;  // To use enabled/offset when generating code.
-  friend class FlowGraphCompiler;  // To compute edge counter code size.
-  friend class Intrinsifier;  // To know whether a jump is near or far.
-};
-
-}  // namespace dart
-
-#endif  // VM_VERIFIED_MEMORY_H_
diff --git a/runtime/vm/verifier.cc b/runtime/vm/verifier.cc
index 5491f1d..18a7874 100644
--- a/runtime/vm/verifier.cc
+++ b/runtime/vm/verifier.cc
@@ -48,7 +48,6 @@
 
 void VerifyPointersVisitor::VisitPointers(RawObject** first, RawObject** last) {
   for (RawObject** current = first; current <= last; current++) {
-    VerifiedMemory::Verify(reinterpret_cast<uword>(current), kWordSize);
     RawObject* raw_obj = *current;
     if (raw_obj->IsHeapObject()) {
       if (!allocated_set_->Contains(raw_obj)) {
diff --git a/runtime/vm/virtual_memory_fuchsia.cc b/runtime/vm/virtual_memory_fuchsia.cc
new file mode 100644
index 0000000..da32045
--- /dev/null
+++ b/runtime/vm/virtual_memory_fuchsia.cc
@@ -0,0 +1,55 @@
+// Copyright (c) 2016, the Dart project authors.  Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.
+
+#include "vm/globals.h"
+#if defined(TARGET_OS_FUCHSIA)
+
+#include "vm/virtual_memory.h"
+
+#include <unistd.h>  // NOLINT
+
+#include "platform/assert.h"
+#include "vm/os.h"
+
+namespace dart {
+
+uword VirtualMemory::page_size_ = 0;
+
+
+void VirtualMemory::InitOnce() {
+  page_size_ = getpagesize();
+}
+
+
+VirtualMemory* VirtualMemory::ReserveInternal(intptr_t size) {
+  UNIMPLEMENTED();
+  return NULL;
+}
+
+
+VirtualMemory::~VirtualMemory() {
+  UNIMPLEMENTED();
+}
+
+
+bool VirtualMemory::FreeSubSegment(void* address, intptr_t size) {
+  UNIMPLEMENTED();
+  return false;
+}
+
+
+bool VirtualMemory::Commit(uword addr, intptr_t size, bool executable) {
+  UNIMPLEMENTED();
+  return false;
+}
+
+
+bool VirtualMemory::Protect(void* address, intptr_t size, Protection mode) {
+  UNIMPLEMENTED();
+  return false;
+}
+
+}  // namespace dart
+
+#endif  // defined(TARGET_OS_FUCHSIA)
diff --git a/runtime/vm/vm_sources.gypi b/runtime/vm/vm_sources.gypi
index 36d9534..156b120 100644
--- a/runtime/vm/vm_sources.gypi
+++ b/runtime/vm/vm_sources.gypi
@@ -43,6 +43,7 @@
     'ast_transformer.h',
     'atomic.h',
     'atomic_android.h',
+    'atomic_fuchsia.h',
     'atomic_linux.h',
     'atomic_macos.h',
     'atomic_simulator.h',
@@ -80,6 +81,8 @@
     'class_finalizer_test.cc',
     'class_table.cc',
     'class_table.h',
+    'clustered_snapshot.cc',
+    'clustered_snapshot.h',
     'code_descriptors.cc',
     'code_descriptors.h',
     'code_descriptors_test.cc',
@@ -125,6 +128,7 @@
     'cpuid.cc',
     'cpuinfo.h',
     'cpuinfo_android.cc',
+    'cpuinfo_fuchsia.cc',
     'cpuinfo_linux.cc',
     'cpuinfo_macos.cc',
     'cpuinfo_test.cc',
@@ -156,8 +160,6 @@
     'deferred_objects.h',
     'deopt_instructions.cc',
     'deopt_instructions.h',
-    'dev_fs.cc',
-    'dev_fs.h',
     'disassembler.cc',
     'disassembler.h',
     'disassembler_arm.cc',
@@ -305,6 +307,7 @@
     'native_message_handler.h',
     'native_symbol.h',
     'native_symbol_android.cc',
+    'native_symbol_fuchsia.cc',
     'native_symbol_linux.cc',
     'native_symbol_macos.cc',
     'native_symbol_win.cc',
@@ -331,6 +334,7 @@
     'object_x64_test.cc',
     'os.h',
     'os_android.cc',
+    'os_fuchsia.cc',
     'os_linux.cc',
     'os_macos.cc',
     'os_test.cc',
@@ -338,6 +342,8 @@
     'os_thread.h',
     'os_thread_android.cc',
     'os_thread_android.h',
+    'os_thread_fuchsia.cc',
+    'os_thread_fuchsia.h',
     'os_thread_linux.cc',
     'os_thread_linux.h',
     'os_thread_macos.cc',
@@ -424,6 +430,7 @@
     'service_isolate.h',
     'service_test.cc',
     'signal_handler_android.cc',
+    'signal_handler_fuchsia.cc',
     'signal_handler_linux.cc',
     'signal_handler_macos.cc',
     'signal_handler_win.cc',
@@ -479,6 +486,7 @@
     'thread_interrupter.cc',
     'thread_interrupter.h',
     'thread_interrupter_android.cc',
+    'thread_interrupter_fuchsia.cc',
     'thread_interrupter_linux.cc',
     'thread_interrupter_macos.cc',
     'thread_interrupter_win.cc',
@@ -513,14 +521,12 @@
     'uri.h',
     'uri_test.cc',
     'utils_test.cc',
-    'verified_memory.cc',
-    'verified_memory.h',
-    'verified_memory_test.cc',
     'verifier.cc',
     'verifier.h',
     'virtual_memory.cc',
     'virtual_memory.h',
     'virtual_memory_android.cc',
+    'virtual_memory_fuchsia.cc',
     'virtual_memory_linux.cc',
     'virtual_memory_macos.cc',
     'virtual_memory_test.cc',
diff --git a/sdk/lib/_internal/js_runtime/lib/core_patch.dart b/sdk/lib/_internal/js_runtime/lib/core_patch.dart
index 9004574..129ce01 100644
--- a/sdk/lib/_internal/js_runtime/lib/core_patch.dart
+++ b/sdk/lib/_internal/js_runtime/lib/core_patch.dart
@@ -587,15 +587,17 @@
 @patch
 class Uri {
   @patch
-  static bool get _isWindows => false;
-
-  @patch
   static Uri get base {
     String uri = Primitives.currentUri();
     if (uri != null) return Uri.parse(uri);
     throw new UnsupportedError("'Uri.base' is not supported");
   }
+}
 
+@patch
+class _Uri {
+  @patch
+  static bool get _isWindows => false;
 
   // Matches a String that _uriEncodes to itself regardless of the kind of
   // component.  This corresponds to [_unreservedTable], i.e. characters that
diff --git a/sdk/lib/async/stream.dart b/sdk/lib/async/stream.dart
index eca6c34..7661d9f 100644
--- a/sdk/lib/async/stream.dart
+++ b/sdk/lib/async/stream.dart
@@ -316,26 +316,37 @@
   /**
    * Adds a subscription to this stream.
    *
-   * On each data event from this stream, the subscriber's [onData] handler
-   * is called. If [onData] is null, nothing happens.
+   * Returns a [StreamSubscription] which handles events from the stream using
+   * the provided [onData], [onError] and [onDone] handlers.
+   * The handlers can be changed on the subscription, but they start out
+   * as the provided functions.
    *
-   * On errors from this stream, the [onError] handler is given a
-   * object describing the error.
+   * On each data event from this stream, the subscriber's [onData] handler
+   * is called. If [onData] is `null`, nothing happens.
+   *
+   * On errors from this stream, the [onError] handler is called with the
+   * error object and possibly a stack trace.
    *
    * The [onError] callback must be of type `void onError(error)` or
    * `void onError(error, StackTrace stackTrace)`. If [onError] accepts
-   * two arguments it is called with the stack trace (which could be `null` if
-   * the stream itself received an error without stack trace).
+   * two arguments it is called with the error object and the stack trace
+   * (which could be `null` if the stream itself received an error without
+   * stack trace).
    * Otherwise it is called with just the error object.
    * If [onError] is omitted, any errors on the stream are considered unhandled,
    * and will be passed to the current [Zone]'s error handler.
    * By default unhandled async errors are treated
    * as if they were uncaught top-level errors.
    *
-   * If this stream closes, the [onDone] handler is called.
+   * If this stream closes and sends a done event, the [onDone] handler is
+   * called. If [onDone] is `null`, nothing happens.
    *
-   * If [cancelOnError] is true, the subscription is ended when
-   * the first error is reported. The default is false.
+   * If [cancelOnError] is true, the subscription is automatically cancelled
+   * when the first error event is delivered. The default is `false`.
+   *
+   * While a subscription is paused, or when it has been cancelled,
+   * the subscription doesn't receive events and none of the
+   * event handler functions are called.
    */
   StreamSubscription<T> listen(void onData(T event),
                                { Function onError,
diff --git a/sdk/lib/async/timer.dart b/sdk/lib/async/timer.dart
index 1bbb65b..135c868 100644
--- a/sdk/lib/async/timer.dart
+++ b/sdk/lib/async/timer.dart
@@ -4,6 +4,84 @@
 
 part of dart.async;
 
+abstract class _TimerTask implements Timer {
+  final Zone _zone;
+  final Timer _nativeTimer;
+
+  _TimerTask(this._nativeTimer, this._zone);
+
+  void cancel() {
+    _nativeTimer.cancel();
+  }
+
+  bool get isActive => _nativeTimer.isActive;
+}
+
+class _SingleShotTimerTask extends _TimerTask {
+  // TODO(floitsch): the generic argument should be 'void'.
+  final ZoneCallback<dynamic> _callback;
+
+  _SingleShotTimerTask(Timer timer, this._callback, Zone zone)
+      : super(timer, zone);
+}
+
+class _PeriodicTimerTask extends _TimerTask {
+  // TODO(floitsch): the first generic argument should be 'void'.
+  final ZoneUnaryCallback<dynamic, Timer> _callback;
+
+  _PeriodicTimerTask(Timer timer, this._callback, Zone zone)
+      : super(timer, zone);
+}
+
+/**
+ * A task specification for a single-shot timer.
+ *
+ * *Experimental*. Might disappear without notice.
+ */
+class SingleShotTimerTaskSpecification implements TaskSpecification {
+  static const String specificationName = "dart.async.timer";
+
+  /** The duration after which the timer should invoke the [callback]. */
+  final Duration duration;
+
+  /** The callback that should be run when the timer triggers. */
+  // TODO(floitsch): the generic argument should be void.
+  final ZoneCallback<dynamic> callback;
+
+  SingleShotTimerTaskSpecification(this.duration, void this.callback());
+
+  @override
+  String get name => specificationName;
+
+  @override
+  bool get isOneShot => true;
+}
+
+/**
+ * A task specification for a periodic timer.
+ *
+ * *Experimental*. Might disappear without notice.
+ */
+class PeriodicTimerTaskSpecification implements TaskSpecification {
+  static const String specificationName = "dart.async.periodic-timer";
+
+  /** The interval at which the periodic timer should invoke the [callback]. */
+  final Duration duration;
+
+  /** The callback that should be run when the timer triggers. */
+  // TODO(floitsch): the first generic argument should be void.
+  final ZoneUnaryCallback<dynamic, Timer> callback;
+
+  PeriodicTimerTaskSpecification(
+      this.duration, void this.callback(Timer timer));
+
+  @override
+  String get name => specificationName;
+
+  @override
+  bool get isOneShot => false;
+}
+
 /**
  * A count-down timer that can be configured to fire once or repeatedly.
  *
@@ -47,10 +125,15 @@
     if (Zone.current == Zone.ROOT) {
       // No need to bind the callback. We know that the root's timer will
       // be invoked in the root zone.
-      return Zone.current.createTimer(duration, callback);
+      return Timer._createTimer(duration, callback);
     }
-    return Zone.current.createTimer(
-        duration, Zone.current.bindCallback(callback, runGuarded: true));
+    return Zone.current.createTimer(duration, callback);
+  }
+
+  factory Timer._task(Zone zone, Duration duration, void callback()) {
+    SingleShotTimerTaskSpecification specification =
+        new SingleShotTimerTaskSpecification(duration, callback);
+    return zone.createTask(_createSingleShotTimerTask, specification);
   }
 
   /**
@@ -70,17 +153,65 @@
    * scheduled for - even if the actual callback was delayed.
    */
   factory Timer.periodic(Duration duration,
-                         void callback(Timer timer)) {
+      void callback(Timer timer)) {
     if (Zone.current == Zone.ROOT) {
       // No need to bind the callback. We know that the root's timer will
       // be invoked in the root zone.
-      return Zone.current.createPeriodicTimer(duration, callback);
+      return Timer._createPeriodicTimer(duration, callback);
     }
+    return Zone.current.createPeriodicTimer(duration, callback);
+  }
+
+  factory Timer._periodicTask(Zone zone, Duration duration,
+      void callback(Timer timer)) {
+    PeriodicTimerTaskSpecification specification =
+        new PeriodicTimerTaskSpecification(duration, callback);
+    return zone.createTask(_createPeriodicTimerTask, specification);
+  }
+
+  static Timer _createSingleShotTimerTask(
+      SingleShotTimerTaskSpecification specification, Zone zone) {
+    ZoneCallback registeredCallback = identical(_ROOT_ZONE, zone)
+        ? specification.callback
+        : zone.registerCallback(specification.callback);
+
+    _TimerTask timerTask;
+
+    Timer nativeTimer = Timer._createTimer(specification.duration, () {
+      timerTask._zone.runTask(_runSingleShotCallback, timerTask, null);
+    });
+
+    timerTask = new _SingleShotTimerTask(nativeTimer, registeredCallback, zone);
+    return timerTask;
+  }
+
+  static void _runSingleShotCallback(_SingleShotTimerTask timerTask, Object _) {
+    timerTask._callback();
+  }
+
+  static Timer _createPeriodicTimerTask(
+      PeriodicTimerTaskSpecification specification, Zone zone) {
     // TODO(floitsch): the return type should be 'void', and the type
     // should be inferred.
-    var boundCallback = Zone.current.bindUnaryCallback/*<dynamic, Timer>*/(
-        callback, runGuarded: true);
-    return Zone.current.createPeriodicTimer(duration, boundCallback);
+    ZoneUnaryCallback<dynamic, Timer> registeredCallback =
+        identical(_ROOT_ZONE, zone)
+        ? specification.callback
+        : zone.registerUnaryCallback/*<dynamic, Timer>*/(
+            specification.callback);
+
+    _TimerTask timerTask;
+
+    Timer nativeTimer =
+        Timer._createPeriodicTimer(specification.duration, (Timer _) {
+      timerTask._zone.runTask(_runPeriodicCallback, timerTask, null);
+    });
+
+    timerTask = new _PeriodicTimerTask(nativeTimer, registeredCallback, zone);
+    return timerTask;
+  }
+
+  static void _runPeriodicCallback(_PeriodicTimerTask timerTask, Object _) {
+    timerTask._callback(timerTask);
   }
 
   /**
diff --git a/sdk/lib/async/zone.dart b/sdk/lib/async/zone.dart
index 24f83f8..e0c3e4b 100644
--- a/sdk/lib/async/zone.dart
+++ b/sdk/lib/async/zone.dart
@@ -8,6 +8,13 @@
 typedef R ZoneUnaryCallback<R, T>(T arg);
 typedef R ZoneBinaryCallback<R, T1, T2>(T1 arg1, T2 arg2);
 
+/// *Experimental*. Might disappear without warning.
+typedef T TaskCreate<T, S extends TaskSpecification>(
+    S specification, Zone zone);
+/// *Experimental*. Might disappear without warning.
+typedef void TaskRun<T, A>(T task, A arg);
+
+
 // TODO(floitsch): we are abusing generic typedefs as typedefs for generic
 // functions.
 /*ABUSE*/
@@ -33,19 +40,31 @@
     Zone self, ZoneDelegate parent, Zone zone, R f(T1 arg1, T2 arg2));
 typedef AsyncError ErrorCallbackHandler(Zone self, ZoneDelegate parent,
     Zone zone, Object error, StackTrace stackTrace);
+/// *Experimental*. Might disappear without warning.
+/*ABUSE*/
+typedef T CreateTaskHandler<T, S extends TaskSpecification>(
+    Zone self, ZoneDelegate parent, Zone zone,
+    TaskCreate<T, S> create, S taskSpecification);
+/// *Experimental*. Might disappear without warning.
+/*ABUSE*/
+typedef void RunTaskHandler<T, A>(Zone self, ZoneDelegate parent, Zone zone,
+    TaskRun<T, A> run, T task, A arg);
 typedef void ScheduleMicrotaskHandler(
     Zone self, ZoneDelegate parent, Zone zone, void f());
-typedef Timer CreateTimerHandler(
-    Zone self, ZoneDelegate parent, Zone zone, Duration duration, void f());
-typedef Timer CreatePeriodicTimerHandler(
-    Zone self, ZoneDelegate parent, Zone zone,
-    Duration period, void f(Timer timer));
 typedef void PrintHandler(
     Zone self, ZoneDelegate parent, Zone zone, String line);
 typedef Zone ForkHandler(Zone self, ZoneDelegate parent, Zone zone,
                          ZoneSpecification specification,
                          Map zoneValues);
 
+// The following typedef declarations are used by functionality which
+// will be removed and replaced by tasksif the task experiment is successful.
+typedef Timer CreateTimerHandler(
+    Zone self, ZoneDelegate parent, Zone zone, Duration duration, void f());
+typedef Timer CreatePeriodicTimerHandler(
+    Zone self, ZoneDelegate parent, Zone zone,
+    Duration period, void f(Timer timer));
+
 /** Pair of error and stack trace. Returned by [Zone.errorCallback]. */
 class AsyncError implements Error {
   final Object error;
@@ -56,10 +75,41 @@
   String toString() => '$error';
 }
 
+/**
+ * A task specification contains the necessary information to create a task.
+ *
+ * See [Zone.createTask] for how a specification is used to create a task.
+ *
+ * Task specifications should be public and it should be possible to create
+ * new instances as a user. That is, custom zones should be able to replace
+ * an existing specification with a modified one.
+ *
+ * *Experimental*. This class might disappear without warning.
+ */
+abstract class TaskSpecification {
+  /**
+   * Description of the task.
+   *
+   * This string is unused by the root-zone, but might be used for debugging,
+   * and testing. As such, it should be relatively unique in its category.
+   *
+   * As a general guideline we recommend: "package-name.library.action".
+   */
+  String get name;
+
+  /**
+   * Whether the scheduled task triggers at most once.
+   *
+   * If the task is not a one-shot task, it may need to be canceled to prevent
+   * further iterations of the task.
+   */
+  bool get isOneShot;
+}
 
 class _ZoneFunction<T extends Function> {
   final _Zone zone;
   final T function;
+
   const _ZoneFunction(this.zone, this.function);
 }
 
@@ -85,6 +135,9 @@
 abstract class ZoneSpecification {
   /**
    * Creates a specification with the provided handlers.
+   *
+   * The task-related parameters ([createTask] and [runTask]) are experimental
+   * and might be removed without warning.
    */
   const factory ZoneSpecification({
       HandleUncaughtErrorHandler handleUncaughtError,
@@ -96,7 +149,11 @@
       RegisterBinaryCallbackHandler registerBinaryCallback,
       ErrorCallbackHandler errorCallback,
       ScheduleMicrotaskHandler scheduleMicrotask,
+      CreateTaskHandler createTask,
+      RunTaskHandler runTask,
+      // TODO(floitsch): mark as deprecated once tasks are non-experimental.
       CreateTimerHandler createTimer,
+      // TODO(floitsch): mark as deprecated once tasks are non-experimental.
       CreatePeriodicTimerHandler createPeriodicTimer,
       PrintHandler print,
       ForkHandler fork
@@ -105,6 +162,9 @@
   /**
    * Creates a specification from [other] with the provided handlers overriding
    * the ones in [other].
+   *
+   * The task-related parameters ([createTask] and [runTask]) are experimental
+   * and might be removed without warning.
    */
   factory ZoneSpecification.from(ZoneSpecification other, {
       HandleUncaughtErrorHandler handleUncaughtError: null,
@@ -116,7 +176,11 @@
       RegisterBinaryCallbackHandler registerBinaryCallback: null,
       ErrorCallbackHandler errorCallback: null,
       ScheduleMicrotaskHandler scheduleMicrotask: null,
+      CreateTaskHandler createTask: null,
+      RunTaskHandler runTask: null,
+      // TODO(floitsch): mark as deprecated once tasks are non-experimental.
       CreateTimerHandler createTimer: null,
+      // TODO(floitsch): mark as deprecated once tasks are non-experimental.
       CreatePeriodicTimerHandler createPeriodicTimer: null,
       PrintHandler print: null,
       ForkHandler fork: null
@@ -132,11 +196,14 @@
       registerBinaryCallback: registerBinaryCallback ??
                               other.registerBinaryCallback,
       errorCallback: errorCallback ?? other.errorCallback,
+
+      createTask: createTask ?? other.createTask,
+      runTask: runTask ?? other.runTask,
+      print : print ?? other.print,
+      fork: fork ?? other.fork,
       scheduleMicrotask: scheduleMicrotask ?? other.scheduleMicrotask,
       createTimer : createTimer ?? other.createTimer,
-      createPeriodicTimer: createPeriodicTimer ?? other.createPeriodicTimer,
-      print : print ?? other.print,
-      fork: fork ?? other.fork);
+      createPeriodicTimer: createPeriodicTimer ?? other.createPeriodicTimer);
   }
 
   HandleUncaughtErrorHandler get handleUncaughtError;
@@ -148,10 +215,17 @@
   RegisterBinaryCallbackHandler get registerBinaryCallback;
   ErrorCallbackHandler get errorCallback;
   ScheduleMicrotaskHandler get scheduleMicrotask;
-  CreateTimerHandler get createTimer;
-  CreatePeriodicTimerHandler get createPeriodicTimer;
+  /// *Experimental*. Might disappear without warning.
+  CreateTaskHandler get createTask;
+  /// *Experimental*. Might disappear without warning.
+  RunTaskHandler get runTask;
   PrintHandler get print;
   ForkHandler get fork;
+
+  // TODO(floitsch): deprecate once tasks are non-experimental.
+  CreateTimerHandler get createTimer;
+  // TODO(floitsch): deprecate once tasks are non-experimental.
+  CreatePeriodicTimerHandler get createPeriodicTimer;
 }
 
 /**
@@ -172,10 +246,14 @@
     this.registerBinaryCallback: null,
     this.errorCallback: null,
     this.scheduleMicrotask: null,
-    this.createTimer: null,
-    this.createPeriodicTimer: null,
+    this.createTask: null,
+    this.runTask: null,
     this.print: null,
-    this.fork: null
+    this.fork: null,
+    // TODO(floitsch): deprecate once tasks are non-experimental.
+    this.createTimer: null,
+    // TODO(floitsch): deprecate once tasks are non-experimental.
+    this.createPeriodicTimer: null
   });
 
   final HandleUncaughtErrorHandler handleUncaughtError;
@@ -187,10 +265,15 @@
   final RegisterBinaryCallbackHandler registerBinaryCallback;
   final ErrorCallbackHandler errorCallback;
   final ScheduleMicrotaskHandler scheduleMicrotask;
-  final CreateTimerHandler createTimer;
-  final CreatePeriodicTimerHandler createPeriodicTimer;
+  final CreateTaskHandler createTask;
+  final RunTaskHandler runTask;
   final PrintHandler print;
   final ForkHandler fork;
+
+  // TODO(floitsch): deprecate once tasks are non-experimental.
+  final CreateTimerHandler createTimer;
+  // TODO(floitsch): deprecate once tasks are non-experimental.
+  final CreatePeriodicTimerHandler createPeriodicTimer;
 }
 
 /**
@@ -217,10 +300,23 @@
       Zone zone, /*=R*/ f(/*=T1*/ arg1, /*=T2*/ arg2));
   AsyncError errorCallback(Zone zone, Object error, StackTrace stackTrace);
   void scheduleMicrotask(Zone zone, void f());
-  Timer createTimer(Zone zone, Duration duration, void f());
-  Timer createPeriodicTimer(Zone zone, Duration period, void f(Timer timer));
+
+  /// *Experimental*. Might disappear without notice.
+  Object/*=T*/ createTask/*<T, S extends TaskSpecification>*/(
+      Zone zone, TaskCreate/*<T, S>*/ create,
+      TaskSpecification/*=S*/ specification);
+  /// *Experimental*. Might disappear without notice.
+  void runTask/*<T, A>*/(
+      Zone zone, TaskRun/*<T, A>*/ run, Object/*=T*/ task,
+      Object/*=A*/ argument);
+
   void print(Zone zone, String line);
   Zone fork(Zone zone, ZoneSpecification specification, Map zoneValues);
+
+  // TODO(floitsch): deprecate once tasks are non-experimental.
+  Timer createTimer(Zone zone, Duration duration, void f());
+  // TODO(floitsch): deprecate once tasks are non-experimental.
+  Timer createPeriodicTimer(Zone zone, Duration period, void f(Timer timer));
 }
 
 /**
@@ -411,13 +507,102 @@
   void scheduleMicrotask(void f());
 
   /**
+   * Creates a task in the current zone.
+   *
+   * A task represents an asynchronous operation or process that reports back
+   * through the event loop.
+   *
+   * This function allows the zone to intercept the initialization of the
+   * task while the [runTask] function is invoked when the task reports back.
+   *
+   * By default, in the root zone, the [create] function is invoked with the
+   * [specification] as argument. It returns a task object which is used for all
+   * future interactions between the zone and the task. The object is
+   * a unique instance representing the task. It is generally returned to
+   * whoever initiated the task.
+   * For example, the HTML library uses the returned [StreamSubscription] as
+   * task object when users register an event listener.
+   *
+   * Tasks are created when the program starts an operation that reports back
+   * through the event loop. For example, a timer or an HTTP request both
+   * return through the event loop and are therefore tasks.
+   *
+   * If the [create] function is not invoked (because a custom zone has
+   * replaced or intercepted it), then the operation is *not* started. This
+   * means that a custom zone can intercept tasks, like HTTP requests.
+   *
+   * A task goes through the following steps:
+   * - a user invokes a library function that should eventually return through
+   *   the event loop.
+   * - the library function creates a [TaskSpecification] that contains the
+   *   necessary information to start the operation, and invokes
+   *   `Zone.current.createTask` with the specification and a [create] closure.
+   *   The closure, when invoked, uses the specification to start the operation
+   *   (usually by interacting with the underlying system, or as a native
+   *   extension), and returns a task object that identifies the running task.
+   * - custom zones handle the request and (unless completely intercepted and
+   *   aborted), end up calling the root zone's [createTask] which runs the
+   *   provided `create` closure, which may have been replaced at this point.
+   * - later, the asynchronous operation returns through the event loop.
+   *   It invokes [Zone.runTask] on the zone in which the task should run
+   *   (and which was originally passed to the `create` function by
+   *   `createTask`). The [runTask] function receives the
+   *   task object, a `run` function and an argument. As before, custom zones
+   *   may intercept this call. Eventually (unless aborted), the `run` function
+   *   is invoked. This last step may happen multiple times for tasks that are
+   *   not oneshot tasks (see [ZoneSpecification.isOneShot]).
+   *
+   * Custom zones may replace the [specification] with a different one, thus
+   * modifying the task parameters. An operation that wishes to be an
+   * interceptable task must publicly specify the types that intercepting code
+   * sees:
+   * - The specification type (extending [TaskSpecification]) which holds the
+   *   information available when intercepting the `createTask` call.
+   * - The task object type, returned by `createTask` and [create]. This object
+   *   may simply be typed as [Object].
+   * - The argument type, if [runTask] takes a meaningful argument.
+   *
+   * *Experimental*. Might disappear without notice.
+   */
+  Object/*=T*/ createTask/*<T, S extends TaskSpecification>*/(
+      /*=T*/ create(TaskSpecification/*=S*/ specification, Zone zone),
+      TaskSpecification/*=S*/ specification);
+
+  /**
+   * Runs a task callback.
+   *
+   * This function is invoked when an operation, started through [createTask],
+   * generates an event.
+   *
+   * Generally, tasks schedule Dart code in the global event loop when the
+   * [createTask] function is invoked. Since the
+   * event loop does not expect any return value from the code it runs, the
+   * [runTask] function is a void function.
+   *
+   * The [task] object must be the same as the one created with [createTask].
+   *
+   * It is good practice that task operations provide a meaningful [argument],
+   * so that custom zones can interact with it. They might want to log or
+   * replace the argument before calling the [run] function.
+   *
+   * See [createTask].
+   *
+   * *Experimental*. Might disappear without notice.
+   */
+  void runTask/*<T, A>*/(
+      /*=T*/ run(/*=T*/ task, /*=A*/ argument), Object/*=T*/ task,
+      Object/*=A*/ argument);
+
+  /**
    * Creates a Timer where the callback is executed in this zone.
    */
+  // TODO(floitsch): deprecate once tasks are non-experimental.
   Timer createTimer(Duration duration, void callback());
 
   /**
    * Creates a periodic Timer where the callback is executed in this zone.
    */
+  // TODO(floitsch): deprecate once tasks are non-experimental.
   Timer createPeriodicTimer(Duration period, void callback(Timer timer));
 
   /**
@@ -523,7 +708,7 @@
     // TODO(floitsch): make this a generic method call on '<R>' once it's
     // supported. Remove the unnecessary cast.
     return handler(implZone, _parentDelegate(implZone), zone, f)
-        as Object/*=ZoneCallback<R>*/;
+        as dynamic/*=ZoneCallback<R>*/;
   }
 
   ZoneUnaryCallback/*<R, T>*/ registerUnaryCallback/*<R, T>*/(
@@ -534,7 +719,7 @@
     // TODO(floitsch): make this a generic method call on '<R, T>' once it's
     // supported. Remove the unnecessary cast.
     return handler(implZone, _parentDelegate(implZone), zone, f)
-        as Object/*=ZoneUnaryCallback<R, T>*/;
+        as dynamic/*=ZoneUnaryCallback<R, T>*/;
   }
 
   ZoneBinaryCallback/*<R, T1, T2>*/ registerBinaryCallback/*<R, T1, T2>*/(
@@ -545,7 +730,7 @@
     // TODO(floitsch): make this a generic method call on '<R, T1, T2>' once
     // it's supported. Remove the unnecessary cast.
     return handler(implZone, _parentDelegate(implZone), zone, f)
-        as Object/*=ZoneBinaryCallback<R, T1, T2>*/;
+        as dynamic/*=ZoneBinaryCallback<R, T1, T2>*/;
   }
 
   AsyncError errorCallback(Zone zone, Object error, StackTrace stackTrace) {
@@ -564,18 +749,25 @@
     handler(implZone, _parentDelegate(implZone), zone, f);
   }
 
-  Timer createTimer(Zone zone, Duration duration, void f()) {
-    var implementation = _delegationTarget._createTimer;
+  Object/*=T*/ createTask/*<T, S extends TaskSpecification>*/(
+      Zone zone, TaskCreate/*<T, S>*/ create, TaskSpecification/*=S*/ specification) {
+    var implementation = _delegationTarget._createTask;
     _Zone implZone = implementation.zone;
-    CreateTimerHandler handler = implementation.function;
-    return handler(implZone, _parentDelegate(implZone), zone, duration, f);
+    // TODO(floitsch): make the handler call a generic method call on '<T, S>'
+    // once it's supported. Remove the unnecessary cast.
+    var handler =
+        implementation.function as CreateTaskHandler/*<T, S>*/;
+    return handler(
+        implZone, _parentDelegate(implZone), zone, create, specification);
   }
 
-  Timer createPeriodicTimer(Zone zone, Duration period, void f(Timer timer)) {
-    var implementation = _delegationTarget._createPeriodicTimer;
+  void runTask/*<T, A>*/(Zone zone, TaskRun run, Object /*=T*/ task,
+      Object /*=A*/ argument) {
+    var implementation = _delegationTarget._runTask;
     _Zone implZone = implementation.zone;
-    CreatePeriodicTimerHandler handler = implementation.function;
-    return handler(implZone, _parentDelegate(implZone), zone, period, f);
+    RunTaskHandler handler = implementation.function;
+    // TODO(floitsch): make this a generic call on '<T, A>'.
+    handler(implZone, _parentDelegate(implZone), zone, run, task, argument);
   }
 
   void print(Zone zone, String line) {
@@ -593,6 +785,22 @@
     return handler(
         implZone, _parentDelegate(implZone), zone, specification, zoneValues);
   }
+
+  // TODO(floitsch): deprecate once tasks are non-experimental.
+  Timer createTimer(Zone zone, Duration duration, void f()) {
+    var implementation = _delegationTarget._createTimer;
+    _Zone implZone = implementation.zone;
+    CreateTimerHandler handler = implementation.function;
+    return handler(implZone, _parentDelegate(implZone), zone, duration, f);
+  }
+
+  // TODO(floitsch): deprecate once tasks are non-experimental.
+  Timer createPeriodicTimer(Zone zone, Duration period, void f(Timer timer)) {
+    var implementation = _delegationTarget._createPeriodicTimer;
+    _Zone implZone = implementation.zone;
+    CreatePeriodicTimerHandler handler = implementation.function;
+    return handler(implZone, _parentDelegate(implZone), zone, period, f);
+  }
 }
 
 
@@ -610,11 +818,17 @@
   _ZoneFunction<RegisterBinaryCallbackHandler> get _registerBinaryCallback;
   _ZoneFunction<ErrorCallbackHandler> get _errorCallback;
   _ZoneFunction<ScheduleMicrotaskHandler> get _scheduleMicrotask;
-  _ZoneFunction<CreateTimerHandler> get _createTimer;
-  _ZoneFunction<CreatePeriodicTimerHandler> get _createPeriodicTimer;
+  _ZoneFunction<CreateTaskHandler> get _createTask;
+  _ZoneFunction<RunTaskHandler> get _runTask;
   _ZoneFunction<PrintHandler> get _print;
   _ZoneFunction<ForkHandler> get _fork;
   _ZoneFunction<HandleUncaughtErrorHandler> get _handleUncaughtError;
+
+  // TODO(floitsch): deprecate once tasks are non-experimental.
+  _ZoneFunction<CreateTimerHandler> get _createTimer;
+  // TODO(floitsch): deprecate once tasks are non-experimental.
+  _ZoneFunction<CreatePeriodicTimerHandler> get _createPeriodicTimer;
+
   _Zone get parent;
   ZoneDelegate get _delegate;
   Map get _map;
@@ -636,12 +850,17 @@
   _ZoneFunction<RegisterBinaryCallbackHandler> _registerBinaryCallback;
   _ZoneFunction<ErrorCallbackHandler> _errorCallback;
   _ZoneFunction<ScheduleMicrotaskHandler> _scheduleMicrotask;
-  _ZoneFunction<CreateTimerHandler> _createTimer;
-  _ZoneFunction<CreatePeriodicTimerHandler> _createPeriodicTimer;
+  _ZoneFunction<CreateTaskHandler> _createTask;
+  _ZoneFunction<RunTaskHandler> _runTask;
   _ZoneFunction<PrintHandler> _print;
   _ZoneFunction<ForkHandler> _fork;
   _ZoneFunction<HandleUncaughtErrorHandler> _handleUncaughtError;
 
+  // TODO(floitsch): deprecate once tasks are non-experimental.
+  _ZoneFunction<CreateTimerHandler> _createTimer;
+  // TODO(floitsch): deprecate once tasks are non-experimental.
+  _ZoneFunction<CreatePeriodicTimerHandler> _createPeriodicTimer;
+
   // A cached delegate to this zone.
   ZoneDelegate _delegateCache;
 
@@ -692,13 +911,14 @@
         ? new _ZoneFunction<ScheduleMicrotaskHandler>(
             this, specification.scheduleMicrotask)
         : parent._scheduleMicrotask;
-    _createTimer = (specification.createTimer != null)
-        ? new _ZoneFunction<CreateTimerHandler>(this, specification.createTimer)
-        : parent._createTimer;
-    _createPeriodicTimer = (specification.createPeriodicTimer != null)
-        ? new _ZoneFunction<CreatePeriodicTimerHandler>(
-            this, specification.createPeriodicTimer)
-        : parent._createPeriodicTimer;
+    _createTask = (specification.createTask != null)
+        ? new _ZoneFunction<CreateTaskHandler>(
+            this, specification.createTask)
+        : parent._createTask;
+    _runTask = (specification.runTask != null)
+        ? new _ZoneFunction<RunTaskHandler>(
+            this, specification.runTask)
+        : parent._runTask;
     _print = (specification.print != null)
         ? new _ZoneFunction<PrintHandler>(this, specification.print)
         : parent._print;
@@ -709,6 +929,16 @@
         ? new _ZoneFunction<HandleUncaughtErrorHandler>(
             this, specification.handleUncaughtError)
         : parent._handleUncaughtError;
+
+    // Deprecated fields, once tasks are non-experimental.
+    _createTimer = (specification.createTimer != null)
+        ? new _ZoneFunction<CreateTimerHandler>(
+            this, specification.createTimer)
+        : parent._createTimer;
+    _createPeriodicTimer = (specification.createPeriodicTimer != null)
+        ? new _ZoneFunction<CreatePeriodicTimerHandler>(
+            this, specification.createPeriodicTimer)
+        : parent._createPeriodicTimer;
   }
 
   /**
@@ -859,7 +1089,7 @@
     // TODO(floitsch): make this a generic method call on '<R>' once it's
     // supported. Remove the unnecessary cast.
     return handler(implementation.zone, parentDelegate, this, callback)
-        as Object/*=ZoneCallback<R>*/;
+        as dynamic/*=ZoneCallback<R>*/;
   }
 
   ZoneUnaryCallback/*<R, T>*/ registerUnaryCallback/*<R, T>*/(
@@ -871,7 +1101,7 @@
     // TODO(floitsch): make this a generic method call on '<R, T>' once it's
     // supported. Remove the unnecessary cast.
     return handler(implementation.zone, parentDelegate, this, callback)
-        as Object/*=ZoneUnaryCallback<R, T>*/;
+        as dynamic/*=ZoneUnaryCallback<R, T>*/;
   }
 
   ZoneBinaryCallback/*<R, T1, T2>*/ registerBinaryCallback/*<R, T1, T2>*/(
@@ -883,7 +1113,7 @@
     // TODO(floitsch): make this a generic method call on '<R, T1, T2>' once
     // it's supported. Remove the unnecessary cast.
     return handler(implementation.zone, parentDelegate, this, callback)
-        as Object/*=ZoneBinaryCallback<R, T1, T2>*/;
+        as dynamic/*=ZoneBinaryCallback<R, T1, T2>*/;
   }
 
   AsyncError errorCallback(Object error, StackTrace stackTrace) {
@@ -902,24 +1132,29 @@
     assert(implementation != null);
     ZoneDelegate parentDelegate = _parentDelegate(implementation.zone);
     ScheduleMicrotaskHandler handler = implementation.function;
-    return handler(implementation.zone, parentDelegate, this, f);
+    handler(implementation.zone, parentDelegate, this, f);
   }
 
-  Timer createTimer(Duration duration, void f()) {
-    var implementation = this._createTimer;
-    assert(implementation != null);
+  Object/*=T*/ createTask/*<T, S extends TaskSpecification>*/(
+      TaskCreate/*<T, S>*/ create, TaskSpecification/*=S*/ specification) {
+    var implementation = this._createTask;
     ZoneDelegate parentDelegate = _parentDelegate(implementation.zone);
-    CreateTimerHandler handler = implementation.function;
-    return handler(implementation.zone, parentDelegate, this, duration, f);
-  }
-
-  Timer createPeriodicTimer(Duration duration, void f(Timer timer)) {
-    var implementation = this._createPeriodicTimer;
-    assert(implementation != null);
-    ZoneDelegate parentDelegate = _parentDelegate(implementation.zone);
-    CreatePeriodicTimerHandler handler = implementation.function;
+    // TODO(floitsch): make the handler call a generic method call on '<T, S>'
+    // once it's supported. Remove the unnecessary cast.
+    var handler =
+        implementation.function as CreateTaskHandler/*<T, S>*/;
     return handler(
-        implementation.zone, parentDelegate, this, duration, f);
+        implementation.zone, parentDelegate, this, create, specification);
+  }
+
+  void runTask/*<T, A>*/(
+      TaskRun/*<T, A>*/ run, Object/*=T*/ task, Object/*=A*/ arg1) {
+    var implementation = this._runTask;
+    ZoneDelegate parentDelegate = _parentDelegate(implementation.zone);
+    RunTaskHandler handler = implementation.function;
+    // TODO(floitsch): make this a generic method call on '<T, A>' once it's
+    // supported.
+    handler(implementation.zone, parentDelegate, this, run, task, arg1);
   }
 
   void print(String line) {
@@ -929,6 +1164,25 @@
     PrintHandler handler = implementation.function;
     return handler(implementation.zone, parentDelegate, this, line);
   }
+
+  // TODO(floitsch): deprecate once tasks are non-experimental.
+  Timer createTimer(Duration duration, void f()) {
+    var implementation = this._createTimer;
+    assert(implementation != null);
+    ZoneDelegate parentDelegate = _parentDelegate(implementation.zone);
+    CreateTimerHandler handler = implementation.function;
+    return handler(implementation.zone, parentDelegate, this, duration, f);
+  }
+
+  // TODO(floitsch): deprecate once tasks are non-experimental.
+  Timer createPeriodicTimer(Duration duration, void f(Timer timer)) {
+    var implementation = this._createPeriodicTimer;
+    assert(implementation != null);
+    ZoneDelegate parentDelegate = _parentDelegate(implementation.zone);
+    CreatePeriodicTimerHandler handler = implementation.function;
+    return handler(
+        implementation.zone, parentDelegate, this, duration, f);
+  }
 }
 
 /*=R*/ _rootHandleUncaughtError/*<R>*/(
@@ -1006,22 +1260,39 @@
   _scheduleAsyncCallback(f);
 }
 
+Object/*=T*/ _rootCreateTask/*<T, S extends TaskSpecification>*/(
+    Zone self, ZoneDelegate parent, Zone zone,
+    TaskCreate/*<T, S>*/ create, TaskSpecification/*=S*/ specification) {
+  return create(specification, zone);
+}
+
+void _rootRunTask/*<T, A>*/(
+    Zone self, ZoneDelegate parent, Zone zone, TaskRun run/*<T, A>*/,
+    Object/*=T*/ task, Object/*=A*/ arg) {
+  if (Zone._current == zone) {
+    run(task, arg);
+    return;
+  }
+
+  Zone old = Zone._enter(zone);
+  try {
+    run(task, arg);
+  } catch (e, s) {
+    zone.handleUncaughtError/*<dynamic>*/(e, s);
+  } finally {
+    Zone._leave(old);
+  }
+}
+
 Timer _rootCreateTimer(Zone self, ZoneDelegate parent, Zone zone,
                        Duration duration, void callback()) {
-  if (!identical(_ROOT_ZONE, zone)) {
-    callback = zone.bindCallback(callback);
-  }
-  return Timer._createTimer(duration, callback);
+  return new Timer._task(zone, duration, callback);
 }
 
 Timer _rootCreatePeriodicTimer(
     Zone self, ZoneDelegate parent, Zone zone,
     Duration duration, void callback(Timer timer)) {
-  if (!identical(_ROOT_ZONE, zone)) {
-    // TODO(floitsch): the return type should be 'void'.
-    callback = zone.bindUnaryCallback/*<dynamic, Timer>*/(callback);
-  }
-  return Timer._createPeriodicTimer(duration, callback);
+  return new Timer._periodicTask(zone, duration, callback);
 }
 
 void _rootPrint(Zone self, ZoneDelegate parent, Zone zone, String line) {
@@ -1082,10 +1353,10 @@
   _ZoneFunction<ScheduleMicrotaskHandler> get _scheduleMicrotask =>
       const _ZoneFunction<ScheduleMicrotaskHandler>(
           _ROOT_ZONE, _rootScheduleMicrotask);
-  _ZoneFunction<CreateTimerHandler> get _createTimer =>
-      const _ZoneFunction<CreateTimerHandler>(_ROOT_ZONE, _rootCreateTimer);
-  _ZoneFunction<CreatePeriodicTimerHandler> get _createPeriodicTimer =>
-      const _ZoneFunction<CreatePeriodicTimerHandler>(_ROOT_ZONE, _rootCreatePeriodicTimer);
+  _ZoneFunction<CreateTaskHandler> get _createTask =>
+      const _ZoneFunction<CreateTaskHandler>(_ROOT_ZONE, _rootCreateTask);
+  _ZoneFunction<RunTaskHandler> get _runTask =>
+      const _ZoneFunction<RunTaskHandler>(_ROOT_ZONE, _rootRunTask);
   _ZoneFunction<PrintHandler> get _print =>
       const _ZoneFunction<PrintHandler>(_ROOT_ZONE, _rootPrint);
   _ZoneFunction<ForkHandler> get _fork =>
@@ -1094,6 +1365,14 @@
       const _ZoneFunction<HandleUncaughtErrorHandler>(
           _ROOT_ZONE, _rootHandleUncaughtError);
 
+  // TODO(floitsch): deprecate once tasks are non-experimental.
+  _ZoneFunction<CreateTimerHandler> get _createTimer =>
+      const _ZoneFunction<CreateTimerHandler>(_ROOT_ZONE, _rootCreateTimer);
+  // TODO(floitsch): deprecate once tasks are non-experimental.
+  _ZoneFunction<CreatePeriodicTimerHandler> get _createPeriodicTimer =>
+      const _ZoneFunction<CreatePeriodicTimerHandler>(
+          _ROOT_ZONE, _rootCreatePeriodicTimer);
+
   // The parent zone.
   _Zone get parent => null;
 
@@ -1225,6 +1504,16 @@
     _rootScheduleMicrotask(null, null, this, f);
   }
 
+  Object/*=T*/ createTask/*<T, S extends TaskSpecification>*/(
+      TaskCreate/*<T, S>*/ create, TaskSpecification/*=S*/ specification) {
+    return _rootCreateTask/*<T, S>*/(null, null, this, create, specification);
+  }
+
+  void runTask/*<T, A>*/(
+      TaskRun/*<T, A>*/ run, Object/*=T*/ task, Object/*=A*/ arg) {
+    _rootRunTask/*<T, A>*/(null, null, this, run, task, arg);
+  }
+
   Timer createTimer(Duration duration, void f()) {
     return Timer._createTimer(duration, f);
   }
diff --git a/sdk/lib/core/uri.dart b/sdk/lib/core/uri.dart
index 5718ca8..17f290d 100644
--- a/sdk/lib/core/uri.dart
+++ b/sdk/lib/core/uri.dart
@@ -4,6 +4,24 @@
 
 part of dart.core;
 
+// Frequently used character codes.
+const int _SPACE = 0x20;
+const int _PERCENT = 0x25;
+const int _PLUS = 0x2B;
+const int _DOT = 0x2E;
+const int _SLASH = 0x2F;
+const int _COLON = 0x3A;
+const int _UPPER_CASE_A = 0x41;
+const int _UPPER_CASE_Z = 0x5A;
+const int _LEFT_BRACKET = 0x5B;
+const int _BACKSLASH = 0x5C;
+const int _RIGHT_BRACKET = 0x5D;
+const int _LOWER_CASE_A = 0x61;
+const int _LOWER_CASE_F = 0x66;
+const int _LOWER_CASE_Z = 0x7A;
+
+const String _hexDigits = "0123456789ABCDEF";
+
 /**
  * A parsed URI, such as a URL.
  *
@@ -15,77 +33,17 @@
  * [uris]: https://www.dartlang.org/docs/dart-up-and-running/ch03.html#uris
  * [libtour]: https://www.dartlang.org/docs/dart-up-and-running/contents/ch03.html
  */
-class Uri {
+abstract class Uri {
   /**
-   * The scheme component of the URI.
+   * Returns the natural base URI for the current platform.
    *
-   * Returns the empty string if there is no scheme component.
+   * When running in a browser this is the current URL of the current page
+   * (from `window.location.href`).
    *
-   * A URI scheme is case insensitive.
-   * The returned scheme is canonicalized to lowercase letters.
+   * When not running in a browser this is the file URI referencing
+   * the current working directory.
    */
-  // We represent the missing scheme as an empty string.
-  // A valid scheme cannot be empty.
-  final String scheme;
-
-  /**
-   * The user-info part of the authority.
-   *
-   * Does not distinguish between an empty user-info and an absent one.
-   * The value is always non-null.
-   * Is considered absent if [_host] is `null`.
-   */
-  final String _userInfo;
-
-  /**
-   * The host name of the URI.
-   *
-   * Set to `null` if there is no authority in the URI.
-   * The host name is the only mandatory part of an authority, so we use
-   * it to mark whether an authority part was present or not.
-   */
-  final String _host;
-
-  /**
-   * The port number part of the authority.
-   *
-   * The port. Set to null if there is no port. Normalized to null if
-   * the port is the default port for the scheme.
-   */
-  int _port;
-
-  /**
-   * The path of the URI.
-   *
-   * Always non-null.
-   */
-  String _path;
-
-  // The query content, or null if there is no query.
-  final String _query;
-
-  // The fragment content, or null if there is no fragment.
-  final String _fragment;
-
-  /**
-   * Cache the computed return value of [pathSegements].
-   */
-  List<String> _pathSegments;
-
-  /**
-   * Cache the computed return value of [queryParameters].
-   */
-  Map<String, String> _queryParameters;
-  Map<String, List<String>> _queryParameterLists;
-
-  /// Internal non-verifying constructor. Only call with validated arguments.
-  Uri._internal(this.scheme,
-                this._userInfo,
-                this._host,
-                this._port,
-                this._path,
-                this._query,
-                this._fragment);
+  external static Uri get base;
 
   /**
    * Creates a new URI from its components.
@@ -158,39 +116,15 @@
    * general delimiters, are escaped if necessary.
    * If `fragment` is omitted or `null`, the URI has no fragment part.
    */
-  factory Uri({String scheme : "",
-               String userInfo : "",
+  factory Uri({String scheme,
+               String userInfo,
                String host,
                int port,
                String path,
                Iterable<String> pathSegments,
                String query,
                Map<String, dynamic/*String|Iterable<String>*/> queryParameters,
-               String fragment}) {
-    scheme = _makeScheme(scheme, 0, _stringOrNullLength(scheme));
-    userInfo = _makeUserInfo(userInfo, 0, _stringOrNullLength(userInfo));
-    host = _makeHost(host, 0, _stringOrNullLength(host), false);
-    // Special case this constructor for backwards compatibility.
-    if (query == "") query = null;
-    query = _makeQuery(query, 0, _stringOrNullLength(query), queryParameters);
-    fragment = _makeFragment(fragment, 0, _stringOrNullLength(fragment));
-    port = _makePort(port, scheme);
-    bool isFile = (scheme == "file");
-    if (host == null &&
-        (userInfo.isNotEmpty || port != null || isFile)) {
-      host = "";
-    }
-    bool hasAuthority = (host != null);
-    path = _makePath(path, 0, _stringOrNullLength(path), pathSegments,
-                     scheme, hasAuthority);
-    if (scheme.isEmpty && host == null && !path.startsWith('/')) {
-      path = _normalizeRelativePath(path);
-    } else {
-      path = _removeDotSegments(path);
-    }
-    return new Uri._internal(scheme, userInfo, host, port,
-                             path, query, fragment);
-  }
+               String fragment}) = _Uri;
 
   /**
    * Creates a new `http` URI from authority, path and query.
@@ -227,9 +161,7 @@
    */
   factory Uri.http(String authority,
                    String unencodedPath,
-                   [Map<String, String> queryParameters]) {
-    return _makeHttpUri("http", authority, unencodedPath, queryParameters);
-  }
+                   [Map<String, String> queryParameters]) = _Uri.http;
 
   /**
    * Creates a new `https` URI from authority, path and query.
@@ -239,424 +171,7 @@
    */
   factory Uri.https(String authority,
                     String unencodedPath,
-                    [Map<String, String> queryParameters]) {
-    return _makeHttpUri("https", authority, unencodedPath, queryParameters);
-  }
-
-  /**
-   * Returns the authority component.
-   *
-   * The authority is formatted from the [userInfo], [host] and [port]
-   * parts.
-   *
-   * Returns the empty string if there is no authority component.
-   */
-  String get authority {
-    if (!hasAuthority) return "";
-    var sb = new StringBuffer();
-    _writeAuthority(sb);
-    return sb.toString();
-  }
-
-  /**
-   * Returns the user info part of the authority component.
-   *
-   * Returns the empty string if there is no user info in the
-   * authority component.
-   */
-  String get userInfo => _userInfo;
-
-  /**
-   * Returns the host part of the authority component.
-   *
-   * Returns the empty string if there is no authority component and
-   * hence no host.
-   *
-   * If the host is an IP version 6 address, the surrounding `[` and `]` is
-   * removed.
-   *
-   * The host string is case-insensitive.
-   * The returned host name is canonicalized to lower-case
-   * with upper-case percent-escapes.
-   */
-  String get host {
-    if (_host == null) return "";
-    if (_host.startsWith('[')) {
-      return _host.substring(1, _host.length - 1);
-    }
-    return _host;
-  }
-
-  /**
-   * Returns the port part of the authority component.
-   *
-   * Returns the defualt port if there is no port number in the authority
-   * component. That's 80 for http, 443 for https, and 0 for everything else.
-   */
-  int get port {
-    if (_port == null) return _defaultPort(scheme);
-    return _port;
-  }
-
-  // The default port for the scheme of this Uri..
-  static int _defaultPort(String scheme) {
-    if (scheme == "http") return 80;
-    if (scheme == "https") return 443;
-    return 0;
-  }
-
-  /**
-   * Returns the path component.
-   *
-   * The returned path is encoded. To get direct access to the decoded
-   * path use [pathSegments].
-   *
-   * Returns the empty string if there is no path component.
-   */
-  String get path => _path;
-
-  /**
-   * Returns the query component. The returned query is encoded. To get
-   * direct access to the decoded query use [queryParameters].
-   *
-   * Returns the empty string if there is no query component.
-   */
-  String get query => (_query == null) ? "" : _query;
-
-  /**
-   * Returns the fragment identifier component.
-   *
-   * Returns the empty string if there is no fragment identifier
-   * component.
-   */
-  String get fragment => (_fragment == null) ? "" : _fragment;
-
-  /**
-   * Creates a new `Uri` object by parsing a URI string.
-   *
-   * If [start] and [end] are provided, only the substring from `start`
-   * to `end` is parsed as a URI.
-   *
-   * If the string is not valid as a URI or URI reference,
-   * a [FormatException] is thrown.
-   */
-  static Uri parse(String uri, [int start = 0, int end]) {
-    // This parsing will not validate percent-encoding, IPv6, etc.
-    // When done splitting into parts, it will call, e.g., [_makeFragment]
-    // to do the final parsing.
-    //
-    // Important parts of the RFC 3986 used here:
-    // URI           = scheme ":" hier-part [ "?" query ] [ "#" fragment ]
-    //
-    // hier-part     = "//" authority path-abempty
-    //               / path-absolute
-    //               / path-rootless
-    //               / path-empty
-    //
-    // URI-reference = URI / relative-ref
-    //
-    // absolute-URI  = scheme ":" hier-part [ "?" query ]
-    //
-    // relative-ref  = relative-part [ "?" query ] [ "#" fragment ]
-    //
-    // relative-part = "//" authority path-abempty
-    //               / path-absolute
-    //               / path-noscheme
-    //               / path-empty
-    //
-    // scheme        = ALPHA *( ALPHA / DIGIT / "+" / "-" / "." )
-    //
-    // authority     = [ userinfo "@" ] host [ ":" port ]
-    // userinfo      = *( unreserved / pct-encoded / sub-delims / ":" )
-    // host          = IP-literal / IPv4address / reg-name
-    // port          = *DIGIT
-    // reg-name      = *( unreserved / pct-encoded / sub-delims )
-    //
-    // path          = path-abempty    ; begins with "/" or is empty
-    //               / path-absolute   ; begins with "/" but not "//"
-    //               / path-noscheme   ; begins with a non-colon segment
-    //               / path-rootless   ; begins with a segment
-    //               / path-empty      ; zero characters
-    //
-    // path-abempty  = *( "/" segment )
-    // path-absolute = "/" [ segment-nz *( "/" segment ) ]
-    // path-noscheme = segment-nz-nc *( "/" segment )
-    // path-rootless = segment-nz *( "/" segment )
-    // path-empty    = 0<pchar>
-    //
-    // segment       = *pchar
-    // segment-nz    = 1*pchar
-    // segment-nz-nc = 1*( unreserved / pct-encoded / sub-delims / "@" )
-    //               ; non-zero-length segment without any colon ":"
-    //
-    // pchar         = unreserved / pct-encoded / sub-delims / ":" / "@"
-    //
-    // query         = *( pchar / "/" / "?" )
-    //
-    // fragment      = *( pchar / "/" / "?" )
-    const int EOI = -1;
-
-    String scheme = "";
-    String userinfo = "";
-    String host = null;
-    int port = null;
-    String path = null;
-    String query = null;
-    String fragment = null;
-    if (end == null) end = uri.length;
-
-    int index = start;
-    int pathStart = start;
-    // End of input-marker.
-    int char = EOI;
-
-    void parseAuth() {
-      if (index == end) {
-        char = EOI;
-        return;
-      }
-      int authStart = index;
-      int lastColon = -1;
-      int lastAt = -1;
-      char = uri.codeUnitAt(index);
-      while (index < end) {
-        char = uri.codeUnitAt(index);
-        if (char == _SLASH || char == _QUESTION || char == _NUMBER_SIGN) {
-          break;
-        }
-        if (char == _AT_SIGN) {
-          lastAt = index;
-          lastColon = -1;
-        } else if (char == _COLON) {
-          lastColon = index;
-        } else if (char == _LEFT_BRACKET) {
-          lastColon = -1;
-          int endBracket = uri.indexOf(']', index + 1);
-          if (endBracket == -1) {
-            index = end;
-            char = EOI;
-            break;
-          } else {
-            index = endBracket;
-          }
-        }
-        index++;
-        char = EOI;
-      }
-      int hostStart = authStart;
-      int hostEnd = index;
-      if (lastAt >= 0) {
-        userinfo = _makeUserInfo(uri, authStart, lastAt);
-        hostStart = lastAt + 1;
-      }
-      if (lastColon >= 0) {
-        int portNumber;
-        if (lastColon + 1 < index) {
-          portNumber = 0;
-          for (int i = lastColon + 1; i < index; i++) {
-            int digit = uri.codeUnitAt(i);
-            if (_ZERO > digit || _NINE < digit) {
-              _fail(uri, i, "Invalid port number");
-            }
-            portNumber = portNumber * 10 + (digit - _ZERO);
-          }
-        }
-        port = _makePort(portNumber, scheme);
-        hostEnd = lastColon;
-      }
-      host = _makeHost(uri, hostStart, hostEnd, true);
-      if (index < end) {
-        char = uri.codeUnitAt(index);
-      }
-    }
-
-    // When reaching path parsing, the current character is known to not
-    // be part of the path.
-    const int NOT_IN_PATH = 0;
-    // When reaching path parsing, the current character is part
-    // of the a non-empty path.
-    const int IN_PATH = 1;
-    // When reaching authority parsing, authority is possible.
-    // This is only true at start or right after scheme.
-    const int ALLOW_AUTH = 2;
-
-    // Current state.
-    // Initialized to the default value that is used when exiting the
-    // scheme loop by reaching the end of input.
-    // All other breaks set their own state.
-    int state = NOT_IN_PATH;
-    int i = index;  // Temporary alias for index to avoid bug 19550 in dart2js.
-    while (i < end) {
-      char = uri.codeUnitAt(i);
-      if (char == _QUESTION || char == _NUMBER_SIGN) {
-        state = NOT_IN_PATH;
-        break;
-      }
-      if (char == _SLASH) {
-        state = (i == start) ? ALLOW_AUTH : IN_PATH;
-        break;
-      }
-      if (char == _COLON) {
-        if (i == start) _fail(uri, start, "Invalid empty scheme");
-        scheme = _makeScheme(uri, start, i);
-        i++;
-        if (scheme == "data") {
-          // This generates a URI that is (potentially) not path normalized.
-          // Applying part normalization to a non-hierarchial URI isn't
-          // meaningful.
-          return UriData._parse(uri, i, null).uri;
-        }
-        pathStart = i;
-        if (i == end) {
-          char = EOI;
-          state = NOT_IN_PATH;
-        } else {
-          char = uri.codeUnitAt(i);
-          if (char == _QUESTION || char == _NUMBER_SIGN) {
-            state = NOT_IN_PATH;
-          } else if (char == _SLASH) {
-            state = ALLOW_AUTH;
-          } else {
-            state = IN_PATH;
-          }
-        }
-        break;
-      }
-      i++;
-      char = EOI;
-    }
-    index = i;  // Remove alias when bug is fixed.
-
-    if (state == ALLOW_AUTH) {
-      assert(char == _SLASH);
-      // Have seen one slash either at start or right after scheme.
-      // If two slashes, it's an authority, otherwise it's just the path.
-      index++;
-      if (index == end) {
-        char = EOI;
-        state = NOT_IN_PATH;
-      } else {
-        char = uri.codeUnitAt(index);
-        if (char == _SLASH) {
-          index++;
-          parseAuth();
-          pathStart = index;
-        }
-        if (char == _QUESTION || char == _NUMBER_SIGN || char == EOI) {
-          state = NOT_IN_PATH;
-        } else {
-          state = IN_PATH;
-        }
-      }
-    }
-
-    assert(state == IN_PATH || state == NOT_IN_PATH);
-    if (state == IN_PATH) {
-      // Characters from pathStart to index (inclusive) are known
-      // to be part of the path.
-      while (++index < end) {
-        char = uri.codeUnitAt(index);
-        if (char == _QUESTION || char == _NUMBER_SIGN) {
-          break;
-        }
-        char = EOI;
-      }
-      state = NOT_IN_PATH;
-    }
-
-    assert(state == NOT_IN_PATH);
-    bool hasAuthority = (host != null);
-    path = _makePath(uri, pathStart, index, null, scheme, hasAuthority);
-
-    if (char == _QUESTION) {
-      int numberSignIndex = -1;
-      for (int i = index + 1; i < end; i++) {
-        if (uri.codeUnitAt(i) == _NUMBER_SIGN) {
-          numberSignIndex = i;
-          break;
-        }
-      }
-      if (numberSignIndex < 0) {
-        query = _makeQuery(uri, index + 1, end, null);
-      } else {
-        query = _makeQuery(uri, index + 1, numberSignIndex, null);
-        fragment = _makeFragment(uri, numberSignIndex + 1, end);
-      }
-    } else if (char == _NUMBER_SIGN) {
-      fragment = _makeFragment(uri, index + 1, end);
-    }
-    return new Uri._internal(scheme,
-                             userinfo,
-                             host,
-                             port,
-                             path,
-                             query,
-                             fragment);
-  }
-
-  // Report a parse failure.
-  static void _fail(String uri, int index, String message) {
-    throw new FormatException(message, uri, index);
-  }
-
-  static Uri _makeHttpUri(String scheme,
-                          String authority,
-                          String unencodedPath,
-                          Map<String, String> queryParameters) {
-    var userInfo = "";
-    var host = null;
-    var port = null;
-
-    if (authority != null && authority.isNotEmpty) {
-      var hostStart = 0;
-      // Split off the user info.
-      bool hasUserInfo = false;
-      for (int i = 0; i < authority.length; i++) {
-        if (authority.codeUnitAt(i) == _AT_SIGN) {
-          hasUserInfo = true;
-          userInfo = authority.substring(0, i);
-          hostStart = i + 1;
-          break;
-        }
-      }
-      var hostEnd = hostStart;
-      if (hostStart < authority.length &&
-          authority.codeUnitAt(hostStart) == _LEFT_BRACKET) {
-        // IPv6 host.
-        for (; hostEnd < authority.length; hostEnd++) {
-          if (authority.codeUnitAt(hostEnd) == _RIGHT_BRACKET) break;
-        }
-        if (hostEnd == authority.length) {
-          throw new FormatException("Invalid IPv6 host entry.",
-                                    authority, hostStart);
-        }
-        parseIPv6Address(authority, hostStart + 1, hostEnd);
-        hostEnd++;  // Skip the closing bracket.
-        if (hostEnd != authority.length &&
-            authority.codeUnitAt(hostEnd) != _COLON) {
-          throw new FormatException("Invalid end of authority",
-                                    authority, hostEnd);
-        }
-      }
-      // Split host and port.
-      bool hasPort = false;
-      for (; hostEnd < authority.length; hostEnd++) {
-        if (authority.codeUnitAt(hostEnd) == _COLON) {
-          var portString = authority.substring(hostEnd + 1);
-          // We allow the empty port - falling back to initial value.
-          if (portString.isNotEmpty) port = int.parse(portString);
-          break;
-        }
-      }
-      host = authority.substring(hostStart, hostEnd);
-    }
-    return new Uri(scheme: scheme,
-                   userInfo: userInfo,
-                   host: host,
-                   port: port,
-                   pathSegments: unencodedPath.split("/"),
-                   queryParameters: queryParameters);
-  }
+                    [Map<String, String> queryParameters]) = _Uri.https;
 
   /**
    * Creates a new file URI from an absolute or relative file path.
@@ -739,11 +254,7 @@
    *
    * If the path passed is not a legal file path [ArgumentError] is thrown.
    */
-  factory Uri.file(String path, {bool windows}) {
-    windows = (windows == null) ? Uri._isWindows : windows;
-    return windows ? _makeWindowsFileUrl(path, false)
-                   : _makeFileUri(path, false);
-  }
+  factory Uri.file(String path, {bool windows}) = _Uri.file;
 
   /**
    * Like [Uri.file] except that a non-empty URI path ends in a slash.
@@ -752,11 +263,7 @@
    * then a slash is added to the returned URI's path.
    * In all other cases, the result is the same as returned by `Uri.file`.
    */
-  factory Uri.directory(String path, {bool windows}) {
-    windows = (windows == null) ? Uri._isWindows : windows;
-    return windows ? _makeWindowsFileUrl(path, true)
-                   : _makeFileUri(path, true);
-  }
+  factory Uri.directory(String path, {bool windows}) = _Uri.directory;
 
   /**
    * Creates a `data:` URI containing the [content] string.
@@ -827,16 +334,1280 @@
   }
 
   /**
-   * Returns the natural base URI for the current platform.
+   * The scheme component of the URI.
    *
-   * When running in a browser this is the current URL (from
-   * `window.location.href`).
+   * Returns the empty string if there is no scheme component.
    *
-   * When not running in a browser this is the file URI referencing
-   * the current working directory.
+   * A URI scheme is case insensitive.
+   * The returned scheme is canonicalized to lowercase letters.
    */
-  external static Uri get base;
+  String get scheme;
 
+  /**
+   * Returns the authority component.
+   *
+   * The authority is formatted from the [userInfo], [host] and [port]
+   * parts.
+   *
+   * Returns the empty string if there is no authority component.
+   */
+  String get authority;
+
+  /**
+   * Returns the user info part of the authority component.
+   *
+   * Returns the empty string if there is no user info in the
+   * authority component.
+   */
+  String get userInfo;
+
+  /**
+   * Returns the host part of the authority component.
+   *
+   * Returns the empty string if there is no authority component and
+   * hence no host.
+   *
+   * If the host is an IP version 6 address, the surrounding `[` and `]` is
+   * removed.
+   *
+   * The host string is case-insensitive.
+   * The returned host name is canonicalized to lower-case
+   * with upper-case percent-escapes.
+   */
+  String get host;
+
+  /**
+   * Returns the port part of the authority component.
+   *
+   * Returns the defualt port if there is no port number in the authority
+   * component. That's 80 for http, 443 for https, and 0 for everything else.
+   */
+  int get port;
+
+  /**
+   * Returns the path component.
+   *
+   * The returned path is encoded. To get direct access to the decoded
+   * path use [pathSegments].
+   *
+   * Returns the empty string if there is no path component.
+   */
+  String get path;
+
+  /**
+   * Returns the query component. The returned query is encoded. To get
+   * direct access to the decoded query use [queryParameters].
+   *
+   * Returns the empty string if there is no query component.
+   */
+  String get query;
+
+  /**
+   * Returns the fragment identifier component.
+   *
+   * Returns the empty string if there is no fragment identifier
+   * component.
+   */
+  String get fragment;
+
+  /**
+   * Returns the URI path split into its segments. Each of the segments in the
+   * returned list have been decoded. If the path is empty the empty list will
+   * be returned. A leading slash `/` does not affect the segments returned.
+   *
+   * The returned list is unmodifiable and will throw [UnsupportedError] on any
+   * calls that would mutate it.
+   */
+  List<String> get pathSegments;
+
+  /**
+   * Returns the URI query split into a map according to the rules
+   * specified for FORM post in the [HTML 4.01 specification section
+   * 17.13.4](http://www.w3.org/TR/REC-html40/interact/forms.html#h-17.13.4 "HTML 4.01 section 17.13.4").
+   * Each key and value in the returned map has been decoded.
+   * If there is no query the empty map is returned.
+   *
+   * Keys in the query string that have no value are mapped to the
+   * empty string.
+   * If a key occurs more than once in the query string, it is mapped to
+   * an arbitrary choice of possible value.
+   * The [queryParametersAll] getter can provide a map
+   * that maps keys to all of their values.
+   *
+   * The returned map is unmodifiable.
+   */
+  Map<String, String> get queryParameters;
+
+  /**
+   * Returns the URI query split into a map according to the rules
+   * specified for FORM post in the [HTML 4.01 specification section
+   * 17.13.4](http://www.w3.org/TR/REC-html40/interact/forms.html#h-17.13.4 "HTML 4.01 section 17.13.4").
+   * Each key and value in the returned map has been decoded. If there is no
+   * query the empty map is returned.
+   *
+   * Keys are mapped to lists of their values. If a key occurs only once,
+   * its value is a singleton list. If a key occurs with no value, the
+   * empty string is used as the value for that occurrence.
+   *
+   * The returned map and the lists it contains are unmodifiable.
+   */
+  Map<String, List<String>> get queryParametersAll;
+
+  /**
+   * Returns whether the URI is absolute.
+   *
+   * A URI is an absolute URI in the sense of RFC 3986 if it has a scheme
+   * and no fragment.
+   */
+  bool get isAbsolute;
+
+  /**
+   * Returns whether the URI has a [scheme] component.
+   */
+  bool get hasScheme => scheme.isNotEmpty;
+
+  /**
+   * Returns whether the URI has an [authority] component.
+   */
+  bool get hasAuthority;
+
+  /**
+   * Returns whether the URI has an explicit port.
+   *
+   * If the port number is the default port number
+   * (zero for unrecognized schemes, with http (80) and https (443) being
+   * recognized),
+   * then the port is made implicit and omitted from the URI.
+   */
+  bool get hasPort;
+
+  /**
+   * Returns whether the URI has a query part.
+   */
+  bool get hasQuery;
+
+  /**
+   * Returns whether the URI has a fragment part.
+   */
+  bool get hasFragment;
+
+  /**
+   * Returns whether the URI has an empty path.
+   */
+  bool get hasEmptyPath;
+
+  /**
+   * Returns whether the URI has an absolute path (starting with '/').
+   */
+  bool get hasAbsolutePath;
+
+  /**
+   * Returns the origin of the URI in the form scheme://host:port for the
+   * schemes http and https.
+   *
+   * It is an error if the scheme is not "http" or "https".
+   *
+   * See: http://www.w3.org/TR/2011/WD-html5-20110405/origin-0.html#origin
+   */
+  String get origin;
+
+  /**
+   * Returns the file path from a file URI.
+   *
+   * The returned path has either Windows or non-Windows
+   * semantics.
+   *
+   * For non-Windows semantics the slash ("/") is used to separate
+   * path segments.
+   *
+   * For Windows semantics the backslash ("\") separator is used to
+   * separate path segments.
+   *
+   * If the URI is absolute the path starts with a path separator
+   * unless Windows semantics is used and the first path segment is a
+   * drive letter. When Windows semantics is used a host component in
+   * the uri in interpreted as a file server and a UNC path is
+   * returned.
+   *
+   * The default for whether to use Windows or non-Windows semantics
+   * determined from the platform Dart is running on. When running in
+   * the standalone VM this is detected by the VM based on the
+   * operating system. When running in a browser non-Windows semantics
+   * is always used.
+   *
+   * To override the automatic detection of which semantics to use pass
+   * a value for [windows]. Passing `true` will use Windows
+   * semantics and passing `false` will use non-Windows semantics.
+   *
+   * If the URI ends with a slash (i.e. the last path component is
+   * empty) the returned file path will also end with a slash.
+   *
+   * With Windows semantics URIs starting with a drive letter cannot
+   * be relative to the current drive on the designated drive. That is
+   * for the URI `file:///c:abc` calling `toFilePath` will throw as a
+   * path segment cannot contain colon on Windows.
+   *
+   * Examples using non-Windows semantics (resulting of calling
+   * toFilePath in comment):
+   *
+   *     Uri.parse("xxx/yyy");  // xxx/yyy
+   *     Uri.parse("xxx/yyy/");  // xxx/yyy/
+   *     Uri.parse("file:///xxx/yyy");  // /xxx/yyy
+   *     Uri.parse("file:///xxx/yyy/");  // /xxx/yyy/
+   *     Uri.parse("file:///C:");  // /C:
+   *     Uri.parse("file:///C:a");  // /C:a
+   *
+   * Examples using Windows semantics (resulting URI in comment):
+   *
+   *     Uri.parse("xxx/yyy");  // xxx\yyy
+   *     Uri.parse("xxx/yyy/");  // xxx\yyy\
+   *     Uri.parse("file:///xxx/yyy");  // \xxx\yyy
+   *     Uri.parse("file:///xxx/yyy/");  // \xxx\yyy/
+   *     Uri.parse("file:///C:/xxx/yyy");  // C:\xxx\yyy
+   *     Uri.parse("file:C:xxx/yyy");  // Throws as a path segment
+   *                                   // cannot contain colon on Windows.
+   *     Uri.parse("file://server/share/file");  // \\server\share\file
+   *
+   * If the URI is not a file URI calling this throws
+   * [UnsupportedError].
+   *
+   * If the URI cannot be converted to a file path calling this throws
+   * [UnsupportedError].
+   */
+  // TODO(lrn): Deprecate and move functionality to File class or similar.
+  // The core libraries should not worry about the platform.
+  String toFilePath({bool windows});
+
+  /**
+   * Access the structure of a `data:` URI.
+   *
+   * Returns a [UriData] object for `data:` URIs and `null` for all other
+   * URIs.
+   * The [UriData] object can be used to access the media type and data
+   * of a `data:` URI.
+   */
+  UriData get data;
+
+  /// Returns a hash code computed as `toString().hashCode`.
+  ///
+  /// This guarantees that URIs with the same normalized
+  int get hashCode;
+
+  /// A URI is equal to another URI with the same normalized representation.
+  bool operator==(Object other);
+
+  /// Returns the normalized string representation of the URI.
+  String toString();
+
+  /**
+   * Returns a new `Uri` based on this one, but with some parts replaced.
+   *
+   * This method takes the same parameters as the [new Uri] constructor,
+   * and they have the same meaning.
+   *
+   * At most one of [path] and [pathSegments] must be provided.
+   * Likewise, at most one of [query] and [queryParameters] must be provided.
+   *
+   * Each part that is not provided will default to the corresponding
+   * value from this `Uri` instead.
+   *
+   * This method is different from [Uri.resolve] which overrides in a
+   * hierarchial manner,
+   * and can instead replace each part of a `Uri` individually.
+   *
+   * Example:
+   *
+   *     Uri uri1 = Uri.parse("a://b@c:4/d/e?f#g");
+   *     Uri uri2 = uri1.replace(scheme: "A", path: "D/E/E", fragment: "G");
+   *     print(uri2);  // prints "A://b@c:4/D/E/E/?f#G"
+   *
+   * This method acts similarly to using the `new Uri` constructor with
+   * some of the arguments taken from this `Uri` . Example:
+   *
+   *     Uri uri3 = new Uri(
+   *         scheme: "A",
+   *         userInfo: uri1.userInfo,
+   *         host: uri1.host,
+   *         port: uri1.port,
+   *         path: "D/E/E",
+   *         query: uri1.query,
+   *         fragment: "G");
+   *     print(uri3);  // prints "A://b@c:4/D/E/E/?f#G"
+   *     print(uri2 == uri3);  // prints true.
+   *
+   * Using this method can be seen as a shorthand for the `Uri` constructor
+   * call above, but may also be slightly faster because the parts taken
+   * from this `Uri` need not be checked for validity again.
+   */
+  Uri replace({String scheme,
+               String userInfo,
+               String host,
+               int port,
+               String path,
+               Iterable<String> pathSegments,
+               String query,
+               Map<String, dynamic/*String|Iterable<String>*/> queryParameters,
+               String fragment});
+
+  /**
+   * Returns a `Uri` that differs from this only in not having a fragment.
+   *
+   * If this `Uri` does not have a fragment, it is itself returned.
+   */
+  Uri removeFragment();
+
+  /**
+   * Resolve [reference] as an URI relative to `this`.
+   *
+   * First turn [reference] into a URI using [Uri.parse]. Then resolve the
+   * resulting URI relative to `this`.
+   *
+   * Returns the resolved URI.
+   *
+   * See [resolveUri] for details.
+   */
+  Uri resolve(String reference);
+
+  /**
+   * Resolve [reference] as an URI relative to `this`.
+   *
+   * Returns the resolved URI.
+   *
+   * The algorithm "Transform Reference" for resolving a reference is described
+   * in [RFC-3986 Section 5](http://tools.ietf.org/html/rfc3986#section-5 "RFC-1123").
+   *
+   * Updated to handle the case where the base URI is just a relative path -
+   * that is: when it has no scheme or authority and the path does not start
+   * with a slash.
+   * In that case, the paths are combined without removing leading "..", and
+   * an empty path is not converted to "/".
+   */
+  Uri resolveUri(Uri reference);
+
+  /**
+   * Returns a URI where the path has been normalized.
+   *
+   * A normalized path does not contain `.` segments or non-leading `..`
+   * segments.
+   * Only a relative path with no scheme or authority may contain
+   * leading `..` segments,
+   * a path that starts with `/` will also drop any leading `..` segments.
+   *
+   * This uses the same normalization strategy as `new Uri().resolve(this)`.
+   *
+   * Does not change any part of the URI except the path.
+   *
+   * The default implementation of `Uri` always normalizes paths, so calling
+   * this function has no effect.
+   */
+  Uri normalizePath();
+
+  /**
+   * Creates a new `Uri` object by parsing a URI string.
+   *
+   * If [start] and [end] are provided, only the substring from `start`
+   * to `end` is parsed as a URI.
+   *
+   * If the string is not valid as a URI or URI reference,
+   * a [FormatException] is thrown.
+   */
+  static Uri parse(String uri, [int start = 0, int end]) {
+    // This parsing will not validate percent-encoding, IPv6, etc.
+    // When done splitting into parts, it will call, e.g., [_makeFragment]
+    // to do the final parsing.
+    //
+    // Important parts of the RFC 3986 used here:
+    // URI           = scheme ":" hier-part [ "?" query ] [ "#" fragment ]
+    //
+    // hier-part     = "//" authority path-abempty
+    //               / path-absolute
+    //               / path-rootless
+    //               / path-empty
+    //
+    // URI-reference = URI / relative-ref
+    //
+    // absolute-URI  = scheme ":" hier-part [ "?" query ]
+    //
+    // relative-ref  = relative-part [ "?" query ] [ "#" fragment ]
+    //
+    // relative-part = "//" authority path-abempty
+    //               / path-absolute
+    //               / path-noscheme
+    //               / path-empty
+    //
+    // scheme        = ALPHA *( ALPHA / DIGIT / "+" / "-" / "." )
+    //
+    // authority     = [ userinfo "@" ] host [ ":" port ]
+    // userinfo      = *( unreserved / pct-encoded / sub-delims / ":" )
+    // host          = IP-literal / IPv4address / reg-name
+    // port          = *DIGIT
+    // reg-name      = *( unreserved / pct-encoded / sub-delims )
+    //
+    // path          = path-abempty    ; begins with "/" or is empty
+    //               / path-absolute   ; begins with "/" but not "//"
+    //               / path-noscheme   ; begins with a non-colon segment
+    //               / path-rootless   ; begins with a segment
+    //               / path-empty      ; zero characters
+    //
+    // path-abempty  = *( "/" segment )
+    // path-absolute = "/" [ segment-nz *( "/" segment ) ]
+    // path-noscheme = segment-nz-nc *( "/" segment )
+    // path-rootless = segment-nz *( "/" segment )
+    // path-empty    = 0<pchar>
+    //
+    // segment       = *pchar
+    // segment-nz    = 1*pchar
+    // segment-nz-nc = 1*( unreserved / pct-encoded / sub-delims / "@" )
+    //               ; non-zero-length segment without any colon ":"
+    //
+    // pchar         = unreserved / pct-encoded / sub-delims / ":" / "@"
+    //
+    // query         = *( pchar / "/" / "?" )
+    //
+    // fragment      = *( pchar / "/" / "?" )
+    end ??= uri.length;
+
+    // Special case data:URIs. Ignore case when testing.
+    if (end >= start + 5) {
+      int dataDelta = _startsWithData(uri, start);
+      if (dataDelta == 0) {
+        // The case is right.
+        if (start > 0 || end < uri.length) uri = uri.substring(start, end);
+        return UriData._parse(uri, 5, null).uri;
+      } else if (dataDelta == 0x20) {
+        return UriData._parse(uri.substring(start + 5, end), 0, null).uri;
+      }
+      // Otherwise the URI doesn't start with "data:" or any case variant of it.
+    }
+
+    // The following index-normalization belongs with the scanning, but is
+    // easier to do here because we already have extracted variables from the
+    // indices list.
+    var indices = new List<int>(8);//new List<int>.filled(8, start - 1);
+
+    // Set default values for each position.
+    // The value will either be correct in some cases where it isn't set
+    // by the scanner, or it is clearly recognizable as an unset value.
+    indices
+      ..[0] = 0
+      ..[_schemeEndIndex] = start - 1
+      ..[_hostStartIndex] = start - 1
+      ..[_notSimpleIndex] = start - 1
+      ..[_portStartIndex] = start
+      ..[_pathStartIndex] = start
+      ..[_queryStartIndex] = end
+      ..[_fragmentStartIndex] = end;
+    var state = _scan(uri, start, end, _uriStart, indices);
+    // Some states that should be non-simple, but the URI ended early.
+    // Paths that end at a ".." must be normalized to end in "../".
+    if (state >= _nonSimpleEndStates) {
+      indices[_notSimpleIndex] = end;
+    }
+    int schemeEnd = indices[_schemeEndIndex];
+    if (schemeEnd >= start) {
+      // Rescan the scheme part now that we know it's not a path.
+      state = _scan(uri, start, schemeEnd, _schemeStart, indices);
+      if (state == _schemeStart) {
+        // Empty scheme.
+        indices[_notSimpleIndex] = schemeEnd;
+      }
+    }
+    // The returned positions are limited by the scanners ability to write only
+    // one position per character, and only the current position.
+    // Scanning from left to right, we only know whether something is a scheme
+    // or a path when we see a `:` or `/`, and likewise we only know if the first
+    // `/` is part of the path or is leading an authority component when we see
+    // the next character.
+
+    int hostStart     = indices[_hostStartIndex] + 1;
+    int portStart     = indices[_portStartIndex];
+    int pathStart     = indices[_pathStartIndex];
+    int queryStart    = indices[_queryStartIndex];
+    int fragmentStart = indices[_fragmentStartIndex];
+
+    // We may discover scheme while handling special cases.
+    String scheme;
+
+    // Derive some positions that weren't set to normalize the indices.
+    // If pathStart isn't set (it's before scheme end or host start), then
+    // the path is empty.
+    if (fragmentStart < queryStart) queryStart = fragmentStart;
+    if (pathStart < hostStart || pathStart <= schemeEnd) {
+      pathStart = queryStart;
+    }
+    // If there is an authority with no port, set the port position
+    // to be at the end of the authority (equal to pathStart).
+    // This also handles a ":" in a user-info component incorrectly setting
+    // the port start position.
+    if (portStart < hostStart) portStart = pathStart;
+
+    assert(hostStart == start || schemeEnd <= hostStart);
+    assert(hostStart <= portStart);
+    assert(schemeEnd <= pathStart);
+    assert(portStart <= pathStart);
+    assert(pathStart <= queryStart);
+    assert(queryStart <= fragmentStart);
+
+    bool isSimple = indices[_notSimpleIndex] < start;
+
+    if (isSimple) {
+      // Check/do normalizations that weren't detected by the scanner.
+      // This includes removal of empty port or userInfo,
+      // or scheme specific port and path normalizations.
+      if (hostStart > schemeEnd + 3) {
+        // Always be non-simple if URI contains user-info.
+        // The scanner doesn't set the not-simple position in this case because
+        // it's setting the host-start position instead.
+        isSimple = false;
+      } else if (portStart > start && portStart + 1 == pathStart) {
+        // If the port is empty, it should be omitted.
+        // Pathological case, don't bother correcting it.
+        isSimple = false;
+      } else if (queryStart < end &&
+                 (queryStart == pathStart + 2 &&
+                  uri.startsWith("..", pathStart)) ||
+                 (queryStart > pathStart + 2 &&
+                  uri.startsWith("/..", queryStart - 3))) {
+        // The path ends in a ".." segment. This should be normalized to "../".
+        // We didn't detect this while scanning because a query or fragment was
+        // detected at the same time (which is why we only need to check this
+        // if there is something after the path).
+        isSimple = false;
+      } else {
+        // There are a few scheme-based normalizations that
+        // the scanner couldn't check.
+        // That means that the input is very close to simple, so just do
+        // the normalizations.
+        if (schemeEnd == start + 4) {
+          // Do scheme based normalizations for file, http.
+          if (uri.startsWith("file", start)) {
+            scheme = "file";
+            if (hostStart <= start) {
+              // File URIs should have an authority.
+              // Paths after an authority should be absolute.
+              String schemeAuth = "file://";
+              int delta = 2;
+              if (!uri.startsWith("/", pathStart)) {
+                schemeAuth = "file:///";
+                delta = 3;
+              }
+              uri = schemeAuth + uri.substring(pathStart, end);
+              schemeEnd -= start;
+              hostStart = 7;
+              portStart = 7;
+              pathStart = 7;
+              queryStart += delta - start;
+              fragmentStart += delta - start;
+              start = 0;
+              end = uri.length;
+            } else if (pathStart == queryStart) {
+              // Uri has authority and empty path. Add "/" as path.
+              if (start == 0 && end == uri.length) {
+                uri = uri.replaceRange(pathStart, queryStart, "/");
+                queryStart += 1;
+                fragmentStart += 1;
+                end += 1;
+              } else {
+                uri = "${uri.substring(start, pathStart)}/"
+                      "${uri.substring(queryStart, end)}";
+                schemeEnd -= start;
+                hostStart -= start;
+                portStart -= start;
+                pathStart -= start;
+                queryStart += 1 - start;
+                fragmentStart += 1 - start;
+                start = 0;
+                end = uri.length;
+              }
+            }
+          } else if (uri.startsWith("http", start)) {
+            scheme = "http";
+            // HTTP URIs should not have an explicit port of 80.
+            if (portStart > start && portStart + 3 == pathStart &&
+                uri.startsWith("80", portStart + 1)) {
+              if (start == 0 && end == uri.length) {
+                uri = uri.replaceRange(portStart, pathStart, "");
+                pathStart -= 3;
+                queryStart -= 3;
+                fragmentStart -= 3;
+                end -= 3;
+              } else {
+                uri = uri.substring(start, portStart) +
+                      uri.substring(pathStart, end);
+                schemeEnd -= start;
+                hostStart -= start;
+                portStart -= start;
+                pathStart -= 3 + start;
+                queryStart -= 3 + start;
+                fragmentStart -= 3 + start;
+                start = 0;
+                end = uri.length;
+              }
+            }
+          }
+        } else if (schemeEnd == start + 5 && uri.startsWith("https", start)) {
+          scheme = "https";
+          // HTTPS URIs should not have an explicit port of 443.
+          if (portStart > start && portStart + 4 == pathStart &&
+              uri.startsWith("443", portStart + 1)) {
+            if (start == 0 && end == uri.length) {
+              uri = uri.replaceRange(portStart, pathStart, "");
+              pathStart -= 4;
+              queryStart -= 4;
+              fragmentStart -= 4;
+              end -= 3;
+            } else {
+              uri = uri.substring(start, portStart) +
+                    uri.substring(pathStart, end);
+              schemeEnd -= start;
+              hostStart -= start;
+              portStart -= start;
+              pathStart -= 4 + start;
+              queryStart -= 4 + start;
+              fragmentStart -= 4 + start;
+              start = 0;
+              end = uri.length;
+            }
+          }
+        }
+      }
+    }
+
+    if (isSimple) {
+      if (start > 0 || end < uri.length) {
+        uri = uri.substring(start, end);
+        schemeEnd -= start;
+        hostStart -= start;
+        portStart -= start;
+        pathStart -= start;
+        queryStart -= start;
+        fragmentStart -= start;
+      }
+      return new _SimpleUri(uri, schemeEnd, hostStart, portStart, pathStart,
+                            queryStart, fragmentStart, scheme);
+
+    }
+
+    return new _Uri.notSimple(uri, start, end, schemeEnd, hostStart, portStart,
+                              pathStart, queryStart, fragmentStart, scheme);
+  }
+
+  /**
+   * Encode the string [component] using percent-encoding to make it
+   * safe for literal use as a URI component.
+   *
+   * All characters except uppercase and lowercase letters, digits and
+   * the characters `-_.!~*'()` are percent-encoded. This is the
+   * set of characters specified in RFC 2396 and the which is
+   * specified for the encodeUriComponent in ECMA-262 version 5.1.
+   *
+   * When manually encoding path segments or query components remember
+   * to encode each part separately before building the path or query
+   * string.
+   *
+   * For encoding the query part consider using
+   * [encodeQueryComponent].
+   *
+   * To avoid the need for explicitly encoding use the [pathSegments]
+   * and [queryParameters] optional named arguments when constructing
+   * a [Uri].
+   */
+  static String encodeComponent(String component) {
+    return _Uri._uriEncode(_Uri._unreserved2396Table, component, UTF8, false);
+  }
+
+  /**
+   * Encode the string [component] according to the HTML 4.01 rules
+   * for encoding the posting of a HTML form as a query string
+   * component.
+   *
+   * Encode the string [component] according to the HTML 4.01 rules
+   * for encoding the posting of a HTML form as a query string
+   * component.
+
+   * The component is first encoded to bytes using [encoding].
+   * The default is to use [UTF8] encoding, which preserves all
+   * the characters that don't need encoding.
+
+   * Then the resulting bytes are "percent-encoded". This transforms
+   * spaces (U+0020) to a plus sign ('+') and all bytes that are not
+   * the ASCII decimal digits, letters or one of '-._~' are written as
+   * a percent sign '%' followed by the two-digit hexadecimal
+   * representation of the byte.
+
+   * Note that the set of characters which are percent-encoded is a
+   * superset of what HTML 4.01 requires, since it refers to RFC 1738
+   * for reserved characters.
+   *
+   * When manually encoding query components remember to encode each
+   * part separately before building the query string.
+   *
+   * To avoid the need for explicitly encoding the query use the
+   * [queryParameters] optional named arguments when constructing a
+   * [Uri].
+   *
+   * See http://www.w3.org/TR/html401/interact/forms.html#h-17.13.4.2 for more
+   * details.
+   */
+  static String encodeQueryComponent(String component,
+                                     {Encoding encoding: UTF8}) {
+    return _Uri._uriEncode(_Uri._unreservedTable, component, encoding, true);
+  }
+
+  /**
+   * Decodes the percent-encoding in [encodedComponent].
+   *
+   * Note that decoding a URI component might change its meaning as
+   * some of the decoded characters could be characters with are
+   * delimiters for a given URI componene type. Always split a URI
+   * component using the delimiters for the component before decoding
+   * the individual parts.
+   *
+   * For handling the [path] and [query] components consider using
+   * [pathSegments] and [queryParameters] to get the separated and
+   * decoded component.
+   */
+  static String decodeComponent(String encodedComponent) {
+    return _Uri._uriDecode(encodedComponent, 0, encodedComponent.length,
+                           UTF8, false);
+  }
+
+  /**
+   * Decodes the percent-encoding in [encodedComponent], converting
+   * pluses to spaces.
+   *
+   * It will create a byte-list of the decoded characters, and then use
+   * [encoding] to decode the byte-list to a String. The default encoding is
+   * UTF-8.
+   */
+  static String decodeQueryComponent(
+      String encodedComponent,
+      {Encoding encoding: UTF8}) {
+    return _Uri._uriDecode(encodedComponent, 0, encodedComponent.length,
+                           encoding, true);
+  }
+
+  /**
+   * Encode the string [uri] using percent-encoding to make it
+   * safe for literal use as a full URI.
+   *
+   * All characters except uppercase and lowercase letters, digits and
+   * the characters `!#$&'()*+,-./:;=?@_~` are percent-encoded. This
+   * is the set of characters specified in in ECMA-262 version 5.1 for
+   * the encodeURI function .
+   */
+  static String encodeFull(String uri) {
+    return _Uri._uriEncode(_Uri._encodeFullTable, uri, UTF8, false);
+  }
+
+  /**
+   * Decodes the percent-encoding in [uri].
+   *
+   * Note that decoding a full URI might change its meaning as some of
+   * the decoded characters could be reserved characters. In most
+   * cases an encoded URI should be parsed into components using
+   * [Uri.parse] before decoding the separate components.
+   */
+  static String decodeFull(String uri) {
+    return _Uri._uriDecode(uri, 0, uri.length, UTF8, false);
+  }
+
+  /**
+   * Returns the [query] split into a map according to the rules
+   * specified for FORM post in the [HTML 4.01 specification section
+   * 17.13.4](http://www.w3.org/TR/REC-html40/interact/forms.html#h-17.13.4 "HTML 4.01 section 17.13.4").
+   * Each key and value in the returned map has been decoded. If the [query]
+   * is the empty string an empty map is returned.
+   *
+   * Keys in the query string that have no value are mapped to the
+   * empty string.
+   *
+   * Each query component will be decoded using [encoding]. The default encoding
+   * is UTF-8.
+   */
+  static Map<String, String> splitQueryString(String query,
+                                              {Encoding encoding: UTF8}) {
+    return query.split("&").fold({}, (map, element) {
+      int index = element.indexOf("=");
+      if (index == -1) {
+        if (element != "") {
+          map[decodeQueryComponent(element, encoding: encoding)] = "";
+        }
+      } else if (index != 0) {
+        var key = element.substring(0, index);
+        var value = element.substring(index + 1);
+        map[decodeQueryComponent(key, encoding: encoding)] =
+            decodeQueryComponent(value, encoding: encoding);
+      }
+      return map;
+    });
+  }
+
+
+  /**
+   * Parse the [host] as an IP version 4 (IPv4) address, returning the address
+   * as a list of 4 bytes in network byte order (big endian).
+   *
+   * Throws a [FormatException] if [host] is not a valid IPv4 address
+   * representation.
+   */
+  static List<int> parseIPv4Address(String host) =>
+       _parseIPv4Address(host, 0, host.length);
+
+  /// Implementation of [parseIPv4Address] that can work on a substring.
+  static List<int> _parseIPv4Address(String host, int start, int end) {
+    void error(String msg, int position) {
+      throw new FormatException('Illegal IPv4 address, $msg', host, position);
+    }
+
+    var result = new Uint8List(4);
+    int partIndex = 0;
+    int partStart = start;
+    for (int i = start; i < end; i++) {
+      int char = host.codeUnitAt(i);
+      if (char != _DOT) {
+        if (char ^ 0x30 > 9) {
+          // Fail on a non-digit character.
+          error("invalid character", i);
+        }
+      } else {
+        if (partIndex == 3) {
+          error('IPv4 address should contain exactly 4 parts', i);
+        }
+        int part = int.parse(host.substring(partStart, i));
+        if (part > 255) {
+          error("each part must be in the range 0..255", partStart);
+        }
+        result[partIndex++] = part;
+        partStart = i + 1;
+      }
+    }
+
+    if (partIndex != 3) {
+      error('IPv4 address should contain exactly 4 parts', end);
+    }
+
+    int part = int.parse(host.substring(partStart, end));
+    if (part > 255) {
+      error("each part must be in the range 0..255", partStart);
+    }
+    result[partIndex] = part;
+
+    return result;
+  }
+
+  /**
+   * Parse the [host] as an IP version 6 (IPv6) address, returning the address
+   * as a list of 16 bytes in network byte order (big endian).
+   *
+   * Throws a [FormatException] if [host] is not a valid IPv6 address
+   * representation.
+   *
+   * Acts on the substring from [start] to [end]. If [end] is omitted, it
+   * defaults ot the end of the string.
+   *
+   * Some examples of IPv6 addresses:
+   *  * ::1
+   *  * FEDC:BA98:7654:3210:FEDC:BA98:7654:3210
+   *  * 3ffe:2a00:100:7031::1
+   *  * ::FFFF:129.144.52.38
+   *  * 2010:836B:4179::836B:4179
+   */
+  static List<int> parseIPv6Address(String host, [int start = 0, int end]) {
+    if (end == null) end = host.length;
+    // An IPv6 address consists of exactly 8 parts of 1-4 hex digits, separated
+    // by `:`'s, with the following exceptions:
+    //
+    //  - One (and only one) wildcard (`::`) may be present, representing a fill
+    //    of 0's. The IPv6 `::` is thus 16 bytes of `0`.
+    //  - The last two parts may be replaced by an IPv4 "dotted-quad" address.
+
+    // Helper function for reporting a badly formatted IPv6 address.
+    void error(String msg, [position]) {
+      throw new FormatException('Illegal IPv6 address, $msg', host, position);
+    }
+
+    // Parse a hex block.
+    int parseHex(int start, int end) {
+      if (end - start > 4) {
+        error('an IPv6 part can only contain a maximum of 4 hex digits', start);
+      }
+      int value = int.parse(host.substring(start, end), radix: 16);
+      if (value < 0 || value > 0xFFFF) {
+        error('each part must be in the range of `0x0..0xFFFF`', start);
+      }
+      return value;
+    }
+
+    if (host.length < 2) error('address is too short');
+    List<int> parts = [];
+    bool wildcardSeen = false;
+    // Set if seeing a ".", suggesting that there is an IPv4 address.
+    bool seenDot = false;
+    int partStart = start;
+    // Parse all parts, except a potential last one.
+    for (int i = start; i < end; i++) {
+      int char = host.codeUnitAt(i);
+      if (char == _COLON) {
+        if (i == start) {
+          // If we see a `:` in the beginning, expect wildcard.
+          i++;
+          if (host.codeUnitAt(i) != _COLON) {
+            error('invalid start colon.', i);
+          }
+          partStart = i;
+        }
+        if (i == partStart) {
+          // Wildcard. We only allow one.
+          if (wildcardSeen) {
+            error('only one wildcard `::` is allowed', i);
+          }
+          wildcardSeen = true;
+          parts.add(-1);
+        } else {
+          // Found a single colon. Parse [partStart..i] as a hex entry.
+          parts.add(parseHex(partStart, i));
+        }
+        partStart = i + 1;
+      } else if (char == _DOT) {
+        seenDot = true;
+      }
+    }
+    if (parts.length == 0) error('too few parts');
+    bool atEnd = (partStart == end);
+    bool isLastWildcard = (parts.last == -1);
+    if (atEnd && !isLastWildcard) {
+      error('expected a part after last `:`', end);
+    }
+    if (!atEnd) {
+      if (!seenDot) {
+        parts.add(parseHex(partStart, end));
+      } else {
+        List<int> last = _parseIPv4Address(host, partStart, end);
+        parts.add(last[0] << 8 | last[1]);
+        parts.add(last[2] << 8 | last[3]);
+      }
+    }
+    if (wildcardSeen) {
+      if (parts.length > 7) {
+        error('an address with a wildcard must have less than 7 parts');
+      }
+    } else if (parts.length != 8) {
+      error('an address without a wildcard must contain exactly 8 parts');
+    }
+    List<int> bytes = new Uint8List(16);
+    for (int i = 0, index = 0; i < parts.length; i++) {
+      int value = parts[i];
+      if (value == -1) {
+        int wildCardLength = 9 - parts.length;
+        for (int j = 0; j < wildCardLength; j++) {
+          bytes[index] = 0;
+          bytes[index + 1] = 0;
+          index += 2;
+        }
+      } else {
+        bytes[index] = value >> 8;
+        bytes[index + 1] = value & 0xff;
+        index += 2;
+      }
+    }
+    return bytes;
+  }
+}
+
+class _Uri implements Uri {
+  // We represent the missing scheme as an empty string.
+  // A valid scheme cannot be empty.
+  final String scheme;
+
+  /**
+   * The user-info part of the authority.
+   *
+   * Does not distinguish between an empty user-info and an absent one.
+   * The value is always non-null.
+   * Is considered absent if [_host] is `null`.
+   */
+  final String _userInfo;
+
+  /**
+   * The host name of the URI.
+   *
+   * Set to `null` if there is no authority in the URI.
+   * The host name is the only mandatory part of an authority, so we use
+   * it to mark whether an authority part was present or not.
+   */
+  final String _host;
+
+  /**
+   * The port number part of the authority.
+   *
+   * The port. Set to null if there is no port. Normalized to null if
+   * the port is the default port for the scheme.
+   */
+  int _port;
+
+  /**
+   * The path of the URI.
+   *
+   * Always non-null.
+   */
+  String _path;
+
+  // The query content, or null if there is no query.
+  final String _query;
+
+  // The fragment content, or null if there is no fragment.
+  final String _fragment;
+
+  /**
+   * Cache the computed return value of [pathSegements].
+   */
+  List<String> _pathSegments;
+
+  /**
+   * Cache of the full normalized text representation of the URI.
+   */
+  String _text;
+
+  /**
+   * Cache of the hashCode of [_text].
+   *
+   * Is null until computed.
+   */
+  int _hashCodeCache;
+
+  /**
+   * Cache the computed return value of [queryParameters].
+   */
+  Map<String, String> _queryParameters;
+  Map<String, List<String>> _queryParameterLists;
+
+  /// Internal non-verifying constructor. Only call with validated arguments.
+  _Uri._internal(this.scheme,
+                 this._userInfo,
+                 this._host,
+                 this._port,
+                 this._path,
+                 this._query,
+                 this._fragment);
+
+  /// Create a [_Uri] from parts of [uri].
+  ///
+  /// The parameters specify the start/end of particular components of the URI.
+  /// The [scheme] may contain a string representing a normalized scheme
+  /// component if one has already been discovered.
+  factory _Uri.notSimple(String uri, int start, int end, int schemeEnd,
+                        int hostStart, int portStart, int pathStart,
+                        int queryStart, int fragmentStart, String scheme) {
+    if (scheme == null) {
+      scheme = "";
+      if (schemeEnd > start) {
+        scheme = _makeScheme(uri, start, schemeEnd);
+      } else if (schemeEnd == start) {
+        _fail(uri, start, "Invalid empty scheme");
+      }
+    }
+    String userInfo = "";
+    String host;
+    int port;
+    if (hostStart > start) {
+      int userInfoStart = schemeEnd + 3;
+      if (userInfoStart < hostStart) {
+        userInfo = _makeUserInfo(uri, userInfoStart, hostStart - 1);
+      }
+      host = _makeHost(uri, hostStart, portStart, false);
+      if (portStart + 1 < pathStart) {
+        // Should throw because invalid.
+        port = int.parse(uri.substring(portStart + 1, pathStart), onError: (_) {
+          throw new FormatException("Invalid port", uri, portStart + 1);
+        });
+        port = _makePort(port, scheme);
+      }
+    }
+    String path = _makePath(uri, pathStart, queryStart, null,
+                            scheme, host != null);
+    String query;
+    if (queryStart < fragmentStart) {
+      query = _makeQuery(uri, queryStart + 1, fragmentStart, null);
+    }
+    String fragment;
+    if (fragmentStart < end) {
+      fragment = _makeFragment(uri, fragmentStart + 1, end);
+    }
+    return new _Uri._internal(scheme,
+                              userInfo,
+                              host,
+                              port,
+                              path,
+                              query,
+                              fragment);
+  }
+
+  /// Implementation of [Uri.Uri].
+  factory _Uri({String scheme,
+                String userInfo,
+                String host,
+                int port,
+                String path,
+                Iterable<String> pathSegments,
+                String query,
+                Map<String, dynamic/*String|Iterable<String>*/> queryParameters,
+                String fragment}) {
+    scheme = _makeScheme(scheme, 0, _stringOrNullLength(scheme));
+    userInfo = _makeUserInfo(userInfo, 0, _stringOrNullLength(userInfo));
+    host = _makeHost(host, 0, _stringOrNullLength(host), false);
+    // Special case this constructor for backwards compatibility.
+    if (query == "") query = null;
+    query = _makeQuery(query, 0, _stringOrNullLength(query), queryParameters);
+    fragment = _makeFragment(fragment, 0, _stringOrNullLength(fragment));
+    port = _makePort(port, scheme);
+    bool isFile = (scheme == "file");
+    if (host == null &&
+        (userInfo.isNotEmpty || port != null || isFile)) {
+      host = "";
+    }
+    bool hasAuthority = (host != null);
+    path = _makePath(path, 0, _stringOrNullLength(path), pathSegments,
+                     scheme, hasAuthority);
+    if (scheme.isEmpty && host == null && !path.startsWith('/')) {
+      path = _normalizeRelativePath(path);
+    } else {
+      path = _removeDotSegments(path);
+    }
+    return new _Uri._internal(scheme, userInfo, host, port,
+                              path, query, fragment);
+  }
+
+  /// Implementation of [Uri.http].
+  factory _Uri.http(String authority,
+                    String unencodedPath,
+                    [Map<String, String> queryParameters]) {
+    return _makeHttpUri("http", authority, unencodedPath, queryParameters);
+  }
+
+  /// Implementation of [Uri.https].
+  factory _Uri.https(String authority,
+                     String unencodedPath,
+                     [Map<String, String> queryParameters]) {
+    return _makeHttpUri("https", authority, unencodedPath, queryParameters);
+  }
+
+  String get authority {
+    if (!hasAuthority) return "";
+    var sb = new StringBuffer();
+    _writeAuthority(sb);
+    return sb.toString();
+  }
+
+  String get userInfo => _userInfo;
+
+  String get host {
+    if (_host == null) return "";
+    if (_host.startsWith('[')) {
+      return _host.substring(1, _host.length - 1);
+    }
+    return _host;
+  }
+
+  int get port {
+    if (_port == null) return _defaultPort(scheme);
+    return _port;
+  }
+
+  // The default port for the scheme of this Uri.
+  static int _defaultPort(String scheme) {
+    if (scheme == "http") return 80;
+    if (scheme == "https") return 443;
+    return 0;
+  }
+
+  String get path => _path;
+
+  String get query => _query ?? "";
+
+  String get fragment => _fragment ?? "";
+
+  // Report a parse failure.
+  static void _fail(String uri, int index, String message) {
+    throw new FormatException(message, uri, index);
+  }
+
+  static Uri _makeHttpUri(String scheme,
+                          String authority,
+                          String unencodedPath,
+                          Map<String, String> queryParameters) {
+    var userInfo = "";
+    var host = null;
+    var port = null;
+
+    if (authority != null && authority.isNotEmpty) {
+      var hostStart = 0;
+      // Split off the user info.
+      bool hasUserInfo = false;
+      for (int i = 0; i < authority.length; i++) {
+        const int atSign = 0x40;
+        if (authority.codeUnitAt(i) == atSign) {
+          hasUserInfo = true;
+          userInfo = authority.substring(0, i);
+          hostStart = i + 1;
+          break;
+        }
+      }
+      var hostEnd = hostStart;
+      if (hostStart < authority.length &&
+          authority.codeUnitAt(hostStart) == _LEFT_BRACKET) {
+        // IPv6 host.
+        for (; hostEnd < authority.length; hostEnd++) {
+          if (authority.codeUnitAt(hostEnd) == _RIGHT_BRACKET) break;
+        }
+        if (hostEnd == authority.length) {
+          throw new FormatException("Invalid IPv6 host entry.",
+                                    authority, hostStart);
+        }
+        Uri.parseIPv6Address(authority, hostStart + 1, hostEnd);
+        hostEnd++;  // Skip the closing bracket.
+        if (hostEnd != authority.length &&
+            authority.codeUnitAt(hostEnd) != _COLON) {
+          throw new FormatException("Invalid end of authority",
+                                    authority, hostEnd);
+        }
+      }
+      // Split host and port.
+      bool hasPort = false;
+      for (; hostEnd < authority.length; hostEnd++) {
+        if (authority.codeUnitAt(hostEnd) == _COLON) {
+          var portString = authority.substring(hostEnd + 1);
+          // We allow the empty port - falling back to initial value.
+          if (portString.isNotEmpty) port = int.parse(portString);
+          break;
+        }
+      }
+      host = authority.substring(hostStart, hostEnd);
+    }
+    return new Uri(scheme: scheme,
+                   userInfo: userInfo,
+                   host: host,
+                   port: port,
+                   pathSegments: unencodedPath.split("/"),
+                   queryParameters: queryParameters);
+  }
+
+  /// Implementation of [Uri.file].
+  factory _Uri.file(String path, {bool windows}) {
+    windows = (windows == null) ? _Uri._isWindows : windows;
+    return windows ? _makeWindowsFileUrl(path, false)
+                   : _makeFileUri(path, false);
+  }
+
+  /// Implementation of [Uri.directory].
+  factory _Uri.directory(String path, {bool windows}) {
+    windows = (windows == null) ? _Uri._isWindows : windows;
+    return windows ? _makeWindowsFileUrl(path, true)
+                   : _makeFileUri(path, true);
+  }
+
+
+  /// Used internally in path-related constructors.
   external static bool get _isWindows;
 
   static _checkNonWindowsPathReservedCharacters(List<String> segments,
@@ -969,46 +1740,6 @@
     }
   }
 
-  /**
-   * Returns a new `Uri` based on this one, but with some parts replaced.
-   *
-   * This method takes the same parameters as the [new Uri] constructor,
-   * and they have the same meaning.
-   *
-   * At most one of [path] and [pathSegments] must be provided.
-   * Likewise, at most one of [query] and [queryParameters] must be provided.
-   *
-   * Each part that is not provided will default to the corresponding
-   * value from this `Uri` instead.
-   *
-   * This method is different from [Uri.resolve] which overrides in a
-   * hierarchial manner,
-   * and can instead replace each part of a `Uri` individually.
-   *
-   * Example:
-   *
-   *     Uri uri1 = Uri.parse("a://b@c:4/d/e?f#g");
-   *     Uri uri2 = uri1.replace(scheme: "A", path: "D/E/E", fragment: "G");
-   *     print(uri2);  // prints "A://b@c:4/D/E/E/?f#G"
-   *
-   * This method acts similarly to using the `new Uri` constructor with
-   * some of the arguments taken from this `Uri` . Example:
-   *
-   *     Uri uri3 = new Uri(
-   *         scheme: "A",
-   *         userInfo: uri1.userInfo,
-   *         host: uri1.host,
-   *         port: uri1.port,
-   *         path: "D/E/E",
-   *         query: uri1.query,
-   *         fragment: "G");
-   *     print(uri3);  // prints "A://b@c:4/D/E/E/?f#G"
-   *     print(uri2 == uri3);  // prints true.
-   *
-   * Using this method can be seen as a shorthand for the `Uri` constructor
-   * call above, but may also be slightly faster because the parts taken
-   * from this `Uri` need not be checked for validity again.
-   */
   Uri replace({String scheme,
                String userInfo,
                String host,
@@ -1075,29 +1806,16 @@
       fragment = this._fragment;
     }
 
-    return new Uri._internal(
+    return new _Uri._internal(
         scheme, userInfo, host, port, path, query, fragment);
   }
 
-  /**
-   * Returns a `Uri` that differs from this only in not having a fragment.
-   *
-   * If this `Uri` does not have a fragment, it is itself returned.
-   */
   Uri removeFragment() {
     if (!this.hasFragment) return this;
-    return new Uri._internal(scheme, _userInfo, _host, _port,
+    return new _Uri._internal(scheme, _userInfo, _host, _port,
                              _path, _query, null);
   }
 
-  /**
-   * Returns the URI path split into its segments. Each of the segments in the
-   * returned list have been decoded. If the path is empty the empty list will
-   * be returned. A leading slash `/` does not affect the segments returned.
-   *
-   * The returned list is unmodifiable and will throw [UnsupportedError] on any
-   * calls that would mutate it.
-   */
   List<String> get pathSegments {
     var result = _pathSegments;
     if (result != null) return result;
@@ -1114,43 +1832,14 @@
     return result;
   }
 
-  /**
-   * Returns the URI query split into a map according to the rules
-   * specified for FORM post in the [HTML 4.01 specification section
-   * 17.13.4](http://www.w3.org/TR/REC-html40/interact/forms.html#h-17.13.4 "HTML 4.01 section 17.13.4").
-   * Each key and value in the returned map has been decoded.
-   * If there is no query the empty map is returned.
-   *
-   * Keys in the query string that have no value are mapped to the
-   * empty string.
-   * If a key occurs more than once in the query string, it is mapped to
-   * an arbitrary choice of possible value.
-   * The [queryParametersAll] getter can provide a map
-   * that maps keys to all of their values.
-   *
-   * The returned map is unmodifiable.
-   */
   Map<String, String> get queryParameters {
     if (_queryParameters == null) {
       _queryParameters =
-          new UnmodifiableMapView<String, String>(splitQueryString(query));
+          new UnmodifiableMapView<String, String>(Uri.splitQueryString(query));
     }
     return _queryParameters;
   }
 
-  /**
-   * Returns the URI query split into a map according to the rules
-   * specified for FORM post in the [HTML 4.01 specification section
-   * 17.13.4](http://www.w3.org/TR/REC-html40/interact/forms.html#h-17.13.4 "HTML 4.01 section 17.13.4").
-   * Each key and value in the returned map has been decoded. If there is no
-   * query the empty map is returned.
-   *
-   * Keys are mapped to lists of their values. If a key occurs only once,
-   * its value is a singleton list. If a key occurs with no value, the
-   * empty string is used as the value for that occurrence.
-   *
-   * The returned map and the lists it contains are unmodifiable.
-   */
   Map<String, List<String>> get queryParametersAll {
     if (_queryParameterLists == null) {
       Map queryParameterLists = _splitQueryStringAll(query);
@@ -1164,22 +1853,6 @@
     return _queryParameterLists;
   }
 
-  /**
-   * Returns a URI where the path has been normalized.
-   *
-   * A normalized path does not contain `.` segments or non-leading `..`
-   * segments.
-   * Only a relative path with no scheme or authority may contain
-   * leading `..` segments,
-   * a path that starts with `/` will also drop any leading `..` segments.
-   *
-   * This uses the same normalization strategy as `new Uri().resolve(this)`.
-   *
-   * Does not change any part of the URI except the path.
-   *
-   * The default implementation of `Uri` always normalizes paths, so calling
-   * this function has no effect.
-   */
   Uri normalizePath() {
     String path = _normalizePath(_path, scheme, hasAuthority);
     if (identical(path, _path)) return this;
@@ -1212,7 +1885,7 @@
       if (host.codeUnitAt(end - 1) != _RIGHT_BRACKET) {
         _fail(host, start, 'Missing end `]` to match `[` in host');
       }
-      parseIPv6Address(host, start + 1, end - 1);
+      Uri.parseIPv6Address(host, start + 1, end - 1);
       // RFC 5952 requires hex digits to be lower case.
       return host.substring(start, end).toLowerCase();
     }
@@ -1220,7 +1893,7 @@
       // TODO(lrn): skip if too short to be a valid IPv6 address?
       for (int i = start; i < end; i++) {
         if (host.codeUnitAt(i) == _COLON) {
-          parseIPv6Address(host, start, end);
+          Uri.parseIPv6Address(host, start, end);
           return '[$host]';
         }
       }
@@ -1333,6 +2006,17 @@
     }
     scheme = scheme.substring(start, end);
     if (containsUpperCase) scheme = scheme.toLowerCase();
+    return _canonicalizeScheme(scheme);
+  }
+
+  // Canonicalize a few often-used scheme strings.
+  //
+  // This improves memory usage and makes comparison faster.
+  static String _canonicalizeScheme(String scheme) {
+    if (scheme == "http") return "http";
+    if (scheme == "file") return "file";
+    if (scheme == "https") return "https";
+    if (scheme == "package") return "package";
     return scheme;
   }
 
@@ -1419,8 +2103,6 @@
     return _normalize(fragment, start, end, _queryCharTable);
   }
 
-  static int _stringOrNullLength(String s) => (s == null) ? 0 : s.length;
-
   /**
    * Performs RFC 3986 Percent-Encoding Normalization.
    *
@@ -1465,10 +2147,11 @@
   // Converts a UTF-16 code-unit to its value as a hex digit.
   // Returns -1 for non-hex digits.
   static int _parseHexDigit(int char) {
-    int digit = char ^ Uri._ZERO;
+    const int zeroDigit = 0x30;
+    int digit = char ^ zeroDigit;
     if (digit <= 9) return digit;
     int lowerCase = char | 0x20;
-    if (Uri._LOWER_CASE_A <= lowerCase && lowerCase <= _LOWER_CASE_F) {
+    if (_LOWER_CASE_A <= lowerCase && lowerCase <= _LOWER_CASE_F) {
       return lowerCase - (_LOWER_CASE_A - 10);
     }
     return -1;
@@ -1554,7 +2237,7 @@
             if (index + 1 < end) {
               int tail = component.codeUnitAt(index + 1);
               if ((tail & 0xFC00) == 0xDC00) {
-                // Tail surrogat.
+                // Tail surrogate.
                 sourceLength = 2;
                 char = 0x10000 | ((char & 0x3ff) << 10) | (tail & 0x3ff);
               }
@@ -1703,34 +2386,10 @@
     return output.join("/");
   }
 
-  /**
-   * Resolve [reference] as an URI relative to `this`.
-   *
-   * First turn [reference] into a URI using [Uri.parse]. Then resolve the
-   * resulting URI relative to `this`.
-   *
-   * Returns the resolved URI.
-   *
-   * See [resolveUri] for details.
-   */
   Uri resolve(String reference) {
     return resolveUri(Uri.parse(reference));
   }
 
-  /**
-   * Resolve [reference] as an URI relative to `this`.
-   *
-   * Returns the resolved URI.
-   *
-   * The algorithm "Transform Reference" for resolving a reference is described
-   * in [RFC-3986 Section 5](http://tools.ietf.org/html/rfc3986#section-5 "RFC-1123").
-   *
-   * Updated to handle the case where the base URI is just a relative path -
-   * that is: when it has no scheme or authority and the path does not start
-   * with a slash.
-   * In that case, the paths are combined without removing leading "..", and
-   * an empty path is not converted to "/".
-   */
   Uri resolveUri(Uri reference) {
     // From RFC 3986.
     String targetScheme;
@@ -1776,11 +2435,17 @@
           } else {
             // This is the RFC 3986 behavior for merging.
             if (this.hasEmptyPath) {
-              if (!this.hasScheme && !this.hasAuthority) {
-                // Keep the path relative if no scheme or authority.
-                targetPath = reference.path;
+              if (!this.hasAuthority) {
+                if (!this.hasScheme) {
+                  // Keep the path relative if no scheme or authority.
+                  targetPath = reference.path;
+                } else {
+                  // Remove leading dot-segments if the path is put
+                  // beneath a scheme.
+                  targetPath = _removeDotSegments(reference.path);
+                }
               } else {
-                // Add path normalization on top of RFC algorithm.
+                // RFC algorithm for base with authority and empty path.
                 targetPath = _removeDotSegments("/" + reference.path);
               }
             } else {
@@ -1788,8 +2453,9 @@
               if (this.hasScheme || this.hasAuthority || this.hasAbsolutePath) {
                 targetPath = _removeDotSegments(mergedPath);
               } else {
-                // Non-RFC 3986 beavior. If both base and reference are relative
-                // path, allow the merged path to start with "..".
+                // Non-RFC 3986 behavior.
+                // If both base and reference are relative paths,
+                // allow the merged path to start with "..".
                 // The RFC only specifies the case where the base has a scheme.
                 targetPath = _normalizeRelativePath(mergedPath);
               }
@@ -1800,63 +2466,29 @@
       }
     }
     String fragment = reference.hasFragment ? reference.fragment : null;
-    return new Uri._internal(targetScheme,
-                             targetUserInfo,
-                             targetHost,
-                             targetPort,
-                             targetPath,
-                             targetQuery,
-                             fragment);
+    return new _Uri._internal(targetScheme,
+                              targetUserInfo,
+                              targetHost,
+                              targetPort,
+                              targetPath,
+                              targetQuery,
+                              fragment);
   }
 
-  /**
-   * Returns whether the URI has a [scheme] component.
-   */
   bool get hasScheme => scheme.isNotEmpty;
 
-  /**
-   * Returns whether the URI has an [authority] component.
-   */
   bool get hasAuthority => _host != null;
 
-  /**
-   * Returns whether the URI has an explicit port.
-   *
-   * If the port number is the default port number
-   * (zero for unrecognized schemes, with http (80) and https (443) being
-   * recognized),
-   * then the port is made implicit and omitted from the URI.
-   */
   bool get hasPort => _port != null;
 
-  /**
-   * Returns whether the URI has a query part.
-   */
   bool get hasQuery => _query != null;
 
-  /**
-   * Returns whether the URI has a fragment part.
-   */
   bool get hasFragment => _fragment != null;
 
-  /**
-   * Returns whether the URI has an empty path.
-   */
   bool get hasEmptyPath => _path.isEmpty;
 
-  /**
-   * Returns whether the URI has an absolute path (starting with '/').
-   */
   bool get hasAbsolutePath => _path.startsWith('/');
 
-  /**
-   * Returns the origin of the URI in the form scheme://host:port for the
-   * schemes http and https.
-   *
-   * It is an error if the scheme is not "http" or "https".
-   *
-   * See: http://www.w3.org/TR/2011/WD-html5-20110405/origin-0.html#origin
-   */
   String get origin {
     if (scheme == "" || _host == null || _host == "") {
       throw new StateError("Cannot use origin without a scheme: $this");
@@ -1869,69 +2501,6 @@
     return "$scheme://$_host:$_port";
   }
 
-  /**
-   * Returns the file path from a file URI.
-   *
-   * The returned path has either Windows or non-Windows
-   * semantics.
-   *
-   * For non-Windows semantics the slash ("/") is used to separate
-   * path segments.
-   *
-   * For Windows semantics the backslash ("\") separator is used to
-   * separate path segments.
-   *
-   * If the URI is absolute the path starts with a path separator
-   * unless Windows semantics is used and the first path segment is a
-   * drive letter. When Windows semantics is used a host component in
-   * the uri in interpreted as a file server and a UNC path is
-   * returned.
-   *
-   * The default for whether to use Windows or non-Windows semantics
-   * determined from the platform Dart is running on. When running in
-   * the standalone VM this is detected by the VM based on the
-   * operating system. When running in a browser non-Windows semantics
-   * is always used.
-   *
-   * To override the automatic detection of which semantics to use pass
-   * a value for [windows]. Passing `true` will use Windows
-   * semantics and passing `false` will use non-Windows semantics.
-   *
-   * If the URI ends with a slash (i.e. the last path component is
-   * empty) the returned file path will also end with a slash.
-   *
-   * With Windows semantics URIs starting with a drive letter cannot
-   * be relative to the current drive on the designated drive. That is
-   * for the URI `file:///c:abc` calling `toFilePath` will throw as a
-   * path segment cannot contain colon on Windows.
-   *
-   * Examples using non-Windows semantics (resulting of calling
-   * toFilePath in comment):
-   *
-   *     Uri.parse("xxx/yyy");  // xxx/yyy
-   *     Uri.parse("xxx/yyy/");  // xxx/yyy/
-   *     Uri.parse("file:///xxx/yyy");  // /xxx/yyy
-   *     Uri.parse("file:///xxx/yyy/");  // /xxx/yyy/
-   *     Uri.parse("file:///C:");  // /C:
-   *     Uri.parse("file:///C:a");  // /C:a
-   *
-   * Examples using Windows semantics (resulting URI in comment):
-   *
-   *     Uri.parse("xxx/yyy");  // xxx\yyy
-   *     Uri.parse("xxx/yyy/");  // xxx\yyy\
-   *     Uri.parse("file:///xxx/yyy");  // \xxx\yyy
-   *     Uri.parse("file:///xxx/yyy/");  // \xxx\yyy/
-   *     Uri.parse("file:///C:/xxx/yyy");  // C:\xxx\yyy
-   *     Uri.parse("file:C:xxx/yyy");  // Throws as a path segment
-   *                                   // cannot contain colon on Windows.
-   *     Uri.parse("file://server/share/file");  // \\server\share\file
-   *
-   * If the URI is not a file URI calling this throws
-   * [UnsupportedError].
-   *
-   * If the URI cannot be converted to a file path calling this throws
-   * [UnsupportedError].
-   */
   String toFilePath({bool windows}) {
     if (scheme != "" && scheme != "file") {
       throw new UnsupportedError(
@@ -1946,25 +2515,27 @@
           "Cannot extract a file path from a URI with a fragment component");
     }
     if (windows == null) windows = _isWindows;
-    return windows ? _toWindowsFilePath() : _toFilePath();
+    return windows ? _toWindowsFilePath(this) : _toFilePath();
   }
 
   String _toFilePath() {
-    if (host != "") {
+    if (hasAuthority && host != "") {
       throw new UnsupportedError(
           "Cannot extract a non-Windows file path from a file URI "
           "with an authority");
     }
+    // Use path segments to have any escapes unescaped.
+    var pathSegments = this.pathSegments;
     _checkNonWindowsPathReservedCharacters(pathSegments, false);
     var result = new StringBuffer();
-    if (_isPathAbsolute) result.write("/");
+    if (hasAbsolutePath) result.write("/");
     result.writeAll(pathSegments, "/");
     return result.toString();
   }
 
-  String _toWindowsFilePath() {
+  static String _toWindowsFilePath(Uri uri) {
     bool hasDriveLetter = false;
-    var segments = pathSegments;
+    var segments = uri.pathSegments;
     if (segments.length > 0 &&
         segments[0].length == 2 &&
         segments[0].codeUnitAt(1) == _COLON) {
@@ -1972,23 +2543,25 @@
       _checkWindowsPathReservedCharacters(segments, false, 1);
       hasDriveLetter = true;
     } else {
-      _checkWindowsPathReservedCharacters(segments, false);
+      _checkWindowsPathReservedCharacters(segments, false, 0);
     }
     var result = new StringBuffer();
-    if (_isPathAbsolute && !hasDriveLetter) result.write("\\");
-    if (host != "") {
-      result.write("\\");
-      result.write(host);
-      result.write("\\");
+    if (uri.hasAbsolutePath && !hasDriveLetter) result.write(r"\");
+    if (uri.hasAuthority) {
+      var host = uri.host;
+      if (host.isNotEmpty) {
+        result.write(r"\");
+        result.write(host);
+        result.write(r"\");
+      }
     }
-    result.writeAll(segments, "\\");
-    if (hasDriveLetter && segments.length == 1) result.write("\\");
+    result.writeAll(segments, r"\");
+    if (hasDriveLetter && segments.length == 1) result.write(r"\");
     return result.toString();
   }
 
   bool get _isPathAbsolute {
-    if (path == null || path.isEmpty) return false;
-    return path.startsWith('/');
+    return _path != null && _path.startsWith('/');
   }
 
   void _writeAuthority(StringSink ss) {
@@ -2014,8 +2587,13 @@
   UriData get data => (scheme == "data") ? new UriData.fromUri(this) : null;
 
   String toString() {
+    return _text ??= _initializeText();
+  }
+
+  String _initializeText() {
+    assert(_text == null);
     StringBuffer sb = new StringBuffer();
-    _addIfNonEmpty(sb, scheme, scheme, ':');
+    if (scheme.isNotEmpty) sb..write(scheme)..write(":");
     if (hasAuthority || path.startsWith("//") || (scheme == "file")) {
       // File URIS always have the authority, even if it is empty.
       // The empty URI means "localhost".
@@ -2023,192 +2601,31 @@
       _writeAuthority(sb);
     }
     sb.write(path);
-    if (_query != null) { sb..write("?")..write(_query); }
-    if (_fragment != null) { sb..write("#")..write(_fragment); }
+    if (_query != null) sb..write("?")..write(_query);
+    if (_fragment != null) sb..write("#")..write(_fragment);
     return sb.toString();
   }
 
   bool operator==(other) {
-    if (other is! Uri) return false;
-    Uri uri = other;
-    return scheme       == uri.scheme       &&
-           hasAuthority == uri.hasAuthority &&
-           userInfo     == uri.userInfo     &&
-           host         == uri.host         &&
-           port         == uri.port         &&
-           path         == uri.path         &&
-           hasQuery     == uri.hasQuery     &&
-           query        == uri.query        &&
-           hasFragment  == uri.hasFragment  &&
-           fragment     == uri.fragment;
+    if (identical(this, other)) return true;
+    if (other is Uri) {
+      Uri uri = other;
+      return scheme       == uri.scheme       &&
+             hasAuthority == uri.hasAuthority &&
+             userInfo     == uri.userInfo     &&
+             host         == uri.host         &&
+             port         == uri.port         &&
+             path         == uri.path         &&
+             hasQuery     == uri.hasQuery     &&
+             query        == uri.query        &&
+             hasFragment  == uri.hasFragment  &&
+             fragment     == uri.fragment;
+    }
+    return false;
   }
 
   int get hashCode {
-    int combine(part, current) {
-      // The sum is truncated to 30 bits to make sure it fits into a Smi.
-      return (current * 31 + part.hashCode) & 0x3FFFFFFF;
-    }
-    return combine(scheme, combine(userInfo, combine(host, combine(port,
-        combine(path, combine(query, combine(fragment, 1)))))));
-  }
-
-  static void _addIfNonEmpty(StringBuffer sb, String test,
-                             String first, String second) {
-    if ("" != test) {
-      sb.write(first);
-      sb.write(second);
-    }
-  }
-
-  /**
-   * Encode the string [component] using percent-encoding to make it
-   * safe for literal use as a URI component.
-   *
-   * All characters except uppercase and lowercase letters, digits and
-   * the characters `-_.!~*'()` are percent-encoded. This is the
-   * set of characters specified in RFC 2396 and the which is
-   * specified for the encodeUriComponent in ECMA-262 version 5.1.
-   *
-   * When manually encoding path segments or query components remember
-   * to encode each part separately before building the path or query
-   * string.
-   *
-   * For encoding the query part consider using
-   * [encodeQueryComponent].
-   *
-   * To avoid the need for explicitly encoding use the [pathSegments]
-   * and [queryParameters] optional named arguments when constructing
-   * a [Uri].
-   */
-  static String encodeComponent(String component) {
-    return _uriEncode(_unreserved2396Table, component, UTF8, false);
-  }
-
-  /**
-   * Encode the string [component] according to the HTML 4.01 rules
-   * for encoding the posting of a HTML form as a query string
-   * component.
-   *
-   * Encode the string [component] according to the HTML 4.01 rules
-   * for encoding the posting of a HTML form as a query string
-   * component.
-
-   * The component is first encoded to bytes using [encoding].
-   * The default is to use [UTF8] encoding, which preserves all
-   * the characters that don't need encoding.
-
-   * Then the resulting bytes are "percent-encoded". This transforms
-   * spaces (U+0020) to a plus sign ('+') and all bytes that are not
-   * the ASCII decimal digits, letters or one of '-._~' are written as
-   * a percent sign '%' followed by the two-digit hexadecimal
-   * representation of the byte.
-
-   * Note that the set of characters which are percent-encoded is a
-   * superset of what HTML 4.01 requires, since it refers to RFC 1738
-   * for reserved characters.
-   *
-   * When manually encoding query components remember to encode each
-   * part separately before building the query string.
-   *
-   * To avoid the need for explicitly encoding the query use the
-   * [queryParameters] optional named arguments when constructing a
-   * [Uri].
-   *
-   * See http://www.w3.org/TR/html401/interact/forms.html#h-17.13.4.2 for more
-   * details.
-   */
-  static String encodeQueryComponent(String component,
-                                     {Encoding encoding: UTF8}) {
-    return _uriEncode(_unreservedTable, component, encoding, true);
-  }
-
-  /**
-   * Decodes the percent-encoding in [encodedComponent].
-   *
-   * Note that decoding a URI component might change its meaning as
-   * some of the decoded characters could be characters with are
-   * delimiters for a given URI componene type. Always split a URI
-   * component using the delimiters for the component before decoding
-   * the individual parts.
-   *
-   * For handling the [path] and [query] components consider using
-   * [pathSegments] and [queryParameters] to get the separated and
-   * decoded component.
-   */
-  static String decodeComponent(String encodedComponent) {
-    return _uriDecode(encodedComponent, 0, encodedComponent.length,
-                      UTF8, false);
-  }
-
-  /**
-   * Decodes the percent-encoding in [encodedComponent], converting
-   * pluses to spaces.
-   *
-   * It will create a byte-list of the decoded characters, and then use
-   * [encoding] to decode the byte-list to a String. The default encoding is
-   * UTF-8.
-   */
-  static String decodeQueryComponent(
-      String encodedComponent,
-      {Encoding encoding: UTF8}) {
-    return _uriDecode(encodedComponent, 0, encodedComponent.length,
-                      encoding, true);
-  }
-
-  /**
-   * Encode the string [uri] using percent-encoding to make it
-   * safe for literal use as a full URI.
-   *
-   * All characters except uppercase and lowercase letters, digits and
-   * the characters `!#$&'()*+,-./:;=?@_~` are percent-encoded. This
-   * is the set of characters specified in in ECMA-262 version 5.1 for
-   * the encodeURI function .
-   */
-  static String encodeFull(String uri) {
-    return _uriEncode(_encodeFullTable, uri, UTF8, false);
-  }
-
-  /**
-   * Decodes the percent-encoding in [uri].
-   *
-   * Note that decoding a full URI might change its meaning as some of
-   * the decoded characters could be reserved characters. In most
-   * cases an encoded URI should be parsed into components using
-   * [Uri.parse] before decoding the separate components.
-   */
-  static String decodeFull(String uri) {
-    return _uriDecode(uri, 0, uri.length, UTF8, false);
-  }
-
-  /**
-   * Returns the [query] split into a map according to the rules
-   * specified for FORM post in the [HTML 4.01 specification section
-   * 17.13.4](http://www.w3.org/TR/REC-html40/interact/forms.html#h-17.13.4 "HTML 4.01 section 17.13.4").
-   * Each key and value in the returned map has been decoded. If the [query]
-   * is the empty string an empty map is returned.
-   *
-   * Keys in the query string that have no value are mapped to the
-   * empty string.
-   *
-   * Each query component will be decoded using [encoding]. The default encoding
-   * is UTF-8.
-   */
-  static Map<String, String> splitQueryString(String query,
-                                              {Encoding encoding: UTF8}) {
-    return query.split("&").fold({}, (map, element) {
-      int index = element.indexOf("=");
-      if (index == -1) {
-        if (element != "") {
-          map[decodeQueryComponent(element, encoding: encoding)] = "";
-        }
-      } else if (index != 0) {
-        var key = element.substring(0, index);
-        var value = element.substring(index + 1);
-        map[Uri.decodeQueryComponent(key, encoding: encoding)] =
-            decodeQueryComponent(value, encoding: encoding);
-      }
-      return map;
-    });
+    return _hashCodeCache ??= toString().hashCode;
   }
 
   static List _createList() => [];
@@ -2251,175 +2668,6 @@
     return result;
   }
 
-  /**
-   * Parse the [host] as an IP version 4 (IPv4) address, returning the address
-   * as a list of 4 bytes in network byte order (big endian).
-   *
-   * Throws a [FormatException] if [host] is not a valid IPv4 address
-   * representation.
-   */
-  static List<int> parseIPv4Address(String host) {
-    void error(String msg) {
-      throw new FormatException('Illegal IPv4 address, $msg');
-    }
-    var bytes = host.split('.');
-    if (bytes.length != 4) {
-      error('IPv4 address should contain exactly 4 parts');
-    }
-    // TODO(ajohnsen): Consider using Uint8List.
-    return bytes
-        .map((byteString) {
-          int byte = int.parse(byteString);
-          if (byte < 0 || byte > 255) {
-            error('each part must be in the range of `0..255`');
-          }
-          return byte;
-        })
-        .toList();
-  }
-
-  /**
-   * Parse the [host] as an IP version 6 (IPv6) address, returning the address
-   * as a list of 16 bytes in network byte order (big endian).
-   *
-   * Throws a [FormatException] if [host] is not a valid IPv6 address
-   * representation.
-   *
-   * Acts on the substring from [start] to [end]. If [end] is omitted, it
-   * defaults ot the end of the string.
-   *
-   * Some examples of IPv6 addresses:
-   *  * ::1
-   *  * FEDC:BA98:7654:3210:FEDC:BA98:7654:3210
-   *  * 3ffe:2a00:100:7031::1
-   *  * ::FFFF:129.144.52.38
-   *  * 2010:836B:4179::836B:4179
-   */
-  static List<int> parseIPv6Address(String host, [int start = 0, int end]) {
-    if (end == null) end = host.length;
-    // An IPv6 address consists of exactly 8 parts of 1-4 hex digits, seperated
-    // by `:`'s, with the following exceptions:
-    //
-    //  - One (and only one) wildcard (`::`) may be present, representing a fill
-    //    of 0's. The IPv6 `::` is thus 16 bytes of `0`.
-    //  - The last two parts may be replaced by an IPv4 address.
-    void error(String msg, [position]) {
-      throw new FormatException('Illegal IPv6 address, $msg', host, position);
-    }
-    int parseHex(int start, int end) {
-      if (end - start > 4) {
-        error('an IPv6 part can only contain a maximum of 4 hex digits', start);
-      }
-      int value = int.parse(host.substring(start, end), radix: 16);
-      if (value < 0 || value > (1 << 16) - 1) {
-        error('each part must be in the range of `0x0..0xFFFF`', start);
-      }
-      return value;
-    }
-    if (host.length < 2) error('address is too short');
-    List<int> parts = [];
-    bool wildcardSeen = false;
-    int partStart = start;
-    // Parse all parts, except a potential last one.
-    for (int i = start; i < end; i++) {
-      if (host.codeUnitAt(i) == _COLON) {
-        if (i == start) {
-          // If we see a `:` in the beginning, expect wildcard.
-          i++;
-          if (host.codeUnitAt(i) != _COLON) {
-            error('invalid start colon.', i);
-          }
-          partStart = i;
-        }
-        if (i == partStart) {
-          // Wildcard. We only allow one.
-          if (wildcardSeen) {
-            error('only one wildcard `::` is allowed', i);
-          }
-          wildcardSeen = true;
-          parts.add(-1);
-        } else {
-          // Found a single colon. Parse [partStart..i] as a hex entry.
-          parts.add(parseHex(partStart, i));
-        }
-        partStart = i + 1;
-      }
-    }
-    if (parts.length == 0) error('too few parts');
-    bool atEnd = (partStart == end);
-    bool isLastWildcard = (parts.last == -1);
-    if (atEnd && !isLastWildcard) {
-      error('expected a part after last `:`', end);
-    }
-    if (!atEnd) {
-      try {
-        parts.add(parseHex(partStart, end));
-      } catch (e) {
-        // Failed to parse the last chunk as hex. Try IPv4.
-        try {
-          List<int> last = parseIPv4Address(host.substring(partStart, end));
-          parts.add(last[0] << 8 | last[1]);
-          parts.add(last[2] << 8 | last[3]);
-        } catch (e) {
-          error('invalid end of IPv6 address.', partStart);
-        }
-      }
-    }
-    if (wildcardSeen) {
-      if (parts.length > 7) {
-        error('an address with a wildcard must have less than 7 parts');
-      }
-    } else if (parts.length != 8) {
-      error('an address without a wildcard must contain exactly 8 parts');
-    }
-    List<int> bytes = new Uint8List(16);
-    for (int i = 0, index = 0; i < parts.length; i++) {
-      int value = parts[i];
-      if (value == -1) {
-        int wildCardLength = 9 - parts.length;
-        for (int j = 0; j < wildCardLength; j++) {
-          bytes[index] = 0;
-          bytes[index + 1] = 0;
-          index += 2;
-        }
-      } else {
-        bytes[index] = value >> 8;
-        bytes[index + 1] = value & 0xff;
-        index += 2;
-      }
-    }
-    return bytes;
-  }
-
-  // Frequently used character codes.
-  static const int _SPACE = 0x20;
-  static const int _DOUBLE_QUOTE = 0x22;
-  static const int _NUMBER_SIGN = 0x23;
-  static const int _PERCENT = 0x25;
-  static const int _ASTERISK = 0x2A;
-  static const int _PLUS = 0x2B;
-  static const int _DOT = 0x2E;
-  static const int _SLASH = 0x2F;
-  static const int _ZERO = 0x30;
-  static const int _NINE = 0x39;
-  static const int _COLON = 0x3A;
-  static const int _LESS = 0x3C;
-  static const int _GREATER = 0x3E;
-  static const int _QUESTION = 0x3F;
-  static const int _AT_SIGN = 0x40;
-  static const int _UPPER_CASE_A = 0x41;
-  static const int _UPPER_CASE_F = 0x46;
-  static const int _UPPER_CASE_Z = 0x5A;
-  static const int _LEFT_BRACKET = 0x5B;
-  static const int _BACKSLASH = 0x5C;
-  static const int _RIGHT_BRACKET = 0x5D;
-  static const int _LOWER_CASE_A = 0x61;
-  static const int _LOWER_CASE_F = 0x66;
-  static const int _LOWER_CASE_Z = 0x7A;
-  static const int _BAR = 0x7C;
-
-  static const String _hexDigits = "0123456789ABCDEF";
-
   external static String _uriEncode(List<int> canonicalTable,
                                     String text,
                                     Encoding encoding,
@@ -2941,13 +3189,13 @@
         throw new ArgumentError.value(mimeType, "mimeType",
                                       "Invalid MIME type");
       }
-      buffer.write(Uri._uriEncode(_tokenCharTable,
-                                  mimeType.substring(0, slashIndex),
-                                  UTF8, false));
+      buffer.write(_Uri._uriEncode(_tokenCharTable,
+                                   mimeType.substring(0, slashIndex),
+                                   UTF8, false));
       buffer.write("/");
-      buffer.write(Uri._uriEncode(_tokenCharTable,
-                                  mimeType.substring(slashIndex + 1),
-                                  UTF8, false));
+      buffer.write(_Uri._uriEncode(_tokenCharTable,
+                                   mimeType.substring(slashIndex + 1),
+                                   UTF8, false));
     }
     if (charsetName != null) {
       if (indices != null) {
@@ -2955,7 +3203,7 @@
                ..add(buffer.length + 8);
       }
       buffer.write(";charset=");
-      buffer.write(Uri._uriEncode(_tokenCharTable, charsetName, UTF8, false));
+      buffer.write(_Uri._uriEncode(_tokenCharTable, charsetName, UTF8, false));
     }
     parameters?.forEach((var key, var value) {
       if (key.isEmpty) {
@@ -2968,10 +3216,10 @@
       if (indices != null) indices.add(buffer.length);
       buffer.write(';');
       // Encode any non-RFC2045-token character and both '%' and '#'.
-      buffer.write(Uri._uriEncode(_tokenCharTable, key, UTF8, false));
+      buffer.write(_Uri._uriEncode(_tokenCharTable, key, UTF8, false));
       if (indices != null) indices.add(buffer.length);
       buffer.write('=');
-      buffer.write(Uri._uriEncode(_tokenCharTable, value, UTF8, false));
+      buffer.write(_Uri._uriEncode(_tokenCharTable, value, UTF8, false));
     });
   }
 
@@ -2988,7 +3236,7 @@
     int slashIndex = -1;
     for (int i = 0; i < mimeType.length; i++) {
       var char = mimeType.codeUnitAt(i);
-      if (char != Uri._SLASH) continue;
+      if (char != _SLASH) continue;
       if (slashIndex < 0) {
         slashIndex = i;
         continue;
@@ -3008,7 +3256,7 @@
    * ````
    *
    * where `type`, `subtype`, `attribute` and `value` are specified in RFC-2045,
-   * and `data` is a sequnce of URI-characters (RFC-2396 `uric`).
+   * and `data` is a sequence of URI-characters (RFC-2396 `uric`).
    *
    * This means that all the characters must be ASCII, but the URI may contain
    * percent-escapes for non-ASCII byte values that need an interpretation
@@ -3019,13 +3267,22 @@
    * and `,` delimiters.
    *
    * Accessing the individual parts may fail later if they turn out to have
-   * content that can't be decoded sucessfully as a string.
+   * content that can't be decoded successfully as a string.
    */
   static UriData parse(String uri) {
-    if (!uri.startsWith("data:")) {
-      throw new FormatException("Does not start with 'data:'", uri, 0);
+    if (uri.length >= 5) {
+      int dataDelta = _startsWithData(uri, 0);
+      if (dataDelta == 0) {
+        // Exact match on "data:".
+        return _parse(uri, 5, null);
+      }
+      if (dataDelta == 0x20) {
+        // Starts with a non-normalized "data" scheme containing upper-case
+        // letters. Parse anyway, but throw away the scheme.
+        return _parse(uri.substring(5), 0, null);
+      }
     }
-    return _parse(uri, 5, null);
+    throw new FormatException("Does not start with 'data:'", uri, 0);
   }
 
   /**
@@ -3050,7 +3307,7 @@
     // That's perfectly reasonable - data URIs are not hierarchical,
     // but it may make some consumers stumble.
     // Should we at least do escape normalization?
-    _uriCache = new Uri._internal("data", "", null, null, path, query, null);
+    _uriCache = new _Uri._internal("data", "", null, null, path, query, null);
     return _uriCache;
   }
 
@@ -3075,7 +3332,7 @@
     int start = _separatorIndices[0] + 1;
     int end = _separatorIndices[1];
     if (start == end) return "text/plain";
-    return Uri._uriDecode(_text, start, end, UTF8, false);
+    return _Uri._uriDecode(_text, start, end, UTF8, false);
   }
 
   /**
@@ -3096,8 +3353,8 @@
       var keyStart = _separatorIndices[i] + 1;
       var keyEnd = _separatorIndices[i + 1];
       if (keyEnd == keyStart + 7 && _text.startsWith("charset", keyStart)) {
-        return Uri._uriDecode(_text, keyEnd + 1, _separatorIndices[i + 2],
-                              UTF8, false);
+        return _Uri._uriDecode(_text, keyEnd + 1, _separatorIndices[i + 2],
+                               UTF8, false);
       }
     }
     return "US-ASCII";
@@ -3155,8 +3412,8 @@
         result[index++] = codeUnit;
       } else {
         if (i + 2 < text.length) {
-          var digit1 = Uri._parseHexDigit(text.codeUnitAt(i + 1));
-          var digit2 = Uri._parseHexDigit(text.codeUnitAt(i + 2));
+          var digit1 = _Uri._parseHexDigit(text.codeUnitAt(i + 1));
+          var digit2 = _Uri._parseHexDigit(text.codeUnitAt(i + 2));
           if (digit1 >= 0 && digit2 >= 0) {
             int byte = digit1 * 16 + digit2;
             result[index++] = byte;
@@ -3177,7 +3434,7 @@
    * If the content is Base64 encoded, it will be decoded to bytes and then
    * decoded to a string using [encoding].
    * If encoding is omitted, the value of a `charset` parameter is used
-   * if it is recongized by [Encoding.getByName], otherwise it defaults to
+   * if it is recognized by [Encoding.getByName], otherwise it defaults to
    * the [ASCII] encoding, which is the default encoding for data URIs
    * that do not specify an encoding.
    *
@@ -3199,7 +3456,7 @@
       var converter = BASE64.decoder.fuse(encoding.decoder);
       return converter.convert(text.substring(start));
     }
-    return Uri._uriDecode(text, start, text.length, encoding, false);
+    return _Uri._uriDecode(text, start, text.length, encoding, false);
   }
 
   /**
@@ -3222,8 +3479,8 @@
       var start = _separatorIndices[i - 2] + 1;
       var equals = _separatorIndices[i - 1];
       var end = _separatorIndices[i];
-      String key = Uri._uriDecode(_text, start, equals, UTF8, false);
-      String value = Uri._uriDecode(_text,equals + 1, end, UTF8, false);
+      String key = _Uri._uriDecode(_text, start, equals, UTF8, false);
+      String value = _Uri._uriDecode(_text,equals + 1, end, UTF8, false);
       result[key] = value;
     }
     return result;
@@ -3306,9 +3563,9 @@
           ((canonicalTable[byte >> 4] & (1 << (byte & 0x0f))) != 0)) {
         buffer.writeCharCode(byte);
       } else {
-        buffer.writeCharCode(Uri._PERCENT);
-        buffer.writeCharCode(Uri._hexDigits.codeUnitAt(byte >> 4));
-        buffer.writeCharCode(Uri._hexDigits.codeUnitAt(byte & 0x0f));
+        buffer.writeCharCode(_PERCENT);
+        buffer.writeCharCode(_hexDigits.codeUnitAt(byte >> 4));
+        buffer.writeCharCode(_hexDigits.codeUnitAt(byte & 0x0f));
       }
     }
     if ((byteOr & ~0xFF) != 0) {
@@ -3357,5 +3614,852 @@
   //  mark        =  "-" | "_" | "." | "!" | "~" | "*" | "'" | "(" | ")"
   //
   // This is the same characters as in a URI query (which is URI pchar plus '?')
-  static const _uricTable = Uri._queryCharTable;
+  static const _uricTable = _Uri._queryCharTable;
 }
+
+// --------------------------------------------------------------------
+// Constants used to read the scanner result.
+// The indices points into the table filled by [_scan] which contains
+// recognized positions in the scanned URI.
+// The `0` index is only used internally.
+
+/// Index of the position of that `:` after a scheme.
+const int _schemeEndIndex     = 1;
+/// Index of the position of the character just before the host name.
+const int _hostStartIndex     = 2;
+/// Index of the position of the `:` before a port value.
+const int _portStartIndex     = 3;
+/// Index of the position of the first character of a path.
+const int _pathStartIndex     = 4;
+/// Index of the position of the `?` before a query.
+const int _queryStartIndex    = 5;
+/// Index of the position of the `#` before a fragment.
+const int _fragmentStartIndex = 6;
+/// Index of a position where the URI was determined to be "non-simple".
+const int _notSimpleIndex     = 7;
+
+// Initial state for scanner.
+const int _uriStart           = 00;
+
+// If scanning of a URI terminates in this state or above,
+// consider the URI non-simple
+const int _nonSimpleEndStates = 14;
+
+// Initial state for scheme validation.
+const int _schemeStart        = 20;
+
+/// Transition tables used to scan a URI to determine its structure.
+///
+/// The tables represent a state machine with output.
+///
+/// To scan the URI, start in the [_uriStart] state, then read each character
+/// of the URI in order, from start to end, and for each character perform a
+/// transition to a new state while writing the current position into the output
+/// buffer at a designated index.
+///
+/// Each state, represented by an integer which is an index into
+/// [_scannerTables], has a set of transitions, one for each character.
+/// The transitions are encoded as a 5-bit integer representing the next state
+/// and a 3-bit index into the output table.
+///
+/// For URI scanning, only characters in the range U+0020 through U+007E are
+/// interesting, all characters outside that range are treated the same.
+/// The tables only contain 96 entries, representing that characters in the
+/// interesting range, plus one more to represent all values outside the range.
+/// The character entries are stored in one `Uint8List` per state, with the
+/// transition for a character at position `character ^ 0x60`,
+/// which maps the range U+0020 .. U+007F into positions 0 .. 95.
+/// All remaining characters are mapped to position 31 (`0x7f ^ 0x60`) which
+/// represents the transition for all remaining characters.
+final List<Uint8List> _scannerTables = _createTables();
+
+// ----------------------------------------------------------------------
+// Code to create the URI scanner table.
+
+/// Creates the tables for [_scannerTables] used by [Uri.parse].
+///
+/// See [_scannerTables] for the generated format.
+///
+/// The concrete tables are chosen as a trade-off between the number of states
+/// needed and the precision of the result.
+/// This allows definitely recognizing the general structure of the URI
+/// (presence and location of scheme, user-info, host, port, path, query and
+/// fragment) while at the same time detecting that some components are not
+/// in canonical form (anything containing a `%`, a host-name containing a
+/// capital letter). Since the scanner doesn't know whether something is a
+/// scheme or a path until it sees `:`, or user-info or host until it sees
+/// a `@`, a second pass is needed to validate the scheme and any user-info
+/// is considered non-canonical by default.
+///
+/// The states (starting from [_uriStart]) write positions while scanning
+/// a string from `start` to `end` as follows:
+///
+/// - [_schemeEndIndex]: Should be initialized to `start-1`.
+///   If the URI has a scheme, it is set to the position of the `:` after
+///   the scheme.
+/// - [_hostStartIndex]: Should be initialized to `start - 1`.
+///   If the URI has an authority, it is set to the character before the
+///   host name - either the second `/` in the `//` leading the authority,
+///   or the `@` after a user-info. Comparing this value to the scheme end
+///   position can be used to detect that there is a user-info component.
+/// - [_portStartIndex]: Should be initialized to `start`.
+///   Set to the position of the last `:` in an authority, and unchanged
+///   if there is no authority or no `:` in an authority.
+///   If this position is after the host start, there is a port, otherwise it
+///   is just marking a colon in the user-info component.
+/// - [_pathStartIndex]: Should be initialized to `start`.
+///   Is set to the first path character unless the path is empty.
+///   If the path is empty, the position is either unchanged (`start`) or
+///   the first slash of an authority. So, if the path start is before a
+///   host start or scheme end, the path is empty.
+/// - [_queryStartIndex]: Should be initialized to `end`.
+///   The position of the `?` leading a query if the URI contains a query.
+/// - [_fragmentStartIndex]: Should be initialized to `end`.
+///   The position of the `#` leading a fragment if the URI contains a fragment.
+/// - [_notSimpleIndex]: Should be initialized to `start - 1`.
+///   Set to another value if the URI is considered "not simple".
+///   This is elaborated below.
+///
+/// # Simple URIs
+/// A URI is considered "simple" if it is in a normalized form containing no
+/// escapes. This allows us to skip normalization and checking whether escapes
+/// are valid, and to extract components without worrying about unescaping.
+///
+/// The scanner computes a conservative approximation of being "simple".
+/// It rejects any URI with an escape, with a user-info component (mainly
+/// because they are rare and would increase the number of states in the
+/// scanner significantly), with an IPV6 host or with a capital letter in
+/// the scheme or host name (the scheme is handled in a second scan using
+/// a separate two-state table).
+/// Further, paths containing `..` or `.` path segments are considered
+/// non-simple except for pure relative paths (no scheme or authority) starting
+/// with a sequence of "../" segments.
+///
+/// The transition tables cannot detect a trailing ".." in the path,
+/// followed by a query or fragment, because the segment is not known to be
+/// complete until we are past it, and we then need to store the query/fragment
+/// start instead. This cast is checked manually post-scanning (such a path
+/// needs to be normalized to end in "../", so the URI shouldn't be considered
+/// simple).
+List<Uint8List> _createTables() {
+  // TODO(lrn): Use a precomputed table.
+
+  // Total number of states for the scanner.
+  const int stateCount         = 22;
+
+  // States used to scan a URI from scratch.
+  const int schemeOrPath       = 01;
+  const int authOrPath         = 02;
+  const int authOrPathSlash    = 03;
+  const int uinfoOrHost0       = 04;
+  const int uinfoOrHost        = 05;
+  const int uinfoOrPort0       = 06;
+  const int uinfoOrPort        = 07;
+  const int ipv6Host           = 08;
+  const int relPathSeg         = 09;
+  const int pathSeg            = 10;
+  const int path               = 11;
+  const int query              = 12;
+  const int fragment           = 13;
+  const int schemeOrPathDot    = 14;
+  const int schemeOrPathDot2   = 15;
+  const int relPathSegDot      = 16;
+  const int relPathSegDot2     = 17;
+  const int pathSegDot         = 18;
+  const int pathSegDot2        = 19;
+
+  // States used to validate a scheme after its end position has been found.
+  const int scheme0            = _schemeStart;
+  const int scheme             = 21;
+
+  // Constants encoding the write-index for the state transition into the top 5
+  // bits of a byte.
+  const int schemeEnd          = _schemeEndIndex     << 5;
+  const int hostStart          = _hostStartIndex     << 5;
+  const int portStart          = _portStartIndex     << 5;
+  const int pathStart          = _pathStartIndex     << 5;
+  const int queryStart         = _queryStartIndex    << 5;
+  const int fragmentStart      = _fragmentStartIndex << 5;
+  const int notSimple          = _notSimpleIndex     << 5;
+
+  /// The `unreserved` characters of RFC 3986.
+  const unreserved =
+      "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz-._~"  ;
+  /// The `sub-delim` characters of RFC 3986.
+  const subDelims = r"!$&'()*+,;=";
+  // The `pchar` characters of RFC 3986: characters that may occur in a path,
+  // excluding escapes.
+  const pchar = "$unreserved$subDelims";
+
+  var tables = new List<Uint8List>.generate(stateCount,
+      (_) => new Uint8List(96));
+
+  // Helper function which initialize the table for [state] with a default
+  // transition and returns the table.
+  Uint8List build(state, defaultTransition) =>
+      tables[state]..fillRange(0, 96, defaultTransition);
+
+  // Helper function which sets the transition for each character in [chars]
+  // to [transition] in the [target] table.
+  // The [chars] string must contain only characters in the U+0020 .. U+007E
+  // range.
+  void setChars(Uint8List target, String chars, int transition) {
+    for (int i = 0; i < chars.length; i++) {
+      var char = chars.codeUnitAt(i);
+      target[char ^ 0x60] = transition;
+    }
+  }
+
+  /// Helper function which sets the transition for all characters in the
+  /// range from `range[0]` to `range[1]` to [transition] in the [target] table.
+  ///
+  /// The [range] must be a two-character string where both characters are in
+  /// the U+0020 .. U+007E range and the former character must have a lower
+  /// code point than the latter.
+  void setRange(Uint8List target, String range, int transition) {
+    for (int i = range.codeUnitAt(0), n = range.codeUnitAt(1); i <= n; i++) {
+      target[i ^ 0x60] = transition;
+    }
+  }
+
+  // Create the transitions for each state.
+  var b;
+
+  // Validate as path, if it is a scheme, we handle it later.
+  b = build(_uriStart, schemeOrPath | notSimple);
+  setChars(b, pchar, schemeOrPath);
+  setChars(b, ".", schemeOrPathDot);
+  setChars(b, ":", authOrPath | schemeEnd);  // Handle later.
+  setChars(b, "/", authOrPathSlash);
+  setChars(b, "?", query | queryStart);
+  setChars(b, "#", fragment | fragmentStart);
+
+  b = build(schemeOrPathDot, schemeOrPath | notSimple);
+  setChars(b, pchar, schemeOrPath);
+  setChars(b, ".", schemeOrPathDot2);
+  setChars(b, ':', authOrPath | schemeEnd);
+  setChars(b, "/", pathSeg | notSimple);
+  setChars(b, "?", query | queryStart);
+  setChars(b, "#", fragment | fragmentStart);
+
+  b = build(schemeOrPathDot2, schemeOrPath | notSimple);
+  setChars(b, pchar, schemeOrPath);
+  setChars(b, "%", schemeOrPath | notSimple);
+  setChars(b, ':', authOrPath | schemeEnd);
+  setChars(b, "/", relPathSeg);
+  setChars(b, "?", query | queryStart);
+  setChars(b, "#", fragment | fragmentStart);
+
+  b = build(schemeOrPath, schemeOrPath | notSimple);
+  setChars(b, pchar, schemeOrPath);
+  setChars(b, ':', authOrPath | schemeEnd);
+  setChars(b, "/", pathSeg);
+  setChars(b, "?", query | queryStart);
+  setChars(b, "#", fragment | fragmentStart);
+
+  b = build(authOrPath, path | notSimple);
+  setChars(b, pchar, path | pathStart);
+  setChars(b, "/", authOrPathSlash | pathStart);
+  setChars(b, ".", pathSegDot | pathStart);
+  setChars(b, "?", query | queryStart);
+  setChars(b, "#", fragment | fragmentStart);
+
+  b = build(authOrPathSlash, path | notSimple);
+  setChars(b, pchar, path);
+  setChars(b, "/", uinfoOrHost0 | hostStart);
+  setChars(b, ".", pathSegDot);
+  setChars(b, "?", query | queryStart);
+  setChars(b, "#", fragment | fragmentStart);
+
+  b = build(uinfoOrHost0, uinfoOrHost | notSimple);
+  setChars(b, pchar, uinfoOrHost);
+  setRange(b, "AZ", uinfoOrHost | notSimple);
+  setChars(b, ":", uinfoOrPort0 | portStart);
+  setChars(b, "@", uinfoOrHost0 | hostStart);
+  setChars(b, "[", ipv6Host | notSimple);
+  setChars(b, "/", pathSeg | pathStart);
+  setChars(b, "?", query | queryStart);
+  setChars(b, "#", fragment | fragmentStart);
+
+  b = build(uinfoOrHost, uinfoOrHost | notSimple);
+  setChars(b, pchar, uinfoOrHost);
+  setRange(b, "AZ", uinfoOrHost | notSimple);
+  setChars(b, ":", uinfoOrPort0 | portStart);
+  setChars(b, "@", uinfoOrHost0 | hostStart);
+  setChars(b, "/", pathSeg | pathStart);
+  setChars(b, "?", query | queryStart);
+  setChars(b, "#", fragment | fragmentStart);
+
+  b = build(uinfoOrPort0, uinfoOrPort | notSimple);
+  setRange(b, "19", uinfoOrPort);
+  setChars(b, "@", uinfoOrHost0 | hostStart);
+  setChars(b, "/", pathSeg | pathStart);
+  setChars(b, "?", query | queryStart);
+  setChars(b, "#", fragment | fragmentStart);
+
+  b = build(uinfoOrPort, uinfoOrPort | notSimple);
+  setRange(b, "09", uinfoOrPort);
+  setChars(b, "@", uinfoOrHost0 | hostStart);
+  setChars(b, "/", pathSeg | pathStart);
+  setChars(b, "?", query | queryStart);
+  setChars(b, "#", fragment | fragmentStart);
+
+  b = build(ipv6Host, ipv6Host);
+  setChars(b, "]", uinfoOrHost);
+
+  b = build(relPathSeg, path | notSimple);
+  setChars(b, pchar, path);
+  setChars(b, ".", relPathSegDot);
+  setChars(b, "/", pathSeg | notSimple);
+  setChars(b, "?", query | queryStart);
+  setChars(b, "#", fragment | fragmentStart);
+
+  b = build(relPathSegDot, path | notSimple);
+  setChars(b, pchar, path);
+  setChars(b, ".", relPathSegDot2);
+  setChars(b, "/", pathSeg | notSimple);
+  setChars(b, "?", query | queryStart);
+  setChars(b, "#", fragment | fragmentStart);
+
+  b = build(relPathSegDot2, path | notSimple);
+  setChars(b, pchar, path);
+  setChars(b, "/", relPathSeg);
+  setChars(b, "?", query | queryStart);  // This should be non-simple.
+  setChars(b, "#", fragment | fragmentStart);  // This should be non-simple.
+
+  b = build(pathSeg, path | notSimple);
+  setChars(b, pchar, path);
+  setChars(b, ".", pathSegDot);
+  setChars(b, "/", pathSeg | notSimple);
+  setChars(b, "?", query | queryStart);
+  setChars(b, "#", fragment | fragmentStart);
+
+  b = build(pathSegDot, path | notSimple);
+  setChars(b, pchar, path);
+  setChars(b, ".", pathSegDot2);
+  setChars(b, "/", pathSeg | notSimple);
+  setChars(b, "?", query | queryStart);
+  setChars(b, "#", fragment | fragmentStart);
+
+  b = build(pathSegDot2, path | notSimple);
+  setChars(b, pchar, path);
+  setChars(b, "/", pathSeg | notSimple);
+  setChars(b, "?", query | queryStart);
+  setChars(b, "#", fragment | fragmentStart);
+
+  b = build(path, path | notSimple);
+  setChars(b, pchar, path);
+  setChars(b, "/", pathSeg);
+  setChars(b, "?", query | queryStart);
+  setChars(b, "#", fragment | fragmentStart);
+
+  b = build(query, query | notSimple);
+  setChars(b, pchar, query);
+  setChars(b, "?", query);
+  setChars(b, "#", fragment | fragmentStart);
+
+  b = build(fragment, fragment | notSimple);
+  setChars(b, pchar, fragment);
+  setChars(b, "?", fragment);
+
+  // A separate two-state validator for lower-case scheme names.
+  // Any non-scheme character or upper-case letter is marked as non-simple.
+  b = build(scheme0, scheme | notSimple);
+  setRange(b, "az", scheme);
+
+  b = build(scheme, scheme | notSimple);
+  setRange(b, "az", scheme);
+  setRange(b, "09", scheme);
+  setChars(b, "+-.", scheme);
+
+  return tables;
+}
+
+// --------------------------------------------------------------------
+// Code that uses the URI scanner table.
+
+/// Scan a string using the [_scannerTables] state machine.
+///
+/// Scans [uri] from [start] to [end], startig in state [state] and
+/// writing output into [indices].
+///
+/// Returns the final state.
+int _scan(String uri, int start, int end, int state, List<int> indices) {
+  var tables = _scannerTables;
+  assert(end <= uri.length);
+  for (int i = start; i < end; i++) {
+    var table = tables[state];
+    // Xor with 0x60 to move range 0x20-0x7f into 0x00-0x5f
+    int char = uri.codeUnitAt(i) ^ 0x60;
+    // Use 0x1f (nee 0x7f) to represent all unhandled characters.
+    if (char > 0x5f) char = 0x1f;
+    int transition = table[char];
+    state = transition & 0x1f;
+    indices[transition >> 5] = i;
+  }
+  return state;
+}
+
+class _SimpleUri implements Uri {
+  final String _uri;
+  final int _schemeEnd;
+  final int _hostStart;
+  final int _portStart;
+  final int _pathStart;
+  final int _queryStart;
+  final int _fragmentStart;
+  /// The scheme is often used to distinguish URIs.
+  /// To make comparisons more efficient, we cache the value, and
+  /// canonicalize a few known types.
+  String _schemeCache;
+  int _hashCodeCache;
+
+  _SimpleUri(
+      this._uri,
+      this._schemeEnd,
+      this._hostStart,
+      this._portStart,
+      this._pathStart,
+      this._queryStart,
+      this._fragmentStart,
+      this._schemeCache);
+
+  bool get hasScheme => _schemeEnd > 0;
+  bool get hasAuthority => _hostStart > 0;
+  bool get hasUserInfo => _hostStart > _schemeEnd + 4;
+  bool get hasPort => _hostStart > 0 && _portStart + 1 < _pathStart;
+  bool get hasQuery => _queryStart < _fragmentStart;
+  bool get hasFragment => _fragmentStart < _uri.length;
+
+  bool get _isFile => _schemeEnd == 4 && _uri.startsWith("file");
+  bool get _isHttp => _schemeEnd == 4 && _uri.startsWith("http");
+  bool get _isHttps => _schemeEnd == 5 && _uri.startsWith("https");
+  bool get _isPackage => _schemeEnd == 7 && _uri.startsWith("package");
+  bool _isScheme(String scheme) =>
+    _schemeEnd == scheme.length && _uri.startsWith(scheme);
+
+  bool get hasAbsolutePath => _uri.startsWith("/", _pathStart);
+  bool get hasEmptyPath => _pathStart == _queryStart;
+
+  bool get isAbsolute => hasScheme && !hasFragment;
+
+  String get scheme {
+    if (_schemeEnd <= 0) return "";
+    if (_schemeCache != null) return _schemeCache;
+    if (_isHttp) {
+      _schemeCache = "http";
+    } else if (_isHttps) {
+      _schemeCache = "https";
+    } else if (_isFile) {
+      _schemeCache = "file";
+    } else if (_isPackage) {
+      _schemeCache = "package";
+    } else {
+      _schemeCache = _uri.substring(0, _schemeEnd);
+    }
+    return _schemeCache;
+  }
+  String get authority => _hostStart > 0 ?
+      _uri.substring(_schemeEnd + 3, _pathStart) : "";
+  String get userInfo => (_hostStart > _schemeEnd + 3) ?
+      _uri.substring(_schemeEnd + 3, _hostStart - 1) : "";
+  String get host =>
+      _hostStart > 0 ? _uri.substring(_hostStart, _portStart) : "";
+  int get port {
+    if (hasPort) return int.parse(_uri.substring(_portStart + 1, _pathStart));
+    if (_isHttp) return 80;
+    if (_isHttps) return 443;
+    return 0;
+  }
+  String get path =>_uri.substring(_pathStart, _queryStart);
+  String get query => (_queryStart < _fragmentStart) ?
+     _uri.substring(_queryStart + 1, _fragmentStart) : "";
+  String get fragment => (_fragmentStart < _uri.length) ?
+     _uri.substring(_fragmentStart + 1) : "";
+
+  String get origin {
+    // Check original behavior - W3C spec is wonky!
+    bool isHttp = _isHttp;
+    if (_schemeEnd < 0 || _hostStart == _portStart) {
+      throw new StateError("Cannot use origin without a scheme: $this");
+    }
+    if (!isHttp && !_isHttps) {
+      throw new StateError(
+        "Origin is only applicable schemes http and https: $this");
+    }
+    if (_hostStart == _schemeEnd + 3) {
+      return _uri.substring(0, _pathStart);
+    }
+    // Need to drop anon-empty userInfo.
+    return _uri.substring(0, _schemeEnd + 3) +
+           _uri.substring(_hostStart, _pathStart);
+  }
+
+  List<String> get pathSegments {
+    int start = _pathStart;
+    int end = _queryStart;
+    if (_uri.startsWith("/", start)) start++;
+    if (start == end) return const <String>[];
+    List<String> parts = [];
+    for (int i = start; i < end; i++) {
+      var char = _uri.codeUnitAt(i);
+      if (char == _SLASH) {
+        parts.add(_uri.substring(start, i));
+        start = i + 1;
+      }
+    }
+    parts.add(_uri.substring(start, end));
+    return new List<String>.unmodifiable(parts);
+  }
+
+  Map<String, String> get queryParameters {
+    if (!hasQuery) return const <String, String>{};
+    return new UnmodifiableMapView<String, String>(
+        Uri.splitQueryString(query));
+  }
+
+  Map<String, List<String>> get queryParametersAll {
+    if (!hasQuery) return const <String, List<String>>{};
+    Map queryParameterLists = _Uri._splitQueryStringAll(query);
+    for (var key in queryParameterLists.keys) {
+      queryParameterLists[key] =
+          new List<String>.unmodifiable(queryParameterLists[key]);
+    }
+    return new Map<String, List<String>>.unmodifiable(queryParameterLists);
+  }
+
+  bool _isPort(String port) {
+    int portDigitStart = _portStart + 1;
+    return portDigitStart + port.length == _pathStart &&
+           _uri.startsWith(port, portDigitStart);
+  }
+
+  Uri normalizePath() => this;
+
+  Uri removeFragment() {
+    if (!hasFragment) return this;
+    return new _SimpleUri(
+      _uri.substring(0, _fragmentStart),
+      _schemeEnd, _hostStart, _portStart,
+      _pathStart, _queryStart, _fragmentStart, _schemeCache);
+  }
+
+  Uri replace({String scheme,
+               String userInfo,
+               String host,
+               int port,
+               String path,
+               Iterable<String> pathSegments,
+               String query,
+               Map<String, dynamic/*String|Iterable<String>*/> queryParameters,
+               String fragment}) {
+    bool schemeChanged = false;
+    if (scheme != null) {
+      scheme = _Uri._makeScheme(scheme, 0, scheme.length);
+      schemeChanged = !_isScheme(scheme);
+    } else {
+      scheme = this.scheme;
+    }
+    bool isFile = (scheme == "file");
+    if (userInfo != null) {
+      userInfo = _Uri._makeUserInfo(userInfo, 0, userInfo.length);
+    } else if (_hostStart > 0) {
+      userInfo = _uri.substring(_schemeEnd + 3, _hostStart);
+    } else {
+      userInfo = "";
+    }
+    if (port != null) {
+      port = _Uri._makePort(port, scheme);
+    } else {
+      port = this.hasPort ? this.port : null;
+      if (schemeChanged) {
+        // The default port might have changed.
+        port = _Uri._makePort(port, scheme);
+      }
+    }
+    if (host != null) {
+      host = _Uri._makeHost(host, 0, host.length, false);
+    } else if (_hostStart > 0) {
+      host = _uri.substring(_hostStart, _portStart);
+    } else if (userInfo.isNotEmpty || port != null || isFile) {
+      host = "";
+    }
+
+    bool hasAuthority = host != null;
+    if (path != null || pathSegments != null) {
+      path = _Uri._makePath(path, 0, _stringOrNullLength(path), pathSegments,
+                           scheme, hasAuthority);
+    } else {
+      path = _uri.substring(_pathStart, _queryStart);
+      if ((isFile || (hasAuthority && !path.isEmpty)) &&
+          !path.startsWith('/')) {
+        path = "/" + path;
+      }
+    }
+
+    if (query != null || queryParameters != null) {
+      query = _Uri._makeQuery(
+          query, 0, _stringOrNullLength(query), queryParameters);
+    } else if (_queryStart < _fragmentStart) {
+      query = _uri.substring(_queryStart, _fragmentStart);
+    }
+
+    if (fragment != null) {
+      fragment = _Uri._makeFragment(fragment, 0, fragment.length);
+    } else if (_fragmentStart < _uri.length) {
+      fragment = _uri.substring(_fragmentStart);
+    }
+
+    return new _Uri._internal(
+        scheme, userInfo, host, port, path, query, fragment);
+  }
+
+  Uri resolve(String reference) {
+    return resolveUri(Uri.parse(reference));
+  }
+
+  Uri resolveUri(Uri reference) {
+    if (reference is _SimpleUri) {
+      return _simpleMerge(this, reference);
+    }
+    return _toNonSimple().resolveUri(reference);
+  }
+
+  // Merge two simple URIs. This should always result in a prefix of
+  // one concatentated with a suffix of the other, possibly with a `/` in
+  // the middle of two merged paths, which is again simple.
+  // In a few cases, there might be a need for extra normalization, when
+  // resolving on top of a known scheme.
+  Uri _simpleMerge(_SimpleUri base, _SimpleUri ref) {
+    if (ref.hasScheme) return ref;
+    if (ref.hasAuthority) {
+      if (!base.hasScheme) return ref;
+      bool isSimple = true;
+      if (base._isFile) {
+        isSimple = !ref.hasEmptyPath;
+      } else if (base._isHttp) {
+        isSimple = !ref._isPort("80");
+      } else if (base._isHttps) {
+        isSimple = !ref._isPort("443");
+      }
+      if (isSimple) {
+        var delta = base._schemeEnd + 1;
+        var newUri = base._uri.substring(0, base._schemeEnd + 1) +
+                     ref._uri.substring(ref._schemeEnd + 1);
+        return new _SimpleUri(newUri,
+           base._schemeEnd,
+           ref._hostStart + delta,
+           ref._portStart + delta,
+           ref._pathStart + delta,
+           ref._queryStart + delta,
+           ref._fragmentStart + delta,
+           base._schemeCache);
+      } else {
+        // This will require normalization, so use the _Uri implementation.
+        return _toNonSimple().resolveUri(ref);
+      }
+    }
+    if (ref.hasEmptyPath) {
+      if (ref.hasQuery) {
+        int delta = base._queryStart - ref._queryStart;
+        var newUri = base._uri.substring(0, base._queryStart) +
+                     ref._uri.substring(ref._queryStart);
+        return new _SimpleUri(newUri,
+           base._schemeEnd,
+           base._hostStart,
+           base._portStart,
+           base._pathStart,
+           ref._queryStart + delta,
+           ref._fragmentStart + delta,
+           base._schemeCache);
+      }
+      if (ref.hasFragment) {
+        int delta = base._fragmentStart - ref._fragmentStart;
+        var newUri = base._uri.substring(0, base._fragmentStart) +
+                     ref._uri.substring(ref._fragmentStart);
+        return new _SimpleUri(newUri,
+           base._schemeEnd,
+           base._hostStart,
+           base._portStart,
+           base._pathStart,
+           base._queryStart,
+           ref._fragmentStart + delta,
+           base._schemeCache);
+      }
+      return base.removeFragment();
+    }
+    if (ref.hasAbsolutePath) {
+      var delta = base._pathStart - ref._pathStart;
+      var newUri = base._uri.substring(0, base._pathStart) +
+                   ref._uri.substring(ref._pathStart);
+      return new _SimpleUri(newUri,
+        base._schemeEnd,
+        base._hostStart,
+        base._portStart,
+        base._pathStart,
+        ref._queryStart + delta,
+        ref._fragmentStart + delta,
+        base._schemeCache);
+    }
+    if (base.hasEmptyPath && base.hasAuthority) {
+      // ref has relative non-empty path.
+      // Add a "/" in front, then leading "/../" segments are folded to "/".
+      int refStart = ref._pathStart;
+      while (ref._uri.startsWith("../", refStart)) {
+        refStart += 3;
+      }
+      var delta = base._pathStart - refStart + 1;
+      var newUri = "${base._uri.substring(0, base._pathStart)}/"
+                   "${ref._uri.substring(refStart)}";
+      return new _SimpleUri(newUri,
+        base._schemeEnd,
+        base._hostStart,
+        base._portStart,
+        base._pathStart,
+        ref._queryStart + delta,
+        ref._fragmentStart + delta,
+        base._schemeCache);
+    }
+    // Merge paths.
+    if (base._uri.startsWith("../", base._pathStart)) {
+      // Complex rare case, go slow.
+      return _toNonSimple().resolveUri(ref);
+    }
+
+    // The RFC 3986 algorithm merges the base path without its final segment
+    // (anything after the final "/", or everything if the base path doesn't
+    // contain any "/"), and the reference path.
+    // Then it removes "." and ".." segments using the remove-dot-segment
+    // algorithm.
+    // This code combines the two steps. It is simplified by knowing that
+    // the base path contains no "." or ".." segments, and the reference
+    // path can only contain leading ".." segments.
+
+    String baseUri = base._uri;
+    String refUri = ref._uri;
+    int baseStart = base._pathStart;
+    int baseEnd = base._queryStart;
+    int refStart = ref._pathStart;
+    int refEnd = ref._queryStart;
+    int backCount = 1;
+
+    int slashCount = 0;
+
+    // Count leading ".." segments in reference path.
+    while (refStart + 3 <= refEnd && refUri.startsWith("../", refStart)) {
+      refStart += 3;
+      backCount += 1;
+    }
+
+    // Extra slash inserted between base and reference path parts if
+    // the base path contains any slashes.
+    // (We could use a slash from the base path in most cases, but not if
+    // we remove the entire base path).
+    String insert = "";
+    while (baseEnd > baseStart) {
+      baseEnd--;
+      int char = baseUri.codeUnitAt(baseEnd);
+      if (char == _SLASH) {
+        insert = "/";
+        backCount--;
+        if (backCount == 0) break;
+      }
+    }
+    // If the base URI has no scheme or authority (`_pathStart == 0`)
+    // and a relative path, and we reached the beginning of the path,
+    // we have a special case.
+    if (baseEnd == 0 && !base.hasAbsolutePath) {
+      // Non-RFC 3986 behavior when resolving a purely relative path on top of
+      // another relative path: Don't make the result absolute.
+      insert = "";
+    }
+
+    var delta = baseEnd - refStart + insert.length;
+    var newUri = "${base._uri.substring(0, baseEnd)}$insert"
+               "${ref._uri.substring(refStart)}";
+
+    return new _SimpleUri(newUri,
+      base._schemeEnd,
+      base._hostStart,
+      base._portStart,
+      base._pathStart,
+      ref._queryStart + delta,
+      ref._fragmentStart + delta,
+      base._schemeCache);
+  }
+
+  String toFilePath({bool windows}) {
+    if (_schemeEnd >= 0 && !_isFile) {
+      throw new UnsupportedError(
+          "Cannot extract a file path from a $scheme URI");
+    }
+    if (_queryStart < _uri.length) {
+      if (_queryStart < _fragmentStart) {
+        throw new UnsupportedError(
+            "Cannot extract a file path from a URI with a query component");
+      }
+      throw new UnsupportedError(
+          "Cannot extract a file path from a URI with a fragment component");
+    }
+    if (windows == null) windows = _Uri._isWindows;
+    return windows ? _Uri._toWindowsFilePath(this) : _toFilePath();
+  }
+
+  String _toFilePath() {
+    if (_hostStart < _portStart) {
+      // Has authority and non-empty host.
+      throw new UnsupportedError(
+        "Cannot extract a non-Windows file path from a file URI "
+        "with an authority");
+    }
+    return this.path;
+  }
+
+  UriData get data {
+    assert(scheme != "data");
+    return null;
+  }
+
+  int get hashCode => _hashCodeCache ??= _uri.hashCode;
+
+  bool operator==(Object other) {
+    if (identical(this, other)) return true;
+    if (other is Uri) return _uri == other.toString();
+    return false;
+  }
+
+  Uri _toNonSimple() {
+    return new _Uri._internal(
+      this.scheme,
+      this.userInfo,
+      this.hasAuthority ? this.host: null,
+      this.hasPort ? this.port : null,
+      this.path,
+      this.hasQuery ? this.query : null,
+      this.hasFragment ? this.fragment : null
+    );
+  }
+
+  String toString() => _uri;
+}
+
+/// Checks whether [text] starts with "data:" at position [start].
+///
+/// The text must be long enough to allow reading five characters
+/// from the [start] position.
+///
+/// Returns an integer value which is zero if text starts with all-lowercase
+/// "data:" and 0x20 if the text starts with "data:" that isn't all lower-case.
+/// All other values means the text starts with some other character.
+int _startsWithData(String text, int start) {
+  // Multiply by 3 to avoid a non-colon character making delta be 0x20.
+  int delta = (text.codeUnitAt(start + 4) ^ _COLON) * 3;
+  delta |= text.codeUnitAt(start)     ^ 0x64 /*d*/;
+  delta |= text.codeUnitAt(start + 1) ^ 0x61 /*a*/;
+  delta |= text.codeUnitAt(start + 2) ^ 0x74 /*t*/;
+  delta |= text.codeUnitAt(start + 3) ^ 0x61 /*a*/;
+  return delta;
+}
+
+/// Helper function returning the length of a string, or `0` for `null`.
+int _stringOrNullLength(String s) => (s == null) ? 0 : s.length;
diff --git a/sdk/lib/html/dart2js/html_dart2js.dart b/sdk/lib/html/dart2js/html_dart2js.dart
index 0bc3749..c9cd5cb 100644
--- a/sdk/lib/html/dart2js/html_dart2js.dart
+++ b/sdk/lib/html/dart2js/html_dart2js.dart
@@ -19240,6 +19240,109 @@
 // BSD-style license that can be found in the LICENSE file.
 
 
+/**
+ * A task specification for HTTP requests.
+ *
+ * This specification is not available when an HTTP request is sent through
+ * direct use of [HttpRequest.send]. See [HttpRequestSendTaskSpecification].
+ *
+ * A task created from this specification is a `Future<HttpRequest>`.
+ *
+ * *Experimental*. This class may disappear without notice.
+ */
+class HttpRequestTaskSpecification extends TaskSpecification {
+  /// The URL of the request.
+  final String url;
+
+  /// The HTTP request method.
+  ///
+  /// By default (when `null`) this is a `"GET"` request. Alternatively, the
+  /// method can be `"POST"`, `"PUT"`, `"DELETE"`, etc.
+  final String method;
+
+  /// Whether the request should send credentials. Credentials are only useful
+  /// for cross-origin requests.
+  ///
+  /// See [HttpRequest.request] for more information.
+  final bool withCredentials;
+
+  /// The desired response format.
+  ///
+  /// Supported types are:
+  /// - `""`: (same as `"text"`),
+  /// - `"arraybuffer"`,
+  /// - `"blob"`,
+  /// - `"document"`,
+  /// - `"json"`,
+  /// - `"text"`
+  ///
+  /// When no value is provided (when equal to `null`) defaults to `""`.
+  final String responseType;
+
+  /// The desired MIME type.
+  ///
+  /// This overrides the default MIME type which is set up to transfer textual
+  /// data.
+  final String mimeType;
+
+  /// The request headers that should be sent with the request.
+  final Map<String, String> requestHeaders;
+
+  /// The data that is sent with the request.
+  ///
+  /// When data is provided (the value is not `null`), it must be a
+  /// [ByteBuffer], [Blob], [Document], [String], or [FormData].
+  final dynamic sendData;
+
+  /// The function that is invoked on progress updates. This function is
+  /// registered as an event listener on the created [HttpRequest] object, and
+  /// thus has its own task. Further invocations of the progress function do
+  /// *not* use the HTTP request task as task object.
+  ///
+  /// Creating an HTTP request automatically registers the on-progress listener.
+  final ZoneUnaryCallback<dynamic, ProgressEvent> onProgress;
+
+  HttpRequestTaskSpecification(this.url,
+      {String this.method, bool this.withCredentials, String this.responseType,
+      String this.mimeType, Map<String, String> this.requestHeaders,
+      this.sendData,
+      void this.onProgress(ProgressEvent e)});
+
+  String get name => "dart.html.http-request";
+  bool get isOneShot => true;
+}
+
+/**
+ * A task specification for HTTP requests that are initiated through a direct
+ * invocation of [HttpRequest.send].
+ *
+ * This specification serves as signal to zones that an HTTP request has been
+ * initiated. The created task is the [request] object itself, and
+ * no callback is ever executed in this task.
+ *
+ * Note that event listeners on the HTTP request are also registered in the
+ * zone (although with their own task creations), and that a zone can thus
+ * detect when the HTTP request returns.
+ *
+ * HTTP requests that are initiated through `request` methods don't use
+ * this class but use [HttpRequestTaskSpecification].
+ *
+ * *Experimental*. This class may disappear without notice.
+ */
+class HttpRequestSendTaskSpecification extends TaskSpecification {
+  final HttpRequest request;
+  final dynamic sendData;
+
+  HttpRequestSendTaskSpecification(this.request, this.sendData);
+
+  String get name => "dart.html.http-request-send";
+
+  /**
+   * No callback is ever executed in an HTTP request send task.
+   */
+  bool get isOneShot => false;
+}
+
  /**
   * A client-side XHR request for getting data from a URL,
   * formally known as XMLHttpRequest.
@@ -19428,7 +19531,34 @@
       {String method, bool withCredentials, String responseType,
       String mimeType, Map<String, String> requestHeaders, sendData,
       void onProgress(ProgressEvent e)}) {
+    var spec = new HttpRequestTaskSpecification(
+        url, method: method,
+        withCredentials: withCredentials,
+        responseType: responseType,
+        mimeType: mimeType,
+        requestHeaders: requestHeaders,
+        sendData: sendData,
+        onProgress: onProgress);
+
+    if (identical(Zone.current, Zone.ROOT)) {
+      return _createHttpRequestTask(spec, null);
+    }
+    return Zone.current.createTask(_createHttpRequestTask, spec);
+  }
+
+  static Future<HttpRequest> _createHttpRequestTask(
+      HttpRequestTaskSpecification spec, Zone zone) {
+    String url = spec.url;
+    String method = spec.method;
+    bool withCredentials = spec.withCredentials;
+    String responseType = spec.responseType;
+    String mimeType = spec.mimeType;
+    Map<String, String> requestHeaders = spec.requestHeaders;
+    var sendData = spec.sendData;
+    var onProgress = spec.onProgress;
+
     var completer = new Completer<HttpRequest>();
+    var task = completer.future;
 
     var xhr = new HttpRequest();
     if (method == null) {
@@ -19468,23 +19598,42 @@
       // redirect case will be handled by the browser before it gets to us,
       // so if we see it we should pass it through to the user.
       var unknownRedirect = xhr.status > 307 && xhr.status < 400;
-      
-      if (accepted || fileUri || notModified || unknownRedirect) {
+
+      var isSuccessful = accepted || fileUri || notModified || unknownRedirect;
+
+      if (zone == null && isSuccessful) {
         completer.complete(xhr);
-      } else {
+      } else if (zone == null) {
         completer.completeError(e);
+      } else if (isSuccessful) {
+        zone.runTask((task, value) {
+          completer.complete(value);
+        }, task, xhr);
+      } else {
+        zone.runTask((task, error) {
+          completer.completeError(error);
+        }, task, e);
       }
     });
 
-    xhr.onError.listen(completer.completeError);
-
-    if (sendData != null) {
-      xhr.send(sendData);
+    if (zone == null) {
+      xhr.onError.listen(completer.completeError);
     } else {
-      xhr.send();
+      xhr.onError.listen((error) {
+        zone.runTask((task, error) {
+          completer.completeError(error);
+        }, task, error);
+      });
     }
 
-    return completer.future;
+    if (sendData != null) {
+      // TODO(floitsch): should we go through 'send()' and have nested tasks?
+      xhr._send(sendData);
+    } else {
+      xhr._send();
+    }
+
+    return task;
   }
 
   /**
@@ -19538,6 +19687,9 @@
         return xhr.responseText;
       });
     }
+    // TODO(floitsch): the following code doesn't go through task zones.
+    // Since 'XDomainRequest' is an IE9 feature we should probably just remove
+    // it.
     var completer = new Completer<String>();
     if (method == null) {
       method = 'GET';
@@ -19616,13 +19768,43 @@
    *
    * Note: Most simple HTTP requests can be accomplished using the [getString],
    * [request], [requestCrossOrigin], or [postFormData] methods. Use of this
-   * `open` method is intended only for more complext HTTP requests where
+   * `open` method is intended only for more complex HTTP requests where
    * finer-grained control is needed.
    */
   @DomName('XMLHttpRequest.open')
   @DocsEditable()
   void open(String method, String url, {bool async, String user, String password}) native;
 
+  /**
+   * Sends the request with any given `data`.
+   *
+   * Note: Most simple HTTP requests can be accomplished using the [getString],
+   * [request], [requestCrossOrigin], or [postFormData] methods. Use of this
+   * `send` method is intended only for more complex HTTP requests where
+   * finer-grained control is needed.
+   *
+   * ## Other resources
+   *
+   * * [XMLHttpRequest.send](https://developer.mozilla.org/en-US/docs/DOM/XMLHttpRequest#send%28%29)
+   *   from MDN.
+   */
+  @DomName('XMLHttpRequest.send')
+  @DocsEditable()
+  void send([body_OR_data]) {
+    if (identical(Zone.current, Zone.ROOT)) {
+      _send(body_OR_data);
+    } else {
+      Zone.current.createTask(_createHttpRequestSendTask,
+          new HttpRequestSendTaskSpecification(this, body_OR_data));
+    }
+  }
+
+  static HttpRequest _createHttpRequestSendTask(
+      HttpRequestSendTaskSpecification spec, Zone zone) {
+    spec.request._send(spec.sendData);
+    return spec.request;
+  }
+
   // To suppress missing implicit constructor warnings.
   factory HttpRequest._() { throw new UnsupportedError("Not supported"); }
 
@@ -19893,12 +20075,13 @@
   @SupportedBrowser(SupportedBrowser.SAFARI)
   void overrideMimeType(String mime) native;
 
+  @JSName('send')
   /**
    * Send the request with any given `data`.
    *
    * Note: Most simple HTTP requests can be accomplished using the [getString],
    * [request], [requestCrossOrigin], or [postFormData] methods. Use of this
-   * `send` method is intended only for more complext HTTP requests where
+   * `send` method is intended only for more complex HTTP requests where
    * finer-grained control is needed.
    *
    * ## Other resources
@@ -19908,7 +20091,7 @@
    */
   @DomName('XMLHttpRequest.send')
   @DocsEditable()
-  void send([body_OR_data]) native;
+  void _send([body_OR_data]) native;
 
   /**
    * Sets the value of an HTTP requst header.
@@ -34485,6 +34668,99 @@
 // BSD-style license that can be found in the LICENSE file.
 
 
+typedef void RemoveFrameRequestMapping(int id);
+
+/**
+ * The task object representing animation-frame requests.
+ *
+ * For historical reasons, [Window.requestAnimationFrame] returns an integer
+ * to users. However, zone tasks must be unique objects, and an integer can
+ * therefore not be used as task object. The [Window] class thus keeps a mapping
+ * from the integer ID to the corresponding task object. All zone related
+ * operations work on this task object, whereas users of
+ * [Window.requestAnimationFrame] only see the integer ID.
+ *
+ * Since this mapping takes up space, it must be removed when the
+ * animation-frame task has triggered. The default implementation does this
+ * automatically, but intercepting implementations of `requestAnimationFrame`
+ * must make sure to call the [AnimationFrameTask.removeMapping]
+ * function that is provided in the task specification.
+ *
+ * *Experimental*. This class may disappear without notice.
+ */
+abstract class AnimationFrameTask {
+  /** The ID that is returned to users. */
+  int get id;
+
+  /** The zone in which the task will run. */
+  Zone get zone;
+
+  /**
+   * Cancels the animation-frame request.
+   *
+   * A call to [Window.cancelAnimationFrame] with an `id` argument equal to [id]
+   * forwards the request to this function.
+   *
+   * Zones that intercept animation-frame requests implement this method so
+   * that they can react to cancelation requests.
+   */
+  void cancel(Window window);
+
+  /**
+   * Maps animation-frame request IDs to their task objects.
+   */
+  static final Map<int, _AnimationFrameTask> _tasks = {};
+
+  /**
+   * Removes the mapping from [id] to [AnimationFrameTask].
+   *
+   * This function must be invoked by user-implemented animation-frame
+   * tasks, before running [callback].
+   *
+   * See [AnimationFrameTask].
+   */
+  static void removeMapping(int id) {
+    _tasks.remove(id);
+  }
+}
+
+class _AnimationFrameTask implements AnimationFrameTask {
+  final int id;
+  final Zone zone;
+  final FrameRequestCallback _callback;
+
+  _AnimationFrameTask(this.id, this.zone, this._callback);
+
+  void cancel(Window window) {
+    window._cancelAnimationFrame(this.id);
+  }
+}
+
+/**
+ * The task specification for an animation-frame request.
+ *
+ * *Experimental*. This class may disappear without notice.
+ */
+class AnimationFrameRequestSpecification implements TaskSpecification {
+  /**
+   * The window on which [Window.requestAnimationFrame] was invoked.
+   */
+  final Window window;
+
+  /**
+   * The callback that is executed when the animation-frame is ready.
+   *
+   * Note that the callback hasn't been registered in any zone when the `create`
+   * function (passed to [Zone.createTask]) is invoked.
+   */
+  final FrameRequestCallback callback;
+
+  AnimationFrameRequestSpecification(this.window, this.callback);
+
+  String get name => "dart.html.request-animation-frame";
+  bool get isOneShot => true;
+}
+
 @DocsEditable()
 /**
  * Top-level container for the current browser tab or window.
@@ -34540,9 +34816,7 @@
    */
   Future<num> get animationFrame {
     var completer = new Completer<num>.sync();
-    requestAnimationFrame((time) {
-      completer.complete(time);
-    });
+    requestAnimationFrame(completer.complete);
     return completer.future;
   }
 
@@ -34606,6 +34880,7 @@
     JS('void', '#.location = #', this, value);
   }
 
+
   /**
    * Called to draw an animation frame and then request the window to repaint
    * after [callback] has finished (creating the animation).
@@ -34625,7 +34900,30 @@
   @DomName('Window.requestAnimationFrame')
   int requestAnimationFrame(FrameRequestCallback callback) {
     _ensureRequestAnimationFrame();
-    return _requestAnimationFrame(_wrapZone/*<num, dynamic>*/(callback));
+    if (identical(Zone.current, Zone.ROOT)) {
+      return _requestAnimationFrame(callback);
+    }
+    var spec = new AnimationFrameRequestSpecification(this, callback);
+    var task = Zone.current.createTask/*<AnimationFrameTask>*/(
+        _createAnimationFrameTask, spec);
+    AnimationFrameTask._tasks[task.id] = task;
+    return task.id;
+  }
+
+  static _AnimationFrameTask _createAnimationFrameTask(
+      AnimationFrameRequestSpecification spec, Zone zone) {
+    var task;
+    var id = spec.window._requestAnimationFrame((num time) {
+      AnimationFrameTask.removeMapping(task.id);
+      zone.runTask(_runAnimationFrame, task, time);
+    });
+    var callback = zone.registerUnaryCallback(spec.callback);
+    task = new _AnimationFrameTask(id, zone, callback);
+    return task;
+  }
+
+  static void _runAnimationFrame(_AnimationFrameTask task, num time) {
+    task._callback(time);
   }
 
   /**
@@ -34636,9 +34934,16 @@
    * * [Window.cancelAnimationFrame](https://developer.mozilla.org/en-US/docs/Web/API/Window.cancelAnimationFrame)
    *   from MDN.
    */
+  @DomName('Window.cancelAnimationFrame')
   void cancelAnimationFrame(int id) {
     _ensureRequestAnimationFrame();
-    _cancelAnimationFrame(id);
+    var task = AnimationFrameTask._tasks.remove(id);
+    if (task == null) {
+      // Assume that the animation frame request wasn't intercepted by a zone.
+      _cancelAnimationFrame(id);
+      return;
+    }
+    task.cancel(this);
   }
 
   @JSName('requestAnimationFrame')
@@ -34692,7 +34997,6 @@
   @DomName('Window.console')
   Console get console => Console._safeConsole;
 
-
   /**
    * Access a sandboxed file system of the specified `size`. If `persistent` is
    * true, the application will request permission from the user to create
@@ -39964,6 +40268,41 @@
   StreamSubscription<T> capture(void onData(T event));
 }
 
+/// Task specification for DOM Events.
+///
+/// *Experimental*. May disappear without notice.
+class EventSubscriptionSpecification<T extends Event>
+    implements TaskSpecification {
+  @override
+  final String name;
+  @override
+  final bool isOneShot;
+
+  final EventTarget target;
+  /// The event-type of the event. For example 'click' for click events.
+  final String eventType;
+  // TODO(floitsch): the first generic argument should be 'void'.
+  final ZoneUnaryCallback<dynamic, T> onData;
+  final bool useCapture;
+
+  EventSubscriptionSpecification({this.name, this.isOneShot, this.target,
+      this.eventType, void this.onData(T event), this.useCapture});
+
+  /// Returns a copy of this instance, with every non-null argument replaced
+  /// by the given value.
+  EventSubscriptionSpecification<T> replace(
+      {String name, bool isOneShot, EventTarget target,
+       String eventType, void onData(T event), bool useCapture}) {
+    return new EventSubscriptionSpecification<T>(
+        name: name ?? this.name,
+        isOneShot: isOneShot ?? this.isOneShot,
+        target: target ?? this.target,
+        eventType: eventType ?? this.eventType,
+        onData: onData ?? this.onData,
+        useCapture: useCapture ?? this.useCapture);
+  }
+}
+
 /**
  * Adapter for exposing DOM events as Dart streams.
  */
@@ -39971,8 +40310,16 @@
   final EventTarget _target;
   final String _eventType;
   final bool _useCapture;
+  /// The name that is used in the task specification.
+  final String _name;
+  /// Whether the stream can trigger multiple times.
+  final bool _isOneShot;
 
-  _EventStream(this._target, this._eventType, this._useCapture);
+  _EventStream(this._target, String eventType, this._useCapture,
+      {String name, bool isOneShot: false})
+      : _eventType = eventType,
+        _isOneShot = isOneShot,
+        _name = name ?? "dart.html.event.$eventType";
 
   // DOM events are inherently multi-subscribers.
   Stream<T> asBroadcastStream({void onListen(StreamSubscription<T> subscription),
@@ -39980,13 +40327,31 @@
       => this;
   bool get isBroadcast => true;
 
+  StreamSubscription<T> _listen(
+      void onData(T event), {bool useCapture}) {
+
+    if (identical(Zone.current, Zone.ROOT)) {
+      return new _EventStreamSubscription<T>(
+          this._target, this._eventType, onData, this._useCapture,
+          Zone.current);
+    }
+
+    var specification = new EventSubscriptionSpecification<T>(
+        name: this._name, isOneShot: this._isOneShot,
+        target: this._target, eventType: this._eventType,
+        onData: onData, useCapture: useCapture);
+    // We need to wrap the _createStreamSubscription call, since a tear-off
+    // would not bind the generic type 'T'.
+    return Zone.current.createTask((spec, Zone zone) {
+      return _createStreamSubscription/*<T>*/(spec, zone);
+    }, specification);
+  }
+
   StreamSubscription<T> listen(void onData(T event),
       { Function onError,
         void onDone(),
         bool cancelOnError}) {
-
-    return new _EventStreamSubscription<T>(
-        this._target, this._eventType, onData, this._useCapture);
+    return _listen(onData, useCapture: this._useCapture);
   }
 }
 
@@ -40001,8 +40366,9 @@
  */
 class _ElementEventStreamImpl<T extends Event> extends _EventStream<T>
     implements ElementStream<T> {
-  _ElementEventStreamImpl(target, eventType, useCapture) :
-      super(target, eventType, useCapture);
+  _ElementEventStreamImpl(target, eventType, useCapture,
+      {String name, bool isOneShot: false}) :
+      super(target, eventType, useCapture, name: name, isOneShot: isOneShot);
 
   Stream<T> matches(String selector) => this.where(
       (event) => _matchesWithAncestors(event, selector)).map((e) {
@@ -40010,9 +40376,9 @@
         return e;
       });
 
-  StreamSubscription<T> capture(void onData(T event)) =>
-    new _EventStreamSubscription<T>(
-        this._target, this._eventType, onData, true);
+  StreamSubscription<T> capture(void onData(T event)) {
+    return _listen(onData, useCapture: true);
+  }
 }
 
 /**
@@ -40061,7 +40427,13 @@
   bool get isBroadcast => true;
 }
 
-// We would like this to just be EventListener<T> but that typdef cannot
+StreamSubscription/*<T>*/ _createStreamSubscription/*<T>*/(
+    EventSubscriptionSpecification/*<T>*/ spec, Zone zone) {
+  return new _EventStreamSubscription/*<T>*/(spec.target, spec.eventType,
+      spec.onData, spec.useCapture, zone);
+}
+
+// We would like this to just be EventListener<T> but that typedef cannot
 // use generics until dartbug/26276 is fixed.
 typedef _EventListener<T extends Event>(T event);
 
@@ -40070,15 +40442,19 @@
   EventTarget _target;
   final String _eventType;
   EventListener _onData;
+  EventListener _domCallback;
   final bool _useCapture;
+  final Zone _zone;
 
   // TODO(jacobr): for full strong mode correctness we should write
-  // _onData = onData == null ? null : _wrapZone/*<Event, dynamic>*/((e) => onData(e as T))
+  // _onData = onData == null ? null : _wrapZone/*<dynamic, Event>*/((e) => onData(e as T))
   // but that breaks 114 co19 tests as well as multiple html tests as it is reasonable
   // to pass the wrong type of event object to an event listener as part of a
   // test.
   _EventStreamSubscription(this._target, this._eventType, void onData(T event),
-      this._useCapture) : _onData = _wrapZone/*<Event, dynamic>*/(onData) {
+      this._useCapture, Zone zone)
+      : _zone = zone,
+        _onData = _registerZone/*<dynamic, Event>*/(zone, onData) {
     _tryResume();
   }
 
@@ -40100,7 +40476,7 @@
     }
     // Remove current event listener.
     _unlisten();
-    _onData = _wrapZone/*<Event, dynamic>*/(handleData);
+    _onData = _registerZone/*<dynamic, Event>*/(_zone, handleData);
     _tryResume();
   }
 
@@ -40129,14 +40505,25 @@
   }
 
   void _tryResume() {
-    if (_onData != null && !isPaused) {
-      _target.addEventListener(_eventType, _onData, _useCapture);
+    if (_onData == null || isPaused) return;
+    if (identical(_zone, Zone.ROOT)) {
+      _domCallback = _onData;
+    } else {
+      _domCallback = (event) {
+        _zone.runTask(_runEventNotification, this, event);
+      };
     }
+    _target.addEventListener(_eventType, _domCallback, _useCapture);
+  }
+
+  static void _runEventNotification/*<T>*/(
+      _EventStreamSubscription/*<T>*/ subscription, /*=T*/ event) {
+    subscription._onData(event);
   }
 
   void _unlisten() {
     if (_onData != null) {
-      _target.removeEventListener(_eventType, _onData, _useCapture);
+      _target.removeEventListener(_eventType, _domCallback, _useCapture);
     }
   }
 
@@ -43370,31 +43757,26 @@
 // BSD-style license that can be found in the LICENSE file.
 
 
-// TODO(jacobr): remove these typedefs when dart:async supports generic types.
-typedef R _wrapZoneCallback<A, R>(A a);
-typedef R _wrapZoneBinaryCallback<A, B, R>(A a, B b);
-
-_wrapZoneCallback/*<A, R>*/ _wrapZone/*<A, R>*/(_wrapZoneCallback/*<A, R>*/ callback) {
-  // For performance reasons avoid wrapping if we are in the root zone.
-  if (Zone.current == Zone.ROOT) return callback;
+ZoneUnaryCallback/*<R, T>*/ _registerZone/*<R, T>*/(Zone zone,
+    ZoneUnaryCallback/*<R, T>*/ callback) {
+  // For performance reasons avoid registering if we are in the root zone.
+  if (identical(zone, Zone.ROOT)) return callback;
   if (callback == null) return null;
-  // TODO(jacobr): we cast to _wrapZoneCallback/*<A, R>*/ to hack around missing
-  // generic method support in zones.
-  // ignore: STRONG_MODE_DOWN_CAST_COMPOSITE
-  _wrapZoneCallback/*<A, R>*/ wrapped =
-      Zone.current.bindUnaryCallback(callback, runGuarded: true);
-  return wrapped;
+  return zone.registerUnaryCallback(callback);
 }
 
-_wrapZoneBinaryCallback/*<A, B, R>*/ _wrapBinaryZone/*<A, B, R>*/(_wrapZoneBinaryCallback/*<A, B, R>*/ callback) {
-  if (Zone.current == Zone.ROOT) return callback;
+ZoneUnaryCallback/*<R, T>*/ _wrapZone/*<R, T>*/(ZoneUnaryCallback/*<R, T>*/ callback) {
+  // For performance reasons avoid wrapping if we are in the root zone.
+  if (identical(Zone.current, Zone.ROOT)) return callback;
   if (callback == null) return null;
-  // We cast to _wrapZoneBinaryCallback/*<A, B, R>*/ to hack around missing
-  // generic method support in zones.
-  // ignore: STRONG_MODE_DOWN_CAST_COMPOSITE
-  _wrapZoneBinaryCallback/*<A, B, R>*/ wrapped =
-      Zone.current.bindBinaryCallback(callback, runGuarded: true);
-  return wrapped;
+  return Zone.current.bindUnaryCallback(callback, runGuarded: true);
+}
+
+ZoneBinaryCallback/*<R, A, B>*/ _wrapBinaryZone/*<R, A, B>*/(
+    ZoneBinaryCallback/*<R, A, B>*/ callback) {
+  if (identical(Zone.current, Zone.ROOT)) return callback;
+  if (callback == null) return null;
+  return Zone.current.bindBinaryCallback(callback, runGuarded: true);
 }
 
 /**
diff --git a/sdk/lib/html/dartium/html_dartium.dart b/sdk/lib/html/dartium/html_dartium.dart
index bc92d6d..b9da2c4 100644
--- a/sdk/lib/html/dartium/html_dartium.dart
+++ b/sdk/lib/html/dartium/html_dartium.dart
@@ -20997,6 +20997,109 @@
 // BSD-style license that can be found in the LICENSE file.
 
 
+/**
+ * A task specification for HTTP requests.
+ *
+ * This specification is not available when an HTTP request is sent through
+ * direct use of [HttpRequest.send]. See [HttpRequestSendTaskSpecification].
+ *
+ * A task created from this specification is a `Future<HttpRequest>`.
+ *
+ * *Experimental*. This class may disappear without notice.
+ */
+class HttpRequestTaskSpecification extends TaskSpecification {
+  /// The URL of the request.
+  final String url;
+
+  /// The HTTP request method.
+  ///
+  /// By default (when `null`) this is a `"GET"` request. Alternatively, the
+  /// method can be `"POST"`, `"PUT"`, `"DELETE"`, etc.
+  final String method;
+
+  /// Whether the request should send credentials. Credentials are only useful
+  /// for cross-origin requests.
+  ///
+  /// See [HttpRequest.request] for more information.
+  final bool withCredentials;
+
+  /// The desired response format.
+  ///
+  /// Supported types are:
+  /// - `""`: (same as `"text"`),
+  /// - `"arraybuffer"`,
+  /// - `"blob"`,
+  /// - `"document"`,
+  /// - `"json"`,
+  /// - `"text"`
+  ///
+  /// When no value is provided (when equal to `null`) defaults to `""`.
+  final String responseType;
+
+  /// The desired MIME type.
+  ///
+  /// This overrides the default MIME type which is set up to transfer textual
+  /// data.
+  final String mimeType;
+
+  /// The request headers that should be sent with the request.
+  final Map<String, String> requestHeaders;
+
+  /// The data that is sent with the request.
+  ///
+  /// When data is provided (the value is not `null`), it must be a
+  /// [ByteBuffer], [Blob], [Document], [String], or [FormData].
+  final dynamic sendData;
+
+  /// The function that is invoked on progress updates. This function is
+  /// registered as an event listener on the created [HttpRequest] object, and
+  /// thus has its own task. Further invocations of the progress function do
+  /// *not* use the HTTP request task as task object.
+  ///
+  /// Creating an HTTP request automatically registers the on-progress listener.
+  final ZoneUnaryCallback<dynamic, ProgressEvent> onProgress;
+
+  HttpRequestTaskSpecification(this.url,
+      {String this.method, bool this.withCredentials, String this.responseType,
+      String this.mimeType, Map<String, String> this.requestHeaders,
+      this.sendData,
+      void this.onProgress(ProgressEvent e)});
+
+  String get name => "dart.html.http-request";
+  bool get isOneShot => true;
+}
+
+/**
+ * A task specification for HTTP requests that are initiated through a direct
+ * invocation of [HttpRequest.send].
+ *
+ * This specification serves as signal to zones that an HTTP request has been
+ * initiated. The created task is the [request] object itself, and
+ * no callback is ever executed in this task.
+ *
+ * Note that event listeners on the HTTP request are also registered in the
+ * zone (although with their own task creations), and that a zone can thus
+ * detect when the HTTP request returns.
+ *
+ * HTTP requests that are initiated through `request` methods don't use
+ * this class but use [HttpRequestTaskSpecification].
+ *
+ * *Experimental*. This class may disappear without notice.
+ */
+class HttpRequestSendTaskSpecification extends TaskSpecification {
+  final HttpRequest request;
+  final dynamic sendData;
+
+  HttpRequestSendTaskSpecification(this.request, this.sendData);
+
+  String get name => "dart.html.http-request-send";
+
+  /**
+   * No callback is ever executed in an HTTP request send task.
+   */
+  bool get isOneShot => false;
+}
+
  /**
   * A client-side XHR request for getting data from a URL,
   * formally known as XMLHttpRequest.
@@ -21184,7 +21287,34 @@
       {String method, bool withCredentials, String responseType,
       String mimeType, Map<String, String> requestHeaders, sendData,
       void onProgress(ProgressEvent e)}) {
+    var spec = new HttpRequestTaskSpecification(
+        url, method: method,
+        withCredentials: withCredentials,
+        responseType: responseType,
+        mimeType: mimeType,
+        requestHeaders: requestHeaders,
+        sendData: sendData,
+        onProgress: onProgress);
+
+    if (identical(Zone.current, Zone.ROOT)) {
+      return _createHttpRequestTask(spec, null);
+    }
+    return Zone.current.createTask(_createHttpRequestTask, spec);
+  }
+
+  static Future<HttpRequest> _createHttpRequestTask(
+      HttpRequestTaskSpecification spec, Zone zone) {
+    String url = spec.url;
+    String method = spec.method;
+    bool withCredentials = spec.withCredentials;
+    String responseType = spec.responseType;
+    String mimeType = spec.mimeType;
+    Map<String, String> requestHeaders = spec.requestHeaders;
+    var sendData = spec.sendData;
+    var onProgress = spec.onProgress;
+
     var completer = new Completer<HttpRequest>();
+    var task = completer.future;
 
     var xhr = new HttpRequest();
     if (method == null) {
@@ -21224,23 +21354,42 @@
       // redirect case will be handled by the browser before it gets to us,
       // so if we see it we should pass it through to the user.
       var unknownRedirect = xhr.status > 307 && xhr.status < 400;
-      
-      if (accepted || fileUri || notModified || unknownRedirect) {
+
+      var isSuccessful = accepted || fileUri || notModified || unknownRedirect;
+
+      if (zone == null && isSuccessful) {
         completer.complete(xhr);
-      } else {
+      } else if (zone == null) {
         completer.completeError(e);
+      } else if (isSuccessful) {
+        zone.runTask((task, value) {
+          completer.complete(value);
+        }, task, xhr);
+      } else {
+        zone.runTask((task, error) {
+          completer.completeError(error);
+        }, task, e);
       }
     });
 
-    xhr.onError.listen(completer.completeError);
-
-    if (sendData != null) {
-      xhr.send(sendData);
+    if (zone == null) {
+      xhr.onError.listen(completer.completeError);
     } else {
-      xhr.send();
+      xhr.onError.listen((error) {
+        zone.runTask((task, error) {
+          completer.completeError(error);
+        }, task, error);
+      });
     }
 
-    return completer.future;
+    if (sendData != null) {
+      // TODO(floitsch): should we go through 'send()' and have nested tasks?
+      xhr._send(sendData);
+    } else {
+      xhr._send();
+    }
+
+    return task;
   }
 
   /**
@@ -21290,6 +21439,9 @@
         return xhr.responseText;
       });
     }
+    // TODO(floitsch): the following code doesn't go through task zones.
+    // Since 'XDomainRequest' is an IE9 feature we should probably just remove
+    // it.
   }
 
   /**
@@ -21340,7 +21492,7 @@
    *
    * Note: Most simple HTTP requests can be accomplished using the [getString],
    * [request], [requestCrossOrigin], or [postFormData] methods. Use of this
-   * `open` method is intended only for more complext HTTP requests where
+   * `open` method is intended only for more complex HTTP requests where
    * finer-grained control is needed.
    */
   @DomName('XMLHttpRequest.open')
@@ -21353,6 +21505,36 @@
     }
   }
 
+  /**
+   * Sends the request with any given `data`.
+   *
+   * Note: Most simple HTTP requests can be accomplished using the [getString],
+   * [request], [requestCrossOrigin], or [postFormData] methods. Use of this
+   * `send` method is intended only for more complex HTTP requests where
+   * finer-grained control is needed.
+   *
+   * ## Other resources
+   *
+   * * [XMLHttpRequest.send](https://developer.mozilla.org/en-US/docs/DOM/XMLHttpRequest#send%28%29)
+   *   from MDN.
+   */
+  @DomName('XMLHttpRequest.send')
+  @DocsEditable()
+  void send([body_OR_data]) {
+    if (identical(Zone.current, Zone.ROOT)) {
+      _send(body_OR_data);
+    } else {
+      Zone.current.createTask(_createHttpRequestSendTask,
+          new HttpRequestSendTaskSpecification(this, body_OR_data));
+    }
+  }
+
+  static HttpRequest _createHttpRequestSendTask(
+      HttpRequestSendTaskSpecification spec, Zone zone) {
+    spec.request._send(spec.sendData);
+    return spec.request;
+  }
+
   // To suppress missing implicit constructor warnings.
   factory HttpRequest._() { throw new UnsupportedError("Not supported"); }
 
@@ -21656,7 +21838,7 @@
   @SupportedBrowser(SupportedBrowser.SAFARI)
   void overrideMimeType(String mime) => _blink.BlinkXMLHttpRequest.instance.overrideMimeType_Callback_1_(this, mime);
   
-  void send([body_OR_data]) {
+  void _send([body_OR_data]) {
     if (body_OR_data != null) {
       _blink.BlinkXMLHttpRequest.instance.send_Callback_1_(this, body_OR_data);
       return;
@@ -37662,10 +37844,10 @@
     if ((blob_OR_source_OR_stream is Blob || blob_OR_source_OR_stream == null)) {
       return _blink.BlinkURL.instance.createObjectURL_Callback_1_(blob_OR_source_OR_stream);
     }
-    if ((blob_OR_source_OR_stream is MediaStream)) {
+    if ((blob_OR_source_OR_stream is MediaSource)) {
       return _blink.BlinkURL.instance.createObjectURL_Callback_1_(blob_OR_source_OR_stream);
     }
-    if ((blob_OR_source_OR_stream is MediaSource)) {
+    if ((blob_OR_source_OR_stream is MediaStream)) {
       return _blink.BlinkURL.instance.createObjectURL_Callback_1_(blob_OR_source_OR_stream);
     }
     throw new ArgumentError("Incorrect number or type of arguments");
@@ -39131,6 +39313,99 @@
 // BSD-style license that can be found in the LICENSE file.
 
 
+typedef void RemoveFrameRequestMapping(int id);
+
+/**
+ * The task object representing animation-frame requests.
+ *
+ * For historical reasons, [Window.requestAnimationFrame] returns an integer
+ * to users. However, zone tasks must be unique objects, and an integer can
+ * therefore not be used as task object. The [Window] class thus keeps a mapping
+ * from the integer ID to the corresponding task object. All zone related
+ * operations work on this task object, whereas users of
+ * [Window.requestAnimationFrame] only see the integer ID.
+ *
+ * Since this mapping takes up space, it must be removed when the
+ * animation-frame task has triggered. The default implementation does this
+ * automatically, but intercepting implementations of `requestAnimationFrame`
+ * must make sure to call the [AnimationFrameTask.removeMapping]
+ * function that is provided in the task specification.
+ *
+ * *Experimental*. This class may disappear without notice.
+ */
+abstract class AnimationFrameTask {
+  /** The ID that is returned to users. */
+  int get id;
+
+  /** The zone in which the task will run. */
+  Zone get zone;
+
+  /**
+   * Cancels the animation-frame request.
+   *
+   * A call to [Window.cancelAnimationFrame] with an `id` argument equal to [id]
+   * forwards the request to this function.
+   *
+   * Zones that intercept animation-frame requests implement this method so
+   * that they can react to cancelation requests.
+   */
+  void cancel(Window window);
+
+  /**
+   * Maps animation-frame request IDs to their task objects.
+   */
+  static final Map<int, _AnimationFrameTask> _tasks = {};
+
+  /**
+   * Removes the mapping from [id] to [AnimationFrameTask].
+   *
+   * This function must be invoked by user-implemented animation-frame
+   * tasks, before running [callback].
+   *
+   * See [AnimationFrameTask].
+   */
+  static void removeMapping(int id) {
+    _tasks.remove(id);
+  }
+}
+
+class _AnimationFrameTask implements AnimationFrameTask {
+  final int id;
+  final Zone zone;
+  final FrameRequestCallback _callback;
+
+  _AnimationFrameTask(this.id, this.zone, this._callback);
+
+  void cancel(Window window) {
+    window._cancelAnimationFrame(this.id);
+  }
+}
+
+/**
+ * The task specification for an animation-frame request.
+ *
+ * *Experimental*. This class may disappear without notice.
+ */
+class AnimationFrameRequestSpecification implements TaskSpecification {
+  /**
+   * The window on which [Window.requestAnimationFrame] was invoked.
+   */
+  final Window window;
+
+  /**
+   * The callback that is executed when the animation-frame is ready.
+   *
+   * Note that the callback hasn't been registered in any zone when the `create`
+   * function (passed to [Zone.createTask]) is invoked.
+   */
+  final FrameRequestCallback callback;
+
+  AnimationFrameRequestSpecification(this.window, this.callback);
+
+  String get name => "dart.html.request-animation-frame";
+  bool get isOneShot => true;
+}
+
 @DocsEditable()
 /**
  * Top-level container for the current browser tab or window.
@@ -39185,12 +39460,11 @@
    */
   Future<num> get animationFrame {
     var completer = new Completer<num>.sync();
-    requestAnimationFrame((time) {
-      completer.complete(time);
-    });
+    requestAnimationFrame(completer.complete);
     return completer.future;
   }
 
+
   /**
    * Called to draw an animation frame and then request the window to repaint
    * after [callback] has finished (creating the animation).
@@ -39209,9 +39483,52 @@
    */
   @DomName('Window.requestAnimationFrame')
   int requestAnimationFrame(FrameRequestCallback callback) {
-    return _requestAnimationFrame(_wrapZone(callback));
+    if (identical(Zone.current, Zone.ROOT)) {
+      return _requestAnimationFrame(callback);
+    }
+    var spec = new AnimationFrameRequestSpecification(this, callback);
+    var task = Zone.current.createTask/*<AnimationFrameTask>*/(
+        _createAnimationFrameTask, spec);
+    AnimationFrameTask._tasks[task.id] = task;
+    return task.id;
   }
 
+  static _AnimationFrameTask _createAnimationFrameTask(
+      AnimationFrameRequestSpecification spec, Zone zone) {
+    var task;
+    var id = spec.window._requestAnimationFrame((num time) {
+      AnimationFrameTask.removeMapping(task.id);
+      zone.runTask(_runAnimationFrame, task, time);
+    });
+    var callback = zone.registerUnaryCallback(spec.callback);
+    task = new _AnimationFrameTask(id, zone, callback);
+    return task;
+  }
+
+  static void _runAnimationFrame(_AnimationFrameTask task, num time) {
+    task._callback(time);
+  }
+
+  /**
+   * Cancels an animation frame request.
+   *
+   * ## Other resources
+   *
+   * * [Window.cancelAnimationFrame](https://developer.mozilla.org/en-US/docs/Web/API/Window.cancelAnimationFrame)
+   *   from MDN.
+   */
+  @DomName('Window.cancelAnimationFrame')
+  void cancelAnimationFrame(int id) {
+    var task = AnimationFrameTask._tasks.remove(id);
+    if (task == null) {
+      // Assume that the animation frame request wasn't intercepted by a zone.
+      _cancelAnimationFrame(id);
+      return;
+    }
+    task.cancel(this);
+  }
+
+
   /**
    * Access a sandboxed file system of the specified `size`. If `persistent` is
    * true, the application will request permission from the user to create
@@ -39978,7 +40295,7 @@
 
   @DomName('Window.cancelAnimationFrame')
   @DocsEditable()
-  void cancelAnimationFrame(int handle) => _blink.BlinkWindow.instance.cancelAnimationFrame_Callback_1_(this, handle);
+  void _cancelAnimationFrame(int handle) => _blink.BlinkWindow.instance.cancelAnimationFrame_Callback_1_(this, handle);
   
   @DomName('Window.close')
   @DocsEditable()
@@ -44492,6 +44809,41 @@
   StreamSubscription<T> capture(void onData(T event));
 }
 
+/// Task specification for DOM Events.
+///
+/// *Experimental*. May disappear without notice.
+class EventSubscriptionSpecification<T extends Event>
+    implements TaskSpecification {
+  @override
+  final String name;
+  @override
+  final bool isOneShot;
+
+  final EventTarget target;
+  /// The event-type of the event. For example 'click' for click events.
+  final String eventType;
+  // TODO(floitsch): the first generic argument should be 'void'.
+  final ZoneUnaryCallback<dynamic, T> onData;
+  final bool useCapture;
+
+  EventSubscriptionSpecification({this.name, this.isOneShot, this.target,
+      this.eventType, void this.onData(T event), this.useCapture});
+
+  /// Returns a copy of this instance, with every non-null argument replaced
+  /// by the given value.
+  EventSubscriptionSpecification<T> replace(
+      {String name, bool isOneShot, EventTarget target,
+       String eventType, void onData(T event), bool useCapture}) {
+    return new EventSubscriptionSpecification<T>(
+        name: name ?? this.name,
+        isOneShot: isOneShot ?? this.isOneShot,
+        target: target ?? this.target,
+        eventType: eventType ?? this.eventType,
+        onData: onData ?? this.onData,
+        useCapture: useCapture ?? this.useCapture);
+  }
+}
+
 /**
  * Adapter for exposing DOM events as Dart streams.
  */
@@ -44499,8 +44851,16 @@
   final EventTarget _target;
   final String _eventType;
   final bool _useCapture;
+  /// The name that is used in the task specification.
+  final String _name;
+  /// Whether the stream can trigger multiple times.
+  final bool _isOneShot;
 
-  _EventStream(this._target, this._eventType, this._useCapture);
+  _EventStream(this._target, String eventType, this._useCapture,
+      {String name, bool isOneShot: false})
+      : _eventType = eventType,
+        _isOneShot = isOneShot,
+        _name = name ?? "dart.html.event.$eventType";
 
   // DOM events are inherently multi-subscribers.
   Stream<T> asBroadcastStream({void onListen(StreamSubscription<T> subscription),
@@ -44508,13 +44868,31 @@
       => this;
   bool get isBroadcast => true;
 
+  StreamSubscription<T> _listen(
+      void onData(T event), {bool useCapture}) {
+
+    if (identical(Zone.current, Zone.ROOT)) {
+      return new _EventStreamSubscription<T>(
+          this._target, this._eventType, onData, this._useCapture,
+          Zone.current);
+    }
+
+    var specification = new EventSubscriptionSpecification<T>(
+        name: this._name, isOneShot: this._isOneShot,
+        target: this._target, eventType: this._eventType,
+        onData: onData, useCapture: useCapture);
+    // We need to wrap the _createStreamSubscription call, since a tear-off
+    // would not bind the generic type 'T'.
+    return Zone.current.createTask((spec, Zone zone) {
+      return _createStreamSubscription/*<T>*/(spec, zone);
+    }, specification);
+  }
+
   StreamSubscription<T> listen(void onData(T event),
       { Function onError,
         void onDone(),
         bool cancelOnError}) {
-
-    return new _EventStreamSubscription<T>(
-        this._target, this._eventType, onData, this._useCapture);
+    return _listen(onData, useCapture: this._useCapture);
   }
 }
 
@@ -44529,8 +44907,9 @@
  */
 class _ElementEventStreamImpl<T extends Event> extends _EventStream<T>
     implements ElementStream<T> {
-  _ElementEventStreamImpl(target, eventType, useCapture) :
-      super(target, eventType, useCapture);
+  _ElementEventStreamImpl(target, eventType, useCapture,
+      {String name, bool isOneShot: false}) :
+      super(target, eventType, useCapture, name: name, isOneShot: isOneShot);
 
   Stream<T> matches(String selector) => this.where(
       (event) => _matchesWithAncestors(event, selector)).map((e) {
@@ -44538,9 +44917,9 @@
         return e;
       });
 
-  StreamSubscription<T> capture(void onData(T event)) =>
-    new _EventStreamSubscription<T>(
-        this._target, this._eventType, onData, true);
+  StreamSubscription<T> capture(void onData(T event)) {
+    return _listen(onData, useCapture: true);
+  }
 }
 
 /**
@@ -44589,7 +44968,13 @@
   bool get isBroadcast => true;
 }
 
-// We would like this to just be EventListener<T> but that typdef cannot
+StreamSubscription/*<T>*/ _createStreamSubscription/*<T>*/(
+    EventSubscriptionSpecification/*<T>*/ spec, Zone zone) {
+  return new _EventStreamSubscription/*<T>*/(spec.target, spec.eventType,
+      spec.onData, spec.useCapture, zone);
+}
+
+// We would like this to just be EventListener<T> but that typedef cannot
 // use generics until dartbug/26276 is fixed.
 typedef _EventListener<T extends Event>(T event);
 
@@ -44598,15 +44983,19 @@
   EventTarget _target;
   final String _eventType;
   EventListener _onData;
+  EventListener _domCallback;
   final bool _useCapture;
+  final Zone _zone;
 
   // TODO(jacobr): for full strong mode correctness we should write
-  // _onData = onData == null ? null : _wrapZone/*<Event, dynamic>*/((e) => onData(e as T))
+  // _onData = onData == null ? null : _wrapZone/*<dynamic, Event>*/((e) => onData(e as T))
   // but that breaks 114 co19 tests as well as multiple html tests as it is reasonable
   // to pass the wrong type of event object to an event listener as part of a
   // test.
   _EventStreamSubscription(this._target, this._eventType, void onData(T event),
-      this._useCapture) : _onData = _wrapZone/*<Event, dynamic>*/(onData) {
+      this._useCapture, Zone zone)
+      : _zone = zone,
+        _onData = _registerZone/*<dynamic, Event>*/(zone, onData) {
     _tryResume();
   }
 
@@ -44628,7 +45017,7 @@
     }
     // Remove current event listener.
     _unlisten();
-    _onData = _wrapZone/*<Event, dynamic>*/(handleData);
+    _onData = _registerZone/*<dynamic, Event>*/(_zone, handleData);
     _tryResume();
   }
 
@@ -44657,14 +45046,25 @@
   }
 
   void _tryResume() {
-    if (_onData != null && !isPaused) {
-      _target.addEventListener(_eventType, _onData, _useCapture);
+    if (_onData == null || isPaused) return;
+    if (identical(_zone, Zone.ROOT)) {
+      _domCallback = _onData;
+    } else {
+      _domCallback = (event) {
+        _zone.runTask(_runEventNotification, this, event);
+      };
     }
+    _target.addEventListener(_eventType, _domCallback, _useCapture);
+  }
+
+  static void _runEventNotification/*<T>*/(
+      _EventStreamSubscription/*<T>*/ subscription, /*=T*/ event) {
+    subscription._onData(event);
   }
 
   void _unlisten() {
     if (_onData != null) {
-      _target.removeEventListener(_eventType, _onData, _useCapture);
+      _target.removeEventListener(_eventType, _domCallback, _useCapture);
     }
   }
 
@@ -47856,31 +48256,26 @@
 // BSD-style license that can be found in the LICENSE file.
 
 
-// TODO(jacobr): remove these typedefs when dart:async supports generic types.
-typedef R _wrapZoneCallback<A, R>(A a);
-typedef R _wrapZoneBinaryCallback<A, B, R>(A a, B b);
-
-_wrapZoneCallback/*<A, R>*/ _wrapZone/*<A, R>*/(_wrapZoneCallback/*<A, R>*/ callback) {
-  // For performance reasons avoid wrapping if we are in the root zone.
-  if (Zone.current == Zone.ROOT) return callback;
+ZoneUnaryCallback/*<R, T>*/ _registerZone/*<R, T>*/(Zone zone,
+    ZoneUnaryCallback/*<R, T>*/ callback) {
+  // For performance reasons avoid registering if we are in the root zone.
+  if (identical(zone, Zone.ROOT)) return callback;
   if (callback == null) return null;
-  // TODO(jacobr): we cast to _wrapZoneCallback/*<A, R>*/ to hack around missing
-  // generic method support in zones.
-  // ignore: STRONG_MODE_DOWN_CAST_COMPOSITE
-  _wrapZoneCallback/*<A, R>*/ wrapped =
-      Zone.current.bindUnaryCallback(callback, runGuarded: true);
-  return wrapped;
+  return zone.registerUnaryCallback(callback);
 }
 
-_wrapZoneBinaryCallback/*<A, B, R>*/ _wrapBinaryZone/*<A, B, R>*/(_wrapZoneBinaryCallback/*<A, B, R>*/ callback) {
-  if (Zone.current == Zone.ROOT) return callback;
+ZoneUnaryCallback/*<R, T>*/ _wrapZone/*<R, T>*/(ZoneUnaryCallback/*<R, T>*/ callback) {
+  // For performance reasons avoid wrapping if we are in the root zone.
+  if (identical(Zone.current, Zone.ROOT)) return callback;
   if (callback == null) return null;
-  // We cast to _wrapZoneBinaryCallback/*<A, B, R>*/ to hack around missing
-  // generic method support in zones.
-  // ignore: STRONG_MODE_DOWN_CAST_COMPOSITE
-  _wrapZoneBinaryCallback/*<A, B, R>*/ wrapped =
-      Zone.current.bindBinaryCallback(callback, runGuarded: true);
-  return wrapped;
+  return Zone.current.bindUnaryCallback(callback, runGuarded: true);
+}
+
+ZoneBinaryCallback/*<R, A, B>*/ _wrapBinaryZone/*<R, A, B>*/(
+    ZoneBinaryCallback/*<R, A, B>*/ callback) {
+  if (identical(Zone.current, Zone.ROOT)) return callback;
+  if (callback == null) return null;
+  return Zone.current.bindBinaryCallback(callback, runGuarded: true);
 }
 
 /**
diff --git a/sdk/lib/io/process.dart b/sdk/lib/io/process.dart
index 93aa63e..6159113 100644
--- a/sdk/lib/io/process.dart
+++ b/sdk/lib/io/process.dart
@@ -445,7 +445,7 @@
   /**
    * Standard output from the process. The value used for the
    * `stdoutEncoding` argument to `Process.run` determines the type. If
-   * `null` was used this value is of type `List<int> otherwise it is
+   * `null` was used this value is of type `List<int>` otherwise it is
    * of type `String`.
    */
   final stdout;
@@ -453,7 +453,7 @@
   /**
    * Standard error from the process. The value used for the
    * `stderrEncoding` argument to `Process.run` determines the type. If
-   * `null` was used this value is of type `List<int>
+   * `null` was used this value is of type `List<int>`
    * otherwise it is of type `String`.
    */
   final stderr;
diff --git a/sdk/lib/js/dart2js/js_dart2js.dart b/sdk/lib/js/dart2js/js_dart2js.dart
index f98b29d..4b3e4a6 100644
--- a/sdk/lib/js/dart2js/js_dart2js.dart
+++ b/sdk/lib/js/dart2js/js_dart2js.dart
@@ -707,7 +707,7 @@
   return Function.apply(callback, [self]..addAll(arguments));
 }
 
-Function allowInterop(Function f) {
+Function /*=F*/ allowInterop/*<F extends Function>*/(Function /*=F*/ f) {
   if (JS('bool', 'typeof(#) == "function"', f)) {
     // Already supports interop, just use the existing function.
     return f;
diff --git a/sdk/lib/js/dartium/js_dartium.dart b/sdk/lib/js/dartium/js_dartium.dart
index 8d0b248..4d1d682 100644
--- a/sdk/lib/js/dartium/js_dartium.dart
+++ b/sdk/lib/js/dartium/js_dartium.dart
@@ -1528,7 +1528,7 @@
 /// JavaScript. We may remove the need to call this method completely in the
 /// future if Dart2Js is refactored so that its function calling conventions
 /// are more compatible with JavaScript.
-JSFunction allowInterop(Function f) {
+Function /*=F*/ allowInterop/*<F extends Function>*/(Function /*=F*/ f) {
   if (f is JSFunction) {
     // The function is already a JSFunction... no need to do anything.
     return f;
diff --git a/sdk/lib/vmservice/devfs.dart b/sdk/lib/vmservice/devfs.dart
new file mode 100644
index 0000000..35af748
--- /dev/null
+++ b/sdk/lib/vmservice/devfs.dart
@@ -0,0 +1,319 @@
+// Copyright (c) 2016, the Dart project authors.  Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.
+
+part of dart._vmservice;
+
+String _encodeDevFSDisabledError(Message message) {
+  return encodeRpcError(
+      message, kFeatureDisabled,
+      details: "DevFS is not supported by this Dart implementation");
+}
+
+String _encodeFileSystemAlreadyExistsError(Message message, String fsName) {
+  return encodeRpcError(
+      message, kFileSystemAlreadyExists,
+      details: "${message.method}: file system '${fsName}' already exists");
+}
+
+String _encodeFileSystemDoesNotExistError(Message message, String fsName) {
+  return encodeRpcError(
+      message, kFileSystemDoesNotExist,
+      details: "${message.method}: file system '${fsName}' does not exist");
+}
+
+class _FileSystem {
+  _FileSystem(this.name, this.uri);
+
+  final String name;
+  final Uri uri;
+
+  Uri resolvePath(String path) {
+    if (path.startsWith('/')) {
+      path = path.substring(1);
+    }
+    if (path.isEmpty) {
+      return null;
+    }
+    Uri pathUri;
+    try {
+      pathUri = Uri.parse(path);
+    } on FormatException catch(e) {
+      return null;
+    }
+    Uri resolvedUri = uri.resolveUri(pathUri);
+    if (!resolvedUri.toString().startsWith(uri.toString())) {
+      // Resolved uri must be within the filesystem's base uri.
+      return null;
+    }
+    return resolvedUri;
+  }
+
+  Map toMap() {
+    return {
+      'type': 'FileSystem',
+      'name': name,
+      'uri': uri.toString(),
+    };
+  }
+}
+
+class DevFS {
+  DevFS();
+
+  Map<String, _FileSystem> _fsMap = {};
+
+  final Set _rpcNames = new Set.from([
+      '_listDevFS',
+      '_createDevFS',
+      '_deleteDevFS',
+      '_readDevFSFile',
+      '_writeDevFSFile',
+      '_writeDevFSFiles',
+      '_listDevFSFiles',
+  ]);
+
+  void cleanup() {
+    var deleteDir = VMServiceEmbedderHooks.deleteDir;
+    if (deleteDir == null) {
+      return;
+    }
+    var deletions = [];
+    for (var fs in _fsMap.values) {
+      deletions.add(deleteDir(fs.uri));
+    }
+    Future.wait(deletions);
+    _fsMap.clear();
+  }
+
+  bool shouldHandleMessage(Message message) {
+    return _rpcNames.contains(message.method);
+  }
+
+  Future<String> handleMessage(Message message) async {
+    switch (message.method) {
+      case '_listDevFS':
+        return _listDevFS(message);
+      case '_createDevFS':
+        return _createDevFS(message);
+      case '_deleteDevFS':
+        return _deleteDevFS(message);
+      case '_readDevFSFile':
+        return _readDevFSFile(message);
+      case '_writeDevFSFile':
+        return _writeDevFSFile(message);
+      case '_writeDevFSFiles':
+        return _writeDevFSFiles(message);
+      case '_listDevFSFiles':
+        return _listDevFSFiles(message);
+      default:
+        return encodeRpcError(
+            message, kInternalError,
+            details: 'Unexpected rpc ${message.method}');
+    }
+  }
+
+  Future<String> _listDevFS(Message message) async {
+    var result = {};
+    result['type'] = 'FileSystemList';
+    result['fsNames'] =  _fsMap.keys.toList();
+    return encodeResult(message, result);
+  }
+
+  Future<String> _createDevFS(Message message) async {
+    var createTempDir = VMServiceEmbedderHooks.createTempDir;
+    if (createTempDir == null) {
+      return _encodeDevFSDisabledError(message);
+    }
+    var fsName = message.params['fsName'];
+    if (fsName == null) {
+      return encodeMissingParamError(message, 'fsName');
+    }
+    if (fsName is! String) {
+      return encodeInvalidParamError(message, 'fsName');
+    }
+    var fs = _fsMap[fsName];
+    if (fs != null) {
+      return _encodeFileSystemAlreadyExistsError(message, fsName);
+    }
+    var tempDir = await createTempDir(fsName);
+    fs = new _FileSystem(fsName, tempDir);
+    _fsMap[fsName] = fs;
+    return encodeResult(message, fs.toMap());
+  }
+
+  Future<String> _deleteDevFS(Message message) async {
+    var deleteDir = VMServiceEmbedderHooks.deleteDir;
+    if (deleteDir == null) {
+      return _encodeDevFSDisabledError(message);
+    }
+    var fsName = message.params['fsName'];
+    if (fsName == null) {
+      return encodeMissingParamError(message, 'fsName');
+    }
+    if (fsName is! String) {
+      return encodeInvalidParamError(message, 'fsName');
+    }
+    var fs = _fsMap.remove(fsName);
+    if (fs == null) {
+      return _encodeFileSystemDoesNotExistError(message, fsName);
+    }
+    await deleteDir(fs.uri);
+    return encodeSuccess(message);
+  }
+
+  Future<String> _readDevFSFile(Message message) async {
+    var readFile = VMServiceEmbedderHooks.readFile;
+    if (readFile == null) {
+      return _encodeDevFSDisabledError(message);
+    }
+    var fsName = message.params['fsName'];
+    if (fsName == null) {
+      return encodeMissingParamError(message, 'fsName');
+    }
+    if (fsName is! String) {
+      return encodeInvalidParamError(message, 'fsName');
+    }
+    var fs = _fsMap[fsName];
+    if (fs == null) {
+      return _encodeFileSystemDoesNotExistError(message, fsName);
+    }
+    var path = message.params['path'];
+    if (path == null) {
+      return encodeMissingParamError(message, 'path');
+    }
+    if (path is! String) {
+      return encodeInvalidParamError(message, 'path');
+    }
+    Uri uri = fs.resolvePath(path);
+    if (uri == null) {
+      return encodeInvalidParamError(message, 'path');
+    }
+
+    try {
+      List<int> bytes = await readFile(uri);
+      var result = {
+        'type': 'FSFile',
+        'fileContents': BASE64.encode(bytes)
+      };
+      return encodeResult(message, result);
+    } catch (e) {
+      return encodeRpcError(
+          message, kFileDoesNotExist,
+          details: "_readDevFSFile: $e");
+    }
+  }
+
+  Future<String> _writeDevFSFile(Message message) async {
+    var writeFile = VMServiceEmbedderHooks.writeFile;
+    if (writeFile == null) {
+      return _encodeDevFSDisabledError(message);
+    }
+    var fsName = message.params['fsName'];
+    if (fsName == null) {
+      return encodeMissingParamError(message, 'fsName');
+    }
+    if (fsName is! String) {
+      return encodeInvalidParamError(message, 'fsName');
+    }
+    var fs = _fsMap[fsName];
+    if (fs == null) {
+      return _encodeFileSystemDoesNotExistError(message, fsName);
+    }
+    var path = message.params['path'];
+    if (path == null) {
+      return encodeMissingParamError(message, 'path');
+    }
+    if (path is! String) {
+      return encodeInvalidParamError(message, 'path');
+    }
+    Uri uri = fs.resolvePath(path);
+    if (uri == null) {
+      return encodeInvalidParamError(message, 'path');
+    }
+    var fileContents = message.params['fileContents'];
+    if (fileContents == null) {
+      return encodeMissingParamError(message, 'fileContents');
+    }
+    if (fileContents is! String) {
+      return encodeInvalidParamError(message, 'fileContents');
+    }
+    List<int> decodedFileContents = BASE64.decode(fileContents);
+
+    await writeFile(uri, decodedFileContents);
+    return encodeSuccess(message);
+  }
+
+  Future<String> _writeDevFSFiles(Message message) async {
+    var writeFile = VMServiceEmbedderHooks.writeFile;
+    if (writeFile == null) {
+      return _encodeDevFSDisabledError(message);
+    }
+    var fsName = message.params['fsName'];
+    if (fsName == null) {
+      return encodeMissingParamError(message, 'fsName');
+    }
+    if (fsName is! String) {
+      return encodeInvalidParamError(message, 'fsName');
+    }
+    var fs = _fsMap[fsName];
+    if (fs == null) {
+      return _encodeFileSystemDoesNotExistError(message, fsName);
+    }
+    var files = message.params['files'];
+    if (files == null) {
+      return encodeMissingParamError(message, 'files');
+    }
+    if (files is! List) {
+      return encodeInvalidParamError(message, 'files');
+    }
+    var uris = [];
+    for (int i = 0; i < files.length; i++) {
+      var fileInfo = files[i];
+      if (fileInfo is! List ||
+          fileInfo.length != 2 ||
+          fileInfo[0] is! String || fileInfo[1] is! String) {
+        return encodeRpcError(
+            message, kInvalidParams,
+            details: "${message.method}: invalid 'files' parameter "
+                     "at index ${i}: ${fileInfo}");
+      }
+      var uri = fs.resolvePath(fileInfo[0]);
+      if (uri == null) {
+        return encodeRpcError(
+            message, kInvalidParams,
+            details: "${message.method}: invalid 'files' parameter "
+                     "at index ${i}: ${fileInfo}");
+      }
+      uris.add(uri);
+    }
+    var pendingWrites = [];
+    for (int i = 0; i < uris.length; i++) {
+      List<int> decodedFileContents = BASE64.decode(files[i][1]);
+      pendingWrites.add(writeFile(uris[i], decodedFileContents));
+    }
+    await Future.wait(pendingWrites);
+    return encodeSuccess(message);
+  }
+
+  Future<String> _listDevFSFiles(Message message) async {
+    var listFiles = VMServiceEmbedderHooks.listFiles;
+    if (listFiles == null) {
+      return _encodeDevFSDisabledError(message);
+    }
+    var fsName = message.params['fsName'];
+    if (fsName == null) {
+      return encodeMissingParamError(message, 'fsName');
+    }
+    if (fsName is! String) {
+      return encodeInvalidParamError(message, 'fsName');
+    }
+    var fs = _fsMap[fsName];
+    if (fs == null) {
+      return _encodeFileSystemDoesNotExistError(message, fsName);
+    }
+    var fileList = await listFiles(fs.uri);
+    var result = { 'type': 'FSFileList', 'files': fileList };
+    return encodeResult(message, result);
+  }
+}
diff --git a/sdk/lib/vmservice/vmservice.dart b/sdk/lib/vmservice/vmservice.dart
index f125352..4101023 100644
--- a/sdk/lib/vmservice/vmservice.dart
+++ b/sdk/lib/vmservice/vmservice.dart
@@ -12,6 +12,7 @@
 
 part 'asset.dart';
 part 'client.dart';
+part 'devfs.dart';
 part 'constants.dart';
 part 'running_isolate.dart';
 part 'running_isolates.dart';
@@ -32,16 +33,24 @@
     new Map<int, IsolateEmbedderData>();
 
 // These must be kept in sync with the declarations in vm/json_stream.h.
-const kInvalidParams = -32602;
-const kInternalError = -32603;
-const kStreamAlreadySubscribed = 103;
-const kStreamNotSubscribed = 104;
+const kInvalidParams             = -32602;
+const kInternalError             = -32603;
+const kFeatureDisabled           = 100;
+const kStreamAlreadySubscribed   = 103;
+const kStreamNotSubscribed       = 104;
+const kFileSystemAlreadyExists   = 1001;
+const kFileSystemDoesNotExist    = 1002;
+const kFileDoesNotExist          = 1003;
 
 var _errorMessages = {
   kInvalidParams: 'Invalid params',
   kInternalError: 'Internal error',
+  kFeatureDisabled: 'Feature is disabled',
   kStreamAlreadySubscribed: 'Stream already subscribed',
   kStreamNotSubscribed: 'Stream not subscribed',
+  kFileSystemAlreadyExists: 'File system already exists',
+  kFileSystemDoesNotExist: 'File system does not exist',
+  kFileDoesNotExist: 'File does not exist',
 };
 
 String encodeRpcError(Message message, int code, {String details}) {
@@ -61,6 +70,19 @@
   return JSON.encode(response);
 }
 
+String encodeMissingParamError(Message message, String param) {
+  return encodeRpcError(
+      message, kInvalidParams,
+      details: "${message.method} expects the '${param}' parameter");
+}
+
+String encodeInvalidParamError(Message message, String param) {
+  var value = message.params[param];
+  return encodeRpcError(
+      message, kInvalidParams,
+      details: "${message.method}: invalid '${param}' parameter: ${value}");
+}
+
 String encodeResult(Message message, Map result) {
   var response = {
     'jsonrpc': '2.0',
@@ -70,6 +92,10 @@
   return JSON.encode(response);
 }
 
+String encodeSuccess(Message message) {
+  return encodeResult(message, { 'type': 'Success' });
+}
+
 const shortDelay = const Duration(milliseconds: 10);
 
 /// Called when the server should be started.
@@ -81,11 +107,31 @@
 /// Called when the service is exiting.
 typedef Future CleanupCallback();
 
+/// Called to create a temporary directory
+typedef Future<Uri> CreateTempDirCallback(String base);
+
+/// Called to delete a directory
+typedef Future DeleteDirCallback(Uri path);
+
+/// Called to write a file.
+typedef Future WriteFileCallback(Uri path, List<int> bytes);
+
+/// Called to read a file.
+typedef Future<List<int>> ReadFileCallback(Uri path);
+
+/// Called to list all files under some path.
+typedef Future<List<Map<String,String>>> ListFilesCallback(Uri path);
+
 /// Hooks that are setup by the embedder.
 class VMServiceEmbedderHooks {
   static ServerStartCallback serverStart;
   static ServerStopCallback serverStop;
   static CleanupCallback cleanup;
+  static CreateTempDirCallback createTempDir;
+  static DeleteDirCallback deleteDir;
+  static WriteFileCallback writeFile;
+  static ReadFileCallback readFile;
+  static ListFilesCallback listFiles;
 }
 
 class VMService extends MessageRouter {
@@ -100,6 +146,8 @@
   /// A port used to receive events from the VM.
   final RawReceivePort eventPort;
 
+  final _devfs = new DevFS();
+
   void _addClient(Client client) {
     assert(client.streams.isEmpty);
     clients.add(client);
@@ -158,6 +206,7 @@
     for (var client in clientsList) {
       client.disconnect();
     }
+    _devfs.cleanup();
     if (VMServiceEmbedderHooks.cleanup != null) {
       await VMServiceEmbedderHooks.cleanup();
     }
@@ -228,8 +277,7 @@
     }
     client.streams.add(streamId);
 
-    var result = { 'type' : 'Success' };
-    return encodeResult(message, result);
+    return encodeSuccess(message);
   }
 
   Future<String> _streamCancel(Message message) async {
@@ -244,8 +292,7 @@
       _vmCancelStream(streamId);
     }
 
-    var result = { 'type' : 'Success' };
-    return encodeResult(message, result);
+    return encodeSuccess(message);
   }
 
   // TODO(johnmccutchan): Turn this into a command line tool that uses the
@@ -320,6 +367,9 @@
     if (message.method == 'streamCancel') {
       return _streamCancel(message);
     }
+    if (_devfs.shouldHandleMessage(message)) {
+      return _devfs.handleMessage(message);
+    }
     if (message.params['isolateId'] != null) {
       return runningIsolates.route(message);
     }
diff --git a/sdk/lib/vmservice/vmservice_sources.gypi b/sdk/lib/vmservice/vmservice_sources.gypi
index 869e304..16eb2c8 100644
--- a/sdk/lib/vmservice/vmservice_sources.gypi
+++ b/sdk/lib/vmservice/vmservice_sources.gypi
@@ -11,6 +11,7 @@
     'asset.dart',
     'client.dart',
     'constants.dart',
+    'devfs.dart',
     'running_isolate.dart',
     'running_isolates.dart',
     'message.dart',
diff --git a/tests/co19/co19-co19.status b/tests/co19/co19-co19.status
index d531a08..80a6317 100644
--- a/tests/co19/co19-co19.status
+++ b/tests/co19/co19-co19.status
@@ -75,3 +75,14 @@
 
 [ $runtime == dartium || $compiler == dart2js ]
 LibTest/async/Future/Future.delayed_A01_t02: Pass, Fail # Issue 15524
+
+[ ($compiler == none || $compiler == precompiler) && ($runtime == vm || $runtime == drt || $runtime == dartium || $runtime == dart_precompiled) ]
+# Optional trailing commas for argument and parameter lists added to language.
+# https://github.com/dart-lang/co19/issues/68
+Language/Expressions/Function_Invocation/Actual_Argument_List_Evaluation/syntax_t05: Fail, OK
+Language/Expressions/Method_Invocation/Ordinary_Invocation/syntax_t05: Fail, OK
+Language/Expressions/Method_Invocation/Ordinary_Invocation/syntax_t10: Fail, OK
+Language/Expressions/Method_Invocation/Super_Invocation/syntax_t05: Fail, OK
+Language/Functions/Formal_Parameters/syntax_t04: Fail, OK
+Language/Functions/Formal_Parameters/syntax_t05: Fail, OK
+Language/Functions/Formal_Parameters/syntax_t12: Fail, OK
diff --git a/tests/co19/co19-dartium.status b/tests/co19/co19-dartium.status
index 10bd582..927945b 100644
--- a/tests/co19/co19-dartium.status
+++ b/tests/co19/co19-dartium.status
@@ -184,8 +184,8 @@
 Language/Classes/deсlarations_t33: Skip # Times out. Please triage this failure.
 Language/Classes/deсlarations_t34: Skip # Times out. Please triage this failure.
 Language/Expressions/Assignment/super_assignment_failed_t05: RuntimeError # Issue 25671
-Language/Expressions/Function_Invocation/async_generator_invokation_t08: Timeout # Issue 25967
-Language/Expressions/Function_Invocation/async_generator_invokation_t10: Timeout # Issue 25967
+Language/Expressions/Function_Invocation/async_generator_invokation_t08: Skip # Times out. Issue 25967
+Language/Expressions/Function_Invocation/async_generator_invokation_t10: Skip # Times out. Issue 25967
 Language/Expressions/Identifier_Reference/built_in_identifier_t35: Fail # Issue 25732
 Language/Expressions/Identifier_Reference/built_in_identifier_t36: Fail # Issue 25732
 Language/Expressions/Identifier_Reference/built_in_identifier_t37: Fail # Issue 25732
@@ -602,7 +602,7 @@
 LayoutTests/fast/dynamic/insertAdjacentElement_t01: Skip # Timeout. co19-roll r786: Please triage this failure.
 LayoutTests/fast/dynamic/insertAdjacentHTML_t01: Pass, RuntimeError # co19 issue 11.
 LayoutTests/fast/dynamic/recursive-layout_t01: RuntimeError # co19-roll r786: Please triage this failure.
-LayoutTests/fast/events/change-overflow-on-overflow-change_t01: Timeout # Dartium 45 roll. Issue 25754
+LayoutTests/fast/events/change-overflow-on-overflow-change_t01: Skip # Timeout. Please triage this failure.
 LayoutTests/fast/events/clipboard-clearData_t01: Skip # Timeout. co19-roll r786: Please triage this failure.
 LayoutTests/fast/events/clipboard-dataTransferItemList_t01: Skip # Timeout. co19-roll r786: Please triage this failure.
 LayoutTests/fast/events/div-focus_t01: Pass, RuntimeError # co19-roll r786: Please triage this failure.
@@ -707,7 +707,7 @@
 LayoutTests/fast/loader/hashchange-event-properties_t01: RuntimeError # co19-roll r801: Please triage this failure.
 LayoutTests/fast/loader/loadInProgress_t01: Pass, RuntimeError # co19-roll r801: Please triage this failure.
 LayoutTests/fast/loader/onhashchange-attribute-listeners_t01: Skip # Times out. co19-roll r801: Please triage this failure.
-LayoutTests/fast/loader/onload-policy-ignore-for-frame_t01: Timeout # Dartium 45 roll
+LayoutTests/fast/loader/onload-policy-ignore-for-frame_t01: Skip # Times out. Dartium 45 roll: Please triage this failure.
 LayoutTests/fast/loader/scroll-position-restored-on-back_t01: RuntimeError # co19-roll r801: Please triage this failure.
 LayoutTests/fast/loader/scroll-position-restored-on-reload-at-load-event_t01: Skip # Times out. co19-roll r801: Please triage this failure.
 LayoutTests/fast/loader/stateobjects/replacestate-in-onunload_t01: RuntimeError # co19-roll r801: Please triage this failure.
@@ -771,8 +771,8 @@
 LayoutTests/fast/replaced/computed-image-width-with-percent-height-inside-table-cell-and-fixed-ancestor-vertical-lr_t01: RuntimeError, Pass # Spurious intermittent pass # co19 issue 11.
 LayoutTests/fast/replaced/computed-image-width-with-percent-height-inside-table-cell-and-fixed-ancestor_t01: RuntimeError, Pass # Spurious intermittent pass # co19 issue 11.
 LayoutTests/fast/replaced/container-width-zero_t01: RuntimeError # co19-roll r801: Please triage this failure.
-LayoutTests/fast/replaced/iframe-with-percentage-height-within-table-with-anonymous-table-cell_t01: RuntimeError, Pass, Timeout # Spurious intermittent pass. # co19 issue 11.
-LayoutTests/fast/replaced/iframe-with-percentage-height-within-table-with-table-cell-ignore-height_t01: RuntimeError, Pass, Timeout # co19-roll r801: Please triage this failure.
+LayoutTests/fast/replaced/iframe-with-percentage-height-within-table-with-anonymous-table-cell_t01: Skip # Times out: Please triage this failure.
+LayoutTests/fast/replaced/iframe-with-percentage-height-within-table-with-table-cell-ignore-height_t01: Skip # Times out: Please triage this failure.
 LayoutTests/fast/replaced/preferred-widths_t01: Pass, RuntimeError # co19-roll r801: Please triage this failure.
 LayoutTests/fast/replaced/table-percent-height_t01: RuntimeError # co19-roll r801: Please triage this failure.
 LayoutTests/fast/replaced/table-percent-width_t01: RuntimeError # co19-roll r801: Please triage this failure.
@@ -1039,7 +1039,7 @@
 LibTest/isolate/ReceivePort/asBroadcastStream_A04_t02: RuntimeError, OK  # Uses Isolate.spawn.
 LibTest/isolate/ReceivePort/asBroadcastStream_A04_t03: RuntimeError, OK  # Uses Isolate.spawn.
 LibTest/isolate/ReceivePort/close_A01_t01: Pass, RuntimeError # Issue 13921, co19 issue for false pass https://github.com/dart-lang/co19/issues/13
-LibTest/isolate/ReceivePort/close_A02_t01: Pass, RuntimeError, Timeout # Issue 13921, co19 issue for false pass https://github.com/dart-lang/co19/issues/13
+LibTest/isolate/ReceivePort/close_A02_t01: Skip # Times out
 LibTest/isolate/ReceivePort/contains_A01_t01: RuntimeError, OK  # Uses Isolate.spawn.
 LibTest/isolate/ReceivePort/distinct_A01_t01: RuntimeError, OK  # Uses Isolate.spawn.
 LibTest/isolate/ReceivePort/distinct_A01_t02: RuntimeError, OK  # Uses Isolate.spawn.
diff --git a/tests/co19/co19-runtime.status b/tests/co19/co19-runtime.status
index b5bdadc..941a1d8 100644
--- a/tests/co19/co19-runtime.status
+++ b/tests/co19/co19-runtime.status
@@ -190,3 +190,23 @@
 
 [ $compiler == precompiler && $runtime == dart_precompiled && $arch == simarm ]
 LibTest/typed_data/Float32x4/operator_division_A01_t02: RuntimeError # Issue #26675
+
+[ $hot_reload ]
+LibTest/collection/DoubleLinkedQueue/DoubleLinkedQueue_class_A01_t01: Pass, Crash
+LibTest/collection/HashSet/HashSet_class_A01_t01: Crash
+LibTest/collection/IterableBase/IterableBase_class_A01_t02: Crash
+LibTest/collection/LinkedHashSet/LinkedHashSet_class_A01_t01: Crash
+LibTest/collection/LinkedList/iterator_current_A01_t01: Crash
+LibTest/collection/ListBase/ListBase_class_A01_t01: Pass, Timeout, Crash
+LibTest/collection/ListBase/ListBase_class_A01_t02: Pass, Timeout
+LibTest/collection/ListMixin/ListMixin_class_A01_t01: Pass, Timeout, Crash
+LibTest/collection/ListMixin/ListMixin_class_A01_t02: Pass, Timeout
+LibTest/collection/ListQueue/ListQueue_class_A01_t01: Pass, Crash
+LibTest/collection/Queue/Queue_class_A01_t01: Pass, Crash
+LibTest/core/List/List.from_A01_t01: Crash
+LibTest/core/List/List_class_A01_t01: Pass, Crash
+LibTest/core/List/List_class_A01_t02: Pass, Timeout, Crash
+LibTest/core/Map/Map_class_A01_t04: Pass, Timeout
+LibTest/core/Set/IterableBase_A01_t01: Pass, Crash
+LibTest/core/Uri/Uri_A06_t03: Pass, Timeout
+LibTest/core/Uri/encodeQueryComponent_A01_t02: Pass, Timeout
diff --git a/tests/compiler/dart2js/command_line_test.dart b/tests/compiler/dart2js/command_line_test.dart
new file mode 100644
index 0000000..fe2842e
--- /dev/null
+++ b/tests/compiler/dart2js/command_line_test.dart
@@ -0,0 +1,80 @@
+// Copyright (c) 2016, the Dart project authors.  Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.
+
+// Test the command line options of dart2js.
+
+import 'dart:async';
+
+import 'package:async_helper/async_helper.dart';
+import 'package:expect/expect.dart';
+
+import 'package:compiler/compiler_new.dart' as api;
+import 'package:compiler/src/commandline_options.dart';
+import 'package:compiler/src/dart2js.dart' as entry;
+import 'package:compiler/src/options.dart' show CompilerOptions;
+
+main() {
+  asyncTest(() async {
+    await test([], exitCode: 1);
+    await test(['foo.dart']);
+    await test([Flags.resolveOnly, 'foo.dart'],
+        resolveOnly: true,
+        resolutionOutput: Uri.base.resolve('out.data'));
+    await test(['--resolution-input=bar.dart', 'foo.dart'],
+        resolutionInputs: [Uri.base.resolve('bar.dart')]);
+    await test([Flags.resolveOnly, '--resolution-input=bar.dart', 'foo.dart'],
+        resolveOnly: true,
+        resolutionOutput: Uri.base.resolve('out.data'),
+        resolutionInputs: [Uri.base.resolve('bar.dart')]);
+    await test([Flags.resolveOnly, '--resolution-input=out.data', 'foo.dart'],
+        exitCode: 1);
+  });
+}
+
+Future test(List<String> arguments,
+    {int exitCode,
+     bool resolveOnly: false,
+     Uri resolutionOutput,
+     List<Uri> resolutionInputs}) async {
+  print('--------------------------------------------------------------------');
+  print('dart2js ${arguments.join(' ')}');
+  print('--------------------------------------------------------------------');
+  entry.CompileFunc oldCompileFunc = entry.compileFunc;
+  entry.ExitFunc oldExitFunc = entry.exitFunc;
+
+  CompilerOptions options;
+  int actualExitCode;
+  entry.compileFunc = (_options, input, diagnostics, output) {
+    options = _options;
+    return new Future<api.CompilationResult>.value(
+        new api.CompilationResult(null));
+  };
+  entry.exitFunc = (_exitCode) {
+    actualExitCode = _exitCode;
+    throw 'exited';
+  };
+  try {
+    await entry.compilerMain(arguments);
+  } catch (e, s) {
+    Expect.equals('exited', e, "Unexpected exception: $e\n$s");
+  }
+  Expect.equals(exitCode, actualExitCode, "Unexpected exit code");
+  if (actualExitCode == null) {
+    Expect.isNotNull(options, "Missing options object");
+    Expect.equals(resolveOnly, options.resolveOnly,
+        "Unexpected resolveOnly value");
+    Expect.equals(resolutionOutput, options.resolutionOutput,
+        "Unexpected resolutionOutput value");
+    if (resolutionInputs == null) {
+      Expect.isNull(options.resolutionInputs,
+          "Unexpected resolutionInputs value");
+    } else {
+      Expect.listEquals(resolutionInputs, options.resolutionInputs,
+          "Unexpected resolutionInputs value");
+    }
+  }
+
+  entry.compileFunc = oldCompileFunc;
+  entry.exitFunc = oldExitFunc;
+}
\ No newline at end of file
diff --git a/tests/compiler/dart2js/compiler_helper.dart b/tests/compiler/dart2js/compiler_helper.dart
index e365f3b..2c1684e 100644
--- a/tests/compiler/dart2js/compiler_helper.dart
+++ b/tests/compiler/dart2js/compiler_helper.dart
@@ -179,7 +179,7 @@
 
 Future compileSources(Map<String, String> sources,
                check(MockCompiler compiler)) {
-  Uri base = new Uri(scheme: 'source');
+  Uri base = new Uri(scheme: 'source', path: '/');
   Uri mainUri = base.resolve('main.dart');
   String mainCode = sources['main.dart'];
   Expect.isNotNull(mainCode, 'No source code found for "main.dart"');
diff --git a/tests/compiler/dart2js/constant_expression_evaluate_test.dart b/tests/compiler/dart2js/constant_expression_evaluate_test.dart
index fde84c3..49f3b51 100644
--- a/tests/compiler/dart2js/constant_expression_evaluate_test.dart
+++ b/tests/compiler/dart2js/constant_expression_evaluate_test.dart
@@ -108,6 +108,10 @@
   const C({field1: 42, this.field2: false}) : super(field1);
   const C.named([field = false]) : this(field1: field, field2: field);
 }
+class D extends C {
+  final field3 = 99;
+  const D(a, b) : super(field2: a, field1: b);
+}
 ''', const [
     const ConstantData('const Object()',
         const { const {} : 'ConstructedConstant(Object())' }),
@@ -140,6 +144,10 @@
         const {'foo': 'false', 'bar': '87'} :
           'ConstructedConstant(C(field1=BoolConstant(false),'
                                 'field2=IntConstant(87)))', }),
+    const ConstantData('const D(42, 87)', const { const {} :
+       'ConstructedConstant(D(field1=IntConstant(87),'
+                             'field2=IntConstant(42),'
+                             'field3=IntConstant(99)))' }),
   ]),
   const TestData('''
 class A<T> implements B {
diff --git a/tests/compiler/dart2js/constant_expression_test.dart b/tests/compiler/dart2js/constant_expression_test.dart
index f2b8f3b..16ec71e 100644
--- a/tests/compiler/dart2js/constant_expression_test.dart
+++ b/tests/compiler/dart2js/constant_expression_test.dart
@@ -53,6 +53,7 @@
     const ConstantData('1 + 2', ConstantExpressionKind.BINARY),
     const ConstantData('1 == 2', ConstantExpressionKind.BINARY),
     const ConstantData('1 != 2', ConstantExpressionKind.BINARY),
+    const ConstantData('1 ?? 2', ConstantExpressionKind.BINARY),
     const ConstantData('-(1)', ConstantExpressionKind.UNARY, text: '-1'),
     const ConstantData('"foo".length', ConstantExpressionKind.STRING_LENGTH),
     const ConstantData('identical(0, 1)', ConstantExpressionKind.IDENTICAL),
diff --git a/tests/compiler/dart2js/constant_value_test.dart b/tests/compiler/dart2js/constant_value_test.dart
new file mode 100644
index 0000000..5c5bec6
--- /dev/null
+++ b/tests/compiler/dart2js/constant_value_test.dart
@@ -0,0 +1,40 @@
+// Copyright (c) 2016, the Dart project authors.  Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.
+
+library dart2js.constants.values.test;
+
+import 'package:async_helper/async_helper.dart';
+import 'package:expect/expect.dart';
+import 'package:compiler/src/helpers/helpers.dart';
+import 'package:compiler/src/elements/elements.dart';
+import 'package:compiler/src/constants/values.dart';
+import 'type_test_helper.dart';
+
+void main() {
+  enableDebugMode();
+
+  asyncTest(() async {
+    TypeEnvironment env = await TypeEnvironment.create('''
+    class C {
+      final field1;
+      final field2;
+
+      C(this.field1, this.field2);
+    }
+    ''');
+    ClassElement C = env.getElement('C');
+    FieldElement field1 = C.lookupLocalMember('field1');
+    FieldElement field2 = C.lookupLocalMember('field2');
+    ConstantValue value1 = new ConstructedConstantValue(C.rawType, {
+      field1: new IntConstantValue(0),
+      field2: new IntConstantValue(1),
+    });
+    ConstantValue value2 = new ConstructedConstantValue(C.rawType, {
+      field2: new IntConstantValue(1),
+      field1: new IntConstantValue(0),
+    });
+    Expect.equals(value1.hashCode, value2.hashCode, "Hashcode mismatch.");
+    Expect.equals(value1, value2, "Value mismatch.");
+  });
+}
diff --git a/tests/compiler/dart2js/js_spec_string_test.dart b/tests/compiler/dart2js/js_spec_string_test.dart
index 01c9f64..fdd59da 100644
--- a/tests/compiler/dart2js/js_spec_string_test.dart
+++ b/tests/compiler/dart2js/js_spec_string_test.dart
@@ -20,15 +20,16 @@
     errorMessage = message;
     throw "error";
   }
-  reportError(message, [infos]) {
 
+  reportError(message, [infos = const <DiagnosticMessage>[]]) {
     errorMessage =
         '${message.message.arguments}'; // E.g.  "{text: Duplicate tag 'new'.}"
     throw "error";
   }
 
   @override
-  DiagnosticMessage createMessage(spannable, messageKind, [arguments]) {
+  DiagnosticMessage createMessage(spannable, messageKind,
+      [arguments = const {}]) {
     return new DiagnosticMessage(null, spannable,
         MessageTemplate.TEMPLATES[messageKind].message(arguments));
   }
diff --git a/tests/compiler/dart2js/mirrors_used_test.dart b/tests/compiler/dart2js/mirrors_used_test.dart
index fc08b6b..4b9c31a 100644
--- a/tests/compiler/dart2js/mirrors_used_test.dart
+++ b/tests/compiler/dart2js/mirrors_used_test.dart
@@ -69,7 +69,7 @@
     // 2. Some code was refactored, and there are more methods.
     // Either situation could be problematic, but in situation 2, it is often
     // acceptable to increase [expectedMethodCount] a little.
-    int expectedMethodCount = 432;
+    int expectedMethodCount = 466;
     Expect.isTrue(
         generatedCode.length <= expectedMethodCount,
         'Too many compiled methods: '
diff --git a/tests/compiler/dart2js/parser_helper.dart b/tests/compiler/dart2js/parser_helper.dart
index a86aebe..48ab2c4 100644
--- a/tests/compiler/dart2js/parser_helper.dart
+++ b/tests/compiler/dart2js/parser_helper.dart
@@ -70,7 +70,8 @@
     infos.forEach(log);
   }
 
-  void reportInfo(Spannable node, MessageKind errorCode, [Map arguments]) {
+  void reportInfo(Spannable node, MessageKind errorCode,
+      [Map arguments = const {}]) {
     log(new Message(MessageTemplate.TEMPLATES[errorCode], arguments, false));
   }
 
diff --git a/tests/compiler/dart2js/serialization/compilation0_test.dart b/tests/compiler/dart2js/serialization/compilation0_test.dart
new file mode 100644
index 0000000..9df7f53
--- /dev/null
+++ b/tests/compiler/dart2js/serialization/compilation0_test.dart
@@ -0,0 +1,12 @@
+// Copyright (c) 2016, the Dart project authors.  Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.
+
+library dart2js.serialization.compilation0_test;
+
+import 'compilation_test_helper.dart' as test;
+import 'test_data.dart';
+
+main() {
+  test.main(['0', '0']);
+}
diff --git a/tests/compiler/dart2js/serialization/compilation1_test.dart b/tests/compiler/dart2js/serialization/compilation1_test.dart
index 3244d79..e3f6176 100644
--- a/tests/compiler/dart2js/serialization/compilation1_test.dart
+++ b/tests/compiler/dart2js/serialization/compilation1_test.dart
@@ -8,5 +8,5 @@
 import 'test_data.dart';
 
 main() {
-  test.main(['0', '${TESTS.length ~/ 4}']);
+  test.main(['1', '${TESTS.length ~/ 4}']);
 }
diff --git a/tests/compiler/dart2js/serialization/equivalence_test.dart b/tests/compiler/dart2js/serialization/equivalence_test.dart
index e6ae2d4..4e3d6e2 100644
--- a/tests/compiler/dart2js/serialization/equivalence_test.dart
+++ b/tests/compiler/dart2js/serialization/equivalence_test.dart
@@ -15,6 +15,7 @@
 import 'package:compiler/src/diagnostics/invariant.dart';
 import 'package:compiler/src/elements/elements.dart';
 import 'package:compiler/src/elements/visitor.dart';
+import 'package:compiler/src/filenames.dart';
 import 'package:compiler/src/library_loader.dart';
 import 'package:compiler/src/ordered_typeset.dart';
 import 'package:compiler/src/serialization/element_serialization.dart';
@@ -24,6 +25,14 @@
 import 'package:expect/expect.dart';
 import 'test_helper.dart';
 
+const TEST_SOURCES = const <String, String>{
+  'main.dart': '''
+import 'a.dart' deferred as a;
+''',
+  'a.dart': '''
+''',
+};
+
 main(List<String> arguments) {
   // Ensure that we can print out constant expressions.
   DEBUG_MODE = true;
@@ -44,15 +53,19 @@
       if (entryPoint != null) {
         print("Multiple entrypoints is not supported.");
       }
-      entryPoint = Uri.parse(arg);
+      entryPoint = Uri.base.resolve(nativeToUriPath(arg));
     }
   }
+  Map<String, String> sourceFiles = const <String, String>{};
   if (entryPoint == null) {
-    entryPoint = Uri.parse('dart:core');
+    entryPoint = Uri.parse('memory:main.dart');
+    sourceFiles = TEST_SOURCES;
   }
   asyncTest(() async {
     CompilationResult result = await runCompiler(
-        entryPoint: entryPoint, options: [Flags.analyzeAll]);
+        memorySourceFiles: sourceFiles,
+        entryPoint: entryPoint,
+        options: [Flags.analyzeAll]);
     Compiler compiler = result.compiler;
     testSerialization(
         compiler.libraryLoader.libraries,
@@ -798,8 +811,12 @@
         element1, element2, 'isDeferred',
         element1.isDeferred, element2.isDeferred);
     checkElementIdentities(
-        element1, element2, 'importedLibrary',
+        element1, element2, 'deferredImport',
         element1.deferredImport, element2.deferredImport);
+    if (element1.isDeferred) {
+      checkElementProperties(element1, element2,
+          'loadLibrary', element1.loadLibrary, element2.loadLibrary);
+    }
     // TODO(johnniwinther): Check members.
   }
 }
diff --git a/tests/compiler/dart2js/serialization/test_data.dart b/tests/compiler/dart2js/serialization/test_data.dart
index 6dfa095..83cf2da 100644
--- a/tests/compiler/dart2js/serialization/test_data.dart
+++ b/tests/compiler/dart2js/serialization/test_data.dart
@@ -5,6 +5,18 @@
 library dart2js.serialization_test_data;
 
 const List<Test> TESTS = const <Test>[
+  // This test is very long-running and put here first to compile it on its own
+  // in compilation0_test.dart
+  const Test('Disable tree shaking through reflection', const {
+    'main.dart': '''
+import 'dart:mirrors';
+
+main() {
+  reflect(null).invoke(#toString, []).reflectee;
+}
+''',
+  }, expectedWarningCount: 1),
+
   const Test('Empty program', const {
     'main.dart': 'main() {}'
   }),
@@ -376,16 +388,6 @@
 ''',
   }),
 
-  const Test('Disable tree shaking through reflection', const {
-    'main.dart': '''
-import 'dart:mirrors';
-
-main() {
-  reflect(null).invoke(#toString, []).reflectee;
-}
-''',
-  }, expectedWarningCount: 1),
-
   const Test('Unused noSuchMethod', const {
     'main.dart': '''
 import 'a.dart';
@@ -454,6 +456,96 @@
 }
 ''',
   }),
+
+  const Test('If-null expression in constant constructor', const {
+    'main.dart': '''
+import 'a.dart';
+
+main() {
+  const A(1.0);
+}
+''',
+  }, preserializedSourceFiles: const {
+    'a.dart': '''
+class A {
+  final field1;
+  const A(a) : this.field1 = a ?? 1.0;
+}
+''',
+  }),
+
+  const Test('Forwarding constructor defined by forwarding constructor', const {
+    'main.dart': '''
+import 'a.dart';
+
+main() => new C();
+''',
+  }, preserializedSourceFiles: const {
+    'a.dart': '''
+class A {}
+class B {}
+class C {}
+class D = A with B, C;
+''',
+    'b.dart': '''
+''',
+}),
+
+  const Test('Deferred prefix loadLibrary', const {
+    'main.dart': '''
+import 'a.dart';
+
+main() {
+  test();
+}
+''',
+  }, preserializedSourceFiles: const {
+    'a.dart': '''
+import 'b.dart' deferred as pre;
+test() {
+  pre.loadLibrary();
+}
+''',
+    'b.dart': '''
+''',
+  }),
+
+  const Test('Deferred without prefix', const {
+    'main.dart': '''
+import 'a.dart';
+
+main() {
+  test();
+}
+''',
+  }, preserializedSourceFiles: const {
+    'a.dart': '''
+import 'b.dart' deferred;
+test() {}
+''',
+    'b.dart': '''
+''',
+  }, expectedErrorCount: 1),
+
+  const Test('Deferred with duplicate prefix', const {
+    'main.dart': '''
+import 'a.dart';
+
+main() {
+  test();
+}
+''',
+  }, preserializedSourceFiles: const {
+    'a.dart': '''
+import 'b.dart' deferred as pre;
+import 'c.dart' deferred as pre;
+test() {}
+''',
+    'b.dart': '''
+''',
+    'c.dart': '''
+''',
+  }, expectedErrorCount: 1),
 ];
 
 class Test {
diff --git a/tests/compiler/dart2js_extra/dart2js_extra.status b/tests/compiler/dart2js_extra/dart2js_extra.status
index df4de07..6fa9843 100644
--- a/tests/compiler/dart2js_extra/dart2js_extra.status
+++ b/tests/compiler/dart2js_extra/dart2js_extra.status
@@ -10,9 +10,6 @@
 constant_javascript_semantics4_test: Fail, OK
 mirrors_used_closure_test: Fail # Issue 17939
 
-[ $compiler == dart2js && $runtime == jsshell ]
-mirror_printer_test: Pass, Slow # Issue 16473
-
 [ $compiler == dart2js && $checked ]
 variable_type_test/03: Fail, OK
 variable_type_test/01: Fail, OK
@@ -85,5 +82,5 @@
 [ $compiler == dart2js && $runtime == ff && $system == windows ]
 consistent_index_error_string_test: Pass, Slow # Issue 25940
 
-[ $compiler == dart2js && $runtime == ff && $system == linux ]
-mirror_printer_test: Pass, Slow # Issue 25940
+[ $compiler == dart2js ]
+mirror_printer_test: Pass, Slow # Issue 25940, 16473
diff --git a/tests/corelib/corelib.status b/tests/corelib/corelib.status
index 60be0ba..53ec300 100644
--- a/tests/corelib/corelib.status
+++ b/tests/corelib/corelib.status
@@ -209,3 +209,15 @@
 
 [ $arch == simdbc || $arch == simdbc64 ]
 regexp/stack-overflow_test: RuntimeError, OK # Smaller limit with irregex interpreter
+
+[ $hot_reload ]
+big_integer_huge_mul_vm_test: Pass, Timeout
+big_integer_parsed_mul_div_vm_test: Pass, Timeout
+collection_length_test: Fail, Crash
+hash_map2_test: Pass, Crash
+queue_test: Pass, Crash
+regexp/regexp_test: Pass, Fail, Crash
+uri_parse_test: Pass, Timeout
+uri_test: Pass, RuntimeError
+data_uri_test: Pass, RuntimeError
+int_parse_radix_test: Pass, Timeout
diff --git a/tests/corelib/data_uri_test.dart b/tests/corelib/data_uri_test.dart
index 1a21be1..41f2290 100644
--- a/tests/corelib/data_uri_test.dart
+++ b/tests/corelib/data_uri_test.dart
@@ -18,6 +18,13 @@
   testRoundTrip("blåbærgrød", UTF8);
   testRoundTrip("blåbærgrød", LATIN1);
 
+  testUriEquals("data:,abc?d#e");
+  testUriEquals("DATA:,ABC?D#E");
+  testUriEquals("data:,a%20bc?d#e");
+  testUriEquals("DATA:,A%20BC?D#E");
+  testUriEquals("data:,a%62c?d#e");
+  testUriEquals("DATA:,A%42C?D#E");
+
   testUtf8Encoding("\u1000\uffff");
   testBytes();
   testInvalidCharacters();
@@ -250,3 +257,11 @@
   Expect.equals(expect.hasFragment, actual.hasFragment, "hasFragment");
   Expect.equals(expect.fragment, actual.fragment, "fragment");
 }
+
+void testUriEquals(String uriText) {
+  var data = UriData.parse(uriText);
+  var uri = Uri.parse(uriText);
+  Expect.equals(data.uri, uri);
+  Expect.equals(data.toString(), uri.data.toString());
+  Expect.equals(data.toString(), uri.toString());
+}
diff --git a/tests/corelib/uri_test.dart b/tests/corelib/uri_test.dart
index 30b6242..a9166be 100644
--- a/tests/corelib/uri_test.dart
+++ b/tests/corelib/uri_test.dart
@@ -10,6 +10,12 @@
 testUri(String uriText, bool isAbsolute) {
   var uri = Uri.parse(uriText);
 
+  // Test that parsing a substring works the same as parsing the string.
+  String wrapper = "://@[]:/%?#";
+  var embeddedUri = Uri.parse(
+       "$wrapper$uri$wrapper", wrapper.length, uriText.length + wrapper.length);
+
+  Expect.equals(uri, embeddedUri);
   Expect.equals(isAbsolute, uri.isAbsolute);
   Expect.stringEquals(uriText, uri.toString());
 
@@ -77,7 +83,8 @@
   final urisSample = "http://a/b/c/d;p?q";
   Uri base = Uri.parse(urisSample);
   testResolve(expect, relative) {
-    Expect.stringEquals(expect, base.resolve(relative).toString());
+    String name = "$base << $relative";
+    Expect.stringEquals(expect, base.resolve(relative).toString(), name);
   }
 
   // From RFC 3986.
@@ -127,19 +134,232 @@
   // Additional tests (not from RFC 3986).
   testResolve("http://a/b/g;p/h;s",    "../g;p/h;s");
 
+  base = Uri.parse("s:a/b");
+  testResolve("s:a/c", "c");
+  testResolve("s:/c", "../c");
+
+  base = Uri.parse("S:a/b");
+  testResolve("s:a/c", "c");
+  testResolve("s:/c", "../c");
+
+  base = Uri.parse("s:foo");
+  testResolve("s:bar", "bar");
+  testResolve("s:bar", "../bar");
+
+  base = Uri.parse("S:foo");
+  testResolve("s:bar", "bar");
+  testResolve("s:bar", "../bar");
+
+  // Special-case (deliberate non-RFC behavior).
+  base = Uri.parse("foo/bar");
+  testResolve("foo/baz", "baz");
+  testResolve("baz", "../baz");
+
+  base = Uri.parse("s:/foo");
+  testResolve("s:/bar", "bar");
+  testResolve("s:/bar", "../bar");
+
+  base = Uri.parse("S:/foo");
+  testResolve("s:/bar", "bar");
+  testResolve("s:/bar", "../bar");
+
   // Test non-URI base (no scheme, no authority, relative path).
   base = Uri.parse("a/b/c?_#_");
   testResolve("a/b/g?q#f", "g?q#f");
   testResolve("./", "../..");
   testResolve("../", "../../..");
   testResolve("a/b/", ".");
-  testResolve("c", "../../c");
+  testResolve("c", "../../c");  // Deliberate non-RFC behavior.
   base = Uri.parse("../../a/b/c?_#_");  // Initial ".." in base url.
   testResolve("../../a/d", "../d");
   testResolve("../../../d", "../../../d");
 
-  base = Uri.parse("s:a/b");
-  testResolve("s:/c", "../c");
+  base = Uri.parse("s://h/p?q#f");  // A simple base.
+  // Simple references:
+  testResolve("s2://h2/P?Q#F", "s2://h2/P?Q#F");
+  testResolve("s://h2/P?Q#F", "//h2/P?Q#F");
+  testResolve("s://h/P?Q#F", "/P?Q#F");
+  testResolve("s://h/p?Q#F", "?Q#F");
+  testResolve("s://h/p?q#F", "#F");
+  testResolve("s://h/p?q", "");
+  // Non-simple references:
+  testResolve("s2://I@h2/P?Q#F%20", "s2://I@h2/P?Q#F%20");
+  testResolve("s://I@h2/P?Q#F%20", "//I@h2/P?Q#F%20");
+  testResolve("s://h2/P?Q#F%20", "//h2/P?Q#F%20");
+  testResolve("s://h/P?Q#F%20", "/P?Q#F%20");
+  testResolve("s://h/p?Q#F%20", "?Q#F%20");
+  testResolve("s://h/p?q#F%20", "#F%20");
+
+  base = Uri.parse("s://h/p1/p2/p3");  // A simple base with a path.
+  testResolve("s://h/p1/p2/", ".");
+  testResolve("s://h/p1/p2/", "./");
+  testResolve("s://h/p1/", "..");
+  testResolve("s://h/p1/", "../");
+  testResolve("s://h/", "../..");
+  testResolve("s://h/", "../../");
+  testResolve("s://h/p1/%20", "../%20");
+  testResolve("s://h/", "../../../..");
+  testResolve("s://h/", "../../../../");
+
+  base = Uri.parse("s://h/p?q#f%20");  // A non-simpe base.
+  // Simple references:
+  testResolve("s2://h2/P?Q#F", "s2://h2/P?Q#F");
+  testResolve("s://h2/P?Q#F", "//h2/P?Q#F");
+  testResolve("s://h/P?Q#F", "/P?Q#F");
+  testResolve("s://h/p?Q#F", "?Q#F");
+  testResolve("s://h/p?q#F", "#F");
+  testResolve("s://h/p?q", "");
+  // Non-simple references:
+  testResolve("s2://I@h2/P?Q#F%20", "s2://I@h2/P?Q#F%20");
+  testResolve("s://I@h2/P?Q#F%20", "//I@h2/P?Q#F%20");
+  testResolve("s://h2/P?Q#F%20", "//h2/P?Q#F%20");
+  testResolve("s://h/P?Q#F%20", "/P?Q#F%20");
+  testResolve("s://h/p?Q#F%20", "?Q#F%20");
+  testResolve("s://h/p?q#F%20", "#F%20");
+
+  base = Uri.parse("S://h/p1/p2/p3");  // A non-simple base with a path.
+  testResolve("s://h/p1/p2/", ".");
+  testResolve("s://h/p1/p2/", "./");
+  testResolve("s://h/p1/", "..");
+  testResolve("s://h/p1/", "../");
+  testResolve("s://h/", "../..");
+  testResolve("s://h/", "../../");
+  testResolve("s://h/p1/%20", "../%20");
+  testResolve("s://h/", "../../../..");
+  testResolve("s://h/", "../../../../");
+
+  base = Uri.parse("../../../");  // A simple relative path.
+  testResolve("../../../a", "a");
+  testResolve("../../../../a", "../a");
+  testResolve("../../../a%20", "a%20");
+  testResolve("../../../../a%20", "../a%20");
+
+  // Tests covering the branches of the merge algorithm in RFC 3986
+  // with both simple and complex base URIs.
+  for (var b in ["s://a/pa/pb?q#f", "s://a/pa/pb?q#f%20"]) {
+    var origBase = Uri.parse(b);
+    base = origBase;
+
+    // if defined(R.scheme) then ...
+    testResolve("s2://a2/p2?q2#f2", "s2://a2/p2?q2#f2");
+    // else, if defined(R.authority) then ...
+    testResolve("s://a2/p2?q2#f2", "//a2/p2?q2#f2");
+    testResolve("s://a2/?q2#f2", "//a2/../?q2#f2");
+    testResolve("s://a2?q2#f2", "//a2?q2#f2");
+    testResolve("s://a2#f2", "//a2#f2");
+    testResolve("s://a2", "//a2");
+    // else, if (R.path == "") then ...
+    //   if defined(R.query) then
+    testResolve("s://a/pa/pb?q2#f2", "?q2#f2");
+    testResolve("s://a/pa/pb?q2", "?q2");
+    //   else
+    testResolve("s://a/pa/pb?q#f2", "#f2");
+    testResolve("s://a/pa/pb?q", "");
+    // else, if (R.path starts-with "/") then ...
+    testResolve("s://a/p2?q2#f2", "/p2?q2#f2");
+    testResolve("s://a/?q2#f2", "/?q2#f2");
+    testResolve("s://a/#f2", "/#f2");
+    testResolve("s://a/", "/");
+    testResolve("s://a/", "/../");
+    // else ... T.path = merge(Base.path, R.path)
+    // ... remove-dot-fragments(T.path) ...
+    // (Cover the merge function and the remove-dot-fragments functions too).
+
+    // If base has authority and empty path ...
+    var emptyPathBase = Uri.parse(b.replaceFirst("/pa/pb", ""));
+    base = emptyPathBase;
+    testResolve("s://a/p2?q2#f2", "p2?q2#f2");
+    testResolve("s://a/p2#f2", "p2#f2");
+    testResolve("s://a/p2", "p2");
+
+    base = origBase;
+    // otherwise
+    // (Cover both no authority and non-empty path and both).
+    var noAuthEmptyPathBase = Uri.parse(b.replaceFirst("//a/pa/pb", ""));
+    var noAuthAbsPathBase = Uri.parse(b.replaceFirst("//a", ""));
+    var noAuthRelPathBase = Uri.parse(b.replaceFirst("//a/", ""));
+    var noAuthRelSinglePathBase = Uri.parse(b.replaceFirst("//a/pa/", ""));
+
+    testResolve("s://a/pa/p2?q2#f2", "p2?q2#f2");
+    testResolve("s://a/pa/p2#f2", "p2#f2");
+    testResolve("s://a/pa/p2", "p2");
+
+    base = noAuthEmptyPathBase;
+    testResolve("s:p2?q2#f2", "p2?q2#f2");
+    testResolve("s:p2#f2", "p2#f2");
+    testResolve("s:p2", "p2");
+
+    base = noAuthAbsPathBase;
+    testResolve("s:/pa/p2?q2#f2", "p2?q2#f2");
+    testResolve("s:/pa/p2#f2", "p2#f2");
+    testResolve("s:/pa/p2", "p2");
+
+    base = noAuthRelPathBase;
+    testResolve("s:pa/p2?q2#f2", "p2?q2#f2");
+    testResolve("s:pa/p2#f2", "p2#f2");
+    testResolve("s:pa/p2", "p2");
+
+    base = noAuthRelSinglePathBase;
+    testResolve("s:p2?q2#f2", "p2?q2#f2");
+    testResolve("s:p2#f2", "p2#f2");
+    testResolve("s:p2", "p2");
+
+    // Then remove dot segments.
+
+    // A. if input buffer starts with "../" or "./".
+    // This only happens if base has only a single (may be empty) segment and
+    // no slash.
+    base = emptyPathBase;
+    testResolve("s://a/p2", "../p2");
+    testResolve("s://a/", "../");
+    testResolve("s://a/", "..");
+    testResolve("s://a/p2", "./p2");
+    testResolve("s://a/", "./");
+    testResolve("s://a/", ".");
+    testResolve("s://a/p2", "../../p2");
+    testResolve("s://a/p2", "../../././p2");
+
+    base = noAuthRelSinglePathBase;
+    testResolve("s:p2", "../p2");
+    testResolve("s:", "../");
+    testResolve("s:", "..");
+    testResolve("s:p2", "./p2");
+    testResolve("s:", "./");
+    testResolve("s:", ".");
+    testResolve("s:p2", "../../p2");
+    testResolve("s:p2", "../../././p2");
+
+    // B. if input buffer starts with "/./" or is "/.". replace with "/".
+    // (The URI implementation removes the "." path segments when parsing,
+    // so this case isn't handled by merge).
+    base = origBase;
+    testResolve("s://a/pa/p2", "./p2");
+
+    // C. if input buffer starts with "/../" or is "/..", replace with "/"
+    // and remove preceeding segment.
+    testResolve("s://a/p2", "../p2");
+    var longPathBase = Uri.parse(b.replaceFirst("/pb", "/pb/pc/pd"));
+    base = longPathBase;
+    testResolve("s://a/pa/pb/p2", "../p2");
+    testResolve("s://a/pa/p2", "../../p2");
+    testResolve("s://a/p2", "../../../p2");
+    testResolve("s://a/p2", "../../../../p2");
+    var noAuthRelLongPathBase =
+        Uri.parse(b.replaceFirst("//a/pa/pb", "pa/pb/pc/pd"));
+    base = noAuthRelLongPathBase;
+    testResolve("s:pa/pb/p2", "../p2");
+    testResolve("s:pa/p2", "../../p2");
+    testResolve("s:/p2", "../../../p2");
+    testResolve("s:/p2", "../../../../p2");
+
+    // D. if the input buffer contains only ".." or ".", remove it.
+    base = noAuthEmptyPathBase;
+    testResolve("s:", "..");
+    testResolve("s:", ".");
+    base = noAuthRelSinglePathBase;
+    testResolve("s:", "..");
+    testResolve("s:", ".");
+  }
 }
 
 void testResolvePath(String expected, String path) {
@@ -493,6 +713,11 @@
                           query: null,
                           fragment: null).toString());
   Expect.stringEquals("file:///", Uri.parse("file:").toString());
+  Expect.stringEquals("file:///", Uri.parse("file:/").toString());
+  Expect.stringEquals("file:///", Uri.parse("file:").toString());
+  Expect.stringEquals("file:///foo", Uri.parse("file:foo").toString());
+  Expect.stringEquals("file:///foo", Uri.parse("file:/foo").toString());
+  Expect.stringEquals("file://foo/", Uri.parse("file://foo").toString());
 
   testResolvePath("/a/g", "/a/b/c/./../../g");
   testResolvePath("/a/g", "/a/b/c/./../../g");
diff --git a/tests/html/event_subscription_specification_test.dart b/tests/html/event_subscription_specification_test.dart
new file mode 100644
index 0000000..406ed37
--- /dev/null
+++ b/tests/html/event_subscription_specification_test.dart
@@ -0,0 +1,121 @@
+// Copyright (c) 2016, the Dart project authors.  Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.
+
+library EventTaskZoneTest;
+
+import 'package:unittest/unittest.dart';
+import 'package:unittest/html_config.dart';
+import 'dart:async';
+import 'dart:html';
+
+// Tests event-subscription specifications.
+
+main() {
+  useHtmlConfiguration();
+
+  var defaultTarget = new Element.div();
+  var defaultOnData = (x) => null;
+
+  EventSubscriptionSpecification createSpec({useCapture, isOneShot}) {
+    return new EventSubscriptionSpecification(
+        name: "name",
+        target: defaultTarget,
+        useCapture: useCapture,
+        isOneShot: isOneShot,
+        onData: defaultOnData,
+        eventType: "eventType");
+  }
+
+  for (var useCapture in [true, false]) {
+    for (var isOneShot in [true, false]) {
+      var spec = createSpec(useCapture: useCapture, isOneShot: isOneShot);
+
+      test(
+          "EventSubscriptionSpecification - constructor "
+          "useCapture: $useCapture isOneShot: $isOneShot", () {
+        var replaced = spec.replace(eventType: 'replace-eventType');
+        expect(replaced.name, "name");
+        expect(replaced.target, defaultTarget);
+        expect(replaced.useCapture, useCapture);
+        expect(replaced.isOneShot, isOneShot);
+        expect(replaced.onData, equals(defaultOnData));
+        expect(replaced.eventType, "replace-eventType");
+      });
+
+      test(
+          "replace name "
+          "useCapture: $useCapture isOneShot: $isOneShot", () {
+        var replaced = spec.replace(name: 'replace-name');
+        expect(replaced.name, "replace-name");
+        expect(replaced.target, defaultTarget);
+        expect(replaced.useCapture, useCapture);
+        expect(replaced.isOneShot, isOneShot);
+        expect(replaced.onData, equals(defaultOnData));
+        expect(replaced.eventType, "eventType");
+      });
+
+      test(
+          "replace target "
+          "useCapture: $useCapture isOneShot: $isOneShot", () {
+        var replacementTarget = new Element.a();
+        var replaced = spec.replace(target: replacementTarget);
+        expect(replaced.name, "name");
+        expect(replaced.target, replacementTarget);
+        expect(replaced.useCapture, useCapture);
+        expect(replaced.isOneShot, isOneShot);
+        expect(replaced.onData, equals(defaultOnData));
+        expect(replaced.eventType, "eventType");
+      });
+
+      test(
+          "replace useCapture "
+              "useCapture: $useCapture isOneShot: $isOneShot", () {
+        var replaced = spec.replace(useCapture: !useCapture);
+        expect(replaced.name, "name");
+        expect(replaced.target, defaultTarget);
+        expect(replaced.useCapture, !useCapture);
+        expect(replaced.isOneShot, isOneShot);
+        expect(replaced.onData, equals(defaultOnData));
+        expect(replaced.eventType, "eventType");
+      });
+
+      test(
+          "replace isOneShot "
+              "useCapture: $useCapture isOneShot: $isOneShot", () {
+        var replaced = spec.replace(isOneShot: !isOneShot);
+        expect(replaced.name, "name");
+        expect(replaced.target, defaultTarget);
+        expect(replaced.useCapture, useCapture);
+        expect(replaced.isOneShot, !isOneShot);
+        expect(replaced.onData, equals(defaultOnData));
+        expect(replaced.eventType, "eventType");
+      });
+
+      test(
+          "replace onData "
+              "useCapture: $useCapture isOneShot: $isOneShot", () {
+        var replacementOnData = (x) {};
+        var replaced = spec.replace(onData: replacementOnData);
+        expect(replaced.name, "name");
+        expect(replaced.target, defaultTarget);
+        expect(replaced.useCapture, useCapture);
+        expect(replaced.isOneShot, isOneShot);
+        expect(replaced.onData, equals(replacementOnData));
+        expect(replaced.eventType, "eventType");
+      });
+
+      test(
+          "replace eventType "
+          "useCapture: $useCapture isOneShot: $isOneShot", () {
+        var replaced = spec.replace(eventType: 'replace-eventType');
+        expect(replaced.name, "name");
+        expect(replaced.target, defaultTarget);
+        expect(replaced.useCapture, useCapture);
+        expect(replaced.isOneShot, isOneShot);
+        expect(replaced.onData, equals(defaultOnData));
+        expect(replaced.eventType, "replace-eventType");
+      });
+    }
+  }
+}
diff --git a/tests/html/event_zone_task_test.dart b/tests/html/event_zone_task_test.dart
new file mode 100644
index 0000000..a0e85c8
--- /dev/null
+++ b/tests/html/event_zone_task_test.dart
@@ -0,0 +1,239 @@
+// Copyright (c) 2016, the Dart project authors.  Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.
+
+library EventTaskZoneTest;
+
+import 'package:unittest/unittest.dart';
+import 'package:unittest/html_config.dart';
+import 'dart:async';
+import 'dart:html';
+
+// Tests zone tasks with DOM events.
+
+class AbortedEventStreamSubscription implements StreamSubscription<Event> {
+  final Zone zone;
+
+  AbortedEventStreamSubscription(this.zone);
+
+  @override
+  Future asFuture([futureValue]) {
+    throw new UnsupportedError("asFuture");
+  }
+
+  @override
+  Future cancel() {
+    return null;
+  }
+
+  @override
+  bool get isPaused => throw new UnsupportedError("pause");
+
+  @override
+  void onData(void handleData(Event data)) {
+    throw new UnsupportedError("cancel");
+  }
+
+  @override
+  void onDone(void handleDone()) {
+    throw new UnsupportedError("onDone");
+  }
+
+  @override
+  void onError(Function handleError) {
+    throw new UnsupportedError("onError");
+  }
+
+  @override
+  void pause([Future resumeSignal]) {
+    throw new UnsupportedError("pause");
+  }
+
+  @override
+  void resume() {
+    throw new UnsupportedError("resume");
+  }
+
+  static AbortedEventStreamSubscription _create(
+      EventSubscriptionSpecification spec, Zone zone) {
+    return new AbortedEventStreamSubscription(zone);
+  }
+}
+
+eventTest(String name, Event eventFn(), void validate(Event event),
+    void validateSpec(EventSubscriptionSpecification spec),
+    {String type: 'foo',
+    bool abortCreation: false,
+    EventSubscriptionSpecification modifySpec(
+        EventSubscriptionSpecification spec),
+    bool abortEvent: false,
+    Event modifyEvent(Event event)}) {
+  test(name, () {
+    var lastSpec;
+    var lastTask;
+    var lastEvent;
+
+    Object createTaskHandler(Zone self, ZoneDelegate parent, Zone zone,
+        TaskCreate create, TaskSpecification specification) {
+      if (specification is EventSubscriptionSpecification) {
+        if (abortCreation) {
+          create = AbortedEventStreamSubscription._create;
+        }
+        if (modifySpec != null) {
+          specification = modifySpec(specification);
+        }
+        lastSpec = specification;
+        return lastTask = parent.createTask(zone, create, specification);
+      }
+      return parent.createTask(zone, create, specification);
+    }
+
+    void runTaskHandler(Zone self, ZoneDelegate parent, Zone zone, TaskRun run,
+        Object task, Object arg) {
+      if (identical(task, lastTask)) {
+        if (abortEvent) return;
+        if (modifyEvent != null) {
+          arg = modifyEvent(arg);
+        }
+        parent.runTask(zone, run, task, arg);
+        return;
+      }
+      parent.runTask(zone, run, task, arg);
+    }
+
+    runZoned(() {
+      final el = new Element.tag('div');
+      var fired = false;
+      var sub = el.on[type].listen((ev) {
+        lastEvent = ev;
+        fired = true;
+      });
+      el.dispatchEvent(eventFn());
+
+      validateSpec(lastSpec);
+      validate(lastEvent);
+
+      if (abortEvent || abortCreation) {
+        expect(fired, isFalse, reason: 'Expected event to be intercepted.');
+      } else {
+        expect(fired, isTrue, reason: 'Expected event to be dispatched.');
+      }
+
+      sub.cancel();
+    },
+        zoneSpecification: new ZoneSpecification(
+            createTask: createTaskHandler,
+            runTask: runTaskHandler));
+  });
+}
+
+Function checkSpec(
+    [String expectedType = 'foo', bool expectedUseCapture = false]) {
+  return (EventSubscriptionSpecification spec) {
+    expect(spec.eventType, expectedType);
+    expect(spec.useCapture, expectedUseCapture);
+  };
+}
+
+main() {
+  useHtmlConfiguration();
+
+  eventTest('Event', () => new Event('foo'), (ev) {
+    expect(ev.type, equals('foo'));
+  }, checkSpec('foo'));
+
+  eventTest(
+      'WheelEvent',
+      () => new WheelEvent("mousewheel",
+          deltaX: 1,
+          deltaY: 0,
+          detail: 4,
+          screenX: 3,
+          screenY: 4,
+          clientX: 5,
+          clientY: 6,
+          ctrlKey: true,
+          altKey: true,
+          shiftKey: true,
+          metaKey: true), (ev) {
+    expect(ev.deltaX, 1);
+    expect(ev.deltaY, 0);
+    expect(ev.screen.x, 3);
+    expect(ev.screen.y, 4);
+    expect(ev.client.x, 5);
+    expect(ev.client.y, 6);
+    expect(ev.ctrlKey, isTrue);
+    expect(ev.altKey, isTrue);
+    expect(ev.shiftKey, isTrue);
+    expect(ev.metaKey, isTrue);
+  }, checkSpec('mousewheel'), type: 'mousewheel');
+
+  eventTest('Event - no-create', () => new Event('foo'), (ev) {
+    expect(ev, isNull);
+  }, checkSpec('foo'), abortCreation: true);
+
+  eventTest(
+      'WheelEvent - no-create',
+      () => new WheelEvent("mousewheel",
+          deltaX: 1,
+          deltaY: 0,
+          detail: 4,
+          screenX: 3,
+          screenY: 4,
+          clientX: 5,
+          clientY: 6,
+          ctrlKey: true,
+          altKey: true,
+          shiftKey: true,
+          metaKey: true), (ev) {
+    expect(ev, isNull);
+  }, checkSpec('mousewheel'), type: 'mousewheel', abortCreation: true);
+
+  eventTest('Event - no-run', () => new Event('foo'), (ev) {
+    expect(ev, isNull);
+  }, checkSpec('foo'), abortEvent: true);
+
+  eventTest(
+      'WheelEvent - no-run',
+      () => new WheelEvent("mousewheel",
+          deltaX: 1,
+          deltaY: 0,
+          detail: 4,
+          screenX: 3,
+          screenY: 4,
+          clientX: 5,
+          clientY: 6,
+          ctrlKey: true,
+          altKey: true,
+          shiftKey: true,
+          metaKey: true), (ev) {
+    expect(ev, isNull);
+  }, checkSpec('mousewheel'), type: 'mousewheel', abortEvent: true);
+
+  // Register for 'foo', but receive a 'bar' event, because the specification
+  // is rewritten.
+  eventTest(
+      'Event - replace eventType',
+      () => new Event('bar'),
+      (ev) {
+        expect(ev.type, equals('bar'));
+      },
+      checkSpec('bar'),
+      type: 'foo',
+      modifySpec: (EventSubscriptionSpecification spec) {
+        return spec.replace(eventType: 'bar');
+      });
+
+  // Intercept the 'foo' event and replace it with a 'bar' event.
+  eventTest(
+      'Event - intercept result',
+      () => new Event('foo'),
+      (ev) {
+        expect(ev.type, equals('bar'));
+      },
+      checkSpec('foo'),
+      type: 'foo',
+      modifyEvent: (Event event) {
+        return new Event('bar');
+      });
+}
diff --git a/tests/html/html.status b/tests/html/html.status
index 921c623..65ff46f 100644
--- a/tests/html/html.status
+++ b/tests/html/html.status
@@ -31,7 +31,7 @@
 [ $compiler == dart2js && $checked ]
 js_function_getter_trust_types_test: Skip # --trust-type-annotations incompatible with --checked
 
-[ $compiler == dart2js && $checked && $browser ]
+[ $compiler == dart2js && $checked && $browser && $runtime != drt]
 js_typed_interop_test/method: Fail # Issue 24822
 
 [ $compiler == dart2js && $csp && $browser ]
@@ -142,6 +142,7 @@
 event_test: RuntimeError # Issue 23437. Only three failures, but hard to break them out.
 wheelevent_test: RuntimeError # Issue 23437
 text_event_test: RuntimeError # Issue 23437
+event_zone_task_test: RuntimeError # Issue 23437
 transition_event_test/functional: Skip # Times out. Issue 22167
 request_animation_frame_test: Skip # Times out. Issue 22167
 
@@ -185,6 +186,7 @@
 webgl_1_test/supported: Fail
 websql_test/supported: Fail
 xhr_test/json: Fail # IE10 returns string, not JSON object
+xhr_task_test/json: Fail # IE10 returns string, not JSON object
 xhr_test/supported_overrideMimeType: Fail
 xsltprocessor_test/supported: Fail
 worker_test/functional: Fail # IE uses incorrect security context for Blob URIs.
@@ -242,7 +244,8 @@
 touchevent_test/supported: Fail # IE does not support TouchEvents
 webgl_1_test/functional: Fail
 websql_test/supported: Fail
-xhr_test/json: Fail # IE10 returns string, not JSON object
+xhr_test/json: Fail # IE11 returns string, not JSON object
+xhr_task_test/json: Fail # IE11 returns string, not JSON object
 xsltprocessor_test/supported: Fail
 
 [ $runtime == ie10 ]
@@ -361,6 +364,7 @@
 
 [ (($runtime == dartium || $runtime == drt) && $system == macos) || $system == windows ]
 xhr_test/xhr: Skip # Times out.  Issue 21527
+xhr_task_test/xhr: Skip # Times out.  Issue 21527
 
 [ $compiler == dart2analyzer ]
 custom/document_register_basic_test: StaticWarning
diff --git a/tests/html/js_test.dart b/tests/html/js_test.dart
index ae9d9b3..c554081 100644
--- a/tests/html/js_test.dart
+++ b/tests/html/js_test.dart
@@ -194,6 +194,8 @@
   document.body.append(script);
 }
 
+typedef bool StringToBool(String s);
+
 // Some test are either causing other test to fail in IE9, or they are failing
 // for unknown reasons
 // useHtmlConfiguration+ImageData bug: dartbug.com/14355
@@ -922,6 +924,17 @@
 
     });
 
+    group('JavaScriptFunction', () {
+      test('is check', () {
+        var fn = (String s) => true;
+        var jsFn = allowInterop(fn);
+        expect(fn is StringToBool, isTrue);
+        expect(jsFn is StringToBool, isTrue);
+        expect(jsFn is Function, isTrue);
+        expect(jsFn is List, isFalse);
+      });
+    });
+
     group('Dart->JS', () {
 
       test('Date', () {
diff --git a/tests/html/request_animation_task_test.dart b/tests/html/request_animation_task_test.dart
new file mode 100644
index 0000000..3d388dd
--- /dev/null
+++ b/tests/html/request_animation_task_test.dart
@@ -0,0 +1,170 @@
+// Copyright (c) 2016, the Dart project authors.  Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.
+
+library EventTaskZoneTest;
+
+import 'package:unittest/unittest.dart';
+import 'package:unittest/html_config.dart';
+import 'dart:async';
+import 'dart:html';
+
+// Tests zone tasks with window.requestAnimationFrame.
+
+class MockAnimationFrameTask implements AnimationFrameTask {
+  static int taskId = 499;
+
+  final int id;
+  final Zone zone;
+  bool _isCanceled = false;
+  Function _callback;
+
+  MockAnimationFrameTask(
+      this.id, this.zone, this._callback);
+
+  void cancel(Window window) {
+    _isCanceled = true;
+  }
+
+  trigger(num stamp) {
+    zone.runTask(run, this, stamp);
+  }
+
+  static create(AnimationFrameRequestSpecification spec, Zone zone) {
+    var callback = zone.registerUnaryCallback(spec.callback);
+    return new MockAnimationFrameTask(
+        taskId++, zone, callback);
+  }
+
+  static run(MockAnimationFrameTask task, num arg) {
+    AnimationFrameTask.removeMapping(task.id);
+    task._callback(arg);
+  }
+}
+
+animationFrameTest() {
+  test("animationFrameTest - no intercept", () async {
+    AnimationFrameTask lastTask;
+    bool sawRequest = false;
+    int id;
+    num providedArg;
+
+    Object createTaskHandler(Zone self, ZoneDelegate parent, Zone zone,
+        TaskCreate create, TaskSpecification specification) {
+      if (specification is AnimationFrameRequestSpecification) {
+        sawRequest = true;
+        lastTask = parent.createTask(zone, create, specification);
+        id = lastTask.id;
+        return lastTask;
+      }
+      return parent.createTask(zone, create, specification);
+    }
+
+    void runTaskHandler(Zone self, ZoneDelegate parent, Zone zone, TaskRun run,
+        Object task, Object arg) {
+      if (identical(task, lastTask)) {
+        providedArg = arg;
+      }
+      parent.runTask(zone, run, task, arg);
+    }
+
+    var completer = new Completer();
+    var publicId;
+    runZoned(() {
+      publicId = window.requestAnimationFrame((num stamp) {
+        completer.complete(stamp);
+      });
+    },
+        zoneSpecification: new ZoneSpecification(
+            createTask: createTaskHandler, runTask: runTaskHandler));
+
+    var referenceCompleter = new Completer();
+    window.requestAnimationFrame((num stamp) {
+      referenceCompleter.complete(stamp);
+    });
+
+    var callbackStamp = await completer.future;
+    var referenceStamp = await referenceCompleter.future;
+
+    expect(callbackStamp, equals(referenceStamp));
+    expect(providedArg, equals(callbackStamp));
+    expect(sawRequest, isTrue);
+    expect(publicId, isNotNull);
+    expect(publicId, equals(id));
+  });
+}
+
+interceptedAnimationFrameTest() {
+  test("animationFrameTest - intercepted", () {
+    List<MockAnimationFrameTask> tasks = [];
+    List<num> loggedRuns = [];
+    int executedTaskId;
+    num executedStamp;
+
+    Object createTaskHandler(Zone self, ZoneDelegate parent, Zone zone,
+        TaskCreate create, TaskSpecification specification) {
+      if (specification is AnimationFrameRequestSpecification) {
+        var task = parent.createTask(
+            zone, MockAnimationFrameTask.create, specification);
+        tasks.add(task);
+        return task;
+      }
+      return parent.createTask(zone, create, specification);
+    }
+
+    void runTaskHandler(Zone self, ZoneDelegate parent, Zone zone, TaskRun run,
+        Object task, Object arg) {
+      if (tasks.contains(task)) {
+        loggedRuns.add(arg);
+      }
+      parent.runTask(zone, run, task, arg);
+    }
+
+    var id0, id1, id2;
+
+    runZoned(() {
+      id0 = window.requestAnimationFrame((num stamp) {
+        executedTaskId = id0;
+        executedStamp = stamp;
+      });
+      id1 = window.requestAnimationFrame((num stamp) {
+        executedTaskId = id1;
+        executedStamp = stamp;
+      });
+      id2 = window.requestAnimationFrame((num stamp) {
+        executedTaskId = id2;
+        executedStamp = stamp;
+      });
+    },
+        zoneSpecification: new ZoneSpecification(
+            createTask: createTaskHandler, runTask: runTaskHandler));
+
+    expect(tasks.length, 3);
+    expect(executedTaskId, isNull);
+    expect(executedStamp, isNull);
+    expect(loggedRuns.isEmpty, isTrue);
+
+    tasks[0].trigger(123.1);
+    expect(executedTaskId, id0);
+    expect(executedStamp, 123.1);
+
+    tasks[1].trigger(123.2);
+    expect(executedTaskId, id1);
+    expect(executedStamp, 123.2);
+
+    expect(loggedRuns, equals([123.1, 123.2]));
+
+    window.cancelAnimationFrame(id2);
+    expect(tasks[2]._isCanceled, isTrue);
+    // Cancel it a second time. Should not crash.
+    window.cancelAnimationFrame(id2);
+    expect(tasks[2]._isCanceled, isTrue);
+  });
+}
+
+main() {
+  useHtmlConfiguration();
+
+  animationFrameTest();
+  interceptedAnimationFrameTest();
+}
diff --git a/tests/html/xhr_task2_test.dart b/tests/html/xhr_task2_test.dart
new file mode 100644
index 0000000..6dee69e
--- /dev/null
+++ b/tests/html/xhr_task2_test.dart
@@ -0,0 +1,280 @@
+// Copyright (c) 2016, the Dart project authors.  Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.
+
+library XHRTask2Test;
+
+import 'dart:async';
+import 'dart:convert';
+import 'dart:html';
+import 'dart:typed_data';
+import 'package:unittest/html_individual_config.dart';
+import 'package:unittest/unittest.dart';
+
+class MockProgressEvent implements ProgressEvent {
+  final target;
+  MockProgressEvent(this.target);
+
+  noSuchMethod(Invocation invocation) {
+    throw "missing function in MockProgressEvent";
+  }
+}
+
+class MockHttpRequestTask implements Future<HttpRequest> {
+  final Completer completer = new Completer<HttpRequest>();
+  final HttpRequestTaskSpecification spec;
+  final Zone zone;
+
+  MockHttpRequestTask(this.spec, this.zone);
+
+  void trigger(response) {
+    var xhr = new MockHttpRequest(spec, response);
+    var arg;
+    if (spec.url == "NonExistingFile") {
+      arg = new MockProgressEvent(xhr);
+    } else {
+      arg = xhr;
+    }
+    zone.runTask(run, this, arg);
+  }
+
+  then(onData, {onError}) => completer.future.then(onData, onError: onError);
+  catchError(f, {test}) => completer.future.catchError(f, test: test);
+  whenComplete(f) => completer.future.whenComplete(f);
+  asStream() => completer.future.asStream();
+  timeout(timeLimit, {onTimeout}) =>
+      completer.future.timeout(timeLimit, onTimeout: onTimeout);
+
+  static create(HttpRequestTaskSpecification spec, Zone zone) {
+    return new MockHttpRequestTask(spec, zone);
+  }
+
+  static run(MockHttpRequestTask task, value) {
+    if (value is HttpRequest) {
+      task.completer.complete(value);
+    } else {
+      task.completer.completeError(value);
+    }
+  }
+}
+
+class MockHttpRequest implements HttpRequest {
+  final HttpRequestTaskSpecification spec;
+  final response;
+
+  MockHttpRequest(this.spec, this.response);
+
+  noSuchMethod(Invocation invocation) {
+    print("isGetter: ${invocation.isGetter}");
+    print("isMethod: ${invocation.isMethod}");
+    print("memberName: ${invocation.memberName}");
+  }
+
+  int get status => spec.url == "NonExistingFile" ? 404 : 200;
+
+  get readyState => HttpRequest.DONE;
+  get responseText => "$response";
+
+  Map get responseHeaders => {'content-type': 'text/plain; charset=utf-8',};
+}
+
+main() {
+  useHtmlIndividualConfiguration();
+  unittestConfiguration.timeout = const Duration(milliseconds: 800);
+
+  var urlExpando = new Expando();
+
+  var url = 'some/url.html';
+
+  Function buildCreateTaskHandler(List log, List tasks) {
+    Object createTaskHandler(Zone self, ZoneDelegate parent, Zone zone,
+        TaskCreate create, TaskSpecification spec) {
+      if (spec is HttpRequestTaskSpecification) {
+        var url = spec.url;
+        var method = spec.method;
+        var withCredentials = spec.withCredentials;
+        var responseType = spec.responseType;
+        var mimeType = spec.mimeType;
+        var data = spec.sendData;
+
+        log.add("request $url");
+        var dataLog = data is List<int> ? "binary ${data.length}" : "$data";
+        log.add("  method: $method withCredentials: $withCredentials "
+            "responseType: $responseType mimeType: $mimeType data: $dataLog");
+        var task = parent.createTask(zone, MockHttpRequestTask.create, spec);
+        urlExpando[task] = url;
+        tasks.add(task);
+        return task;
+      }
+      if (spec is EventSubscriptionSpecification) {
+        EventSubscriptionSpecification eventSpec = spec;
+        if (eventSpec.target is HttpRequest) {
+          HttpRequest target = eventSpec.target;
+          log.add("event listener on http-request ${eventSpec.eventType}");
+          if (eventSpec.eventType == "readystatechange") {
+            var oldOnData = eventSpec.onData;
+            spec = eventSpec.replace(onData: (event) {
+              oldOnData(event);
+              if (target.readyState == HttpRequest.DONE) {
+                log.add("unknown request done");
+              }
+            });
+          }
+        }
+      }
+      return parent.createTask(zone, create, spec);
+    }
+
+    return createTaskHandler;
+  }
+
+  Function buildRunTaskHandler(List log, List tasks) {
+    void runTaskHandler(Zone self, ZoneDelegate parent, Zone zone, TaskRun run,
+        Object task, Object arg) {
+      if (tasks.contains(task)) {
+        var url = urlExpando[task];
+        if (arg is Error || arg is Exception) {
+          log.add("failed $url");
+        } else {
+          if (arg is ProgressEvent) {
+            log.add("success $url with progress-event");
+          } else if (arg is HttpRequest) {
+            log.add("success $url with http-request");
+          } else {
+            log.add("success $url (unknown arg)");
+          }
+        }
+      }
+      parent.runTask(zone, run, task, arg);
+    }
+
+    return runTaskHandler;
+  }
+
+  Future<List> runMocked(response, fun) async {
+    var log = [];
+    var tasks = [];
+    var future = runZoned(fun,
+        zoneSpecification: new ZoneSpecification(
+            createTask: buildCreateTaskHandler(log, tasks),
+            runTask: buildRunTaskHandler(log, tasks)));
+    // Wait a full cycle to make sure things settle.
+    await new Future(() {});
+    var beforeTriggerLog = log.toList();
+    log.clear();
+    expect(tasks.length, 1);
+    tasks.single.trigger(response);
+    await future;
+    return [beforeTriggerLog, log];
+  }
+
+  void validate200Response(xhr) {
+    expect(xhr.status, equals(200));
+    var data = JSON.decode(xhr.responseText);
+    expect(data, contains('feed'));
+    expect(data['feed'], contains('entry'));
+    expect(data, isMap);
+  }
+
+  void validate404(xhr) {
+    expect(xhr.status, equals(404));
+    // We cannot say much about xhr.responseText, most HTTP servers will
+    // include an HTML page explaining the error to a human.
+    String responseText = xhr.responseText;
+    expect(responseText, isNotNull);
+  }
+
+  group('xhr', () {
+    test('XHR.request No file', () async {
+      var log = await runMocked("404", () {
+        var completer = new Completer();
+        HttpRequest.request('NonExistingFile').then((_) {
+          fail('Request should not have succeeded.');
+        }, onError: expectAsync((error) {
+          var xhr = error.target;
+          expect(xhr.readyState, equals(HttpRequest.DONE));
+          validate404(xhr);
+          completer.complete('done');
+        }));
+        return completer.future;
+      });
+      expect(
+          log,
+          equals([
+            [
+              'request NonExistingFile',
+              '  method: null withCredentials: null responseType: null '
+                  'mimeType: null data: null',
+            ],
+            ['success NonExistingFile with progress-event']
+          ]));
+    });
+
+    test('XHR.request file', () async {
+      var log = await runMocked('{"feed": {"entry": 499}}', () {
+        var completer = new Completer();
+        HttpRequest.request(url).then(expectAsync((xhr) {
+          expect(xhr.readyState, equals(HttpRequest.DONE));
+          validate200Response(xhr);
+          completer.complete('done');
+        }));
+        return completer.future;
+      });
+      expect(
+          log,
+          equals([
+            [
+              'request $url',
+              '  method: null withCredentials: null responseType: null '
+                  'mimeType: null data: null'
+            ],
+            ['success $url with http-request']
+          ]));
+    });
+
+    test('XHR.getString file', () async {
+      var log = await runMocked("foo", () {
+        return HttpRequest.getString(url).then(expectAsync((str) {}));
+      });
+      expect(
+          log,
+          equals([
+            [
+              'request $url',
+              '  method: null withCredentials: null responseType: null '
+                  'mimeType: null data: null'
+            ],
+            ['success $url with http-request']
+          ]));
+    });
+
+    test('XHR.request responseType arraybuffer', () async {
+      if (Platform.supportsTypedData) {
+        var data = new Uint8List(128);
+        var log = await runMocked(data.buffer, () {
+          return HttpRequest.request(url,
+              responseType: 'arraybuffer',
+              requestHeaders: {
+                'Content-Type': 'text/xml'
+              }).then(expectAsync((xhr) {
+            expect(xhr.status, equals(200));
+            var byteBuffer = xhr.response;
+            expect(byteBuffer, new isInstanceOf<ByteBuffer>());
+            expect(byteBuffer, isNotNull);
+          }));
+        });
+        expect(
+            log,
+            equals([
+              [
+                'request $url',
+                '  method: null withCredentials: null responseType: arraybuffer'
+                    ' mimeType: null data: null'
+              ],
+              ['success $url with http-request']
+            ]));
+      }
+      ;
+    });
+  });
+}
diff --git a/tests/html/xhr_task_test.dart b/tests/html/xhr_task_test.dart
new file mode 100644
index 0000000..c315ae6
--- /dev/null
+++ b/tests/html/xhr_task_test.dart
@@ -0,0 +1,508 @@
+// Copyright (c) 2016, the Dart project authors.  Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.
+
+library XHRTaskTest;
+
+import 'dart:async';
+import 'dart:convert';
+import 'dart:html';
+import 'dart:typed_data';
+import 'package:unittest/html_individual_config.dart';
+import 'package:unittest/unittest.dart';
+
+main() {
+  useHtmlIndividualConfiguration();
+
+  // Cache blocker is a workaround for:
+  // https://code.google.com/p/dart/issues/detail?id=11834
+  var cacheBlocker = new DateTime.now().millisecondsSinceEpoch;
+  var url = '/root_dart/tests/html/xhr_cross_origin_data.txt?'
+      'cacheBlock=$cacheBlocker';
+
+  var urlExpando = new Expando();
+
+  Function buildCreateTaskHandler(List log, List tasks) {
+    Object createTaskHandler(Zone self, ZoneDelegate parent, Zone zone,
+        TaskCreate create, TaskSpecification spec) {
+      if (spec is HttpRequestTaskSpecification) {
+        var url = spec.url;
+        var method = spec.method;
+        var withCredentials = spec.withCredentials;
+        var responseType = spec.responseType;
+        var mimeType = spec.mimeType;
+        var data = spec.sendData;
+
+        log.add("request $url");
+        var dataLog = data is List<int> ? "binary ${data.length}" : "$data";
+        log.add("  method: $method withCredentials: $withCredentials "
+            "responseType: $responseType mimeType: $mimeType data: $dataLog");
+        var task = parent.createTask(zone, create, spec);
+        urlExpando[task] = url;
+        tasks.add(task);
+        return task;
+      }
+      if (spec is HttpRequestSendTaskSpecification) {
+        var data = spec.sendData;
+        var dataLog = data is List<int> ? "binary ${data.length}" : "$data";
+        log.add("http-request (no info), data: $dataLog");
+        var task = parent.createTask(zone, create, spec);
+        tasks.add(task);
+        urlExpando[task] = "unknown";
+        return task;
+      }
+      if (spec is EventSubscriptionSpecification) {
+        EventSubscriptionSpecification eventSpec = spec;
+        if (eventSpec.target is HttpRequest) {
+          HttpRequest target = eventSpec.target;
+          log.add("event listener on http-request ${eventSpec.eventType}");
+          if (eventSpec.eventType == "readystatechange") {
+            var oldOnData = eventSpec.onData;
+            spec = eventSpec.replace(onData: (event) {
+              oldOnData(event);
+              if (target.readyState == HttpRequest.DONE) {
+                log.add("unknown request done");
+              }
+            });
+          }
+        }
+      }
+      return parent.createTask(zone, create, spec);
+    }
+
+    return createTaskHandler;
+  }
+
+  Function buildRunTaskHandler(List log, List tasks) {
+    void runTaskHandler(Zone self, ZoneDelegate parent, Zone zone,
+        TaskRun run, Object task, Object arg) {
+      if (tasks.contains(task)) {
+        var url = urlExpando[task];
+        if (arg is Error || arg is Exception) {
+          log.add("failed $url");
+        } else {
+          if (arg is ProgressEvent) {
+            log.add("success $url with progress-event");
+          } else if (arg is HttpRequest){
+            log.add("success $url with http-request");
+          } else {
+            log.add("success $url (unknown arg)");
+          }
+        }
+      }
+      parent.runTask(zone, run, task, arg);
+    }
+
+    return runTaskHandler;
+  }
+
+  Future<List> runWithLogging(fun) async {
+    var log = [];
+    var tasks = [];
+    await runZoned(fun, zoneSpecification: new ZoneSpecification(
+        createTask: buildCreateTaskHandler(log, tasks),
+        runTask: buildRunTaskHandler(log, tasks)));
+    return log;
+  }
+
+  void validate200Response(xhr) {
+    expect(xhr.status, equals(200));
+    var data = JSON.decode(xhr.responseText);
+    expect(data, contains('feed'));
+    expect(data['feed'], contains('entry'));
+    expect(data, isMap);
+  }
+
+  void validate404(xhr) {
+    expect(xhr.status, equals(404));
+    // We cannot say much about xhr.responseText, most HTTP servers will
+    // include an HTML page explaining the error to a human.
+    String responseText = xhr.responseText;
+    expect(responseText, isNotNull);
+  }
+
+  group('xhr', () {
+    test('XHR No file', () async {
+      var log = await runWithLogging(() {
+        var completer = new Completer();
+        HttpRequest xhr = new HttpRequest();
+        xhr.open("GET", "NonExistingFile", async: true);
+        xhr.onReadyStateChange.listen(expectAsyncUntil((event) {
+          if (xhr.readyState == HttpRequest.DONE) {
+            validate404(xhr);
+            completer.complete("done");
+          }
+        }, () => xhr.readyState == HttpRequest.DONE));
+        xhr.send();
+        return completer.future;
+      });
+      expect(log, equals([
+        'event listener on http-request readystatechange',
+        'http-request (no info), data: null',
+        'unknown request done'
+      ]));
+    });
+
+    test('XHR_file', () async {
+      var log = await runWithLogging(() {
+        var completer = new Completer();
+        var loadEndCalled = false;
+
+        var xhr = new HttpRequest();
+        xhr.open('GET', url, async: true);
+        xhr.onReadyStateChange.listen(expectAsyncUntil((e) {
+          if (xhr.readyState == HttpRequest.DONE) {
+            validate200Response(xhr);
+
+            Timer.run(expectAsync(() {
+              expect(loadEndCalled, HttpRequest.supportsLoadEndEvent);
+              completer.complete("done");
+            }));
+          }
+        }, () => xhr.readyState == HttpRequest.DONE));
+
+        xhr.onLoadEnd.listen((ProgressEvent e) {
+          loadEndCalled = true;
+        });
+        xhr.send();
+        return completer.future;
+      });
+      expect(log, equals([
+        'event listener on http-request readystatechange',
+        'event listener on http-request loadend',
+        'http-request (no info), data: null',
+        'unknown request done'
+      ]));
+    });
+
+    test('XHR.request No file', () async {
+      var log = await runWithLogging(() {
+        var completer = new Completer();
+        HttpRequest.request('NonExistingFile').then(
+            (_) { fail('Request should not have succeeded.'); },
+            onError: expectAsync((error) {
+              var xhr = error.target;
+              expect(xhr.readyState, equals(HttpRequest.DONE));
+              validate404(xhr);
+              completer.complete('done');
+            }));
+        return completer.future;
+      });
+      expect(log, equals([
+        'request NonExistingFile',
+        '  method: null withCredentials: null responseType: null '
+            'mimeType: null data: null',
+        'event listener on http-request load',
+        'event listener on http-request error',
+        'success NonExistingFile with progress-event'
+      ]));
+    });
+
+    test('XHR.request file', () async {
+      var log = await runWithLogging(() {
+        var completer = new Completer();
+        HttpRequest.request(url).then(expectAsync((xhr) {
+          expect(xhr.readyState, equals(HttpRequest.DONE));
+          validate200Response(xhr);
+          completer.complete('done');
+        }));
+        return completer.future;
+      });
+      expect(log, equals([
+        'request $url',
+        '  method: null withCredentials: null responseType: null '
+            'mimeType: null data: null',
+        'event listener on http-request load',
+        'event listener on http-request error',
+        'success $url with http-request'
+      ]));
+    });
+
+    test('XHR.request onProgress', () async {
+      var log = await runWithLogging(() {
+        var completer = new Completer();
+        var progressCalled = false;
+        HttpRequest.request(url,
+            onProgress: (_) {
+              progressCalled = true;
+            }).then(expectAsync(
+            (xhr) {
+              expect(xhr.readyState, equals(HttpRequest.DONE));
+              expect(progressCalled, HttpRequest.supportsProgressEvent);
+              validate200Response(xhr);
+              completer.complete("done");
+        }));
+        return completer.future;
+      });
+      expect(log, equals([
+        'request $url',
+        '  method: null withCredentials: null responseType: null '
+            'mimeType: null data: null',
+        'event listener on http-request progress',
+        'event listener on http-request load',
+        'event listener on http-request error',
+        'success $url with http-request'
+      ]));
+    });
+
+    test('XHR.request withCredentials No file', () async {
+      var log = await runWithLogging(() {
+        var completer = new Completer();
+        HttpRequest.request('NonExistingFile', withCredentials: true).then(
+            (_) { fail('Request should not have succeeded.'); },
+            onError: expectAsync((error) {
+              var xhr = error.target;
+              expect(xhr.readyState, equals(HttpRequest.DONE));
+              validate404(xhr);
+              completer.complete("done");
+            }));
+        return completer.future;
+      });
+      expect(log, equals([
+        'request NonExistingFile',
+        '  method: null withCredentials: true responseType: null '
+            'mimeType: null data: null',
+        'event listener on http-request load',
+        'event listener on http-request error',
+        'success NonExistingFile with progress-event'
+      ]));
+    });
+
+
+    test('XHR.request withCredentials file', () async {
+      var log = await runWithLogging(() {
+        var completer = new Completer();
+        HttpRequest.request(url, withCredentials: true).then(expectAsync((xhr) {
+          expect(xhr.readyState, equals(HttpRequest.DONE));
+          validate200Response(xhr);
+          completer.complete("done");
+        }));
+        return completer.future;
+      });
+      expect(log, equals([
+        'request $url',
+        '  method: null withCredentials: true responseType: null '
+            'mimeType: null data: null',
+        'event listener on http-request load',
+        'event listener on http-request error',
+        'success $url with http-request'
+      ]));
+    });
+
+    test('XHR.getString file', () async {
+      var log = await runWithLogging(() {
+        return HttpRequest.getString(url).then(expectAsync((str) {}));
+      });
+      expect(log, equals([
+        'request $url',
+        '  method: null withCredentials: null responseType: null '
+            'mimeType: null data: null',
+        'event listener on http-request load',
+        'event listener on http-request error',
+        'success $url with http-request'
+      ]));
+    });
+
+    test('XHR.getString No file', () async {
+      var log = await runWithLogging(() {
+        return HttpRequest.getString('NonExistingFile').then(
+            (_) { fail('Succeeded for non-existing file.'); },
+            onError: expectAsync((error) {
+              validate404(error.target);
+            }));
+      });
+      expect(log, equals([
+        'request NonExistingFile',
+        '  method: null withCredentials: null responseType: null '
+            'mimeType: null data: null',
+        'event listener on http-request load',
+        'event listener on http-request error',
+        'success NonExistingFile with progress-event'
+      ]));
+    });
+
+    test('XHR.request responseType arraybuffer', () async {
+      if (Platform.supportsTypedData) {
+        var log = await runWithLogging(() {
+          return HttpRequest.request(url, responseType: 'arraybuffer',
+              requestHeaders: {'Content-Type': 'text/xml'}).then(
+              expectAsync((xhr) {
+                expect(xhr.status, equals(200));
+                var byteBuffer = xhr.response;
+                expect(byteBuffer, new isInstanceOf<ByteBuffer>());
+                expect(byteBuffer, isNotNull);
+              }));
+        });
+        expect(log, equals([
+          'request $url',
+          '  method: null withCredentials: null responseType: arraybuffer '
+              'mimeType: null data: null',
+          'event listener on http-request load',
+          'event listener on http-request error',
+          'success $url with http-request'
+        ]));
+      };
+    });
+
+    test('overrideMimeType', () async {
+      var expectation =
+          HttpRequest.supportsOverrideMimeType ? returnsNormally : throws;
+
+      var log = await runWithLogging(() {
+        var completer = new Completer();
+        expect(() {
+          HttpRequest.request(url, mimeType: 'application/binary')
+              .whenComplete(completer.complete);
+        }, expectation);
+        return completer.future;
+      });
+      expect(log, equals([
+        'request $url',
+        '  method: null withCredentials: null responseType: null '
+            'mimeType: application/binary data: null',
+        'event listener on http-request load',
+        'event listener on http-request error',
+        'success $url with http-request'
+      ]));
+    });
+
+    if (Platform.supportsTypedData) {
+      test('xhr upload', () async {
+        var log = await runWithLogging(() {
+          var xhr = new HttpRequest();
+          var progressCalled = false;
+          xhr.upload.onProgress.listen((e) {
+            progressCalled = true;
+          });
+
+          xhr.open('POST',
+              '${window.location.protocol}//${window.location.host}/echo');
+
+          // 10MB of payload data w/ a bit of data to make sure it
+          // doesn't get compressed to nil.
+          var data = new Uint8List(1 * 1024 * 1024);
+          for (var i = 0; i < data.length; ++i) {
+            data[i] = i & 0xFF;
+          }
+          xhr.send(new Uint8List.view(data.buffer));
+
+          return xhr.onLoad.first.then((_) {
+            expect(
+                progressCalled, isTrue, reason: 'onProgress should be fired');
+          });
+        });
+        expect(log, equals([
+          'http-request (no info), data: binary 1048576',
+          'event listener on http-request load',
+        ]));
+      });
+    }
+
+    test('xhr postFormData', () async {
+      var url = '${window.location.protocol}//${window.location.host}/echo';
+      var log = await runWithLogging(() {
+        var data = { 'name': 'John', 'time': '2 pm'};
+
+        var parts = [];
+        for (var key in data.keys) {
+          parts.add('${Uri.encodeQueryComponent(key)}='
+              '${Uri.encodeQueryComponent(data[key])}');
+        }
+        var encodedData = parts.join('&');
+
+        return HttpRequest.postFormData(url, data).then((xhr) {
+          expect(xhr.responseText, encodedData);
+        });
+      });
+      expect(log, equals([
+        'request $url',
+        '  method: POST withCredentials: null responseType: null '
+            'mimeType: null data: name=John&time=2+pm',
+        'event listener on http-request load',
+        'event listener on http-request error',
+        'success $url with http-request'
+      ]));
+    });
+  });
+
+  group('xhr_requestBlob', () {
+    test('XHR.request responseType blob', () async {
+      if (Platform.supportsTypedData) {
+        var log = await runWithLogging(() {
+          return HttpRequest.request(url, responseType: 'blob').then(
+              (xhr) {
+            expect(xhr.status, equals(200));
+            var blob = xhr.response;
+            expect(blob is Blob, isTrue);
+            expect(blob, isNotNull);
+          });
+        });
+        expect(log, equals([
+          'request $url',
+          '  method: null withCredentials: null responseType: blob '
+              'mimeType: null data: null',
+          'event listener on http-request load',
+          'event listener on http-request error',
+          'success $url with http-request'
+        ]));
+      }
+    });
+  });
+
+  group('json', () {
+    test('xhr responseType json', () async {
+      var url = '${window.location.protocol}//${window.location.host}/echo';
+      var log = await runWithLogging(() {
+        var completer = new Completer();
+        var data = {
+          'key': 'value',
+          'a': 'b',
+          'one': 2,
+        };
+
+        HttpRequest.request(url,
+            method: 'POST',
+            sendData: JSON.encode(data),
+            responseType: 'json').then(
+            expectAsync((xhr) {
+              expect(xhr.status, equals(200));
+              var json = xhr.response;
+              expect(json, equals(data));
+              completer.complete("done");
+            }));
+        return completer.future;
+      });
+      expect(log, equals([
+        'request $url',
+        '  method: POST withCredentials: null responseType: json mimeType: null'
+            ' data: {"key":"value","a":"b","one":2}',
+        'event listener on http-request load',
+        'event listener on http-request error',
+        'success $url with http-request'
+      ]));
+    });
+  });
+
+  group('headers', () {
+    test('xhr responseHeaders', () async {
+      var log = await runWithLogging(() {
+        return HttpRequest.request(url).then(
+            (xhr) {
+          var contentTypeHeader = xhr.responseHeaders['content-type'];
+          expect(contentTypeHeader, isNotNull);
+          // Should be like: 'text/plain; charset=utf-8'
+          expect(contentTypeHeader.contains('text/plain'), isTrue);
+          expect(contentTypeHeader.contains('charset=utf-8'), isTrue);
+        });
+      });
+      expect(log, equals([
+        'request $url',
+        '  method: null withCredentials: null responseType: null'
+            ' mimeType: null data: null',
+        'event listener on http-request load',
+        'event listener on http-request error',
+        'success $url with http-request'
+      ]));
+    });
+  });
+}
diff --git a/tests/language/arg_param_trailing_comma_test.dart b/tests/language/arg_param_trailing_comma_test.dart
index 5c2c42a..4b87b1a 100644
--- a/tests/language/arg_param_trailing_comma_test.dart
+++ b/tests/language/arg_param_trailing_comma_test.dart
@@ -75,7 +75,7 @@
   operator []=(x, y, ) {}                             /// none: continued
 }
 
-main(args, ) {
+main() {
   testCalls();                                        /// none: continued
   // Make sure the Bad class is checked.
   new Bad().method();
diff --git a/tests/language/language.status b/tests/language/language.status
index e2bfc83..03fd476 100644
--- a/tests/language/language.status
+++ b/tests/language/language.status
@@ -5,6 +5,10 @@
 # This directory contains tests that are intended to show the
 # current state of the language.
 
+# Trailing commas are so far supported by:
+# - The VM (vm, dartium, drt, precompiler+dart_precompiled)
+# Remaining targets still fail on arg_param_trailing_comma_test/none.
+[($compiler != none && $compiler != precompiler) || ($runtime != vm && $runtime != dartium && $runtime != drt && $runtime != dart_precompiled)]
 arg_param_trailing_comma_test/none: Fail # Issue 26644
 
 [ ($compiler == none || $compiler == precompiler || $compiler == dart2app || $compiler == dart2appjit) ]
@@ -246,3 +250,8 @@
 [ $compiler == precompiler && $runtime == dart_precompiled && $system == android ]
 vm/optimized_guarded_field_isolates_test: Skip # Issue #26373
 issue23244_test: Skip # Issue #26373
+
+[ $hot_reload ]
+deferred_load_inval_code_test: RuntimeError
+regress_26453_test: Pass, Fail, Crash
+vm/regress_16873_test: Pass, Crash
diff --git a/tests/lib/async/zone_task_test.dart b/tests/lib/async/zone_task_test.dart
new file mode 100644
index 0000000..aa9b008
--- /dev/null
+++ b/tests/lib/async/zone_task_test.dart
@@ -0,0 +1,310 @@
+// Copyright (c) 2016, the Dart project authors.  Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.
+
+// Tests basic functionality of tasks in zones.
+
+import 'package:expect/expect.dart';
+import 'package:async_helper/async_helper.dart';
+import 'dart:async';
+
+List log = [];
+
+class MySpecification extends TaskSpecification {
+  final Function callback;
+  final bool isOneShot;
+  final int value;
+
+  MySpecification(void this.callback(), this.isOneShot, this.value);
+
+  String get name => "test.specification-name";
+}
+
+class MyTask {
+  final Zone zone;
+  final Function callback;
+  final int id;
+  int invocationCount = 0;
+  bool shouldStop = false;
+
+  MyTask(this.zone, void this.callback(), this.id);
+}
+
+void runMyTask(MyTask task, int value) {
+  log.add("running "
+      "zone: ${Zone.current['name']} "
+      "task-id: ${task.id} "
+      "invocation-count: ${task.invocationCount} "
+      "value: $value");
+  task.callback();
+  task.invocationCount++;
+}
+
+MyTask createMyTask(MySpecification spec, Zone zone) {
+  var task = new MyTask(zone, spec.callback, spec.value);
+  log.add("creating task: ${spec.value} oneshot?: ${spec.isOneShot}");
+  if (spec.isOneShot) {
+    Timer.run(() {
+      zone.runTask(runMyTask, task, task.id);
+    });
+  } else {
+    new Timer.periodic(const Duration(milliseconds: 10), (Timer timer) {
+      zone.runTask(runMyTask, task, task.id);
+      if (task.shouldStop) {
+        timer.cancel();
+      }
+    });
+  }
+  return task;
+}
+
+MyTask startTask(f, bool oneShot, int value) {
+  var spec = new MySpecification(f, oneShot, value);
+  return Zone.current.createTask(createMyTask, spec);
+}
+
+/// Makes sure things are working in a simple setting.
+/// No interceptions, changes, ...
+Future testCustomTask() {
+  var testCompleter = new Completer();
+  asyncStart();
+
+  Object createTaskHandler(Zone self, ZoneDelegate parent, Zone zone,
+      TaskCreate create, TaskSpecification specification) {
+    if (specification is MySpecification) {
+      log.add("create enter "
+          "zone: ${self['name']} "
+          "spec-value: ${specification.value} "
+          "spec-oneshot?: ${specification.isOneShot}");
+      MyTask result = parent.createTask(zone, create, specification);
+      log.add("create leave");
+      return result;
+    }
+    return parent.createTask(zone, create, specification);
+  }
+
+  void runTaskHandler(Zone self, ZoneDelegate parent, Zone zone, TaskRun run,
+      Object task, Object arg) {
+    if (task is MyTask) {
+      log.add("run enter "
+          "zone: ${self['name']} "
+          "task-id: ${task.id} "
+          "invocation-count: ${task.invocationCount} "
+          "arg: $arg");
+      parent.runTask(zone, run, task, arg);
+      log.add("run leave invocation-count: ${task.invocationCount}");
+      return;
+    }
+    parent.runTask(zone, run, task, arg);
+  }
+
+  runZoned(() async {
+    var completer0 = new Completer();
+    startTask(() {
+      completer0.complete("done");
+    }, true, 0);
+    await completer0.future;
+
+    Expect.listEquals([
+      'create enter zone: custom zone spec-value: 0 spec-oneshot?: true',
+      'creating task: 0 oneshot?: true',
+      'create leave',
+      'run enter zone: custom zone task-id: 0 invocation-count: 0 arg: 0',
+      'running zone: custom zone task-id: 0 invocation-count: 0 value: 0',
+      'run leave invocation-count: 1'
+    ], log);
+    log.clear();
+
+    var completer1 = new Completer();
+    MyTask task1;
+    task1 = startTask(() {
+      if (task1.invocationCount == 1) {
+        task1.shouldStop = true;
+        completer1.complete("done");
+      }
+    }, false, 1);
+    await completer1.future;
+
+    Expect.listEquals([
+      'create enter zone: custom zone spec-value: 1 spec-oneshot?: false',
+      'creating task: 1 oneshot?: false',
+      'create leave',
+      'run enter zone: custom zone task-id: 1 invocation-count: 0 arg: 1',
+      'running zone: custom zone task-id: 1 invocation-count: 0 value: 1',
+      'run leave invocation-count: 1',
+      'run enter zone: custom zone task-id: 1 invocation-count: 1 arg: 1',
+      'running zone: custom zone task-id: 1 invocation-count: 1 value: 1',
+      'run leave invocation-count: 2',
+    ], log);
+    log.clear();
+
+    testCompleter.complete("done");
+    asyncEnd();
+  },
+      zoneValues: {'name': 'custom zone'},
+      zoneSpecification: new ZoneSpecification(
+          createTask: createTaskHandler,
+          runTask: runTaskHandler));
+
+  return testCompleter.future;
+}
+
+/// More complicated zone, that intercepts...
+Future testCustomTask2() {
+  var testCompleter = new Completer();
+  asyncStart();
+
+  Object createTaskHandler(Zone self, ZoneDelegate parent, Zone zone,
+      TaskCreate create, TaskSpecification specification) {
+    if (specification is MySpecification) {
+      log.add("create enter "
+          "zone: ${self['name']} "
+          "spec-value: ${specification.value} "
+          "spec-oneshot?: ${specification.isOneShot}");
+      var replacement = new MySpecification(specification.callback,
+          specification.isOneShot, specification.value + 1);
+      MyTask result = parent.createTask(zone, create, replacement);
+      log.add("create leave");
+      return result;
+    }
+    return parent.createTask(zone, create, specification);
+  }
+
+  void runTaskHandler(Zone self, ZoneDelegate parent, Zone zone, TaskRun run,
+      Object task, Object arg) {
+    if (task is MyTask) {
+      log.add("run enter "
+          "zone: ${self['name']} "
+          "task-id: ${task.id} "
+          "invocation-count: ${task.invocationCount} "
+          "arg: $arg");
+      int value = arg;
+      parent.runTask(zone, run, task, value + 101);
+      log.add("run leave invocation-count: ${task.invocationCount}");
+      return;
+    }
+    parent.runTask(zone, run, task, arg);
+  }
+
+  runZoned(() async {
+    var completer0 = new Completer();
+    startTask(() {
+      completer0.complete("done");
+    }, true, 0);
+    await completer0.future;
+
+    Expect.listEquals([
+      'create enter zone: outer-zone spec-value: 0 spec-oneshot?: true',
+      'creating task: 1 oneshot?: true',
+      'create leave',
+      'run enter zone: outer-zone task-id: 1 invocation-count: 0 arg: 1',
+      'running zone: outer-zone task-id: 1 invocation-count: 0 value: 102',
+      'run leave invocation-count: 1'
+    ], log);
+    log.clear();
+
+    var completer1 = new Completer();
+    MyTask task1;
+    task1 = startTask(() {
+      if (task1.invocationCount == 1) {
+        task1.shouldStop = true;
+        completer1.complete("done");
+      }
+    }, false, 1);
+    await completer1.future;
+
+    Expect.listEquals([
+      'create enter zone: outer-zone spec-value: 1 spec-oneshot?: false',
+      'creating task: 2 oneshot?: false',
+      'create leave',
+      'run enter zone: outer-zone task-id: 2 invocation-count: 0 arg: 2',
+      'running zone: outer-zone task-id: 2 invocation-count: 0 value: 103',
+      'run leave invocation-count: 1',
+      'run enter zone: outer-zone task-id: 2 invocation-count: 1 arg: 2',
+      'running zone: outer-zone task-id: 2 invocation-count: 1 value: 103',
+      'run leave invocation-count: 2',
+    ], log);
+    log.clear();
+
+    var nestedCompleter = new Completer();
+
+    runZoned(() async {
+      var completer0 = new Completer();
+      startTask(() {
+        completer0.complete("done");
+      }, true, 0);
+      await completer0.future;
+
+      Expect.listEquals([
+        'create enter zone: inner-zone spec-value: 0 spec-oneshot?: true',
+        'create enter zone: outer-zone spec-value: 1 spec-oneshot?: true',
+        'creating task: 2 oneshot?: true',
+        'create leave',
+        'create leave',
+        'run enter zone: inner-zone task-id: 2 invocation-count: 0 arg: 2',
+        'run enter zone: outer-zone task-id: 2 invocation-count: 0 arg: 103',
+        'running zone: inner-zone task-id: 2 invocation-count: 0 value: 204',
+        'run leave invocation-count: 1',
+        'run leave invocation-count: 1'
+      ], log);
+      log.clear();
+
+      var completer1 = new Completer();
+      MyTask task1;
+      task1 = startTask(() {
+        if (task1.invocationCount == 1) {
+          task1.shouldStop = true;
+          completer1.complete("done");
+        }
+      }, false, 1);
+      await completer1.future;
+
+      Expect.listEquals([
+        'create enter zone: inner-zone spec-value: 1 spec-oneshot?: false',
+        'create enter zone: outer-zone spec-value: 2 spec-oneshot?: false',
+        'creating task: 3 oneshot?: false',
+        'create leave',
+        'create leave',
+        'run enter zone: inner-zone task-id: 3 invocation-count: 0 arg: 3',
+        'run enter zone: outer-zone task-id: 3 invocation-count: 0 arg: 104',
+        'running zone: inner-zone task-id: 3 invocation-count: 0 value: 205',
+        'run leave invocation-count: 1',
+        'run leave invocation-count: 1',
+        'run enter zone: inner-zone task-id: 3 invocation-count: 1 arg: 3',
+        'run enter zone: outer-zone task-id: 3 invocation-count: 1 arg: 104',
+        'running zone: inner-zone task-id: 3 invocation-count: 1 value: 205',
+        'run leave invocation-count: 2',
+        'run leave invocation-count: 2',
+      ], log);
+      log.clear();
+
+      nestedCompleter.complete("done");
+    },
+        zoneValues: {'name': 'inner-zone'},
+        zoneSpecification: new ZoneSpecification(
+            createTask: createTaskHandler,
+            runTask: runTaskHandler));
+
+    await nestedCompleter.future;
+    testCompleter.complete("done");
+    asyncEnd();
+  },
+      zoneValues: {'name': 'outer-zone'},
+      zoneSpecification: new ZoneSpecification(
+          createTask: createTaskHandler,
+          runTask: runTaskHandler));
+
+  return testCompleter.future;
+}
+
+runTests() async {
+  await testCustomTask();
+  await testCustomTask2();
+}
+
+main() {
+  asyncStart();
+  runTests().then((_) {
+    asyncEnd();
+  });
+}
diff --git a/tests/lib/async/zone_timer_task_test.dart b/tests/lib/async/zone_timer_task_test.dart
new file mode 100644
index 0000000..310f7ca
--- /dev/null
+++ b/tests/lib/async/zone_timer_task_test.dart
@@ -0,0 +1,515 @@
+// Copyright (c) 2016, the Dart project authors.  Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.
+
+// Tests timer tasks.
+
+import 'package:expect/expect.dart';
+import 'package:async_helper/async_helper.dart';
+import 'dart:async';
+import 'dart:collection';
+
+class MyTimerSpecification implements SingleShotTimerTaskSpecification {
+  final Function callback;
+  final Duration duration;
+
+  MyTimerSpecification(this.callback, this.duration);
+
+  bool get isOneShot => true;
+  String get name => "test.timer-override";
+}
+
+class MyPeriodicTimerSpecification implements PeriodicTimerTaskSpecification {
+  final Function callback;
+  final Duration duration;
+
+  MyPeriodicTimerSpecification(this.callback, this.duration);
+
+  bool get isOneShot => true;
+  String get name => "test.periodic-timer-override";
+}
+
+/// Makes sure things are working in a simple setting.
+/// No interceptions, changes, ...
+Future testTimerTask() {
+  List log = [];
+
+  var testCompleter = new Completer();
+  asyncStart();
+
+  int taskIdCounter = 0;
+
+  Object createTaskHandler(Zone self, ZoneDelegate parent, Zone zone,
+      TaskCreate create, TaskSpecification specification) {
+    var taskMap = self['taskMap'];
+    var taskIdMap = self['taskIdMap'];
+    if (specification is SingleShotTimerTaskSpecification) {
+      log.add("create enter "
+          "zone: ${self['name']} "
+          "spec-duration: ${specification.duration} "
+          "spec-oneshot?: ${specification.isOneShot}");
+      var result = parent.createTask(zone, create, specification);
+      taskMap[result] = specification;
+      taskIdMap[specification] = taskIdCounter++;
+      log.add("create leave");
+      return result;
+    } else if (specification is PeriodicTimerTaskSpecification) {
+      log.add("create enter "
+          "zone: ${self['name']} "
+          "spec-duration: ${specification.duration} "
+          "spec-oneshot?: ${specification.isOneShot}");
+      var result = parent.createTask(zone, create, specification);
+      taskMap[result] = specification;
+      taskIdMap[specification] = taskIdCounter++;
+      log.add("create leave");
+      return result;
+    }
+    return parent.createTask(zone, create, specification);
+  }
+
+  void runTaskHandler(Zone self, ZoneDelegate parent, Zone zone, TaskRun run,
+      Object task, Object arg) {
+    var taskMap = self['taskMap'];
+    var taskIdMap = self['taskIdMap'];
+    if (taskMap.containsKey(task)) {
+      var spec = taskMap[task];
+      log.add("run enter "
+          "zone: ${self['name']} "
+          "task-id: ${taskIdMap[spec]} "
+          "arg: $arg");
+      parent.runTask(zone, run, task, arg);
+      log.add("run leave");
+      return;
+    }
+    parent.runTask(zone, run, task, arg);
+  }
+
+  runZoned(() async {
+    var completer0 = new Completer();
+    Timer.run(() {
+      completer0.complete("done");
+    });
+    await completer0.future;
+
+    Expect.listEquals([
+      'create enter zone: custom zone spec-duration: 0:00:00.000000 '
+          'spec-oneshot?: true',
+      'create leave',
+      'run enter zone: custom zone task-id: 0 arg: null',
+      'run leave'
+    ], log);
+    log.clear();
+
+    var completer1 = new Completer();
+    var counter1 = 0;
+    new Timer.periodic(const Duration(milliseconds: 5), (Timer timer) {
+      if (counter1++ > 1) {
+        timer.cancel();
+        completer1.complete("done");
+      }
+    });
+    await completer1.future;
+
+    Expect.listEquals([
+      'create enter zone: custom zone spec-duration: 0:00:00.005000 '
+          'spec-oneshot?: false',
+      'create leave',
+      'run enter zone: custom zone task-id: 1 arg: null',
+      'run leave',
+      'run enter zone: custom zone task-id: 1 arg: null',
+      'run leave',
+      'run enter zone: custom zone task-id: 1 arg: null',
+      'run leave'
+    ], log);
+    log.clear();
+
+    testCompleter.complete("done");
+    asyncEnd();
+  },
+      zoneValues: {'name': 'custom zone', 'taskMap': {}, 'taskIdMap': {}},
+      zoneSpecification: new ZoneSpecification(
+          createTask: createTaskHandler,
+          runTask: runTaskHandler));
+
+  return testCompleter.future;
+}
+
+/// More complicated zone, that intercepts...
+Future testTimerTask2() {
+  List log = [];
+
+  var testCompleter = new Completer();
+  asyncStart();
+
+  int taskIdCounter = 0;
+
+  Object createTaskHandler(Zone self, ZoneDelegate parent, Zone zone,
+      TaskCreate create, TaskSpecification specification) {
+    var taskMap = self['taskMap'];
+    var taskIdMap = self['taskIdMap'];
+    if (specification is SingleShotTimerTaskSpecification) {
+      log.add("create enter "
+          "zone: ${self['name']} "
+          "spec-duration: ${specification.duration} "
+          "spec-oneshot?: ${specification.isOneShot}");
+      var mySpec = new MyTimerSpecification(specification.callback,
+          specification.duration + const Duration(milliseconds: 2));
+      var result = parent.createTask(zone, create, mySpec);
+      taskMap[result] = specification;
+      taskIdMap[specification] = taskIdCounter++;
+      log.add("create leave");
+      return result;
+    } else if (specification is PeriodicTimerTaskSpecification) {
+      log.add("create enter "
+          "zone: ${self['name']} "
+          "spec-duration: ${specification.duration} "
+          "spec-oneshot?: ${specification.isOneShot}");
+      var mySpec = new MyPeriodicTimerSpecification(specification.callback,
+          specification.duration + const Duration(milliseconds: 2));
+      var result = parent.createTask(zone, create, specification);
+      taskMap[result] = specification;
+      taskIdMap[specification] = taskIdCounter++;
+      log.add("create leave");
+      return result;
+    }
+    return parent.createTask(zone, create, specification);
+  }
+
+  void runTaskHandler(Zone self, ZoneDelegate parent, Zone zone, TaskRun run,
+      Object task, Object arg) {
+    var taskMap = self['taskMap'];
+    var taskIdMap = self['taskIdMap'];
+    if (taskMap.containsKey(task)) {
+      var spec = taskMap[task];
+      log.add("run enter "
+          "zone: ${self['name']} "
+          "task-id: ${taskIdMap[spec]} "
+          "arg: $arg");
+      parent.runTask(zone, run, task, arg);
+      log.add("run leave");
+      return;
+    }
+    parent.runTask(zone, run, task, arg);
+  }
+
+  runZoned(() async {
+    var completer0 = new Completer();
+    Timer.run(() {
+      completer0.complete("done");
+    });
+    await completer0.future;
+
+    // No visible change (except for the zone name) in the log, compared to the
+    // simple invocations.
+    Expect.listEquals([
+      'create enter zone: outer-zone spec-duration: 0:00:00.000000 '
+          'spec-oneshot?: true',
+      'create leave',
+      'run enter zone: outer-zone task-id: 0 arg: null',
+      'run leave'
+    ], log);
+    log.clear();
+
+    var completer1 = new Completer();
+    var counter1 = 0;
+    new Timer.periodic(const Duration(milliseconds: 5), (Timer timer) {
+      if (counter1++ > 1) {
+        timer.cancel();
+        completer1.complete("done");
+      }
+    });
+    await completer1.future;
+
+    // No visible change (except for the zone nome) in the log, compared to the
+    // simple invocations.
+    Expect.listEquals([
+      'create enter zone: outer-zone spec-duration: 0:00:00.005000 '
+          'spec-oneshot?: false',
+      'create leave',
+      'run enter zone: outer-zone task-id: 1 arg: null',
+      'run leave',
+      'run enter zone: outer-zone task-id: 1 arg: null',
+      'run leave',
+      'run enter zone: outer-zone task-id: 1 arg: null',
+      'run leave'
+    ], log);
+    log.clear();
+
+    var nestedCompleter = new Completer();
+
+    runZoned(() async {
+      var completer0 = new Completer();
+      Timer.run(() {
+        completer0.complete("done");
+      });
+      await completer0.future;
+
+      // The outer zone sees the duration change of the inner zone.
+      Expect.listEquals([
+        'create enter zone: inner-zone spec-duration: 0:00:00.000000 '
+            'spec-oneshot?: true',
+        'create enter zone: outer-zone spec-duration: 0:00:00.002000 '
+            'spec-oneshot?: true',
+        'create leave',
+        'create leave',
+        'run enter zone: inner-zone task-id: 3 arg: null',
+        'run enter zone: outer-zone task-id: 2 arg: null',
+        'run leave',
+        'run leave'
+      ], log);
+      log.clear();
+
+      var completer1 = new Completer();
+      var counter1 = 0;
+      new Timer.periodic(const Duration(milliseconds: 5), (Timer timer) {
+        if (counter1++ > 1) {
+          timer.cancel();
+          completer1.complete("done");
+        }
+      });
+      await completer1.future;
+
+      // The outer zone sees the duration change of the inner zone.
+      Expect.listEquals([
+        'create enter zone: inner-zone spec-duration: 0:00:00.005000 '
+            'spec-oneshot?: false',
+        'create enter zone: outer-zone spec-duration: 0:00:00.005000 '
+            'spec-oneshot?: false',
+        'create leave',
+        'create leave',
+        'run enter zone: inner-zone task-id: 5 arg: null',
+        'run enter zone: outer-zone task-id: 4 arg: null',
+        'run leave',
+        'run leave',
+        'run enter zone: inner-zone task-id: 5 arg: null',
+        'run enter zone: outer-zone task-id: 4 arg: null',
+        'run leave',
+        'run leave',
+        'run enter zone: inner-zone task-id: 5 arg: null',
+        'run enter zone: outer-zone task-id: 4 arg: null',
+        'run leave',
+        'run leave'
+      ], log);
+      log.clear();
+
+      nestedCompleter.complete("done");
+    },
+        zoneValues: {'name': 'inner-zone', 'taskMap': {}, 'taskIdMap': {}},
+        zoneSpecification: new ZoneSpecification(
+            createTask: createTaskHandler,
+            runTask: runTaskHandler));
+
+    await nestedCompleter.future;
+    testCompleter.complete("done");
+    asyncEnd();
+  },
+      zoneValues: {'name': 'outer-zone', 'taskMap': {}, 'taskIdMap': {}},
+      zoneSpecification: new ZoneSpecification(
+          createTask: createTaskHandler,
+          runTask: runTaskHandler));
+
+  return testCompleter.future;
+}
+
+class TimerEntry {
+  final int time;
+  final SimulatedTimer timer;
+
+  TimerEntry(this.time, this.timer);
+}
+
+class SimulatedTimer implements Timer {
+  static int _idCounter = 0;
+
+  Zone _zone;
+  final int _id = _idCounter++;
+  final Duration _duration;
+  final Function _callback;
+  final bool _isPeriodic;
+  bool _isActive = true;
+
+  SimulatedTimer(this._zone, this._duration, this._callback, this._isPeriodic);
+
+  bool get isActive => _isActive;
+
+  void cancel() {
+    _isActive = false;
+  }
+
+  void _run() {
+    if (!isActive) return;
+    _zone.runTask(_runTimer, this, null);
+  }
+
+  static void _runTimer(SimulatedTimer timer, _) {
+    if (timer._isPeriodic) {
+      timer._callback(timer);
+    } else {
+      timer._callback();
+    }
+  }
+}
+
+testSimulatedTimer() {
+  List log = [];
+
+  var currentTime = 0;
+  // Using a simple list as queue. Not very efficient, but the test has only
+  // very few timers running at the same time.
+  var queue = new DoubleLinkedQueue<TimerEntry>();
+
+  // Schedules the given callback at now + duration.
+  void schedule(int scheduledTime, SimulatedTimer timer) {
+    log.add("scheduling timer ${timer._id} for $scheduledTime");
+    if (queue.isEmpty) {
+      queue.add(new TimerEntry(scheduledTime, timer));
+    } else {
+      DoubleLinkedQueueEntry current = queue.firstEntry();
+      while (current != null) {
+        if (current.element.time <= scheduledTime) {
+          current = current.nextEntry();
+        } else {
+          current.prepend(new TimerEntry(scheduledTime, timer));
+          break;
+        }
+      }
+      if (current == null) {
+        queue.add(new TimerEntry(scheduledTime, timer));
+      }
+    }
+  }
+
+  void runQueue() {
+    while (queue.isNotEmpty) {
+      var item = queue.removeFirst();
+      // If multiple callbacks were scheduled at the same time, increment the
+      // current time instead of staying at the same time.
+      currentTime = item.time > currentTime ? item.time : currentTime + 1;
+      SimulatedTimer timer = item.timer;
+      log.add("running timer ${timer._id} at $currentTime "
+          "(active?: ${timer.isActive})");
+      if (!timer.isActive) continue;
+      if (timer._isPeriodic) {
+        schedule(currentTime + timer._duration.inMilliseconds, timer);
+      }
+      item.timer._run();
+    }
+  }
+
+  SimulatedTimer createSimulatedOneShotTimer(
+      SingleShotTimerTaskSpecification spec, Zone zone) {
+    var timer = new SimulatedTimer(zone, spec.duration, spec.callback, false);
+    schedule(currentTime + spec.duration.inMilliseconds, timer);
+    return timer;
+  }
+
+  SimulatedTimer createSimulatedPeriodicTimer(
+      PeriodicTimerTaskSpecification spec, Zone zone) {
+    var timer = new SimulatedTimer(zone, spec.duration, spec.callback, true);
+    schedule(currentTime + spec.duration.inMilliseconds, timer);
+    return timer;
+  }
+
+  Object createSimulatedTaskHandler(Zone self, ZoneDelegate parent, Zone zone,
+      TaskCreate create, TaskSpecification specification) {
+    var taskMap = self['taskMap'];
+    var taskIdMap = self['taskIdMap'];
+    if (specification is SingleShotTimerTaskSpecification) {
+      log.add("create enter "
+          "zone: ${self['name']} "
+          "spec-duration: ${specification.duration} "
+          "spec-oneshot?: ${specification.isOneShot}");
+      var result =
+          parent.createTask(zone, createSimulatedOneShotTimer, specification);
+      log.add("create leave");
+      return result;
+    }
+    if (specification is PeriodicTimerTaskSpecification) {
+      log.add("create enter "
+          "zone: ${self['name']} "
+          "spec-duration: ${specification.duration} "
+          "spec-oneshot?: ${specification.isOneShot}");
+      var result =
+          parent.createTask(zone, createSimulatedPeriodicTimer, specification);
+      log.add("create leave");
+      return result;
+    }
+    return parent.createTask(zone, create, specification);
+  }
+
+  runZoned(() {
+    Timer.run(() {
+      log.add("running Timer.run");
+    });
+
+    var timer0;
+
+    new Timer(const Duration(milliseconds: 10), () {
+      log.add("running Timer(10)");
+      timer0.cancel();
+      log.add("canceled timer0");
+    });
+
+    timer0 = new Timer(const Duration(milliseconds: 15), () {
+      log.add("running Timer(15)");
+    });
+
+    var counter1 = 0;
+    new Timer.periodic(const Duration(milliseconds: 5), (Timer timer) {
+      log.add("running periodic timer $counter1");
+      if (counter1++ > 1) {
+        timer.cancel();
+      }
+    });
+  },
+      zoneSpecification:
+          new ZoneSpecification(createTask: createSimulatedTaskHandler));
+
+  runQueue();
+
+  Expect.listEquals([
+    'create enter zone: null spec-duration: 0:00:00.000000 spec-oneshot?: true',
+    'scheduling timer 0 for 0',
+    'create leave',
+    'create enter zone: null spec-duration: 0:00:00.010000 spec-oneshot?: true',
+    'scheduling timer 1 for 10',
+    'create leave',
+    'create enter zone: null spec-duration: 0:00:00.015000 spec-oneshot?: true',
+    'scheduling timer 2 for 15',
+    'create leave',
+    'create enter zone: null spec-duration: 0:00:00.005000 '
+        'spec-oneshot?: false',
+    'scheduling timer 3 for 5',
+    'create leave',
+    'running timer 0 at 1 (active?: true)',
+    'running Timer.run',
+    'running timer 3 at 5 (active?: true)',
+    'scheduling timer 3 for 10',
+    'running periodic timer 0',
+    'running timer 1 at 10 (active?: true)',
+    'running Timer(10)',
+    'canceled timer0',
+    'running timer 3 at 11 (active?: true)',
+    'scheduling timer 3 for 16',
+    'running periodic timer 1',
+    'running timer 2 at 15 (active?: false)',
+    'running timer 3 at 16 (active?: true)',
+    'scheduling timer 3 for 21',
+    'running periodic timer 2',
+    'running timer 3 at 21 (active?: false)'
+  ], log);
+  log.clear();
+}
+
+runTests() async {
+  await testTimerTask();
+  await testTimerTask2();
+  testSimulatedTimer();
+}
+
+main() {
+  asyncStart();
+  runTests().then((_) {
+    asyncEnd();
+  });
+}
diff --git a/tests/lib/lib.status b/tests/lib/lib.status
index 7a60549..ecfc10d 100644
--- a/tests/lib/lib.status
+++ b/tests/lib/lib.status
@@ -167,6 +167,8 @@
 async/stream_transformation_broadcast_test: RuntimeError # Timer interface not supported: Issue 7728.
 async/stream_controller_test: Fail # Timer interface not supported: Issue 7728.
 async/future_constructor2_test: Fail # Timer interface not supported: Issue 7728.
+async/zone_timer_task_test: Fail # Timer interface not supported: Issue 7728.
+async/zone_task_test: Fail # Timer interface not supported: Issue 7728.
 mirrors/mirrors_reader_test: Skip # Running in v8 suffices. Issue 16589 - RuntimeError.  Issue 22130 - Crash (out of memory).
 
 [ $compiler == dart2js && $checked ]
@@ -373,3 +375,8 @@
 # TODO(vegorov) LoadField bytecode supports only up to 256 fields. Need a long
 # version.
 mirrors/accessor_cache_overflow_test: Skip
+
+[ $hot_reload ]
+convert/chunked_conversion_utf88_test: Pass, Timeout
+convert/streamed_conversion_json_utf8_decode_test: Fail, Crash
+convert/utf85_test: Fail, Crash
diff --git a/tests/standalone/standalone.status b/tests/standalone/standalone.status
index 5c03bea..bcad73f 100644
--- a/tests/standalone/standalone.status
+++ b/tests/standalone/standalone.status
@@ -17,7 +17,6 @@
 package/scenarios/invalid/invalid_package_name_test: RuntimeError, CompileTimeError # Errors intentionally
 package/scenarios/invalid/same_package_twice_test.dart: RuntimeError, CompileTimeError # Errors intentionally
 full_coverage_test: Pass, Slow, Timeout
-verified_mem_test: Pass, Slow, Timeout # Does verify before and after GC.
 
 issue14236_test: Pass # Do not remove this line. It serves as a marker for Issue 14516 comment #4.
 
@@ -142,6 +141,9 @@
 out_of_memory_test: Skip # passes on Mac, crashes on Linux
 oom_error_stacktrace_test: Skip # Fails on Linux
 
+[ $arch == simarm && $mode == debug && $checked ]
+io/web_socket_test: Pass, Fail # Issue 26814
+
 [ $arch == mips ]
 io/file_stat_test: Fail # Issue 17440
 io/process_sync_test: Skip # Starts 10 dart subprocesses, uses too much memory.
@@ -192,9 +194,6 @@
 io/process_sync_test: Pass, Timeout # Issue 24596
 io/sleep_test: Pass, Fail # Issue 25757
 
-[ $arch != ia32 && $arch != x64 && $arch != simarm && $arch != simarmv5te && $mode == debug ]
-verified_mem_test: Skip  # Not yet implemented.
-
 [ ($runtime == vm || $runtime == dart_precompiled || $runtime == dart_app) && $mode == debug && $builder_tag == asan ]
 full_coverage_test: Skip  # Timeout.
 io/file_lock_test: Skip  # Timeout.
@@ -322,3 +321,11 @@
 
 [ $runtime == dart_precompiled ]
 deferred_transitive_import_error_test: Skip # Contains intentional errors.
+
+[ $hot_reload ]
+io/bytes_builder_test: RuntimeError
+io/file_input_stream_test: Crash
+io/file_test: Pass, Crash
+io/web_socket_protocol_processor_test: Pass, Crash
+map_insert_remove_oom_test: Crash
+priority_queue_stress_test: Crash
diff --git a/tests/standalone/verified_mem_test.dart b/tests/standalone/verified_mem_test.dart
deleted file mode 100644
index 53da2ca..0000000
--- a/tests/standalone/verified_mem_test.dart
+++ /dev/null
@@ -1,17 +0,0 @@
-// Copyright (c) 2014, the Dart project authors.  Please see the AUTHORS file
-// for details. All rights reserved. Use of this source code is governed by a
-// BSD-style license that can be found in the LICENSE file.
-//
-// Test write barrier verification mode.
-// VMOptions=--verify_before_gc --verify_after_gc --old_gen_growth_rate=1
-
-var a = [];
-
-void main() {
-  for (int i = 0; i < 12; ++i) {
-    a.add(new List(12345));
-  }
-  for (int i = 0; i < 1234; ++i) {
-    a[0] = new List(100000);
-  }
-}
diff --git a/tools/VERSION b/tools/VERSION
index 954f2ea..6fb4f1b 100644
--- a/tools/VERSION
+++ b/tools/VERSION
@@ -27,5 +27,5 @@
 MAJOR 1
 MINOR 18
 PATCH 0
-PRERELEASE 3
+PRERELEASE 4
 PRERELEASE_PATCH 0
diff --git a/tools/build.py b/tools/build.py
index 518a015..1f9eb8b 100755
--- a/tools/build.py
+++ b/tools/build.py
@@ -60,7 +60,7 @@
       default=utils.GuessArchitecture())
   result.add_option("--os",
     help='Target OSs (comma-separated).',
-    metavar='[all,host,android]',
+    metavar='[all,host,android,fuchsia]',
     default='host')
   result.add_option("-t", "--toolchain",
     help='Cross-compiler toolchain path',
@@ -109,11 +109,11 @@
       return False
   options.os = [ProcessOsOption(os_name) for os_name in options.os]
   for os_name in options.os:
-    if not os_name in ['android', 'freebsd', 'linux', 'macos', 'win32']:
+    if not os_name in ['android', 'freebsd', 'fuchsia', 'linux', 'macos', 'win32']:
       print "Unknown os %s" % os_name
       return False
     if os_name != HOST_OS:
-      if os_name != 'android':
+      if os_name != 'android' and os_name != 'fuchsia':
         print "Unsupported target os %s" % os_name
         return False
       if not HOST_OS in ['linux']:
@@ -148,6 +148,13 @@
     if arch == 'x64':
       return os.path.join(android_toolchain, 'x86_64-linux-android')
 
+  if target_os == 'fuchsia':
+    fuchsia_toolchain = GetFuchsiaToolchainDir(HOST_OS, arch)
+    if arch == 'arm64':
+      return os.path.join(fuchsia_toolchain, 'aarch64-elf')
+    if arch == 'x64':
+      return os.path.join(fuchsia_toolchain, 'x86_64-elf')
+
   # If no cross compiler is specified, only try to figure one out on Linux.
   if not HOST_OS in ['linux']:
     raise Exception('Unless --toolchain is used cross-building is only '
@@ -175,6 +182,8 @@
   linker = ""
   if target_os == 'android':
     linker = os.path.join(DART_ROOT, 'tools', 'android_link.py')
+  elif target_os == 'fuchsia':
+    linker = os.path.join(DART_ROOT, 'tools', 'fuchsia_link.py')
   elif toolchainprefix:
     linker = toolchainprefix + "-g++"
 
@@ -226,6 +235,28 @@
   return android_toolchain
 
 
+def GetFuchsiaToolchainDir(host_os, target_arch):
+  global THIRD_PARTY_ROOT
+  if host_os not in ['linux']:
+    raise Exception('Unsupported host os %s' % host_os)
+  if target_arch not in ['x64', 'arm64',]:
+    raise Exception('Unsupported target architecture %s' % target_arch)
+
+  # Set up path to the Android NDK.
+  CheckDirExists(THIRD_PARTY_ROOT, 'third party tools')
+  fuchsia_tools = os.path.join(THIRD_PARTY_ROOT, 'fuchsia_tools')
+  CheckDirExists(fuchsia_tools, 'Fuchsia tools')
+
+  toolchain_arch = 'x86_64-elf-5.3.0-Linux-x86_64'
+  if target_arch == 'arm64':
+    toolchain_arch = 'aarch64-elf-5.3.0-Linux-x86_64'
+  fuchsia_toolchain = os.path.join(
+      fuchsia_tools, 'toolchains', toolchain_arch, 'bin')
+  CheckDirExists(fuchsia_toolchain, 'Fuchsia toolchain')
+
+  return fuchsia_toolchain
+
+
 def Execute(args):
   process = subprocess.Popen(args)
   process.wait()
diff --git a/tools/deps/dartium.deps/DEPS b/tools/deps/dartium.deps/DEPS
index 220e6fc..c526711 100644
--- a/tools/deps/dartium.deps/DEPS
+++ b/tools/deps/dartium.deps/DEPS
@@ -9,7 +9,7 @@
 
 vars.update({
   "dartium_chromium_commit": "ef7d4ae18c646aea34c07a7ef62de7342c3b8c12",
-  "dartium_webkit_commit": "e08c4cb1d7f1e16166f940d5dea59326c531d7b9",
+  "dartium_webkit_commit": "e77699b350f3e76be2d6f4e06af37615d3d6e778",
   "chromium_base_revision": "338390",
 
   # We use mirrors of all github repos to guarantee reproducibility and
@@ -45,7 +45,7 @@
   "mime_rev": "@75890811d4af5af080351ba8a2853ad4c8df98dd",
   "metatest_rev": "@e5aa8e4e19fc4188ac2f6d38368a47d8f07c3df1",
   "oauth2_rev": "@1bff41f4d54505c36f2d1a001b83b8b745c452f5",
-  "observatory_pub_packages_rev": "@cf90eb9077177d3d6b3fd5e8289477c2385c026a",
+  "observatory_pub_packages_rev": "@e5e1e543bea10d4bed95b22ad3e7aa2b20a23584",
   "package_config_rev": "@0.1.3",
   "path_rev": "@b657c0854d1cf41c014986fa9d2321f1173df805",
   "plugin_tag": "@0.1.0",
diff --git a/tools/dom/docs/docs.json b/tools/dom/docs/docs.json
index 123447d..98455fd 100644
--- a/tools/dom/docs/docs.json
+++ b/tools/dom/docs/docs.json
@@ -4201,7 +4201,7 @@
           "   *",
           "   * Note: Most simple HTTP requests can be accomplished using the [getString],",
           "   * [request], [requestCrossOrigin], or [postFormData] methods. Use of this",
-          "   * `open` method is intended only for more complext HTTP requests where",
+          "   * `open` method is intended only for more complex HTTP requests where",
           "   * finer-grained control is needed.",
           "   */"
         ],
@@ -4298,7 +4298,7 @@
           "   *",
           "   * Note: Most simple HTTP requests can be accomplished using the [getString],",
           "   * [request], [requestCrossOrigin], or [postFormData] methods. Use of this",
-          "   * `send` method is intended only for more complext HTTP requests where",
+          "   * `send` method is intended only for more complex HTTP requests where",
           "   * finer-grained control is needed.",
           "   *",
           "   * ## Other resources",
diff --git a/tools/dom/scripts/htmlrenamer.py b/tools/dom/scripts/htmlrenamer.py
index 856da50..3d3c410 100644
--- a/tools/dom/scripts/htmlrenamer.py
+++ b/tools/dom/scripts/htmlrenamer.py
@@ -402,8 +402,11 @@
   'WindowTimers.setTimeout',
   'Window.moveTo',
   'Window.requestAnimationFrame',
+  'Window.cancelAnimationFrame',
   'Window.setInterval',
   'Window.setTimeout',
+
+  'XMLHttpRequest.send',
 ])
 
 # Members from the standard dom that exist in the dart:html library with
diff --git a/tools/dom/src/EventStreamProvider.dart b/tools/dom/src/EventStreamProvider.dart
index 474a143..b6c32fc 100644
--- a/tools/dom/src/EventStreamProvider.dart
+++ b/tools/dom/src/EventStreamProvider.dart
@@ -118,6 +118,41 @@
   StreamSubscription<T> capture(void onData(T event));
 }
 
+/// Task specification for DOM Events.
+///
+/// *Experimental*. May disappear without notice.
+class EventSubscriptionSpecification<T extends Event>
+    implements TaskSpecification {
+  @override
+  final String name;
+  @override
+  final bool isOneShot;
+
+  final EventTarget target;
+  /// The event-type of the event. For example 'click' for click events.
+  final String eventType;
+  // TODO(floitsch): the first generic argument should be 'void'.
+  final ZoneUnaryCallback<dynamic, T> onData;
+  final bool useCapture;
+
+  EventSubscriptionSpecification({this.name, this.isOneShot, this.target,
+      this.eventType, void this.onData(T event), this.useCapture});
+
+  /// Returns a copy of this instance, with every non-null argument replaced
+  /// by the given value.
+  EventSubscriptionSpecification<T> replace(
+      {String name, bool isOneShot, EventTarget target,
+       String eventType, void onData(T event), bool useCapture}) {
+    return new EventSubscriptionSpecification<T>(
+        name: name ?? this.name,
+        isOneShot: isOneShot ?? this.isOneShot,
+        target: target ?? this.target,
+        eventType: eventType ?? this.eventType,
+        onData: onData ?? this.onData,
+        useCapture: useCapture ?? this.useCapture);
+  }
+}
+
 /**
  * Adapter for exposing DOM events as Dart streams.
  */
@@ -125,8 +160,16 @@
   final EventTarget _target;
   final String _eventType;
   final bool _useCapture;
+  /// The name that is used in the task specification.
+  final String _name;
+  /// Whether the stream can trigger multiple times.
+  final bool _isOneShot;
 
-  _EventStream(this._target, this._eventType, this._useCapture);
+  _EventStream(this._target, String eventType, this._useCapture,
+      {String name, bool isOneShot: false})
+      : _eventType = eventType,
+        _isOneShot = isOneShot,
+        _name = name ?? "dart.html.event.$eventType";
 
   // DOM events are inherently multi-subscribers.
   Stream<T> asBroadcastStream({void onListen(StreamSubscription<T> subscription),
@@ -134,13 +177,31 @@
       => this;
   bool get isBroadcast => true;
 
+  StreamSubscription<T> _listen(
+      void onData(T event), {bool useCapture}) {
+
+    if (identical(Zone.current, Zone.ROOT)) {
+      return new _EventStreamSubscription<T>(
+          this._target, this._eventType, onData, this._useCapture,
+          Zone.current);
+    }
+
+    var specification = new EventSubscriptionSpecification<T>(
+        name: this._name, isOneShot: this._isOneShot,
+        target: this._target, eventType: this._eventType,
+        onData: onData, useCapture: useCapture);
+    // We need to wrap the _createStreamSubscription call, since a tear-off
+    // would not bind the generic type 'T'.
+    return Zone.current.createTask((spec, Zone zone) {
+      return _createStreamSubscription/*<T>*/(spec, zone);
+    }, specification);
+  }
+
   StreamSubscription<T> listen(void onData(T event),
       { Function onError,
         void onDone(),
         bool cancelOnError}) {
-
-    return new _EventStreamSubscription<T>(
-        this._target, this._eventType, onData, this._useCapture);
+    return _listen(onData, useCapture: this._useCapture);
   }
 }
 
@@ -155,8 +216,9 @@
  */
 class _ElementEventStreamImpl<T extends Event> extends _EventStream<T>
     implements ElementStream<T> {
-  _ElementEventStreamImpl(target, eventType, useCapture) :
-      super(target, eventType, useCapture);
+  _ElementEventStreamImpl(target, eventType, useCapture,
+      {String name, bool isOneShot: false}) :
+      super(target, eventType, useCapture, name: name, isOneShot: isOneShot);
 
   Stream<T> matches(String selector) => this.where(
       (event) => _matchesWithAncestors(event, selector)).map((e) {
@@ -164,9 +226,9 @@
         return e;
       });
 
-  StreamSubscription<T> capture(void onData(T event)) =>
-    new _EventStreamSubscription<T>(
-        this._target, this._eventType, onData, true);
+  StreamSubscription<T> capture(void onData(T event)) {
+    return _listen(onData, useCapture: true);
+  }
 }
 
 /**
@@ -215,7 +277,13 @@
   bool get isBroadcast => true;
 }
 
-// We would like this to just be EventListener<T> but that typdef cannot
+StreamSubscription/*<T>*/ _createStreamSubscription/*<T>*/(
+    EventSubscriptionSpecification/*<T>*/ spec, Zone zone) {
+  return new _EventStreamSubscription/*<T>*/(spec.target, spec.eventType,
+      spec.onData, spec.useCapture, zone);
+}
+
+// We would like this to just be EventListener<T> but that typedef cannot
 // use generics until dartbug/26276 is fixed.
 typedef _EventListener<T extends Event>(T event);
 
@@ -224,15 +292,19 @@
   EventTarget _target;
   final String _eventType;
   EventListener _onData;
+  EventListener _domCallback;
   final bool _useCapture;
+  final Zone _zone;
 
   // TODO(jacobr): for full strong mode correctness we should write
-  // _onData = onData == null ? null : _wrapZone/*<Event, dynamic>*/((e) => onData(e as T))
+  // _onData = onData == null ? null : _wrapZone/*<dynamic, Event>*/((e) => onData(e as T))
   // but that breaks 114 co19 tests as well as multiple html tests as it is reasonable
   // to pass the wrong type of event object to an event listener as part of a
   // test.
   _EventStreamSubscription(this._target, this._eventType, void onData(T event),
-      this._useCapture) : _onData = _wrapZone/*<Event, dynamic>*/(onData) {
+      this._useCapture, Zone zone)
+      : _zone = zone,
+        _onData = _registerZone/*<dynamic, Event>*/(zone, onData) {
     _tryResume();
   }
 
@@ -254,7 +326,7 @@
     }
     // Remove current event listener.
     _unlisten();
-    _onData = _wrapZone/*<Event, dynamic>*/(handleData);
+    _onData = _registerZone/*<dynamic, Event>*/(_zone, handleData);
     _tryResume();
   }
 
@@ -283,14 +355,25 @@
   }
 
   void _tryResume() {
-    if (_onData != null && !isPaused) {
-      _target.addEventListener(_eventType, _onData, _useCapture);
+    if (_onData == null || isPaused) return;
+    if (identical(_zone, Zone.ROOT)) {
+      _domCallback = _onData;
+    } else {
+      _domCallback = (event) {
+        _zone.runTask(_runEventNotification, this, event);
+      };
     }
+    _target.addEventListener(_eventType, _domCallback, _useCapture);
+  }
+
+  static void _runEventNotification/*<T>*/(
+      _EventStreamSubscription/*<T>*/ subscription, /*=T*/ event) {
+    subscription._onData(event);
   }
 
   void _unlisten() {
     if (_onData != null) {
-      _target.removeEventListener(_eventType, _onData, _useCapture);
+      _target.removeEventListener(_eventType, _domCallback, _useCapture);
     }
   }
 
diff --git a/tools/dom/src/shared_html.dart b/tools/dom/src/shared_html.dart
index 7342cdf..f2c32f3 100644
--- a/tools/dom/src/shared_html.dart
+++ b/tools/dom/src/shared_html.dart
@@ -4,31 +4,26 @@
 
 part of dart.dom.html;
 
-// TODO(jacobr): remove these typedefs when dart:async supports generic types.
-typedef R _wrapZoneCallback<A, R>(A a);
-typedef R _wrapZoneBinaryCallback<A, B, R>(A a, B b);
-
-_wrapZoneCallback/*<A, R>*/ _wrapZone/*<A, R>*/(_wrapZoneCallback/*<A, R>*/ callback) {
-  // For performance reasons avoid wrapping if we are in the root zone.
-  if (Zone.current == Zone.ROOT) return callback;
+ZoneUnaryCallback/*<R, T>*/ _registerZone/*<R, T>*/(Zone zone,
+    ZoneUnaryCallback/*<R, T>*/ callback) {
+  // For performance reasons avoid registering if we are in the root zone.
+  if (identical(zone, Zone.ROOT)) return callback;
   if (callback == null) return null;
-  // TODO(jacobr): we cast to _wrapZoneCallback/*<A, R>*/ to hack around missing
-  // generic method support in zones.
-  // ignore: STRONG_MODE_DOWN_CAST_COMPOSITE
-  _wrapZoneCallback/*<A, R>*/ wrapped =
-      Zone.current.bindUnaryCallback(callback, runGuarded: true);
-  return wrapped;
+  return zone.registerUnaryCallback(callback);
 }
 
-_wrapZoneBinaryCallback/*<A, B, R>*/ _wrapBinaryZone/*<A, B, R>*/(_wrapZoneBinaryCallback/*<A, B, R>*/ callback) {
-  if (Zone.current == Zone.ROOT) return callback;
+ZoneUnaryCallback/*<R, T>*/ _wrapZone/*<R, T>*/(ZoneUnaryCallback/*<R, T>*/ callback) {
+  // For performance reasons avoid wrapping if we are in the root zone.
+  if (identical(Zone.current, Zone.ROOT)) return callback;
   if (callback == null) return null;
-  // We cast to _wrapZoneBinaryCallback/*<A, B, R>*/ to hack around missing
-  // generic method support in zones.
-  // ignore: STRONG_MODE_DOWN_CAST_COMPOSITE
-  _wrapZoneBinaryCallback/*<A, B, R>*/ wrapped =
-      Zone.current.bindBinaryCallback(callback, runGuarded: true);
-  return wrapped;
+  return Zone.current.bindUnaryCallback(callback, runGuarded: true);
+}
+
+ZoneBinaryCallback/*<R, A, B>*/ _wrapBinaryZone/*<R, A, B>*/(
+    ZoneBinaryCallback/*<R, A, B>*/ callback) {
+  if (identical(Zone.current, Zone.ROOT)) return callback;
+  if (callback == null) return null;
+  return Zone.current.bindBinaryCallback(callback, runGuarded: true);
 }
 
 /**
diff --git a/tools/dom/templates/html/impl/impl_Window.darttemplate b/tools/dom/templates/html/impl/impl_Window.darttemplate
index 8abac37..e2cdaee 100644
--- a/tools/dom/templates/html/impl/impl_Window.darttemplate
+++ b/tools/dom/templates/html/impl/impl_Window.darttemplate
@@ -4,6 +4,99 @@
 
 part of $LIBRARYNAME;
 
+typedef void RemoveFrameRequestMapping(int id);
+
+/**
+ * The task object representing animation-frame requests.
+ *
+ * For historical reasons, [Window.requestAnimationFrame] returns an integer
+ * to users. However, zone tasks must be unique objects, and an integer can
+ * therefore not be used as task object. The [Window] class thus keeps a mapping
+ * from the integer ID to the corresponding task object. All zone related
+ * operations work on this task object, whereas users of
+ * [Window.requestAnimationFrame] only see the integer ID.
+ *
+ * Since this mapping takes up space, it must be removed when the
+ * animation-frame task has triggered. The default implementation does this
+ * automatically, but intercepting implementations of `requestAnimationFrame`
+ * must make sure to call the [AnimationFrameTask.removeMapping]
+ * function that is provided in the task specification.
+ *
+ * *Experimental*. This class may disappear without notice.
+ */
+abstract class AnimationFrameTask {
+  /** The ID that is returned to users. */
+  int get id;
+
+  /** The zone in which the task will run. */
+  Zone get zone;
+
+  /**
+   * Cancels the animation-frame request.
+   *
+   * A call to [Window.cancelAnimationFrame] with an `id` argument equal to [id]
+   * forwards the request to this function.
+   *
+   * Zones that intercept animation-frame requests implement this method so
+   * that they can react to cancelation requests.
+   */
+  void cancel(Window window);
+
+  /**
+   * Maps animation-frame request IDs to their task objects.
+   */
+  static final Map<int, _AnimationFrameTask> _tasks = {};
+
+  /**
+   * Removes the mapping from [id] to [AnimationFrameTask].
+   *
+   * This function must be invoked by user-implemented animation-frame
+   * tasks, before running [callback].
+   *
+   * See [AnimationFrameTask].
+   */
+  static void removeMapping(int id) {
+    _tasks.remove(id);
+  }
+}
+
+class _AnimationFrameTask implements AnimationFrameTask {
+  final int id;
+  final Zone zone;
+  final FrameRequestCallback _callback;
+
+  _AnimationFrameTask(this.id, this.zone, this._callback);
+
+  void cancel(Window window) {
+    window._cancelAnimationFrame(this.id);
+  }
+}
+
+/**
+ * The task specification for an animation-frame request.
+ *
+ * *Experimental*. This class may disappear without notice.
+ */
+class AnimationFrameRequestSpecification implements TaskSpecification {
+  /**
+   * The window on which [Window.requestAnimationFrame] was invoked.
+   */
+  final Window window;
+
+  /**
+   * The callback that is executed when the animation-frame is ready.
+   *
+   * Note that the callback hasn't been registered in any zone when the `create`
+   * function (passed to [Zone.createTask]) is invoked.
+   */
+  final FrameRequestCallback callback;
+
+  AnimationFrameRequestSpecification(this.window, this.callback);
+
+  String get name => "dart.html.request-animation-frame";
+  bool get isOneShot => true;
+}
+
 @DocsEditable()
 $if DART2JS
 $(ANNOTATIONS)@Native("Window,DOMWindow")
@@ -29,9 +122,7 @@
    */
   Future<num> get animationFrame {
     var completer = new Completer<num>.sync();
-    requestAnimationFrame((time) {
-      completer.complete(time);
-    });
+    requestAnimationFrame(completer.complete);
     return completer.future;
   }
 
@@ -96,6 +187,8 @@
     JS('void', '#.location = #', this, value);
   }
 
+$endif
+
   /**
    * Called to draw an animation frame and then request the window to repaint
    * after [callback] has finished (creating the animation).
@@ -114,8 +207,33 @@
    */
   @DomName('Window.requestAnimationFrame')
   int requestAnimationFrame(FrameRequestCallback callback) {
+$if DART2JS
     _ensureRequestAnimationFrame();
-    return _requestAnimationFrame(_wrapZone/*<num, dynamic>*/(callback));
+$endif
+    if (identical(Zone.current, Zone.ROOT)) {
+      return _requestAnimationFrame(callback);
+    }
+    var spec = new AnimationFrameRequestSpecification(this, callback);
+    var task = Zone.current.createTask/*<AnimationFrameTask>*/(
+        _createAnimationFrameTask, spec);
+    AnimationFrameTask._tasks[task.id] = task;
+    return task.id;
+  }
+
+  static _AnimationFrameTask _createAnimationFrameTask(
+      AnimationFrameRequestSpecification spec, Zone zone) {
+    var task;
+    var id = spec.window._requestAnimationFrame((num time) {
+      AnimationFrameTask.removeMapping(task.id);
+      zone.runTask(_runAnimationFrame, task, time);
+    });
+    var callback = zone.registerUnaryCallback(spec.callback);
+    task = new _AnimationFrameTask(id, zone, callback);
+    return task;
+  }
+
+  static void _runAnimationFrame(_AnimationFrameTask task, num time) {
+    task._callback(time);
   }
 
   /**
@@ -126,11 +244,21 @@
    * * [Window.cancelAnimationFrame](https://developer.mozilla.org/en-US/docs/Web/API/Window.cancelAnimationFrame)
    *   from MDN.
    */
+  @DomName('Window.cancelAnimationFrame')
   void cancelAnimationFrame(int id) {
+$if DART2JS
     _ensureRequestAnimationFrame();
-    _cancelAnimationFrame(id);
+$endif
+    var task = AnimationFrameTask._tasks.remove(id);
+    if (task == null) {
+      // Assume that the animation frame request wasn't intercepted by a zone.
+      _cancelAnimationFrame(id);
+      return;
+    }
+    task.cancel(this);
   }
 
+$if DART2JS
   @JSName('requestAnimationFrame')
   int _requestAnimationFrame(FrameRequestCallback callback) native;
 
@@ -181,28 +309,6 @@
   /// The debugging console for this window.
   @DomName('Window.console')
   Console get console => Console._safeConsole;
-
-$else
-  /**
-   * Called to draw an animation frame and then request the window to repaint
-   * after [callback] has finished (creating the animation).
-   *
-   * Use this method only if you need to later call [cancelAnimationFrame]. If
-   * not, the preferred Dart idiom is to set animation frames by calling
-   * [animationFrame], which returns a Future.
-   *
-   * Returns a non-zero valued integer to represent the request id for this
-   * request. This value only needs to be saved if you intend to call
-   * [cancelAnimationFrame] so you can specify the particular animation to
-   * cancel.
-   *
-   * Note: The supplied [callback] needs to call [requestAnimationFrame] again
-   * for the animation to continue.
-   */
-  @DomName('Window.requestAnimationFrame')
-  int requestAnimationFrame(FrameRequestCallback callback) {
-    return _requestAnimationFrame(_wrapZone(callback));
-  }
 $endif
 
   /**
diff --git a/tools/dom/templates/html/impl/impl_XMLHttpRequest.darttemplate b/tools/dom/templates/html/impl/impl_XMLHttpRequest.darttemplate
index 9ad00f6..35683fc 100644
--- a/tools/dom/templates/html/impl/impl_XMLHttpRequest.darttemplate
+++ b/tools/dom/templates/html/impl/impl_XMLHttpRequest.darttemplate
@@ -4,6 +4,109 @@
 
 part of $LIBRARYNAME;
 
+/**
+ * A task specification for HTTP requests.
+ *
+ * This specification is not available when an HTTP request is sent through
+ * direct use of [HttpRequest.send]. See [HttpRequestSendTaskSpecification].
+ *
+ * A task created from this specification is a `Future<HttpRequest>`.
+ *
+ * *Experimental*. This class may disappear without notice.
+ */
+class HttpRequestTaskSpecification extends TaskSpecification {
+  /// The URL of the request.
+  final String url;
+
+  /// The HTTP request method.
+  ///
+  /// By default (when `null`) this is a `"GET"` request. Alternatively, the
+  /// method can be `"POST"`, `"PUT"`, `"DELETE"`, etc.
+  final String method;
+
+  /// Whether the request should send credentials. Credentials are only useful
+  /// for cross-origin requests.
+  ///
+  /// See [HttpRequest.request] for more information.
+  final bool withCredentials;
+
+  /// The desired response format.
+  ///
+  /// Supported types are:
+  /// - `""`: (same as `"text"`),
+  /// - `"arraybuffer"`,
+  /// - `"blob"`,
+  /// - `"document"`,
+  /// - `"json"`,
+  /// - `"text"`
+  ///
+  /// When no value is provided (when equal to `null`) defaults to `""`.
+  final String responseType;
+
+  /// The desired MIME type.
+  ///
+  /// This overrides the default MIME type which is set up to transfer textual
+  /// data.
+  final String mimeType;
+
+  /// The request headers that should be sent with the request.
+  final Map<String, String> requestHeaders;
+
+  /// The data that is sent with the request.
+  ///
+  /// When data is provided (the value is not `null`), it must be a
+  /// [ByteBuffer], [Blob], [Document], [String], or [FormData].
+  final dynamic sendData;
+
+  /// The function that is invoked on progress updates. This function is
+  /// registered as an event listener on the created [HttpRequest] object, and
+  /// thus has its own task. Further invocations of the progress function do
+  /// *not* use the HTTP request task as task object.
+  ///
+  /// Creating an HTTP request automatically registers the on-progress listener.
+  final ZoneUnaryCallback<dynamic, ProgressEvent> onProgress;
+
+  HttpRequestTaskSpecification(this.url,
+      {String this.method, bool this.withCredentials, String this.responseType,
+      String this.mimeType, Map<String, String> this.requestHeaders,
+      this.sendData,
+      void this.onProgress(ProgressEvent e)});
+
+  String get name => "dart.html.http-request";
+  bool get isOneShot => true;
+}
+
+/**
+ * A task specification for HTTP requests that are initiated through a direct
+ * invocation of [HttpRequest.send].
+ *
+ * This specification serves as signal to zones that an HTTP request has been
+ * initiated. The created task is the [request] object itself, and
+ * no callback is ever executed in this task.
+ *
+ * Note that event listeners on the HTTP request are also registered in the
+ * zone (although with their own task creations), and that a zone can thus
+ * detect when the HTTP request returns.
+ *
+ * HTTP requests that are initiated through `request` methods don't use
+ * this class but use [HttpRequestTaskSpecification].
+ *
+ * *Experimental*. This class may disappear without notice.
+ */
+class HttpRequestSendTaskSpecification extends TaskSpecification {
+  final HttpRequest request;
+  final dynamic sendData;
+
+  HttpRequestSendTaskSpecification(this.request, this.sendData);
+
+  String get name => "dart.html.http-request-send";
+
+  /**
+   * No callback is ever executed in an HTTP request send task.
+   */
+  bool get isOneShot => false;
+}
+
  /**
   * A client-side XHR request for getting data from a URL,
   * formally known as XMLHttpRequest.
@@ -190,7 +293,34 @@
       {String method, bool withCredentials, String responseType,
       String mimeType, Map<String, String> requestHeaders, sendData,
       void onProgress(ProgressEvent e)}) {
+    var spec = new HttpRequestTaskSpecification(
+        url, method: method,
+        withCredentials: withCredentials,
+        responseType: responseType,
+        mimeType: mimeType,
+        requestHeaders: requestHeaders,
+        sendData: sendData,
+        onProgress: onProgress);
+
+    if (identical(Zone.current, Zone.ROOT)) {
+      return _createHttpRequestTask(spec, null);
+    }
+    return Zone.current.createTask(_createHttpRequestTask, spec);
+  }
+
+  static Future<HttpRequest> _createHttpRequestTask(
+      HttpRequestTaskSpecification spec, Zone zone) {
+    String url = spec.url;
+    String method = spec.method;
+    bool withCredentials = spec.withCredentials;
+    String responseType = spec.responseType;
+    String mimeType = spec.mimeType;
+    Map<String, String> requestHeaders = spec.requestHeaders;
+    var sendData = spec.sendData;
+    var onProgress = spec.onProgress;
+
     var completer = new Completer<HttpRequest>();
+    var task = completer.future;
 
     var xhr = new HttpRequest();
     if (method == null) {
@@ -230,23 +360,42 @@
       // redirect case will be handled by the browser before it gets to us,
       // so if we see it we should pass it through to the user.
       var unknownRedirect = xhr.status > 307 && xhr.status < 400;
-      
-      if (accepted || fileUri || notModified || unknownRedirect) {
+
+      var isSuccessful = accepted || fileUri || notModified || unknownRedirect;
+
+      if (zone == null && isSuccessful) {
         completer.complete(xhr);
-      } else {
+      } else if (zone == null) {
         completer.completeError(e);
+      } else if (isSuccessful) {
+        zone.runTask((task, value) {
+          completer.complete(value);
+        }, task, xhr);
+      } else {
+        zone.runTask((task, error) {
+          completer.completeError(error);
+        }, task, e);
       }
     });
 
-    xhr.onError.listen(completer.completeError);
-
-    if (sendData != null) {
-      xhr.send(sendData);
+    if (zone == null) {
+      xhr.onError.listen(completer.completeError);
     } else {
-      xhr.send();
+      xhr.onError.listen((error) {
+        zone.runTask((task, error) {
+          completer.completeError(error);
+        }, task, error);
+      });
     }
 
-    return completer.future;
+    if (sendData != null) {
+      // TODO(floitsch): should we go through 'send()' and have nested tasks?
+      xhr._send(sendData);
+    } else {
+      xhr._send();
+    }
+
+    return task;
   }
 
   /**
@@ -316,6 +465,9 @@
         return xhr.responseText;
       });
     }
+    // TODO(floitsch): the following code doesn't go through task zones.
+    // Since 'XDomainRequest' is an IE9 feature we should probably just remove
+    // it.
 $if DART2JS
     var completer = new Completer<String>();
     if (method == null) {
@@ -396,7 +548,7 @@
    *
    * Note: Most simple HTTP requests can be accomplished using the [getString],
    * [request], [requestCrossOrigin], or [postFormData] methods. Use of this
-   * `open` method is intended only for more complext HTTP requests where
+   * `open` method is intended only for more complex HTTP requests where
    * finer-grained control is needed.
    */
   @DomName('XMLHttpRequest.open')
@@ -413,5 +565,35 @@
   void open(String method, String url, {bool async, String user, String password}) native;
 $endif
 
+  /**
+   * Sends the request with any given `data`.
+   *
+   * Note: Most simple HTTP requests can be accomplished using the [getString],
+   * [request], [requestCrossOrigin], or [postFormData] methods. Use of this
+   * `send` method is intended only for more complex HTTP requests where
+   * finer-grained control is needed.
+   *
+   * ## Other resources
+   *
+   * * [XMLHttpRequest.send](https://developer.mozilla.org/en-US/docs/DOM/XMLHttpRequest#send%28%29)
+   *   from MDN.
+   */
+  @DomName('XMLHttpRequest.send')
+  @DocsEditable()
+  void send([body_OR_data]) {
+    if (identical(Zone.current, Zone.ROOT)) {
+      _send(body_OR_data);
+    } else {
+      Zone.current.createTask(_createHttpRequestSendTask,
+          new HttpRequestSendTaskSpecification(this, body_OR_data));
+    }
+  }
+
+  static HttpRequest _createHttpRequestSendTask(
+      HttpRequestSendTaskSpecification spec, Zone zone) {
+    spec.request._send(spec.sendData);
+    return spec.request;
+  }
+
 $!MEMBERS
 }
diff --git a/tools/fuchsia_link.py b/tools/fuchsia_link.py
new file mode 100755
index 0000000..c3c537b
--- /dev/null
+++ b/tools/fuchsia_link.py
@@ -0,0 +1,127 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2016 The Dart Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+This script performs the final link step for Fuchsia NDK executables.
+Usage:
+./fuchsia_link {arm,arm64,ia32} {executable,library,shared_library}
+               {host,target} [linker args]
+"""
+
+import os
+import subprocess
+import sys
+
+# Figure out where we are.
+SCRIPT_DIR = os.path.dirname(sys.argv[0])
+DART_ROOT = os.path.realpath(os.path.join(SCRIPT_DIR, '..'))
+THIRD_PARTY_ROOT = os.path.join(DART_ROOT, 'third_party')
+
+
+def CheckDirExists(path, docstring):
+  if not os.path.isdir(path):
+    raise Exception('Could not find %s directory %s'
+          % (docstring, path))
+
+
+def execute(args):
+  process = subprocess.Popen(args)
+  process.wait()
+  return process.returncode
+
+
+def main():
+  if len(sys.argv) < 5:
+    raise Exception(sys.argv[0] + " failed: not enough arguments")
+
+  # gyp puts -shared first in a shared_library link. Remove it.
+  if sys.argv[1] == '-shared':
+    sys.argv.remove('-shared')
+
+  # Grab the command line arguments.
+  target_arch = sys.argv[1]
+  link_type = sys.argv[2]
+  link_target = sys.argv[3]
+  link_args = sys.argv[4:]
+
+  # Check arguments.
+  if target_arch not in ['arm64', 'x64',]:
+    raise Exception(sys.argv[0] +
+        " first argument must be 'arm64', or 'x64'")
+  if link_type not in ['executable', 'library', 'shared_library']:
+    raise Exception(sys.argv[0] +
+      " second argument must be 'executable' or 'library' or 'shared_library'")
+  if link_target not in ['host', 'target']:
+    raise Exception(sys.argv[0] + " third argument must be 'host' or 'target'")
+
+  # TODO(zra): Figure out how to link a shared library with the
+  # cross-compilers. For now, we disable it by generating empty files
+  # for the results. We disable it here to avoid inspecting the OS type in
+  # the gyp files.
+  if link_type == 'shared_library':
+    print "NOT linking shared library for Fuchsia."
+    o_index = link_args.index('-o')
+    output = os.path.join(DART_ROOT, link_args[o_index + 1])
+    open(output, 'a').close()
+    sys.exit(0)
+
+  # Set up path to the Fuchsia NDK.
+  CheckDirExists(THIRD_PARTY_ROOT, 'third party tools')
+  fuchsia_tools = os.path.join(THIRD_PARTY_ROOT, 'fuchsia_tools')
+  CheckDirExists(fuchsia_tools, 'Fuchsia tools')
+
+  # Set up the directory of the Fuchsia NDK cross-compiler toolchain.
+  toolchain_arch = 'x86_64-elf-5.3.0-Linux-x86_64'
+  if target_arch == 'arm64':
+    toolchain_arch = 'aarch64-elf-5.3.0-Linux-x86_64'
+  fuchsia_toolchain = os.path.join(
+      fuchsia_tools, 'toolchains', toolchain_arch, 'bin')
+  CheckDirExists(fuchsia_toolchain, 'Fuchsia toolchain')
+
+  # Set up the path to the linker executable.
+  fuchsia_linker = os.path.join(fuchsia_toolchain, 'x86_64-elf-g++')
+  if target_arch == 'arm64':
+    fuchsia_linker = os.path.join(fuchsia_toolchain, 'aarch64-elf-c++')
+
+  # Grab the path to libgcc.a, which we must explicitly add to the link,
+  # by invoking the cross-compiler with the -print-libgcc-file-name flag.
+  fuchsia_gcc = os.path.join(fuchsia_toolchain, 'x86_64-elf-gcc')
+  if target_arch == 'arm64':
+    fuchsia_gcc = os.path.join(fuchsia_toolchain, 'aarch64-elf-gcc')
+  fuchsia_libgcc = subprocess.check_output(
+      [fuchsia_gcc, '-print-libgcc-file-name']).strip()
+
+  # Set up the path to the system root directory, which is where we'll find the
+  # Fuchsia specific system includes and libraries.
+  fuchsia_sysroot = os.path.join(fuchsia_tools, 'sysroot', 'x86_64')
+  if target_arch == 'arm64':
+    fuchsia_sysroot = os.path.join(fuchsia_tools, 'sysroot', 'arm64')
+  CheckDirExists(fuchsia_sysroot, 'Fuchsia sysroot')
+  fuchsia_lib = os.path.join(fuchsia_sysroot, 'usr', 'lib')
+  crtn_fuchsia = os.path.join(fuchsia_lib, 'crtn.o')
+
+  if link_target == 'target':
+    # Add and remove libraries as listed in configurations_fuchsia.gypi
+    libs_to_rm = ['-lrt', '-lpthread', '-ldl']
+    libs_to_add = [fuchsia_libgcc, '-lc',]
+
+    # Add crtn_fuchsia to end if we are linking an executable.
+    if link_type == 'executable':
+      libs_to_add.extend([crtn_fuchsia])
+
+    link_args = [i for i in link_args if i not in libs_to_rm]
+    link_args.extend(libs_to_add)
+
+    link_args.insert(0, fuchsia_linker)
+  else:
+    link_args.extend(['-ldl', '-lrt'])
+    link_args.insert(0, 'g++')
+
+  print ' '.join(link_args)
+  sys.exit(execute(link_args))
+
+if __name__ == '__main__':
+  main()
diff --git a/tools/gyp/configurations.gypi b/tools/gyp/configurations.gypi
index 29f457e..267be04 100644
--- a/tools/gyp/configurations.gypi
+++ b/tools/gyp/configurations.gypi
@@ -41,6 +41,7 @@
   },
   'includes': [
     'configurations_android.gypi',
+    'configurations_fuchsia.gypi',
     'configurations_make.gypi',
     'configurations_xcode.gypi',
     'configurations_msvs.gypi',
@@ -729,6 +730,21 @@
         ],
       },
 
+      # Fuchsia configurations. The configuration names explicitly include
+      # 'Fuchsia' because we are cross-building from Linux, and, when building
+      # the standalone VM, we cannot inspect the gyp built-in 'OS' variable to
+      # figure out that we are building for Fuchsia. Since we have not re-run
+      # gyp, it will still be 'linux'.
+      'ProductFuchsiaX64': {
+        'inherit_from': [
+          'Dart_Base', 'Dart_x64_Base', 'Dart_Product',
+          'Dart_Fuchsia_Base',
+          'Dart_Fuchsia_x64_Base',
+          'Dart_Fuchsia_Product',
+        ],
+      },
+
+
       # Android configurations. The configuration names explicitly include
       # 'Android' because we are cross-building from Linux, and, when building
       # the standalone VM, we cannot inspect the gyp built-in 'OS' variable to
diff --git a/tools/gyp/configurations_fuchsia.gypi b/tools/gyp/configurations_fuchsia.gypi
new file mode 100644
index 0000000..5b291d3
--- /dev/null
+++ b/tools/gyp/configurations_fuchsia.gypi
@@ -0,0 +1,152 @@
+# Copyright (c) 2016, the Dart project authors.  Please see the AUTHORS file
+# for details. All rights reserved. Use of this source code is governed by a
+# BSD-style license that can be found in the LICENSE file.
+
+# Definitions for building standalone Dart binaries to run on Fuchsia.
+
+{
+  'variables': {
+    'fuchsia_tools': '<(PRODUCT_DIR)/../../third_party/fuchsia_tools/',
+  },  # variables
+  'target_defaults': {
+    'configurations': {
+      'Dart_Fuchsia_Base': {
+        'abstract': 1,
+        'cflags': [
+          '-Werror',
+          '<@(common_gcc_warning_flags)',
+          '-Wnon-virtual-dtor',
+          '-Wvla',
+          '-Woverloaded-virtual',
+          '-g3',
+          '-ggdb3',
+          '-fno-rtti',
+          '-fno-exceptions',
+          '-fstack-protector',
+          '-Wa,--noexecstack',
+        ],
+        'target_conditions': [
+          ['_toolset=="target"', {
+            'cflags!': [
+              '-pthread',  # Not supported by Android toolchain.
+            ],
+          }],
+        ],
+      },
+      'Dart_Fuchsia_Debug': {
+        'abstract': 1,
+        'defines': [
+          'DEBUG',
+        ],
+        'cflags': [
+          '-fno-omit-frame-pointer',
+        ],
+      },
+      'Dart_Fuchsia_Release': {
+        'abstract': 1,
+        'defines': [
+          'NDEBUG',
+        ],
+        'cflags!': [
+          '-O2',
+          '-Os',
+        ],
+        'cflags': [
+          '-fno-omit-frame-pointer',
+          '-fdata-sections',
+          '-ffunction-sections',
+          '-O3',
+        ],
+      },
+      'Dart_Fuchsia_Product': {
+        'abstract': 1,
+        'defines': [
+          'NDEBUG',
+          'PRODUCT',
+        ],
+        'cflags!': [
+          '-O2',
+          '-Os',
+        ],
+        'cflags': [
+          '-fdata-sections',
+          '-ffunction-sections',
+          '-O3',
+        ],
+      },
+      'Dart_Fuchsia_x64_Base': {
+        'abstract': 1,
+        'variables': {
+          'fuchsia_sysroot': '<(fuchsia_tools)/sysroot/x86_64',
+          'fuchsia_include': '<(fuchsia_sysroot)/usr/include',
+          'fuchsia_lib': '<(fuchsia_sysroot)/usr/lib',
+        },
+        'target_conditions': [
+          ['_toolset=="target"', {
+            'defines': [
+              'TARGET_OS_FUCHSIA',
+            ],
+            'cflags': [
+              '--sysroot=<(fuchsia_sysroot)',
+              '-I<(fuchsia_include)',
+              '-fno-threadsafe-statics',
+            ],
+            'ldflags': [
+              'x64', '>(_type)', 'target',
+              '-nostdlib',
+              '-T<(fuchsia_sysroot)/usr/user.ld',
+              '-L<(fuchsia_lib)',
+              '-Wl,-z,noexecstack',
+              '-Wl,-z,now',
+              '-Wl,-z,relro',
+              '<(fuchsia_lib)/crt1.o',
+              '<(fuchsia_lib)/crti.o',
+            ],
+            'ldflags!': [
+              '-pthread',
+            ],
+          }],
+          ['_toolset=="host"', {
+            'cflags': [ '-pthread' ],
+            'ldflags': [ '-pthread' ],
+          }],
+        ],
+      },
+      'Dart_Fuchsia_arm64_Base': {
+        'abstract': 1,
+        'variables': {
+          'fuchsia_sysroot': '<(fuchsia_tools)/sysroot/arm64',
+          'fuchsia_include': '<(fuchsia_sysroot)/usr/include',
+          'fuchsia_lib': '<(fuchsia_sysroot)/usr/lib',
+        },
+        'target_conditions': [
+          ['_toolset=="target"', {
+            'defines': [
+              'TARGET_OS_FUCHSIA',
+            ],
+            'cflags': [
+              '--sysroot=<(fuchsia_sysroot)',
+              '-I<(fuchsia_include)',
+              '-fno-threadsafe-statics',
+            ],
+            'ldflags': [
+              'arm64', '>(_type)', 'target',
+              '-nostdlib',
+              '-L<(fuchsia_lib)',
+              '-Wl,-z,noexecstack',
+              '-Wl,-z,now',
+              '-Wl,-z,relro',
+            ],
+            'ldflags!': [
+              '-pthread',
+            ],
+          }],
+          ['_toolset=="host"', {
+            'cflags': [ '-pthread' ],
+            'ldflags': [ '-pthread' ],
+          }],
+        ],
+      },  # Dart_Fuchsia_arm64_Base
+    },  # configurations
+  },  # target_defaults
+}
diff --git a/tools/testing/dart/compiler_configuration.dart b/tools/testing/dart/compiler_configuration.dart
index 398986a..76a0e55 100644
--- a/tools/testing/dart/compiler_configuration.dart
+++ b/tools/testing/dart/compiler_configuration.dart
@@ -53,6 +53,7 @@
     bool isCsp = configuration['csp'];
     bool useCps = configuration['cps_ir'];
     bool useBlobs = configuration['use_blobs'];
+    bool hotReload = configuration['hot_reload'];
 
     switch (compiler) {
       case 'dart2analyzer':
@@ -89,7 +90,8 @@
             isDebug: isDebug,
             isChecked: isChecked,
             isHostChecked: isHostChecked,
-            useSdk: useSdk);
+            useSdk: useSdk,
+            hotReload: hotReload);
       default:
         throw "Unknown compiler '$compiler'";
     }
@@ -149,13 +151,17 @@
 
 /// The "none" compiler.
 class NoneCompilerConfiguration extends CompilerConfiguration {
+  final bool hotReload;
+
   NoneCompilerConfiguration(
-      {bool isDebug, bool isChecked, bool isHostChecked, bool useSdk})
+      {bool isDebug, bool isChecked, bool isHostChecked, bool useSdk,
+       bool hotReload})
       : super._subclass(
             isDebug: isDebug,
             isChecked: isChecked,
             isHostChecked: isHostChecked,
-            useSdk: useSdk);
+            useSdk: useSdk),
+        this.hotReload = hotReload;
 
   bool get hasCompiler => false;
 
@@ -172,6 +178,13 @@
       args.add('--enable_asserts');
       args.add('--enable_type_checks');
     }
+    if (hotReload) {
+      args.add('--hot-reload-test-mode');
+      // Remove the following once known bugs with background compilation
+      // and OSR are fixed.
+      args.add('--no-background-compilation');
+      args.add('--no-osr');
+    }
     return args
       ..addAll(vmOptions)
       ..addAll(sharedOptions)
diff --git a/tools/testing/dart/test_options.dart b/tools/testing/dart/test_options.dart
index 7f44f3c..da2c5d0 100644
--- a/tools/testing/dart/test_options.dart
+++ b/tools/testing/dart/test_options.dart
@@ -187,8 +187,12 @@
           'noopt', 'Run an in-place precompilation', ['--noopt'], [], false,
           type: 'bool'),
       new _TestOptionSpecification(
-          'use_blobs', 'Use mmap instead of shared libraries for precompilation', ['--use-blobs'], [], false,
-          type: 'bool'),
+          'hot_reload', 'Run hot reload stress tests', ['--hot-reload'], [],
+          false, type: 'bool'),
+      new _TestOptionSpecification(
+          'use_blobs',
+          'Use mmap instead of shared libraries for precompilation',
+          ['--use-blobs'], [], false, type: 'bool'),
       new _TestOptionSpecification(
           'timeout', 'Timeout in seconds', ['-t', '--timeout'], [], -1,
           type: 'int'),
diff --git a/tools/utils.py b/tools/utils.py
index 0611bdd..496b499 100644
--- a/tools/utils.py
+++ b/tools/utils.py
@@ -268,7 +268,7 @@
           (target_os != GuessOS()))
 
 def GetBuildConf(mode, arch, conf_os=None):
-  if conf_os == 'android':
+  if conf_os == 'android' or conf_os == 'fuchsia':
     return '%s%s%s' % (GetBuildMode(mode), conf_os.title(), arch.upper())
   else:
     # Ask for a cross build if the host and target architectures don't match.