Remove most of build/, move generation to build/gen.py

Build now with `./build/gen.py && ninja -C out`. README.md updated.

Change-Id: I00be44215e4a0e203a2e1ff38867e17535701cad
Reviewed-on: https://gn-review.googlesource.com/1240
Reviewed-by: Brett Wilson <brettw@chromium.org>
diff --git a/README.md b/README.md
index 331f0d3..c7e1ad3 100644
--- a/README.md
+++ b/README.md
@@ -1,3 +1,11 @@
 # GN
 
-GN is a meta-build system that generates [Ninja](https://ninja-build.org).
+GN is a meta-build system that generates build files for
+[Ninja](https://ninja-build.org).
+
+Get started with:
+
+  git clone https://gn.googlesource.com/gn
+  cd gn
+  build/gen.py
+  ninja -C out
diff --git a/base/allocator/allocator_check.cc b/base/allocator/allocator_check.cc
index f5e4edc..57c1eb0 100644
--- a/base/allocator/allocator_check.cc
+++ b/base/allocator/allocator_check.cc
@@ -4,7 +4,7 @@
 
 #include "base/allocator/allocator_check.h"
 
-#include "build/build_config.h"
+#include "build_config.h"
 
 #if defined(OS_WIN)
 #include "base/allocator/winheap_stubs_win.h"
diff --git a/base/allocator/allocator_extension.h b/base/allocator/allocator_extension.h
index 9f2775a..00d88cd 100644
--- a/base/allocator/allocator_extension.h
+++ b/base/allocator/allocator_extension.h
@@ -8,7 +8,7 @@
 #include <stddef.h> // for size_t
 
 #include "base/base_export.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 namespace base {
 namespace allocator {
diff --git a/base/allocator/allocator_interception_mac.mm b/base/allocator/allocator_interception_mac.mm
index dce7eda..17cf3f0 100644
--- a/base/allocator/allocator_interception_mac.mm
+++ b/base/allocator/allocator_interception_mac.mm
@@ -35,7 +35,7 @@
 #include "base/process/memory.h"
 #include "base/scoped_clear_errno.h"
 #include "base/threading/sequenced_task_runner_handle.h"
-#include "build/build_config.h"
+#include "build_config.h"
 #include "third_party/apple_apsl/CFBase.h"
 
 namespace base {
diff --git a/base/allocator/allocator_shim.cc b/base/allocator/allocator_shim.cc
index e919f09..d0205ca 100644
--- a/base/allocator/allocator_shim.cc
+++ b/base/allocator/allocator_shim.cc
@@ -13,7 +13,7 @@
 #include "base/macros.h"
 #include "base/process/process_metrics.h"
 #include "base/threading/platform_thread.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 #if !defined(OS_WIN)
 #include <unistd.h>
diff --git a/base/allocator/allocator_shim.h b/base/allocator/allocator_shim.h
index 527e414..6256f30 100644
--- a/base/allocator/allocator_shim.h
+++ b/base/allocator/allocator_shim.h
@@ -8,7 +8,7 @@
 #include <stddef.h>
 
 #include "base/base_export.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 namespace base {
 namespace allocator {
diff --git a/base/allocator/allocator_shim_default_dispatch_to_linker_wrapped_symbols.cc b/base/allocator/allocator_shim_default_dispatch_to_linker_wrapped_symbols.cc
index c351a7c..89cabc4 100644
--- a/base/allocator/allocator_shim_default_dispatch_to_linker_wrapped_symbols.cc
+++ b/base/allocator/allocator_shim_default_dispatch_to_linker_wrapped_symbols.cc
@@ -5,7 +5,7 @@
 #include <malloc.h>
 
 #include "base/allocator/allocator_shim.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 #if defined(OS_ANDROID) && __ANDROID_API__ < 17
 #include <dlfcn.h>
diff --git a/base/allocator/allocator_shim_unittest.cc b/base/allocator/allocator_shim_unittest.cc
index 73ff031..0a18eb2 100644
--- a/base/allocator/allocator_shim_unittest.cc
+++ b/base/allocator/allocator_shim_unittest.cc
@@ -17,7 +17,7 @@
 #include "base/synchronization/waitable_event.h"
 #include "base/threading/platform_thread.h"
 #include "base/threading/thread_local.h"
-#include "build/build_config.h"
+#include "build_config.h"
 #include "testing/gmock/include/gmock/gmock.h"
 #include "testing/gtest/include/gtest/gtest.h"
 
diff --git a/base/allocator/partition_allocator/address_space_randomization.cc b/base/allocator/partition_allocator/address_space_randomization.cc
index a7e17c7..b25fbdc 100644
--- a/base/allocator/partition_allocator/address_space_randomization.cc
+++ b/base/allocator/partition_allocator/address_space_randomization.cc
@@ -8,7 +8,7 @@
 #include "base/allocator/partition_allocator/spin_lock.h"
 #include "base/lazy_instance.h"
 #include "base/rand_util.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 #if defined(OS_WIN)
 #include <windows.h>  // Must be in front of other Windows header files.
diff --git a/base/allocator/partition_allocator/address_space_randomization.h b/base/allocator/partition_allocator/address_space_randomization.h
index 3f65a87..ab40e2b 100644
--- a/base/allocator/partition_allocator/address_space_randomization.h
+++ b/base/allocator/partition_allocator/address_space_randomization.h
@@ -7,7 +7,7 @@
 
 #include "base/allocator/partition_allocator/page_allocator.h"
 #include "base/base_export.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 namespace base {
 
diff --git a/base/allocator/partition_allocator/address_space_randomization_unittest.cc b/base/allocator/partition_allocator/address_space_randomization_unittest.cc
index 40f494d..a9fe1ce 100644
--- a/base/allocator/partition_allocator/address_space_randomization_unittest.cc
+++ b/base/allocator/partition_allocator/address_space_randomization_unittest.cc
@@ -8,7 +8,7 @@
 #include "base/bit_cast.h"
 #include "base/bits.h"
 #include "base/sys_info.h"
-#include "build/build_config.h"
+#include "build_config.h"
 #include "testing/gtest/include/gtest/gtest.h"
 
 #if defined(OS_WIN)
diff --git a/base/allocator/partition_allocator/page_allocator.cc b/base/allocator/partition_allocator/page_allocator.cc
index 328384e..72e34df 100644
--- a/base/allocator/partition_allocator/page_allocator.cc
+++ b/base/allocator/partition_allocator/page_allocator.cc
@@ -14,7 +14,7 @@
 #include "base/lazy_instance.h"
 #include "base/logging.h"
 #include "base/numerics/checked_math.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 #include <atomic>
 
diff --git a/base/allocator/partition_allocator/page_allocator.h b/base/allocator/partition_allocator/page_allocator.h
index 4973348..0db2fde 100644
--- a/base/allocator/partition_allocator/page_allocator.h
+++ b/base/allocator/partition_allocator/page_allocator.h
@@ -12,7 +12,7 @@
 #include "base/allocator/partition_allocator/page_allocator_constants.h"
 #include "base/base_export.h"
 #include "base/compiler_specific.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 namespace base {
 
diff --git a/base/allocator/partition_allocator/page_allocator_constants.h b/base/allocator/partition_allocator/page_allocator_constants.h
index 308d099..a2a2003 100644
--- a/base/allocator/partition_allocator/page_allocator_constants.h
+++ b/base/allocator/partition_allocator/page_allocator_constants.h
@@ -7,7 +7,7 @@
 
 #include <stddef.h>
 
-#include "build/build_config.h"
+#include "build_config.h"
 
 namespace base {
 #if defined(OS_WIN)
diff --git a/base/allocator/partition_allocator/page_allocator_internals_posix.h b/base/allocator/partition_allocator/page_allocator_internals_posix.h
index a579266..baadbdc 100644
--- a/base/allocator/partition_allocator/page_allocator_internals_posix.h
+++ b/base/allocator/partition_allocator/page_allocator_internals_posix.h
@@ -15,7 +15,7 @@
 #include <sys/resource.h>
 #endif
 
-#include "build/build_config.h"
+#include "build_config.h"
 
 #ifndef MAP_ANONYMOUS
 #define MAP_ANONYMOUS MAP_ANON
diff --git a/base/allocator/partition_allocator/page_allocator_unittest.cc b/base/allocator/partition_allocator/page_allocator_unittest.cc
index 22c6455..ad14c06 100644
--- a/base/allocator/partition_allocator/page_allocator_unittest.cc
+++ b/base/allocator/partition_allocator/page_allocator_unittest.cc
@@ -8,7 +8,7 @@
 #include <string.h>
 
 #include "base/allocator/partition_allocator/address_space_randomization.h"
-#include "build/build_config.h"
+#include "build_config.h"
 #include "testing/gtest/include/gtest/gtest.h"
 
 #if defined(OS_POSIX) && !defined(OS_FUCHSIA)
diff --git a/base/allocator/partition_allocator/partition_alloc.h b/base/allocator/partition_allocator/partition_alloc.h
index 79d0905..c69fd01 100644
--- a/base/allocator/partition_allocator/partition_alloc.h
+++ b/base/allocator/partition_allocator/partition_alloc.h
@@ -76,7 +76,7 @@
 #include "base/logging.h"
 #include "base/macros.h"
 #include "base/sys_byteorder.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 #if defined(MEMORY_TOOL_REPLACES_ALLOCATOR)
 #include <stdlib.h>
diff --git a/base/allocator/partition_allocator/partition_alloc_unittest.cc b/base/allocator/partition_allocator/partition_alloc_unittest.cc
index 4bf6b26..994f339 100644
--- a/base/allocator/partition_allocator/partition_alloc_unittest.cc
+++ b/base/allocator/partition_allocator/partition_alloc_unittest.cc
@@ -14,7 +14,7 @@
 #include "base/bit_cast.h"
 #include "base/bits.h"
 #include "base/sys_info.h"
-#include "build/build_config.h"
+#include "build_config.h"
 #include "testing/gtest/include/gtest/gtest.h"
 
 #if defined(OS_POSIX)
diff --git a/base/allocator/partition_allocator/partition_bucket.cc b/base/allocator/partition_allocator/partition_bucket.cc
index fcea523..f38b2ea 100644
--- a/base/allocator/partition_allocator/partition_bucket.cc
+++ b/base/allocator/partition_allocator/partition_bucket.cc
@@ -10,7 +10,7 @@
 #include "base/allocator/partition_allocator/partition_oom.h"
 #include "base/allocator/partition_allocator/partition_page.h"
 #include "base/allocator/partition_allocator/partition_root_base.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 namespace base {
 namespace internal {
diff --git a/base/allocator/partition_allocator/partition_freelist_entry.h b/base/allocator/partition_allocator/partition_freelist_entry.h
index 7e3282e..c9fe004 100644
--- a/base/allocator/partition_allocator/partition_freelist_entry.h
+++ b/base/allocator/partition_allocator/partition_freelist_entry.h
@@ -10,7 +10,7 @@
 #include "base/allocator/partition_allocator/partition_alloc_constants.h"
 #include "base/compiler_specific.h"
 #include "base/sys_byteorder.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 namespace base {
 namespace internal {
diff --git a/base/allocator/partition_allocator/partition_oom.cc b/base/allocator/partition_allocator/partition_oom.cc
index 5e1cf79..6476761 100644
--- a/base/allocator/partition_allocator/partition_oom.cc
+++ b/base/allocator/partition_allocator/partition_oom.cc
@@ -5,7 +5,7 @@
 #include "base/allocator/partition_allocator/partition_oom.h"
 
 #include "base/allocator/partition_allocator/oom.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 namespace base {
 namespace internal {
diff --git a/base/allocator/partition_allocator/partition_oom.h b/base/allocator/partition_allocator/partition_oom.h
index da8fc15..242da38 100644
--- a/base/allocator/partition_allocator/partition_oom.h
+++ b/base/allocator/partition_allocator/partition_oom.h
@@ -9,7 +9,7 @@
 #define BASE_ALLOCATOR_PARTITION_ALLOCATOR_PARTITION_OOM_H_
 
 #include "base/compiler_specific.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 namespace base {
 namespace internal {
diff --git a/base/allocator/partition_allocator/partition_root_base.cc b/base/allocator/partition_allocator/partition_root_base.cc
index 91b998f..db51d02 100644
--- a/base/allocator/partition_allocator/partition_root_base.cc
+++ b/base/allocator/partition_allocator/partition_root_base.cc
@@ -7,7 +7,7 @@
 #include "base/allocator/partition_allocator/oom.h"
 #include "base/allocator/partition_allocator/partition_oom.h"
 #include "base/allocator/partition_allocator/partition_page.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 namespace base {
 namespace internal {
diff --git a/base/allocator/partition_allocator/spin_lock.cc b/base/allocator/partition_allocator/spin_lock.cc
index 46f4965..0250c58 100644
--- a/base/allocator/partition_allocator/spin_lock.cc
+++ b/base/allocator/partition_allocator/spin_lock.cc
@@ -3,7 +3,7 @@
 // found in the LICENSE file.
 
 #include "base/allocator/partition_allocator/spin_lock.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 #if defined(OS_WIN)
 #include <windows.h>
diff --git a/base/allocator/tcmalloc_unittest.cc b/base/allocator/tcmalloc_unittest.cc
index 78c4f84..4b5b664 100644
--- a/base/allocator/tcmalloc_unittest.cc
+++ b/base/allocator/tcmalloc_unittest.cc
@@ -9,7 +9,7 @@
 #include "base/logging.h"
 #include "base/process/process_metrics.h"
 #include "base/sys_info.h"
-#include "build/build_config.h"
+#include "build_config.h"
 #include "testing/gtest/include/gtest/gtest.h"
 
 #if defined(USE_TCMALLOC)
diff --git a/base/atomicops.h b/base/atomicops.h
index 4d8510e..ccf05f9 100644
--- a/base/atomicops.h
+++ b/base/atomicops.h
@@ -37,7 +37,7 @@
 #include <cstddef>
 
 #include "base/base_export.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 #if defined(OS_WIN) && defined(ARCH_CPU_64_BITS)
 // windows.h #defines this (only on x64). This causes problems because the
diff --git a/base/atomicops_internals_atomicword_compat.h b/base/atomicops_internals_atomicword_compat.h
index 8b000d2..d985d10 100644
--- a/base/atomicops_internals_atomicword_compat.h
+++ b/base/atomicops_internals_atomicword_compat.h
@@ -9,7 +9,7 @@
 
 #include <stdint.h>
 
-#include "build/build_config.h"
+#include "build_config.h"
 
 // AtomicWord is a synonym for intptr_t, and Atomic32 is a synonym for int32_t,
 // which in turn means int. On some LP32 platforms, intptr_t is an int, but
diff --git a/base/atomicops_internals_portable.h b/base/atomicops_internals_portable.h
index ee034de..b75d080 100644
--- a/base/atomicops_internals_portable.h
+++ b/base/atomicops_internals_portable.h
@@ -34,7 +34,7 @@
 
 #include <atomic>
 
-#include "build/build_config.h"
+#include "build_config.h"
 
 namespace base {
 namespace subtle {
diff --git a/base/atomicops_internals_x86_msvc.h b/base/atomicops_internals_x86_msvc.h
index ee9043e..38001a8 100644
--- a/base/atomicops_internals_x86_msvc.h
+++ b/base/atomicops_internals_x86_msvc.h
@@ -12,7 +12,7 @@
 #include <intrin.h>
 
 #include "base/macros.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 #if defined(ARCH_CPU_64_BITS)
 // windows.h #defines this (only on x64). This causes problems because the
diff --git a/base/base_paths.h b/base/base_paths.h
index 2a163f4..b67e3db 100644
--- a/base/base_paths.h
+++ b/base/base_paths.h
@@ -8,7 +8,7 @@
 // This file declares path keys for the base module.  These can be used with
 // the PathService to access various special directories and files.
 
-#include "build/build_config.h"
+#include "build_config.h"
 
 #if defined(OS_WIN)
 #include "base/base_paths_win.h"
diff --git a/base/base_paths_mac.mm b/base/base_paths_mac.mm
index 46bbd16..6eb6e07 100644
--- a/base/base_paths_mac.mm
+++ b/base/base_paths_mac.mm
@@ -20,7 +20,7 @@
 #include "base/path_service.h"
 #include "base/strings/string_util.h"
 #include "base/threading/thread_restrictions.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 namespace {
 
diff --git a/base/base_paths_posix.cc b/base/base_paths_posix.cc
index 00a1569..02ae498 100644
--- a/base/base_paths_posix.cc
+++ b/base/base_paths_posix.cc
@@ -22,7 +22,7 @@
 #include "base/nix/xdg_util.h"
 #include "base/path_service.h"
 #include "base/process/process_metrics.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 #if defined(OS_FREEBSD)
 #include <sys/param.h>
diff --git a/base/base_switches.cc b/base/base_switches.cc
index 7ce7380..49eb975 100644
--- a/base/base_switches.cc
+++ b/base/base_switches.cc
@@ -3,7 +3,7 @@
 // found in the LICENSE file.
 
 #include "base/base_switches.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 namespace switches {
 
diff --git a/base/base_switches.h b/base/base_switches.h
index 3425e6f..ada2727 100644
--- a/base/base_switches.h
+++ b/base/base_switches.h
@@ -7,7 +7,7 @@
 #ifndef BASE_BASE_SWITCHES_H_
 #define BASE_BASE_SWITCHES_H_
 
-#include "build/build_config.h"
+#include "build_config.h"
 
 namespace switches {
 
diff --git a/base/bind_helpers.h b/base/bind_helpers.h
index 15961e6..f4bbd80 100644
--- a/base/bind_helpers.h
+++ b/base/bind_helpers.h
@@ -13,7 +13,7 @@
 #include "base/bind.h"
 #include "base/callback.h"
 #include "base/memory/weak_ptr.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 // This defines a set of simple functions and utilities that people want when
 // using Callback<> and Bind().
diff --git a/base/bind_internal.h b/base/bind_internal.h
index d748f89..ffe9ed7 100644
--- a/base/bind_internal.h
+++ b/base/bind_internal.h
@@ -14,7 +14,7 @@
 #include "base/memory/raw_scoped_refptr_mismatch_checker.h"
 #include "base/memory/weak_ptr.h"
 #include "base/template_util.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 // See base/callback.h for user documentation.
 //
diff --git a/base/bind_unittest.cc b/base/bind_unittest.cc
index f1d19a1..5162a08 100644
--- a/base/bind_unittest.cc
+++ b/base/bind_unittest.cc
@@ -15,7 +15,7 @@
 #include "base/memory/weak_ptr.h"
 #include "base/test/bind_test_util.h"
 #include "base/test/gtest_util.h"
-#include "build/build_config.h"
+#include "build_config.h"
 #include "testing/gmock/include/gmock/gmock.h"
 #include "testing/gtest/include/gtest/gtest.h"
 
diff --git a/base/bit_cast.h b/base/bit_cast.h
index 90dd925..213dcc5 100644
--- a/base/bit_cast.h
+++ b/base/bit_cast.h
@@ -10,7 +10,7 @@
 
 #include "base/compiler_specific.h"
 #include "base/template_util.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 // bit_cast<Dest,Source> is a template function that implements the equivalent
 // of "*reinterpret_cast<Dest*>(&source)".  We need this in very low-level
diff --git a/base/bits.h b/base/bits.h
index a1c8b5d..b02f303 100644
--- a/base/bits.h
+++ b/base/bits.h
@@ -12,7 +12,7 @@
 
 #include "base/compiler_specific.h"
 #include "base/logging.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 #if defined(COMPILER_MSVC)
 #include <intrin.h>
diff --git a/base/bits_unittest.cc b/base/bits_unittest.cc
index 98b9c08..ceaad3f 100644
--- a/base/bits_unittest.cc
+++ b/base/bits_unittest.cc
@@ -5,7 +5,7 @@
 // This file contains the unit tests for the bit utilities.
 
 #include "base/bits.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 #include <stddef.h>
 
diff --git a/base/command_line.cc b/base/command_line.cc
index aec89f5..134ffb8 100644
--- a/base/command_line.cc
+++ b/base/command_line.cc
@@ -15,7 +15,7 @@
 #include "base/strings/string_tokenizer.h"
 #include "base/strings/string_util.h"
 #include "base/strings/utf_string_conversions.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 #if defined(OS_WIN)
 #include <windows.h>
diff --git a/base/command_line.h b/base/command_line.h
index 25fd7d9..f82ebfd 100644
--- a/base/command_line.h
+++ b/base/command_line.h
@@ -23,7 +23,7 @@
 #include "base/base_export.h"
 #include "base/strings/string16.h"
 #include "base/strings/string_piece.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 namespace base {
 
diff --git a/base/command_line_unittest.cc b/base/command_line_unittest.cc
index 3718cd9..2396830 100644
--- a/base/command_line_unittest.cc
+++ b/base/command_line_unittest.cc
@@ -11,7 +11,7 @@
 #include "base/files/file_path.h"
 #include "base/macros.h"
 #include "base/strings/utf_string_conversions.h"
-#include "build/build_config.h"
+#include "build_config.h"
 #include "testing/gtest/include/gtest/gtest.h"
 
 namespace base {
diff --git a/base/compiler_specific.h b/base/compiler_specific.h
index 88c290f..c27c7bc 100644
--- a/base/compiler_specific.h
+++ b/base/compiler_specific.h
@@ -5,7 +5,7 @@
 #ifndef BASE_COMPILER_SPECIFIC_H_
 #define BASE_COMPILER_SPECIFIC_H_
 
-#include "build/build_config.h"
+#include "build_config.h"
 
 #if defined(COMPILER_MSVC)
 
diff --git a/base/component_export.h b/base/component_export.h
index b5cb364..9e55664 100644
--- a/base/component_export.h
+++ b/base/component_export.h
@@ -5,7 +5,7 @@
 #ifndef BASE_COMPONENT_EXPORT_H_
 #define BASE_COMPONENT_EXPORT_H_
 
-#include "build/build_config.h"
+#include "build_config.h"
 
 // Used to annotate symbols which are exported by the component named
 // |component|. Note that this only does the right thing if the corresponding
diff --git a/base/containers/stack_container.h b/base/containers/stack_container.h
index c775744..7d84d07 100644
--- a/base/containers/stack_container.h
+++ b/base/containers/stack_container.h
@@ -10,7 +10,7 @@
 #include <vector>
 
 #include "base/macros.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 namespace base {
 
diff --git a/base/containers/stack_container_unittest.cc b/base/containers/stack_container_unittest.cc
index b6bb9b6..8c039ad 100644
--- a/base/containers/stack_container_unittest.cc
+++ b/base/containers/stack_container_unittest.cc
@@ -9,7 +9,7 @@
 #include <algorithm>
 
 #include "base/memory/ref_counted.h"
-#include "build/build_config.h"
+#include "build_config.h"
 #include "testing/gtest/include/gtest/gtest.h"
 
 namespace base {
diff --git a/base/cpu.cc b/base/cpu.cc
index cd9066f..68a6bbc 100644
--- a/base/cpu.cc
+++ b/base/cpu.cc
@@ -13,7 +13,7 @@
 #include <utility>
 
 #include "base/macros.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 #if defined(ARCH_CPU_ARM_FAMILY) && (defined(OS_ANDROID) || defined(OS_LINUX))
 #include "base/files/file_util.h"
diff --git a/base/cpu_unittest.cc b/base/cpu_unittest.cc
index 8a68ea0..bfdb1cd 100644
--- a/base/cpu_unittest.cc
+++ b/base/cpu_unittest.cc
@@ -4,7 +4,7 @@
 
 #include "base/cpu.h"
 #include "base/stl_util.h"
-#include "build/build_config.h"
+#include "build_config.h"
 #include "testing/gtest/include/gtest/gtest.h"
 
 #if _MSC_VER >= 1700
diff --git a/base/critical_closure.h b/base/critical_closure.h
index 94c618d..d593537 100644
--- a/base/critical_closure.h
+++ b/base/critical_closure.h
@@ -9,7 +9,7 @@
 
 #include "base/callback.h"
 #include "base/macros.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 #if defined(OS_IOS)
 #include "base/bind.h"
diff --git a/base/debug/activity_tracker.cc b/base/debug/activity_tracker.cc
index 362013e..bb1349c 100644
--- a/base/debug/activity_tracker.cc
+++ b/base/debug/activity_tracker.cc
@@ -25,7 +25,7 @@
 #include "base/strings/string_util.h"
 #include "base/strings/utf_string_conversions.h"
 #include "base/threading/platform_thread.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 namespace base {
 namespace debug {
diff --git a/base/debug/alias.cc b/base/debug/alias.cc
index 6b0caaa..d93d495 100644
--- a/base/debug/alias.cc
+++ b/base/debug/alias.cc
@@ -3,7 +3,7 @@
 // found in the LICENSE file.
 
 #include "base/debug/alias.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 namespace base {
 namespace debug {
diff --git a/base/debug/asan_invalid_access.cc b/base/debug/asan_invalid_access.cc
index 07c19db..d5d43d5 100644
--- a/base/debug/asan_invalid_access.cc
+++ b/base/debug/asan_invalid_access.cc
@@ -10,7 +10,7 @@
 
 #include "base/debug/alias.h"
 #include "base/logging.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 #if defined(OS_WIN)
 #include <windows.h>
diff --git a/base/debug/asan_invalid_access.h b/base/debug/asan_invalid_access.h
index dc9a7ee..f8b078a 100644
--- a/base/debug/asan_invalid_access.h
+++ b/base/debug/asan_invalid_access.h
@@ -10,7 +10,7 @@
 
 #include "base/base_export.h"
 #include "base/compiler_specific.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 namespace base {
 namespace debug {
diff --git a/base/debug/close_handle_hook_win.cc b/base/debug/close_handle_hook_win.cc
index 1f1f432..35afdf5 100644
--- a/base/debug/close_handle_hook_win.cc
+++ b/base/debug/close_handle_hook_win.cc
@@ -16,7 +16,7 @@
 #include "base/win/iat_patch_function.h"
 #include "base/win/pe_image.h"
 #include "base/win/scoped_handle.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 namespace {
 
diff --git a/base/debug/debugger.cc b/base/debug/debugger.cc
index 1ccee1c..025bc54 100644
--- a/base/debug/debugger.cc
+++ b/base/debug/debugger.cc
@@ -5,7 +5,7 @@
 #include "base/debug/debugger.h"
 #include "base/logging.h"
 #include "base/threading/platform_thread.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 namespace base {
 namespace debug {
diff --git a/base/debug/debugger_posix.cc b/base/debug/debugger_posix.cc
index b62bf01..63d9d52 100644
--- a/base/debug/debugger_posix.cc
+++ b/base/debug/debugger_posix.cc
@@ -20,7 +20,7 @@
 #include "base/macros.h"
 #include "base/threading/platform_thread.h"
 #include "base/time/time.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 #if defined(__GLIBCXX__)
 #include <cxxabi.h>
diff --git a/base/debug/debugger_unittest.cc b/base/debug/debugger_unittest.cc
index 0a5a039..23ea83d 100644
--- a/base/debug/debugger_unittest.cc
+++ b/base/debug/debugger_unittest.cc
@@ -4,7 +4,7 @@
 
 #include "base/debug/debugger.h"
 
-#include "build/build_config.h"
+#include "build_config.h"
 #include "testing/gtest/include/gtest/gtest.h"
 
 namespace {
diff --git a/base/debug/dump_without_crashing.h b/base/debug/dump_without_crashing.h
index 913f6c4..c36973f 100644
--- a/base/debug/dump_without_crashing.h
+++ b/base/debug/dump_without_crashing.h
@@ -7,7 +7,7 @@
 
 #include "base/base_export.h"
 #include "base/compiler_specific.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 namespace base {
 
diff --git a/base/debug/elf_reader_linux_unittest.cc b/base/debug/elf_reader_linux_unittest.cc
index 5510418..88ed502 100644
--- a/base/debug/elf_reader_linux_unittest.cc
+++ b/base/debug/elf_reader_linux_unittest.cc
@@ -8,7 +8,7 @@
 
 #include "base/files/memory_mapped_file.h"
 #include "base/strings/string_util.h"
-#include "build/build_config.h"
+#include "build_config.h"
 #include "testing/gtest/include/gtest/gtest.h"
 
 extern char __executable_start;
diff --git a/base/debug/leak_annotations.h b/base/debug/leak_annotations.h
index dc50246..f1a2d07 100644
--- a/base/debug/leak_annotations.h
+++ b/base/debug/leak_annotations.h
@@ -6,7 +6,7 @@
 #define BASE_DEBUG_LEAK_ANNOTATIONS_H_
 
 #include "base/macros.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 // This file defines macros which can be used to annotate intentional memory
 // leaks. Support for annotations is implemented in LeakSanitizer. Annotated
diff --git a/base/debug/leak_tracker.h b/base/debug/leak_tracker.h
index 7ddd5b6..43f2102 100644
--- a/base/debug/leak_tracker.h
+++ b/base/debug/leak_tracker.h
@@ -7,7 +7,7 @@
 
 #include <stddef.h>
 
-#include "build/build_config.h"
+#include "build_config.h"
 
 // Only enable leak tracking in non-uClibc debug builds.
 #if !defined(NDEBUG) && !defined(__UCLIBC__)
diff --git a/base/debug/proc_maps_linux.cc b/base/debug/proc_maps_linux.cc
index 0bb44b4..1a9476f 100644
--- a/base/debug/proc_maps_linux.cc
+++ b/base/debug/proc_maps_linux.cc
@@ -10,7 +10,7 @@
 #include "base/files/file_util.h"
 #include "base/files/scoped_file.h"
 #include "base/strings/string_split.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 #if defined(OS_LINUX) || defined(OS_ANDROID)
 #include <inttypes.h>
diff --git a/base/debug/proc_maps_linux_unittest.cc b/base/debug/proc_maps_linux_unittest.cc
index 7abf152..d291507 100644
--- a/base/debug/proc_maps_linux_unittest.cc
+++ b/base/debug/proc_maps_linux_unittest.cc
@@ -11,7 +11,7 @@
 #include "base/path_service.h"
 #include "base/strings/stringprintf.h"
 #include "base/threading/platform_thread.h"
-#include "build/build_config.h"
+#include "build_config.h"
 #include "testing/gtest/include/gtest/gtest.h"
 
 namespace base {
diff --git a/base/debug/profiler.cc b/base/debug/profiler.cc
index 91619a1..82e5229 100644
--- a/base/debug/profiler.cc
+++ b/base/debug/profiler.cc
@@ -9,7 +9,7 @@
 #include "base/process/process_handle.h"
 #include "base/strings/string_number_conversions.h"
 #include "base/strings/string_util.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 #if defined(OS_WIN)
 #include "base/win/current_module.h"
diff --git a/base/debug/stack_trace.h b/base/debug/stack_trace.h
index 322e77b..33023fe 100644
--- a/base/debug/stack_trace.h
+++ b/base/debug/stack_trace.h
@@ -12,7 +12,7 @@
 
 #include "base/base_export.h"
 #include "base/macros.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 #if defined(OS_POSIX)
 #include <unistd.h>
diff --git a/base/debug/stack_trace_posix.cc b/base/debug/stack_trace_posix.cc
index b954b97..c67fd62 100644
--- a/base/debug/stack_trace_posix.cc
+++ b/base/debug/stack_trace_posix.cc
@@ -48,7 +48,7 @@
 #include "base/numerics/safe_conversions.h"
 #include "base/posix/eintr_wrapper.h"
 #include "base/strings/string_number_conversions.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 #if defined(USE_SYMBOLIZE)
 #include "base/third_party/symbolize/symbolize.h"
diff --git a/base/debug/stack_trace_unittest.cc b/base/debug/stack_trace_unittest.cc
index 1fbeea2..6d37c40 100644
--- a/base/debug/stack_trace_unittest.cc
+++ b/base/debug/stack_trace_unittest.cc
@@ -13,7 +13,7 @@
 #include "base/process/kill.h"
 #include "base/process/process_handle.h"
 #include "base/test/test_timeouts.h"
-#include "build/build_config.h"
+#include "build_config.h"
 #include "testing/gtest/include/gtest/gtest.h"
 #include "testing/multiprocess_func_list.h"
 
diff --git a/base/debug/thread_heap_usage_tracker.cc b/base/debug/thread_heap_usage_tracker.cc
index f9852db..7dda2f7 100644
--- a/base/debug/thread_heap_usage_tracker.cc
+++ b/base/debug/thread_heap_usage_tracker.cc
@@ -14,7 +14,7 @@
 #include "base/logging.h"
 #include "base/no_destructor.h"
 #include "base/threading/thread_local_storage.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 #if defined(OS_MACOSX) || defined(OS_IOS)
 #include <malloc/malloc.h>
diff --git a/base/environment.cc b/base/environment.cc
index cdea53c..fed01d0 100644
--- a/base/environment.cc
+++ b/base/environment.cc
@@ -12,7 +12,7 @@
 #include "base/strings/string_piece.h"
 #include "base/strings/string_util.h"
 #include "base/strings/utf_string_conversions.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 #if defined(OS_WIN)
 #include <windows.h>
diff --git a/base/environment.h b/base/environment.h
index e842ab0..5d05396 100644
--- a/base/environment.h
+++ b/base/environment.h
@@ -12,7 +12,7 @@
 #include "base/base_export.h"
 #include "base/strings/string16.h"
 #include "base/strings/string_piece.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 namespace base {
 
diff --git a/base/environment_unittest.cc b/base/environment_unittest.cc
index 23aec51..79800ad 100644
--- a/base/environment_unittest.cc
+++ b/base/environment_unittest.cc
@@ -6,7 +6,7 @@
 
 #include <memory>
 
-#include "build/build_config.h"
+#include "build_config.h"
 #include "testing/gtest/include/gtest/gtest.h"
 #include "testing/platform_test.h"
 
diff --git a/base/file_version_info.h b/base/file_version_info.h
index 3b9457c..3240bbe 100644
--- a/base/file_version_info.h
+++ b/base/file_version_info.h
@@ -7,7 +7,7 @@
 
 #include <string>
 
-#include "build/build_config.h"
+#include "build_config.h"
 #include "base/base_export.h"
 #include "base/strings/string16.h"
 
diff --git a/base/file_version_info_mac.mm b/base/file_version_info_mac.mm
index ce42924..ec0743c 100644
--- a/base/file_version_info_mac.mm
+++ b/base/file_version_info_mac.mm
@@ -11,7 +11,7 @@
 #include "base/mac/bundle_locations.h"
 #include "base/mac/foundation_util.h"
 #include "base/strings/sys_string_conversions.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 FileVersionInfoMac::FileVersionInfoMac(NSBundle *bundle)
     : bundle_([bundle retain]) {
diff --git a/base/files/dir_reader_posix.h b/base/files/dir_reader_posix.h
index 15fc744..e238d6b 100644
--- a/base/files/dir_reader_posix.h
+++ b/base/files/dir_reader_posix.h
@@ -5,7 +5,7 @@
 #ifndef BASE_FILES_DIR_READER_POSIX_H_
 #define BASE_FILES_DIR_READER_POSIX_H_
 
-#include "build/build_config.h"
+#include "build_config.h"
 
 // This header provides a class, DirReaderPosix, which allows one to open and
 // read from directories without allocating memory. For the interface, see
diff --git a/base/files/dir_reader_posix_unittest.cc b/base/files/dir_reader_posix_unittest.cc
index 1954cb2..f82619b 100644
--- a/base/files/dir_reader_posix_unittest.cc
+++ b/base/files/dir_reader_posix_unittest.cc
@@ -13,7 +13,7 @@
 
 #include "base/files/scoped_temp_dir.h"
 #include "base/logging.h"
-#include "build/build_config.h"
+#include "build_config.h"
 #include "testing/gtest/include/gtest/gtest.h"
 
 #if defined(OS_ANDROID)
diff --git a/base/files/file.cc b/base/files/file.cc
index 1a4ee37..92ff6dd 100644
--- a/base/files/file.cc
+++ b/base/files/file.cc
@@ -7,7 +7,7 @@
 #include "base/files/file_tracing.h"
 #include "base/metrics/histogram.h"
 #include "base/timer/elapsed_timer.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 #if defined(OS_POSIX) || defined(OS_FUCHSIA)
 #include <errno.h>
diff --git a/base/files/file.h b/base/files/file.h
index c3a31d84..801d68a 100644
--- a/base/files/file.h
+++ b/base/files/file.h
@@ -16,7 +16,7 @@
 #include "base/files/scoped_file.h"
 #include "base/macros.h"
 #include "base/time/time.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 #if defined(OS_POSIX) || defined(OS_FUCHSIA)
 #include <sys/stat.h>
diff --git a/base/files/file_descriptor_watcher_posix_unittest.cc b/base/files/file_descriptor_watcher_posix_unittest.cc
index 4ed044b..8fc57ce 100644
--- a/base/files/file_descriptor_watcher_posix_unittest.cc
+++ b/base/files/file_descriptor_watcher_posix_unittest.cc
@@ -19,7 +19,7 @@
 #include "base/threading/platform_thread.h"
 #include "base/threading/thread.h"
 #include "base/threading/thread_checker_impl.h"
-#include "build/build_config.h"
+#include "build_config.h"
 #include "testing/gmock/include/gmock/gmock.h"
 #include "testing/gtest/include/gtest/gtest.h"
 
diff --git a/base/files/file_enumerator.h b/base/files/file_enumerator.h
index 0fa99a6..db54301 100644
--- a/base/files/file_enumerator.h
+++ b/base/files/file_enumerator.h
@@ -15,7 +15,7 @@
 #include "base/files/file_path.h"
 #include "base/macros.h"
 #include "base/time/time.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 #if defined(OS_WIN)
 #include <windows.h>
diff --git a/base/files/file_enumerator_posix.cc b/base/files/file_enumerator_posix.cc
index 4b429c6..383d202 100644
--- a/base/files/file_enumerator_posix.cc
+++ b/base/files/file_enumerator_posix.cc
@@ -12,7 +12,7 @@
 
 #include "base/logging.h"
 #include "base/threading/thread_restrictions.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 namespace base {
 namespace {
diff --git a/base/files/file_locking_unittest.cc b/base/files/file_locking_unittest.cc
index e158b7d..bfe62ce 100644
--- a/base/files/file_locking_unittest.cc
+++ b/base/files/file_locking_unittest.cc
@@ -11,7 +11,7 @@
 #include "base/test/test_timeouts.h"
 #include "base/threading/platform_thread.h"
 #include "base/time/time.h"
-#include "build/build_config.h"
+#include "build_config.h"
 #include "testing/gtest/include/gtest/gtest.h"
 #include "testing/multiprocess_func_list.h"
 
diff --git a/base/files/file_path.cc b/base/files/file_path.cc
index 14f9251..6221db9 100644
--- a/base/files/file_path.cc
+++ b/base/files/file_path.cc
@@ -14,7 +14,7 @@
 #include "base/strings/string_util.h"
 #include "base/strings/sys_string_conversions.h"
 #include "base/strings/utf_string_conversions.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 #if defined(OS_MACOSX)
 #include "base/mac/scoped_cftyperef.h"
diff --git a/base/files/file_path.h b/base/files/file_path.h
index 2dc15f9..a31253e 100644
--- a/base/files/file_path.h
+++ b/base/files/file_path.h
@@ -113,7 +113,7 @@
 #include "base/macros.h"
 #include "base/strings/string16.h"
 #include "base/strings/string_piece.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 // Windows-style drive letter support and pathname separator characters can be
 // enabled and disabled independently, to aid testing.  These #defines are
diff --git a/base/files/file_path_unittest.cc b/base/files/file_path_unittest.cc
index e722c68..d0fd00e 100644
--- a/base/files/file_path_unittest.cc
+++ b/base/files/file_path_unittest.cc
@@ -9,7 +9,7 @@
 #include "base/files/file_path.h"
 #include "base/macros.h"
 #include "base/strings/utf_string_conversions.h"
-#include "build/build_config.h"
+#include "build_config.h"
 #include "testing/gtest/include/gtest/gtest.h"
 #include "testing/platform_test.h"
 
diff --git a/base/files/file_path_watcher.cc b/base/files/file_path_watcher.cc
index af40346..5b50312 100644
--- a/base/files/file_path_watcher.cc
+++ b/base/files/file_path_watcher.cc
@@ -8,7 +8,7 @@
 #include "base/files/file_path_watcher.h"
 
 #include "base/logging.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 namespace base {
 
diff --git a/base/files/file_path_watcher_mac.cc b/base/files/file_path_watcher_mac.cc
index 4dcf90b..bd5a559 100644
--- a/base/files/file_path_watcher_mac.cc
+++ b/base/files/file_path_watcher_mac.cc
@@ -8,7 +8,7 @@
 #include "base/files/file_path_watcher_kqueue.h"
 #include "base/macros.h"
 #include "base/memory/ptr_util.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 #if !defined(OS_IOS)
 #include "base/files/file_path_watcher_fsevents.h"
diff --git a/base/files/file_path_watcher_unittest.cc b/base/files/file_path_watcher_unittest.cc
index 2cc2e58..9e3d94d 100644
--- a/base/files/file_path_watcher_unittest.cc
+++ b/base/files/file_path_watcher_unittest.cc
@@ -30,7 +30,7 @@
 #include "base/test/test_file_util.h"
 #include "base/test/test_timeouts.h"
 #include "base/threading/thread_task_runner_handle.h"
-#include "build/build_config.h"
+#include "build_config.h"
 #include "testing/gtest/include/gtest/gtest.h"
 
 #if defined(OS_ANDROID)
diff --git a/base/files/file_posix.cc b/base/files/file_posix.cc
index d538b66..d6b7641 100644
--- a/base/files/file_posix.cc
+++ b/base/files/file_posix.cc
@@ -15,7 +15,7 @@
 #include "base/posix/eintr_wrapper.h"
 #include "base/strings/utf_string_conversions.h"
 #include "base/threading/thread_restrictions.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 #if defined(OS_ANDROID)
 #include "base/os_compat_android.h"
diff --git a/base/files/file_proxy_unittest.cc b/base/files/file_proxy_unittest.cc
index 20bb489..73a0497 100644
--- a/base/files/file_proxy_unittest.cc
+++ b/base/files/file_proxy_unittest.cc
@@ -19,7 +19,7 @@
 #include "base/run_loop.h"
 #include "base/threading/thread.h"
 #include "base/threading/thread_restrictions.h"
-#include "build/build_config.h"
+#include "build_config.h"
 #include "testing/gtest/include/gtest/gtest.h"
 
 namespace base {
diff --git a/base/files/file_unittest.cc b/base/files/file_unittest.cc
index 112b90d..b9992ad 100644
--- a/base/files/file_unittest.cc
+++ b/base/files/file_unittest.cc
@@ -12,7 +12,7 @@
 #include "base/files/memory_mapped_file.h"
 #include "base/files/scoped_temp_dir.h"
 #include "base/time/time.h"
-#include "build/build_config.h"
+#include "build_config.h"
 #include "testing/gtest/include/gtest/gtest.h"
 
 using base::File;
diff --git a/base/files/file_util.cc b/base/files/file_util.cc
index 109cb22..887139b 100644
--- a/base/files/file_util.cc
+++ b/base/files/file_util.cc
@@ -19,7 +19,7 @@
 #include "base/strings/string_util.h"
 #include "base/strings/stringprintf.h"
 #include "base/strings/utf_string_conversions.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 namespace base {
 
diff --git a/base/files/file_util.h b/base/files/file_util.h
index 1ba9368..9c7c748 100644
--- a/base/files/file_util.h
+++ b/base/files/file_util.h
@@ -25,7 +25,7 @@
 #include "base/files/file.h"
 #include "base/files/file_path.h"
 #include "base/strings/string16.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 #if defined(OS_WIN)
 #include "base/win/windows_types.h"
diff --git a/base/files/file_util_posix.cc b/base/files/file_util_posix.cc
index d8a0ae0..37fb015 100644
--- a/base/files/file_util_posix.cc
+++ b/base/files/file_util_posix.cc
@@ -42,7 +42,7 @@
 #include "base/sys_info.h"
 #include "base/threading/thread_restrictions.h"
 #include "base/time/time.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 #if defined(OS_MACOSX)
 #include <AvailabilityMacros.h>
diff --git a/base/files/file_util_unittest.cc b/base/files/file_util_unittest.cc
index a89e1b3..b4de085 100644
--- a/base/files/file_util_unittest.cc
+++ b/base/files/file_util_unittest.cc
@@ -34,7 +34,7 @@
 #include "base/test/test_file_util.h"
 #include "base/test/test_timeouts.h"
 #include "base/threading/platform_thread.h"
-#include "build/build_config.h"
+#include "build_config.h"
 #include "testing/gtest/include/gtest/gtest.h"
 #include "testing/multiprocess_func_list.h"
 #include "testing/platform_test.h"
diff --git a/base/files/important_file_writer.cc b/base/files/important_file_writer.cc
index 235bb8d..7fd9d79 100644
--- a/base/files/important_file_writer.cc
+++ b/base/files/important_file_writer.cc
@@ -28,7 +28,7 @@
 #include "base/task_runner_util.h"
 #include "base/threading/thread.h"
 #include "base/time/time.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 namespace base {
 
diff --git a/base/files/memory_mapped_file.cc b/base/files/memory_mapped_file.cc
index ccd9e23..8a07e49 100644
--- a/base/files/memory_mapped_file.cc
+++ b/base/files/memory_mapped_file.cc
@@ -10,7 +10,7 @@
 #include "base/logging.h"
 #include "base/numerics/safe_math.h"
 #include "base/sys_info.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 namespace base {
 
diff --git a/base/files/memory_mapped_file.h b/base/files/memory_mapped_file.h
index 04f4336..012f091 100644
--- a/base/files/memory_mapped_file.h
+++ b/base/files/memory_mapped_file.h
@@ -11,7 +11,7 @@
 #include "base/base_export.h"
 #include "base/files/file.h"
 #include "base/macros.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 #if defined(OS_WIN)
 #include <windows.h>
diff --git a/base/files/memory_mapped_file_posix.cc b/base/files/memory_mapped_file_posix.cc
index 45a0aea..e51c7e3 100644
--- a/base/files/memory_mapped_file_posix.cc
+++ b/base/files/memory_mapped_file_posix.cc
@@ -14,7 +14,7 @@
 #include "base/logging.h"
 #include "base/numerics/safe_conversions.h"
 #include "base/threading/thread_restrictions.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 #if defined(OS_ANDROID)
 #include <android/api-level.h>
diff --git a/base/files/platform_file.h b/base/files/platform_file.h
index 3929a0d..59bb6fe 100644
--- a/base/files/platform_file.h
+++ b/base/files/platform_file.h
@@ -6,7 +6,7 @@
 #define BASE_FILES_PLATFORM_FILE_H_
 
 #include "base/files/scoped_file.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 #if defined(OS_WIN)
 #include "base/win/scoped_handle.h"
diff --git a/base/files/scoped_file.cc b/base/files/scoped_file.cc
index 1b9227d..f73e1c8 100644
--- a/base/files/scoped_file.cc
+++ b/base/files/scoped_file.cc
@@ -5,7 +5,7 @@
 #include "base/files/scoped_file.h"
 
 #include "base/logging.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 #if defined(OS_POSIX) || defined(OS_FUCHSIA)
 #include <errno.h>
diff --git a/base/files/scoped_file.h b/base/files/scoped_file.h
index e32a603..ee8ed8b 100644
--- a/base/files/scoped_file.h
+++ b/base/files/scoped_file.h
@@ -12,7 +12,7 @@
 #include "base/base_export.h"
 #include "base/logging.h"
 #include "base/scoped_generic.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 namespace base {
 
diff --git a/base/files/scoped_temp_dir_unittest.cc b/base/files/scoped_temp_dir_unittest.cc
index 024b438..84eff6e 100644
--- a/base/files/scoped_temp_dir_unittest.cc
+++ b/base/files/scoped_temp_dir_unittest.cc
@@ -7,7 +7,7 @@
 #include "base/files/file.h"
 #include "base/files/file_util.h"
 #include "base/files/scoped_temp_dir.h"
-#include "build/build_config.h"
+#include "build_config.h"
 #include "testing/gtest/include/gtest/gtest.h"
 
 namespace base {
diff --git a/base/format_macros.h b/base/format_macros.h
index 1279ff7..46cd12c 100644
--- a/base/format_macros.h
+++ b/base/format_macros.h
@@ -24,7 +24,7 @@
 #include <stddef.h>
 #include <stdint.h>
 
-#include "build/build_config.h"
+#include "build_config.h"
 
 #if (defined(OS_POSIX) || defined(OS_FUCHSIA)) && \
     (defined(_INTTYPES_H) || defined(_INTTYPES_H_)) && !defined(PRId64)
diff --git a/base/guid.h b/base/guid.h
index c6937a1..9b0eb28 100644
--- a/base/guid.h
+++ b/base/guid.h
@@ -11,7 +11,7 @@
 
 #include "base/base_export.h"
 #include "base/strings/string_piece.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 namespace base {
 
diff --git a/base/guid_unittest.cc b/base/guid_unittest.cc
index 70dad67..8aa56ed 100644
--- a/base/guid_unittest.cc
+++ b/base/guid_unittest.cc
@@ -9,7 +9,7 @@
 #include <limits>
 
 #include "base/strings/string_util.h"
-#include "build/build_config.h"
+#include "build_config.h"
 #include "testing/gtest/include/gtest/gtest.h"
 
 namespace base {
diff --git a/base/i18n/char_iterator.h b/base/i18n/char_iterator.h
index 24024d4..33f2934 100644
--- a/base/i18n/char_iterator.h
+++ b/base/i18n/char_iterator.h
@@ -13,7 +13,7 @@
 #include "base/i18n/base_i18n_export.h"
 #include "base/macros.h"
 #include "base/strings/string16.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 // The CharIterator classes iterate through the characters in UTF8 and
 // UTF16 strings.  Example usage:
diff --git a/base/i18n/encoding_detection.cc b/base/i18n/encoding_detection.cc
index fef34e4..f6bbf4a 100644
--- a/base/i18n/encoding_detection.cc
+++ b/base/i18n/encoding_detection.cc
@@ -4,7 +4,7 @@
 
 #include "base/i18n/encoding_detection.h"
 
-#include "build/build_config.h"
+#include "build_config.h"
 #include "third_party/ced/src/compact_enc_det/compact_enc_det.h"
 
 // third_party/ced/src/util/encodings/encodings.h, which is included
diff --git a/base/i18n/file_util_icu.cc b/base/i18n/file_util_icu.cc
index c91aea1..20a7d2d 100644
--- a/base/i18n/file_util_icu.cc
+++ b/base/i18n/file_util_icu.cc
@@ -19,7 +19,7 @@
 #include "base/strings/string_util.h"
 #include "base/strings/sys_string_conversions.h"
 #include "base/strings/utf_string_conversions.h"
-#include "build/build_config.h"
+#include "build_config.h"
 #include "third_party/icu/source/common/unicode/uniset.h"
 #include "third_party/icu/source/i18n/unicode/coll.h"
 
diff --git a/base/i18n/file_util_icu_unittest.cc b/base/i18n/file_util_icu_unittest.cc
index 062d29b..f1c2ecc 100644
--- a/base/i18n/file_util_icu_unittest.cc
+++ b/base/i18n/file_util_icu_unittest.cc
@@ -9,7 +9,7 @@
 #include "base/files/file_util.h"
 #include "base/macros.h"
 #include "base/strings/utf_string_conversions.h"
-#include "build/build_config.h"
+#include "build_config.h"
 #include "testing/gtest/include/gtest/gtest.h"
 #include "testing/platform_test.h"
 
diff --git a/base/i18n/icu_string_conversions_unittest.cc b/base/i18n/icu_string_conversions_unittest.cc
index d155986..871f18b 100644
--- a/base/i18n/icu_string_conversions_unittest.cc
+++ b/base/i18n/icu_string_conversions_unittest.cc
@@ -16,7 +16,7 @@
 #include "base/strings/string_piece.h"
 #include "base/strings/stringprintf.h"
 #include "base/strings/utf_string_conversions.h"
-#include "build/build_config.h"
+#include "build_config.h"
 #include "testing/gtest/include/gtest/gtest.h"
 
 namespace base {
diff --git a/base/i18n/icu_util.cc b/base/i18n/icu_util.cc
index bc08ecb..1cbfbd6 100644
--- a/base/i18n/icu_util.cc
+++ b/base/i18n/icu_util.cc
@@ -17,7 +17,7 @@
 #include "base/path_service.h"
 #include "base/strings/string_util.h"
 #include "base/strings/sys_string_conversions.h"
-#include "build/build_config.h"
+#include "build_config.h"
 #include "third_party/icu/source/common/unicode/putil.h"
 #include "third_party/icu/source/common/unicode/udata.h"
 #if (defined(OS_LINUX) && !defined(OS_CHROMEOS)) || defined(OS_ANDROID)
diff --git a/base/i18n/icu_util.h b/base/i18n/icu_util.h
index 5f9948f..9bae8a1 100644
--- a/base/i18n/icu_util.h
+++ b/base/i18n/icu_util.h
@@ -9,7 +9,7 @@
 
 #include "base/files/memory_mapped_file.h"
 #include "base/i18n/base_i18n_export.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 #define ICU_UTIL_DATA_FILE   0
 #define ICU_UTIL_DATA_SHARED 1
diff --git a/base/i18n/number_formatting_unittest.cc b/base/i18n/number_formatting_unittest.cc
index 045bc0e..71b15a5 100644
--- a/base/i18n/number_formatting_unittest.cc
+++ b/base/i18n/number_formatting_unittest.cc
@@ -12,7 +12,7 @@
 #include "base/macros.h"
 #include "base/strings/utf_string_conversions.h"
 #include "base/test/icu_test_util.h"
-#include "build/build_config.h"
+#include "build_config.h"
 #include "testing/gtest/include/gtest/gtest.h"
 #include "third_party/icu/source/i18n/unicode/usearch.h"
 
diff --git a/base/i18n/rtl.cc b/base/i18n/rtl.cc
index bba0d44..295968b 100644
--- a/base/i18n/rtl.cc
+++ b/base/i18n/rtl.cc
@@ -18,7 +18,7 @@
 #include "base/strings/string_util.h"
 #include "base/strings/sys_string_conversions.h"
 #include "base/strings/utf_string_conversions.h"
-#include "build/build_config.h"
+#include "build_config.h"
 #include "third_party/icu/source/common/unicode/locid.h"
 #include "third_party/icu/source/common/unicode/uchar.h"
 #include "third_party/icu/source/common/unicode/uscript.h"
diff --git a/base/i18n/rtl.h b/base/i18n/rtl.h
index 5325970..d3ba29f 100644
--- a/base/i18n/rtl.h
+++ b/base/i18n/rtl.h
@@ -10,7 +10,7 @@
 #include "base/compiler_specific.h"
 #include "base/i18n/base_i18n_export.h"
 #include "base/strings/string16.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 namespace base {
 
diff --git a/base/i18n/rtl_unittest.cc b/base/i18n/rtl_unittest.cc
index 313d2b4..88ae36a 100644
--- a/base/i18n/rtl_unittest.cc
+++ b/base/i18n/rtl_unittest.cc
@@ -14,7 +14,7 @@
 #include "base/strings/sys_string_conversions.h"
 #include "base/strings/utf_string_conversions.h"
 #include "base/test/icu_test_util.h"
-#include "build/build_config.h"
+#include "build_config.h"
 #include "testing/gtest/include/gtest/gtest.h"
 #include "testing/platform_test.h"
 #include "third_party/icu/source/common/unicode/locid.h"
diff --git a/base/json/json_file_value_serializer.cc b/base/json/json_file_value_serializer.cc
index a7c68c5..b0974c0 100644
--- a/base/json/json_file_value_serializer.cc
+++ b/base/json/json_file_value_serializer.cc
@@ -7,7 +7,7 @@
 #include "base/files/file_util.h"
 #include "base/json/json_string_value_serializer.h"
 #include "base/logging.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 using base::FilePath;
 
diff --git a/base/json/json_reader_unittest.cc b/base/json/json_reader_unittest.cc
index faaf43e..828fba0 100644
--- a/base/json/json_reader_unittest.cc
+++ b/base/json/json_reader_unittest.cc
@@ -16,7 +16,7 @@
 #include "base/strings/string_piece.h"
 #include "base/strings/utf_string_conversions.h"
 #include "base/values.h"
-#include "build/build_config.h"
+#include "build_config.h"
 #include "testing/gtest/include/gtest/gtest.h"
 
 namespace base {
diff --git a/base/json/json_value_serializer_unittest.cc b/base/json/json_value_serializer_unittest.cc
index d25f950..b358dec 100644
--- a/base/json/json_value_serializer_unittest.cc
+++ b/base/json/json_value_serializer_unittest.cc
@@ -16,7 +16,7 @@
 #include "base/strings/string_util.h"
 #include "base/strings/utf_string_conversions.h"
 #include "base/values.h"
-#include "build/build_config.h"
+#include "build_config.h"
 #include "testing/gtest/include/gtest/gtest.h"
 
 namespace base {
diff --git a/base/json/json_writer.cc b/base/json/json_writer.cc
index e4f1e3c..86af250 100644
--- a/base/json/json_writer.cc
+++ b/base/json/json_writer.cc
@@ -14,7 +14,7 @@
 #include "base/strings/string_number_conversions.h"
 #include "base/strings/utf_string_conversions.h"
 #include "base/values.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 namespace base {
 
diff --git a/base/json/json_writer_unittest.cc b/base/json/json_writer_unittest.cc
index 2d81af3..b90c3ea 100644
--- a/base/json/json_writer_unittest.cc
+++ b/base/json/json_writer_unittest.cc
@@ -6,7 +6,7 @@
 
 #include "base/memory/ptr_util.h"
 #include "base/values.h"
-#include "build/build_config.h"
+#include "build_config.h"
 #include "testing/gtest/include/gtest/gtest.h"
 
 namespace base {
diff --git a/base/lazy_instance_unittest.cc b/base/lazy_instance_unittest.cc
index a5f024c..242e8b5 100644
--- a/base/lazy_instance_unittest.cc
+++ b/base/lazy_instance_unittest.cc
@@ -17,7 +17,7 @@
 #include "base/threading/platform_thread.h"
 #include "base/threading/simple_thread.h"
 #include "base/time/time.h"
-#include "build/build_config.h"
+#include "build_config.h"
 #include "testing/gtest/include/gtest/gtest.h"
 
 namespace {
diff --git a/base/linux_util.cc b/base/linux_util.cc
index ddf848e..5c57d78 100644
--- a/base/linux_util.cc
+++ b/base/linux_util.cc
@@ -25,7 +25,7 @@
 #include "base/strings/string_tokenizer.h"
 #include "base/strings/string_util.h"
 #include "base/synchronization/lock.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 namespace {
 
diff --git a/base/location.cc b/base/location.cc
index 8bbf6ed..59d6728 100644
--- a/base/location.cc
+++ b/base/location.cc
@@ -11,7 +11,7 @@
 #include "base/compiler_specific.h"
 #include "base/strings/string_number_conversions.h"
 #include "base/strings/stringprintf.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 namespace base {
 
diff --git a/base/logging.cc b/base/logging.cc
index 8eabda0..cf96aeb 100644
--- a/base/logging.cc
+++ b/base/logging.cc
@@ -8,7 +8,7 @@
 #include <stdint.h>
 
 #include "base/macros.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 #if defined(OS_WIN)
 #include <io.h>
diff --git a/base/logging.h b/base/logging.h
index 2996059..39b17ff 100644
--- a/base/logging.h
+++ b/base/logging.h
@@ -21,7 +21,7 @@
 #include "base/macros.h"
 #include "base/strings/string_piece_forward.h"
 #include "base/template_util.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 //
 // Optional message capabilities
diff --git a/base/logging_unittest.cc b/base/logging_unittest.cc
index 9025aaf..0264012 100644
--- a/base/logging_unittest.cc
+++ b/base/logging_unittest.cc
@@ -9,7 +9,7 @@
 #include "base/macros.h"
 #include "base/strings/string_piece.h"
 #include "base/test/scoped_feature_list.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 #include "testing/gmock/include/gmock/gmock.h"
 #include "testing/gtest/include/gtest/gtest.h"
diff --git a/base/mac/bind_objc_block_unittest.mm b/base/mac/bind_objc_block_unittest.mm
index 2b18672..38da5aa 100644
--- a/base/mac/bind_objc_block_unittest.mm
+++ b/base/mac/bind_objc_block_unittest.mm
@@ -9,7 +9,7 @@
 #include "base/bind.h"
 #include "base/callback.h"
 #include "base/callback_helpers.h"
-#include "build/build_config.h"
+#include "build_config.h"
 #include "testing/gtest/include/gtest/gtest.h"
 #include "testing/gtest_mac.h"
 
diff --git a/base/mac/bind_objc_block_unittest_arc.mm b/base/mac/bind_objc_block_unittest_arc.mm
index 24ec974..f5f953d 100644
--- a/base/mac/bind_objc_block_unittest_arc.mm
+++ b/base/mac/bind_objc_block_unittest_arc.mm
@@ -9,7 +9,7 @@
 #include "base/bind.h"
 #include "base/callback.h"
 #include "base/callback_helpers.h"
-#include "build/build_config.h"
+#include "build_config.h"
 #include "testing/gtest/include/gtest/gtest.h"
 #include "testing/gtest_mac.h"
 
diff --git a/base/mac/call_with_eh_frame.cc b/base/mac/call_with_eh_frame.cc
index 4578541..35e01bc 100644
--- a/base/mac/call_with_eh_frame.cc
+++ b/base/mac/call_with_eh_frame.cc
@@ -7,7 +7,7 @@
 #include <stdint.h>
 #include <unwind.h>
 
-#include "build/build_config.h"
+#include "build_config.h"
 
 namespace base {
 namespace mac {
diff --git a/base/mac/foundation_util.h b/base/mac/foundation_util.h
index abdfdf3..d92a204 100644
--- a/base/mac/foundation_util.h
+++ b/base/mac/foundation_util.h
@@ -13,7 +13,7 @@
 #include "base/base_export.h"
 #include "base/logging.h"
 #include "base/mac/scoped_cftyperef.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 #if defined(__OBJC__)
 #import <Foundation/Foundation.h>
diff --git a/base/mac/foundation_util.mm b/base/mac/foundation_util.mm
index 15fc15b..ae19544 100644
--- a/base/mac/foundation_util.mm
+++ b/base/mac/foundation_util.mm
@@ -15,7 +15,7 @@
 #include "base/macros.h"
 #include "base/numerics/safe_conversions.h"
 #include "base/strings/sys_string_conversions.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 #if !defined(OS_IOS)
 #import <AppKit/AppKit.h>
diff --git a/base/mac/foundation_util_unittest.mm b/base/mac/foundation_util_unittest.mm
index a584094..ce02163 100644
--- a/base/mac/foundation_util_unittest.mm
+++ b/base/mac/foundation_util_unittest.mm
@@ -14,7 +14,7 @@
 #include "base/mac/scoped_nsautorelease_pool.h"
 #include "base/macros.h"
 #include "base/strings/stringprintf.h"
-#include "build/build_config.h"
+#include "build_config.h"
 #include "testing/gtest/include/gtest/gtest.h"
 #import "testing/gtest_mac.h"
 
diff --git a/base/mac/mac_logging.h b/base/mac/mac_logging.h
index 30e43ea..ccdc404 100644
--- a/base/mac/mac_logging.h
+++ b/base/mac/mac_logging.h
@@ -8,7 +8,7 @@
 #include "base/base_export.h"
 #include "base/logging.h"
 #include "base/macros.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 #if defined(OS_IOS)
 #include <MacTypes.h>
diff --git a/base/mac/mac_logging.mm b/base/mac/mac_logging.mm
index f0d3c07..1f9682d 100644
--- a/base/mac/mac_logging.mm
+++ b/base/mac/mac_logging.mm
@@ -8,7 +8,7 @@
 
 #include <iomanip>
 
-#include "build/build_config.h"
+#include "build_config.h"
 
 #if !defined(OS_IOS)
 #include <CoreServices/CoreServices.h>
diff --git a/base/mac/mach_logging.cc b/base/mac/mach_logging.cc
index 7b939b3..fbd0134 100644
--- a/base/mac/mach_logging.cc
+++ b/base/mac/mach_logging.cc
@@ -8,7 +8,7 @@
 #include <string>
 
 #include "base/strings/stringprintf.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 #if !defined(OS_IOS)
 #include <servers/bootstrap.h>
diff --git a/base/mac/mach_logging.h b/base/mac/mach_logging.h
index 59ab762..b4b4215 100644
--- a/base/mac/mach_logging.h
+++ b/base/mac/mach_logging.h
@@ -10,7 +10,7 @@
 #include "base/base_export.h"
 #include "base/logging.h"
 #include "base/macros.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 // Use the MACH_LOG family of macros along with a mach_error_t (kern_return_t)
 // containing a Mach error. The error value will be decoded so that logged
diff --git a/base/memory/aligned_memory.cc b/base/memory/aligned_memory.cc
index 93cbeb5..861ea50 100644
--- a/base/memory/aligned_memory.cc
+++ b/base/memory/aligned_memory.cc
@@ -5,7 +5,7 @@
 #include "base/memory/aligned_memory.h"
 
 #include "base/logging.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 #if defined(OS_ANDROID)
 #include <malloc.h>
diff --git a/base/memory/aligned_memory.h b/base/memory/aligned_memory.h
index 89f9505..e0dd7f5 100644
--- a/base/memory/aligned_memory.h
+++ b/base/memory/aligned_memory.h
@@ -12,7 +12,7 @@
 
 #include "base/base_export.h"
 #include "base/compiler_specific.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 #if defined(COMPILER_MSVC)
 #include <malloc.h>
diff --git a/base/memory/aligned_memory_unittest.cc b/base/memory/aligned_memory_unittest.cc
index e354f38..29ed706 100644
--- a/base/memory/aligned_memory_unittest.cc
+++ b/base/memory/aligned_memory_unittest.cc
@@ -6,7 +6,7 @@
 
 #include <memory>
 
-#include "build/build_config.h"
+#include "build_config.h"
 #include "testing/gtest/include/gtest/gtest.h"
 
 #define EXPECT_ALIGNED(ptr, align) \
diff --git a/base/memory/discardable_shared_memory.cc b/base/memory/discardable_shared_memory.cc
index 3b6b4db..8601e21 100644
--- a/base/memory/discardable_shared_memory.cc
+++ b/base/memory/discardable_shared_memory.cc
@@ -16,7 +16,7 @@
 #include "base/process/process_metrics.h"
 #include "base/trace_event/memory_allocator_dump.h"
 #include "base/trace_event/process_memory_dump.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 #if defined(OS_POSIX) && !defined(OS_NACL)
 // For madvise() which is available on all POSIX compatible systems.
diff --git a/base/memory/discardable_shared_memory.h b/base/memory/discardable_shared_memory.h
index 52a78b1..55e99d9 100644
--- a/base/memory/discardable_shared_memory.h
+++ b/base/memory/discardable_shared_memory.h
@@ -14,7 +14,7 @@
 #include "base/memory/unsafe_shared_memory_region.h"
 #include "base/threading/thread_collision_warner.h"
 #include "base/time/time.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 #if DCHECK_IS_ON()
 #include <set>
diff --git a/base/memory/discardable_shared_memory_unittest.cc b/base/memory/discardable_shared_memory_unittest.cc
index a7310a7..bda37f6 100644
--- a/base/memory/discardable_shared_memory_unittest.cc
+++ b/base/memory/discardable_shared_memory_unittest.cc
@@ -11,7 +11,7 @@
 #include "base/process/process_metrics.h"
 #include "base/trace_event/memory_allocator_dump.h"
 #include "base/trace_event/process_memory_dump.h"
-#include "build/build_config.h"
+#include "build_config.h"
 #include "testing/gtest/include/gtest/gtest.h"
 
 namespace base {
diff --git a/base/memory/platform_shared_memory_region.h b/base/memory/platform_shared_memory_region.h
index 143a1d4..df4cc4d 100644
--- a/base/memory/platform_shared_memory_region.h
+++ b/base/memory/platform_shared_memory_region.h
@@ -11,7 +11,7 @@
 #include "base/gtest_prod_util.h"
 #include "base/macros.h"
 #include "base/unguessable_token.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 #if defined(OS_MACOSX) && !defined(OS_IOS)
 #include <mach/mach.h>
diff --git a/base/memory/platform_shared_memory_region_mac.cc b/base/memory/platform_shared_memory_region_mac.cc
index b4d12ba..9cbe92e 100644
--- a/base/memory/platform_shared_memory_region_mac.cc
+++ b/base/memory/platform_shared_memory_region_mac.cc
@@ -9,7 +9,7 @@
 #include "base/mac/mach_logging.h"
 #include "base/mac/scoped_mach_vm.h"
 #include "base/numerics/checked_math.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 #if defined(OS_IOS)
 #error "MacOS only - iOS uses platform_shared_memory_region_posix.cc"
diff --git a/base/memory/platform_shared_memory_region_posix.cc b/base/memory/platform_shared_memory_region_posix.cc
index 8453c12..422cec5 100644
--- a/base/memory/platform_shared_memory_region_posix.cc
+++ b/base/memory/platform_shared_memory_region_posix.cc
@@ -11,7 +11,7 @@
 #include "base/files/file_util.h"
 #include "base/numerics/checked_math.h"
 #include "base/threading/thread_restrictions.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 namespace base {
 namespace subtle {
diff --git a/base/memory/platform_shared_memory_region_unittest.cc b/base/memory/platform_shared_memory_region_unittest.cc
index df3e526..f4d3071 100644
--- a/base/memory/platform_shared_memory_region_unittest.cc
+++ b/base/memory/platform_shared_memory_region_unittest.cc
@@ -9,7 +9,7 @@
 #include "base/sys_info.h"
 #include "base/test/gtest_util.h"
 #include "base/test/test_shared_memory_util.h"
-#include "build/build_config.h"
+#include "build_config.h"
 #include "testing/gtest/include/gtest/gtest.h"
 
 #if defined(OS_MACOSX) && !defined(OS_IOS)
diff --git a/base/memory/protected_memory.h b/base/memory/protected_memory.h
index 8d7868f..9f372ca 100644
--- a/base/memory/protected_memory.h
+++ b/base/memory/protected_memory.h
@@ -57,7 +57,7 @@
 #include "base/logging.h"
 #include "base/macros.h"
 #include "base/synchronization/lock.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 #define PROTECTED_MEMORY_ENABLED 1
 
diff --git a/base/memory/protected_memory_cfi.h b/base/memory/protected_memory_cfi.h
index 44f4654..f5b0121 100644
--- a/base/memory/protected_memory_cfi.h
+++ b/base/memory/protected_memory_cfi.h
@@ -16,7 +16,7 @@
 #include "base/compiler_specific.h"
 #include "base/macros.h"
 #include "base/memory/protected_memory.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 namespace base {
 namespace internal {
diff --git a/base/memory/protected_memory_posix.cc b/base/memory/protected_memory_posix.cc
index d003d79..1839e3e 100644
--- a/base/memory/protected_memory_posix.cc
+++ b/base/memory/protected_memory_posix.cc
@@ -20,7 +20,7 @@
 #include "base/posix/eintr_wrapper.h"
 #include "base/process/process_metrics.h"
 #include "base/synchronization/lock.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 namespace base {
 
diff --git a/base/memory/protected_memory_unittest.cc b/base/memory/protected_memory_unittest.cc
index 5601f45..b6ec61d 100644
--- a/base/memory/protected_memory_unittest.cc
+++ b/base/memory/protected_memory_unittest.cc
@@ -6,7 +6,7 @@
 #include "base/memory/protected_memory_cfi.h"
 #include "base/synchronization/lock.h"
 #include "base/test/gtest_util.h"
-#include "build/build_config.h"
+#include "build_config.h"
 #include "testing/gtest/include/gtest/gtest.h"
 
 namespace base {
diff --git a/base/memory/protected_memory_win.cc b/base/memory/protected_memory_win.cc
index cf3da78..43ad8ba 100644
--- a/base/memory/protected_memory_win.cc
+++ b/base/memory/protected_memory_win.cc
@@ -10,7 +10,7 @@
 
 #include "base/process/process_metrics.h"
 #include "base/synchronization/lock.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 namespace base {
 
diff --git a/base/memory/read_only_shared_memory_region.cc b/base/memory/read_only_shared_memory_region.cc
index 6b654c9..0a5f900 100644
--- a/base/memory/read_only_shared_memory_region.cc
+++ b/base/memory/read_only_shared_memory_region.cc
@@ -7,7 +7,7 @@
 #include <utility>
 
 #include "base/memory/shared_memory.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 namespace base {
 
diff --git a/base/memory/ref_counted.h b/base/memory/ref_counted.h
index 249f70e..26a8437 100644
--- a/base/memory/ref_counted.h
+++ b/base/memory/ref_counted.h
@@ -17,7 +17,7 @@
 #include "base/memory/scoped_refptr.h"
 #include "base/sequence_checker.h"
 #include "base/threading/thread_collision_warner.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 namespace base {
 namespace subtle {
diff --git a/base/memory/shared_memory.h b/base/memory/shared_memory.h
index c573ef7..aa1f682 100644
--- a/base/memory/shared_memory.h
+++ b/base/memory/shared_memory.h
@@ -15,7 +15,7 @@
 #include "base/memory/shared_memory_handle.h"
 #include "base/process/process_handle.h"
 #include "base/strings/string16.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 #if defined(OS_POSIX) || defined(OS_FUCHSIA)
 #include <stdio.h>
diff --git a/base/memory/shared_memory_handle.h b/base/memory/shared_memory_handle.h
index ae143af..18aa816 100644
--- a/base/memory/shared_memory_handle.h
+++ b/base/memory/shared_memory_handle.h
@@ -8,7 +8,7 @@
 #include <stddef.h>
 
 #include "base/unguessable_token.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 #if defined(OS_WIN)
 #include "base/process/process_handle.h"
diff --git a/base/memory/shared_memory_helper.h b/base/memory/shared_memory_helper.h
index 2c24f86..9da810e 100644
--- a/base/memory/shared_memory_helper.h
+++ b/base/memory/shared_memory_helper.h
@@ -6,7 +6,7 @@
 #define BASE_MEMORY_SHARED_MEMORY_HELPER_H_
 
 #include "base/memory/shared_memory.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 #include <fcntl.h>
 
diff --git a/base/memory/shared_memory_mac.cc b/base/memory/shared_memory_mac.cc
index 0a233e5..3990948 100644
--- a/base/memory/shared_memory_mac.cc
+++ b/base/memory/shared_memory_mac.cc
@@ -28,7 +28,7 @@
 #include "base/strings/utf_string_conversions.h"
 #include "base/threading/thread_restrictions.h"
 #include "base/unguessable_token.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 #if defined(OS_IOS)
 #error "MacOS only - iOS uses shared_memory_posix.cc"
diff --git a/base/memory/shared_memory_mapping.cc b/base/memory/shared_memory_mapping.cc
index 005e3fc..878a012 100644
--- a/base/memory/shared_memory_mapping.cc
+++ b/base/memory/shared_memory_mapping.cc
@@ -9,7 +9,7 @@
 #include "base/logging.h"
 #include "base/memory/shared_memory_tracker.h"
 #include "base/unguessable_token.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 #if defined(OS_POSIX)
 #include <sys/mman.h>
diff --git a/base/memory/shared_memory_posix.cc b/base/memory/shared_memory_posix.cc
index d3163e5..6b53551 100644
--- a/base/memory/shared_memory_posix.cc
+++ b/base/memory/shared_memory_posix.cc
@@ -25,7 +25,7 @@
 #include "base/threading/thread_restrictions.h"
 #include "base/trace_event/trace_event.h"
 #include "base/unguessable_token.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 #if defined(OS_ANDROID)
 #include "base/os_compat_android.h"
diff --git a/base/memory/shared_memory_region_unittest.cc b/base/memory/shared_memory_region_unittest.cc
index fcecb1f..e917154 100644
--- a/base/memory/shared_memory_region_unittest.cc
+++ b/base/memory/shared_memory_region_unittest.cc
@@ -10,7 +10,7 @@
 #include "base/memory/writable_shared_memory_region.h"
 #include "base/sys_info.h"
 #include "base/test/test_shared_memory_util.h"
-#include "build/build_config.h"
+#include "build_config.h"
 #include "testing/gtest/include/gtest/gtest.h"
 
 namespace base {
diff --git a/base/memory/shared_memory_unittest.cc b/base/memory/shared_memory_unittest.cc
index b754540..c4c21c4 100644
--- a/base/memory/shared_memory_unittest.cc
+++ b/base/memory/shared_memory_unittest.cc
@@ -26,7 +26,7 @@
 #include "base/threading/platform_thread.h"
 #include "base/time/time.h"
 #include "base/unguessable_token.h"
-#include "build/build_config.h"
+#include "build_config.h"
 #include "testing/gtest/include/gtest/gtest.h"
 #include "testing/multiprocess_func_list.h"
 
diff --git a/base/memory/writable_shared_memory_region.cc b/base/memory/writable_shared_memory_region.cc
index 0806c37..7a29522 100644
--- a/base/memory/writable_shared_memory_region.cc
+++ b/base/memory/writable_shared_memory_region.cc
@@ -7,7 +7,7 @@
 #include <utility>
 
 #include "base/memory/shared_memory.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 namespace base {
 
diff --git a/base/message_loop/incoming_task_queue.cc b/base/message_loop/incoming_task_queue.cc
index 9f5f855..0dd1272 100644
--- a/base/message_loop/incoming_task_queue.cc
+++ b/base/message_loop/incoming_task_queue.cc
@@ -11,7 +11,7 @@
 #include "base/message_loop/message_loop.h"
 #include "base/synchronization/waitable_event.h"
 #include "base/time/time.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 namespace base {
 namespace internal {
diff --git a/base/message_loop/message_loop.h b/base/message_loop/message_loop.h
index 2d8047d..6bcccb8 100644
--- a/base/message_loop/message_loop.h
+++ b/base/message_loop/message_loop.h
@@ -26,7 +26,7 @@
 #include "base/threading/sequence_local_storage_map.h"
 #include "base/threading/thread_checker.h"
 #include "base/time/time.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 namespace base {
 
diff --git a/base/message_loop/message_loop_current.h b/base/message_loop/message_loop_current.h
index c5016dc..593942b 100644
--- a/base/message_loop/message_loop_current.h
+++ b/base/message_loop/message_loop_current.h
@@ -12,7 +12,7 @@
 #include "base/message_loop/message_pump_for_ui.h"
 #include "base/pending_task.h"
 #include "base/single_thread_task_runner.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 namespace base {
 
diff --git a/base/message_loop/message_loop_io_posix_unittest.cc b/base/message_loop/message_loop_io_posix_unittest.cc
index 4dd5f28..9ecd606 100644
--- a/base/message_loop/message_loop_io_posix_unittest.cc
+++ b/base/message_loop/message_loop_io_posix_unittest.cc
@@ -16,7 +16,7 @@
 #include "base/posix/eintr_wrapper.h"
 #include "base/run_loop.h"
 #include "base/test/gtest_util.h"
-#include "build/build_config.h"
+#include "build_config.h"
 #include "testing/gtest/include/gtest/gtest.h"
 
 namespace base {
diff --git a/base/message_loop/message_loop_unittest.cc b/base/message_loop/message_loop_unittest.cc
index 8525366..970f6d1 100644
--- a/base/message_loop/message_loop_unittest.cc
+++ b/base/message_loop/message_loop_unittest.cc
@@ -29,7 +29,7 @@
 #include "base/threading/sequence_local_storage_slot.h"
 #include "base/threading/thread.h"
 #include "base/threading/thread_task_runner_handle.h"
-#include "build/build_config.h"
+#include "build_config.h"
 #include "testing/gtest/include/gtest/gtest.h"
 
 #if defined(OS_ANDROID)
diff --git a/base/message_loop/message_pump_default.cc b/base/message_loop/message_pump_default.cc
index 4104e73..b84aa36 100644
--- a/base/message_loop/message_pump_default.cc
+++ b/base/message_loop/message_pump_default.cc
@@ -7,7 +7,7 @@
 #include "base/auto_reset.h"
 #include "base/logging.h"
 #include "base/threading/thread_restrictions.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 #if defined(OS_MACOSX)
 #include <mach/thread_policy.h>
diff --git a/base/message_loop/message_pump_default.h b/base/message_loop/message_pump_default.h
index dd11adc..f92d2e4 100644
--- a/base/message_loop/message_pump_default.h
+++ b/base/message_loop/message_pump_default.h
@@ -10,7 +10,7 @@
 #include "base/message_loop/message_pump.h"
 #include "base/synchronization/waitable_event.h"
 #include "base/time/time.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 namespace base {
 
diff --git a/base/message_loop/message_pump_for_io.h b/base/message_loop/message_pump_for_io.h
index 6aac1e6..39a972e 100644
--- a/base/message_loop/message_pump_for_io.h
+++ b/base/message_loop/message_pump_for_io.h
@@ -8,7 +8,7 @@
 // This header is a forwarding header to coalesce the various platform specific
 // types representing MessagePumpForIO.
 
-#include "build/build_config.h"
+#include "build_config.h"
 
 #if defined(OS_WIN)
 #include "base/message_loop/message_pump_win.h"
diff --git a/base/message_loop/message_pump_for_ui.h b/base/message_loop/message_pump_for_ui.h
index 6ee02b0..620c2ae 100644
--- a/base/message_loop/message_pump_for_ui.h
+++ b/base/message_loop/message_pump_for_ui.h
@@ -8,7 +8,7 @@
 // This header is a forwarding header to coalesce the various platform specific
 // implementations of MessagePumpForUI.
 
-#include "build/build_config.h"
+#include "build_config.h"
 
 #if defined(OS_WIN)
 #include "base/message_loop/message_pump_win.h"
diff --git a/base/message_loop/message_pump_libevent.cc b/base/message_loop/message_pump_libevent.cc
index 2a595e5..a8efd27 100644
--- a/base/message_loop/message_pump_libevent.cc
+++ b/base/message_loop/message_pump_libevent.cc
@@ -17,7 +17,7 @@
 #include "base/third_party/libevent/event.h"
 #include "base/time/time.h"
 #include "base/trace_event/trace_event.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 #if defined(OS_MACOSX)
 #include "base/mac/scoped_nsautorelease_pool.h"
diff --git a/base/message_loop/message_pump_libevent_unittest.cc b/base/message_loop/message_pump_libevent_unittest.cc
index 55eb0b4..bb26c29 100644
--- a/base/message_loop/message_pump_libevent_unittest.cc
+++ b/base/message_loop/message_pump_libevent_unittest.cc
@@ -24,7 +24,7 @@
 #include "base/threading/sequenced_task_runner_handle.h"
 #include "base/threading/thread.h"
 #include "base/threading/thread_task_runner_handle.h"
-#include "build/build_config.h"
+#include "build_config.h"
 #include "testing/gtest/include/gtest/gtest.h"
 
 namespace base {
diff --git a/base/message_loop/message_pump_mac.h b/base/message_loop/message_pump_mac.h
index fa88c3a..686fcd2 100644
--- a/base/message_loop/message_pump_mac.h
+++ b/base/message_loop/message_pump_mac.h
@@ -38,7 +38,7 @@
 #include "base/macros.h"
 #include "base/memory/weak_ptr.h"
 #include "base/message_loop/timer_slack.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 #if defined(__OBJC__)
 #if defined(OS_IOS)
diff --git a/base/message_loop/message_pump_mac.mm b/base/message_loop/message_pump_mac.mm
index fb25201..14b0a86 100644
--- a/base/message_loop/message_pump_mac.mm
+++ b/base/message_loop/message_pump_mac.mm
@@ -17,7 +17,7 @@
 #include "base/message_loop/timer_slack.h"
 #include "base/run_loop.h"
 #include "base/time/time.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 #if !defined(OS_IOS)
 #import <AppKit/AppKit.h>
diff --git a/base/message_loop/message_pump_perftest.cc b/base/message_loop/message_pump_perftest.cc
index 76f18cb..8391888 100644
--- a/base/message_loop/message_pump_perftest.cc
+++ b/base/message_loop/message_pump_perftest.cc
@@ -17,7 +17,7 @@
 #include "base/synchronization/waitable_event.h"
 #include "base/threading/thread.h"
 #include "base/time/time.h"
-#include "build/build_config.h"
+#include "build_config.h"
 #include "testing/gtest/include/gtest/gtest.h"
 #include "testing/perf/perf_test.h"
 
diff --git a/base/metrics/field_trial.h b/base/metrics/field_trial.h
index ac4ea1c..d5d6cb8 100644
--- a/base/metrics/field_trial.h
+++ b/base/metrics/field_trial.h
@@ -80,7 +80,7 @@
 #include "base/strings/string_piece.h"
 #include "base/synchronization/lock.h"
 #include "base/time/time.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 namespace base {
 
diff --git a/base/metrics/field_trial_unittest.cc b/base/metrics/field_trial_unittest.cc
index 41550dd..4d55697 100644
--- a/base/metrics/field_trial_unittest.cc
+++ b/base/metrics/field_trial_unittest.cc
@@ -20,7 +20,7 @@
 #include "base/test/mock_entropy_provider.h"
 #include "base/test/scoped_feature_list.h"
 #include "base/test/test_shared_memory_util.h"
-#include "build/build_config.h"
+#include "build_config.h"
 #include "testing/gtest/include/gtest/gtest.h"
 
 namespace base {
diff --git a/base/metrics/histogram.cc b/base/metrics/histogram.cc
index f765181..07e09cf 100644
--- a/base/metrics/histogram.cc
+++ b/base/metrics/histogram.cc
@@ -34,7 +34,7 @@
 #include "base/synchronization/lock.h"
 #include "base/sys_info.h"
 #include "base/values.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 namespace base {
 
diff --git a/base/metrics/persistent_histogram_storage.cc b/base/metrics/persistent_histogram_storage.cc
index e2a56d7..8f527b0 100644
--- a/base/metrics/persistent_histogram_storage.cc
+++ b/base/metrics/persistent_histogram_storage.cc
@@ -12,7 +12,7 @@
 #include "base/strings/string_util.h"
 #include "base/strings/stringprintf.h"
 #include "base/time/time.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 namespace {
 
diff --git a/base/metrics/persistent_histogram_storage_unittest.cc b/base/metrics/persistent_histogram_storage_unittest.cc
index 0b9b1ce..ed42068 100644
--- a/base/metrics/persistent_histogram_storage_unittest.cc
+++ b/base/metrics/persistent_histogram_storage_unittest.cc
@@ -11,7 +11,7 @@
 #include "base/files/scoped_temp_dir.h"
 #include "base/metrics/histogram_macros.h"
 #include "base/time/time.h"
-#include "build/build_config.h"
+#include "build_config.h"
 #include "testing/gtest/include/gtest/gtest.h"
 
 namespace base {
diff --git a/base/metrics/persistent_memory_allocator.cc b/base/metrics/persistent_memory_allocator.cc
index 9b18a00..7d9c03d 100644
--- a/base/metrics/persistent_memory_allocator.cc
+++ b/base/metrics/persistent_memory_allocator.cc
@@ -22,7 +22,7 @@
 #include "base/numerics/safe_conversions.h"
 #include "base/sys_info.h"
 #include "base/threading/thread_restrictions.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 namespace {
 
diff --git a/base/native_library.h b/base/native_library.h
index 04356d9..6bd37d0 100644
--- a/base/native_library.h
+++ b/base/native_library.h
@@ -12,7 +12,7 @@
 
 #include "base/base_export.h"
 #include "base/strings/string_piece.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 #if defined(OS_WIN)
 #include <windows.h>
diff --git a/base/native_library_unittest.cc b/base/native_library_unittest.cc
index 2bfb9ec..ac8ec5c 100644
--- a/base/native_library_unittest.cc
+++ b/base/native_library_unittest.cc
@@ -7,7 +7,7 @@
 #include "base/native_library.h"
 #include "base/path_service.h"
 #include "base/test/native_library_test_utils.h"
-#include "build/build_config.h"
+#include "build_config.h"
 #include "testing/gtest/include/gtest/gtest.h"
 
 namespace base {
diff --git a/base/nix/mime_util_xdg.h b/base/nix/mime_util_xdg.h
index e0f264a..0685321 100644
--- a/base/nix/mime_util_xdg.h
+++ b/base/nix/mime_util_xdg.h
@@ -8,7 +8,7 @@
 #include <string>
 
 #include "base/base_export.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 namespace base {
 
diff --git a/base/no_destructor_unittest.cc b/base/no_destructor_unittest.cc
index 8f9d4a4..c552a92 100644
--- a/base/no_destructor_unittest.cc
+++ b/base/no_destructor_unittest.cc
@@ -8,7 +8,7 @@
 #include <utility>
 
 #include "base/logging.h"
-#include "build/build_config.h"
+#include "build_config.h"
 #include "testing/gtest/include/gtest/gtest.h"
 
 namespace base {
diff --git a/base/observer_list_threadsafe.h b/base/observer_list_threadsafe.h
index bd349f3..05b5a56 100644
--- a/base/observer_list_threadsafe.h
+++ b/base/observer_list_threadsafe.h
@@ -20,7 +20,7 @@
 #include "base/synchronization/lock.h"
 #include "base/threading/sequenced_task_runner_handle.h"
 #include "base/threading/thread_local.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 // TODO(fdoray): Removing these includes causes IWYU failures in other headers,
 // remove them in a follow- up CL.
diff --git a/base/path_service.cc b/base/path_service.cc
index 6ac501e..2843dce 100644
--- a/base/path_service.cc
+++ b/base/path_service.cc
@@ -16,7 +16,7 @@
 #include "base/files/file_util.h"
 #include "base/logging.h"
 #include "base/synchronization/lock.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 namespace base {
 
diff --git a/base/path_service.h b/base/path_service.h
index 9b4715f..f92fead 100644
--- a/base/path_service.h
+++ b/base/path_service.h
@@ -10,7 +10,7 @@
 #include "base/base_export.h"
 #include "base/base_paths.h"
 #include "base/gtest_prod_util.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 namespace base {
 
diff --git a/base/path_service_unittest.cc b/base/path_service_unittest.cc
index 8fcd673..378064a 100644
--- a/base/path_service_unittest.cc
+++ b/base/path_service_unittest.cc
@@ -8,7 +8,7 @@
 #include "base/files/file_util.h"
 #include "base/files/scoped_temp_dir.h"
 #include "base/strings/string_util.h"
-#include "build/build_config.h"
+#include "build_config.h"
 #include "testing/gtest/include/gtest/gtest-spi.h"
 #include "testing/gtest/include/gtest/gtest.h"
 #include "testing/platform_test.h"
diff --git a/base/pickle.cc b/base/pickle.cc
index c2189c8..89e2d01 100644
--- a/base/pickle.cc
+++ b/base/pickle.cc
@@ -13,7 +13,7 @@
 #include "base/macros.h"
 #include "base/numerics/safe_conversions.h"
 #include "base/numerics/safe_math.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 namespace base {
 
diff --git a/base/posix/eintr_wrapper.h b/base/posix/eintr_wrapper.h
index c0ffced..c3565da 100644
--- a/base/posix/eintr_wrapper.h
+++ b/base/posix/eintr_wrapper.h
@@ -17,7 +17,7 @@
 #ifndef BASE_POSIX_EINTR_WRAPPER_H_
 #define BASE_POSIX_EINTR_WRAPPER_H_
 
-#include "build/build_config.h"
+#include "build_config.h"
 
 #if defined(OS_POSIX) && !defined(OS_FUCHSIA)
 
diff --git a/base/posix/global_descriptors.h b/base/posix/global_descriptors.h
index 9d68761..1afd12f 100644
--- a/base/posix/global_descriptors.h
+++ b/base/posix/global_descriptors.h
@@ -5,7 +5,7 @@
 #ifndef BASE_POSIX_GLOBAL_DESCRIPTORS_H_
 #define BASE_POSIX_GLOBAL_DESCRIPTORS_H_
 
-#include "build/build_config.h"
+#include "build_config.h"
 
 #include <vector>
 #include <utility>
diff --git a/base/posix/safe_strerror.cc b/base/posix/safe_strerror.cc
index aef5742..aa18098 100644
--- a/base/posix/safe_strerror.cc
+++ b/base/posix/safe_strerror.cc
@@ -16,7 +16,7 @@
 #include <stdio.h>
 #include <string.h>
 
-#include "build/build_config.h"
+#include "build_config.h"
 
 namespace base {
 
diff --git a/base/posix/unix_domain_socket.cc b/base/posix/unix_domain_socket.cc
index 7c087a5..17c8a24 100644
--- a/base/posix/unix_domain_socket.cc
+++ b/base/posix/unix_domain_socket.cc
@@ -18,7 +18,7 @@
 #include "base/pickle.h"
 #include "base/posix/eintr_wrapper.h"
 #include "base/stl_util.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 #if !defined(OS_NACL_NONSFI)
 #include <sys/uio.h>
diff --git a/base/posix/unix_domain_socket.h b/base/posix/unix_domain_socket.h
index 5c74f07..0bbd064 100644
--- a/base/posix/unix_domain_socket.h
+++ b/base/posix/unix_domain_socket.h
@@ -13,7 +13,7 @@
 #include "base/base_export.h"
 #include "base/files/scoped_file.h"
 #include "base/process/process_handle.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 namespace base {
 
diff --git a/base/posix/unix_domain_socket_unittest.cc b/base/posix/unix_domain_socket_unittest.cc
index 453064f..f626de5 100644
--- a/base/posix/unix_domain_socket_unittest.cc
+++ b/base/posix/unix_domain_socket_unittest.cc
@@ -2,7 +2,7 @@
 // Use of this source code is governed by a BSD-style license that can be
 // found in the LICENSE file.
 
-#include "build/build_config.h"
+#include "build_config.h"
 
 #include <stddef.h>
 #include <stdint.h>
diff --git a/base/power_monitor/power_monitor_device_source.h b/base/power_monitor/power_monitor_device_source.h
index 1e2c885..2996609 100644
--- a/base/power_monitor/power_monitor_device_source.h
+++ b/base/power_monitor/power_monitor_device_source.h
@@ -9,7 +9,7 @@
 #include "base/macros.h"
 #include "base/power_monitor/power_monitor_source.h"
 #include "base/power_monitor/power_observer.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 #if defined(OS_WIN)
 #include <windows.h>
diff --git a/base/power_monitor/power_monitor_source.cc b/base/power_monitor/power_monitor_source.cc
index d4757b0..8ec6e68 100644
--- a/base/power_monitor/power_monitor_source.cc
+++ b/base/power_monitor/power_monitor_source.cc
@@ -5,7 +5,7 @@
 #include "base/power_monitor/power_monitor_source.h"
 
 #include "base/power_monitor/power_monitor.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 namespace base {
 
diff --git a/base/process/kill.h b/base/process/kill.h
index 005b72e..a3a7c63 100644
--- a/base/process/kill.h
+++ b/base/process/kill.h
@@ -12,7 +12,7 @@
 #include "base/process/process.h"
 #include "base/process/process_handle.h"
 #include "base/time/time.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 namespace base {
 
diff --git a/base/process/kill_posix.cc b/base/process/kill_posix.cc
index 4b52d8b..5159c19 100644
--- a/base/process/kill_posix.cc
+++ b/base/process/kill_posix.cc
@@ -18,7 +18,7 @@
 #include "base/process/process_iterator.h"
 #include "base/task_scheduler/post_task.h"
 #include "base/threading/platform_thread.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 namespace base {
 
diff --git a/base/process/launch.cc b/base/process/launch.cc
index c03e1a7..dbb8447 100644
--- a/base/process/launch.cc
+++ b/base/process/launch.cc
@@ -3,7 +3,7 @@
 // found in the LICENSE file.
 
 #include "base/process/launch.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 namespace base {
 
diff --git a/base/process/launch.h b/base/process/launch.h
index b4530b7..0a08750 100644
--- a/base/process/launch.h
+++ b/base/process/launch.h
@@ -19,7 +19,7 @@
 #include "base/process/process.h"
 #include "base/process/process_handle.h"
 #include "base/strings/string_piece.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 #if defined(OS_WIN)
 #include <windows.h>
diff --git a/base/process/launch_posix.cc b/base/process/launch_posix.cc
index ec58488..d1690bc 100644
--- a/base/process/launch_posix.cc
+++ b/base/process/launch_posix.cc
@@ -43,7 +43,7 @@
 #include "base/threading/thread_restrictions.h"
 #include "base/time/time.h"
 #include "base/trace_event/trace_event.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 #if defined(OS_LINUX) || defined(OS_AIX)
 #include <sys/prctl.h>
diff --git a/base/process/memory.cc b/base/process/memory.cc
index 5b98733..a98e309 100644
--- a/base/process/memory.cc
+++ b/base/process/memory.cc
@@ -5,7 +5,7 @@
 #include "base/debug/alias.h"
 #include "base/logging.h"
 #include "base/process/memory.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 namespace base {
 
diff --git a/base/process/memory.h b/base/process/memory.h
index 7f16e12..5ed3acc 100644
--- a/base/process/memory.h
+++ b/base/process/memory.h
@@ -9,7 +9,7 @@
 
 #include "base/base_export.h"
 #include "base/process/process_handle.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 #ifdef PVALLOC_AVAILABLE
 // Build config explicitly tells us whether or not pvalloc is available.
diff --git a/base/process/memory_linux.cc b/base/process/memory_linux.cc
index ffd8757..7c78cb8 100644
--- a/base/process/memory_linux.cc
+++ b/base/process/memory_linux.cc
@@ -14,7 +14,7 @@
 #include "base/logging.h"
 #include "base/process/internal_linux.h"
 #include "base/strings/string_number_conversions.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 #if defined(USE_TCMALLOC)
 #include "third_party/tcmalloc/chromium/src/config.h"
@@ -95,14 +95,7 @@
 }
 
 bool UncheckedMalloc(size_t size, void** result) {
-#if defined(MEMORY_TOOL_REPLACES_ALLOCATOR) || \
-    (!defined(LIBC_GLIBC) && !defined(USE_TCMALLOC))
   *result = malloc(size);
-#elif defined(LIBC_GLIBC) && !defined(USE_TCMALLOC)
-  *result = __libc_malloc(size);
-#elif defined(USE_TCMALLOC)
-  *result = tc_malloc_skip_new_handler(size);
-#endif
   return *result != nullptr;
 }
 
diff --git a/base/process/memory_mac.mm b/base/process/memory_mac.mm
index 6cf380f..6f51fbe 100644
--- a/base/process/memory_mac.mm
+++ b/base/process/memory_mac.mm
@@ -6,7 +6,7 @@
 
 #include "base/allocator/allocator_interception_mac.h"
 #include "base/allocator/allocator_shim.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 namespace base {
 
diff --git a/base/process/memory_unittest.cc b/base/process/memory_unittest.cc
index 41c078d..3a565ee 100644
--- a/base/process/memory_unittest.cc
+++ b/base/process/memory_unittest.cc
@@ -15,7 +15,7 @@
 #include "base/debug/alias.h"
 #include "base/memory/aligned_memory.h"
 #include "base/strings/stringprintf.h"
-#include "build/build_config.h"
+#include "build_config.h"
 #include "testing/gtest/include/gtest/gtest.h"
 
 #if defined(OS_WIN)
diff --git a/base/process/memory_unittest_mac.h b/base/process/memory_unittest_mac.h
index 713589b..4d82a73 100644
--- a/base/process/memory_unittest_mac.h
+++ b/base/process/memory_unittest_mac.h
@@ -11,7 +11,7 @@
 #include <stddef.h>
 #include <sys/types.h>
 
-#include "build/build_config.h"
+#include "build_config.h"
 
 namespace base {
 
diff --git a/base/process/memory_unittest_mac.mm b/base/process/memory_unittest_mac.mm
index 26fe1af..7ec7afd 100644
--- a/base/process/memory_unittest_mac.mm
+++ b/base/process/memory_unittest_mac.mm
@@ -3,7 +3,7 @@
 // found in the LICENSE file.
 
 #include "base/process/memory_unittest_mac.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 #import <Foundation/Foundation.h>
 #include <CoreFoundation/CoreFoundation.h>
diff --git a/base/process/process.h b/base/process/process.h
index c06998e..479e24d 100644
--- a/base/process/process.h
+++ b/base/process/process.h
@@ -9,7 +9,7 @@
 #include "base/macros.h"
 #include "base/process/process_handle.h"
 #include "base/time/time.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 #if defined(OS_WIN)
 #include "base/win/scoped_handle.h"
diff --git a/base/process/process_handle.cc b/base/process/process_handle.cc
index 58ceb08..fde631e 100644
--- a/base/process/process_handle.cc
+++ b/base/process/process_handle.cc
@@ -6,7 +6,7 @@
 
 #include "base/logging.h"
 #include "base/process/process_handle.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 namespace base {
 
diff --git a/base/process/process_handle.h b/base/process/process_handle.h
index f3f6343..bf9a720 100644
--- a/base/process/process_handle.h
+++ b/base/process/process_handle.h
@@ -10,7 +10,7 @@
 
 #include "base/base_export.h"
 #include "base/files/file_path.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 #if defined(OS_WIN)
 #include "base/win/windows_types.h"
diff --git a/base/process/process_info.h b/base/process/process_info.h
index 5138e24..8d5aa07 100644
--- a/base/process/process_info.h
+++ b/base/process/process_info.h
@@ -6,7 +6,7 @@
 #define BASE_PROCESS_PROCESS_INFO_H_
 
 #include "base/base_export.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 namespace base {
 
diff --git a/base/process/process_info_unittest.cc b/base/process/process_info_unittest.cc
index f54d957..51b15b9 100644
--- a/base/process/process_info_unittest.cc
+++ b/base/process/process_info_unittest.cc
@@ -5,7 +5,7 @@
 #include "base/process/process_info.h"
 
 #include "base/time/time.h"
-#include "build/build_config.h"
+#include "build_config.h"
 #include "testing/gtest/include/gtest/gtest.h"
 
 namespace base {
diff --git a/base/process/process_iterator.cc b/base/process/process_iterator.cc
index 8b530a0..bc8c0a8 100644
--- a/base/process/process_iterator.cc
+++ b/base/process/process_iterator.cc
@@ -3,7 +3,7 @@
 // found in the LICENSE file.
 
 #include "base/process/process_iterator.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 namespace base {
 
diff --git a/base/process/process_iterator.h b/base/process/process_iterator.h
index b30ad41..672d58b 100644
--- a/base/process/process_iterator.h
+++ b/base/process/process_iterator.h
@@ -17,7 +17,7 @@
 #include "base/files/file_path.h"
 #include "base/macros.h"
 #include "base/process/process.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 #if defined(OS_WIN)
 #include <windows.h>
diff --git a/base/process/process_linux.cc b/base/process/process_linux.cc
index faf39af..8cfeef2 100644
--- a/base/process/process_linux.cc
+++ b/base/process/process_linux.cc
@@ -14,7 +14,7 @@
 #include "base/strings/stringprintf.h"
 #include "base/synchronization/lock.h"
 #include "base/threading/thread_restrictions.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 // Not defined on AIX by default.
 #if defined(OS_AIX)
diff --git a/base/process/process_metrics.cc b/base/process/process_metrics.cc
index c3a7063..fe9f8d3 100644
--- a/base/process/process_metrics.cc
+++ b/base/process/process_metrics.cc
@@ -8,7 +8,7 @@
 
 #include "base/logging.h"
 #include "base/values.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 #if defined(OS_MACOSX) || defined(OS_LINUX) || defined(OS_AIX)
 namespace {
diff --git a/base/process/process_metrics.h b/base/process/process_metrics.h
index 0170a0c..5d47abf 100644
--- a/base/process/process_metrics.h
+++ b/base/process/process_metrics.h
@@ -20,7 +20,7 @@
 #include "base/process/process_handle.h"
 #include "base/time/time.h"
 #include "base/values.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 #if defined(OS_MACOSX)
 #include <mach/mach.h>
diff --git a/base/process/process_metrics_iocounters.h b/base/process/process_metrics_iocounters.h
index e12d090..39f7d3b 100644
--- a/base/process/process_metrics_iocounters.h
+++ b/base/process/process_metrics_iocounters.h
@@ -11,7 +11,7 @@
 #include <stdint.h>
 
 #include "base/process/process_metrics.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 #if defined(OS_WIN)
 #include <windows.h>
diff --git a/base/process/process_metrics_linux.cc b/base/process/process_metrics_linux.cc
index 16cde35..f045e24 100644
--- a/base/process/process_metrics_linux.cc
+++ b/base/process/process_metrics_linux.cc
@@ -26,7 +26,7 @@
 #include "base/strings/string_tokenizer.h"
 #include "base/strings/string_util.h"
 #include "base/threading/thread_restrictions.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 namespace base {
 
diff --git a/base/process/process_metrics_posix.cc b/base/process/process_metrics_posix.cc
index a09bbf2..858ab05 100644
--- a/base/process/process_metrics_posix.cc
+++ b/base/process/process_metrics_posix.cc
@@ -11,7 +11,7 @@
 #include <unistd.h>
 
 #include "base/logging.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 #if !defined(OS_FUCHSIA)
 #include <sys/resource.h>
diff --git a/base/process/process_metrics_unittest.cc b/base/process/process_metrics_unittest.cc
index eba543a..aec7be1 100644
--- a/base/process/process_metrics_unittest.cc
+++ b/base/process/process_metrics_unittest.cc
@@ -22,7 +22,7 @@
 #include "base/sys_info.h"
 #include "base/test/multiprocess_test.h"
 #include "base/threading/thread.h"
-#include "build/build_config.h"
+#include "build_config.h"
 #include "testing/gtest/include/gtest/gtest.h"
 #include "testing/multiprocess_func_list.h"
 
diff --git a/base/process/process_posix.cc b/base/process/process_posix.cc
index 7645b78..51b57e1 100644
--- a/base/process/process_posix.cc
+++ b/base/process/process_posix.cc
@@ -16,7 +16,7 @@
 #include "base/posix/eintr_wrapper.h"
 #include "base/process/kill.h"
 #include "base/threading/thread_restrictions.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 #if defined(OS_MACOSX)
 #include <sys/event.h>
diff --git a/base/process/process_unittest.cc b/base/process/process_unittest.cc
index 9f678d1..0f05d24 100644
--- a/base/process/process_unittest.cc
+++ b/base/process/process_unittest.cc
@@ -12,7 +12,7 @@
 #include "base/test/test_timeouts.h"
 #include "base/threading/platform_thread.h"
 #include "base/threading/thread_local.h"
-#include "build/build_config.h"
+#include "build_config.h"
 #include "testing/gtest/include/gtest/gtest.h"
 #include "testing/multiprocess_func_list.h"
 
diff --git a/base/process/process_util_unittest.cc b/base/process/process_util_unittest.cc
index 8946669..81c31b2 100644
--- a/base/process/process_util_unittest.cc
+++ b/base/process/process_util_unittest.cc
@@ -33,7 +33,7 @@
 #include "base/test/test_timeouts.h"
 #include "base/threading/platform_thread.h"
 #include "base/threading/thread.h"
-#include "build/build_config.h"
+#include "build_config.h"
 #include "testing/gtest/include/gtest/gtest.h"
 #include "testing/multiprocess_func_list.h"
 
diff --git a/base/profiler/stack_sampling_profiler_unittest.cc b/base/profiler/stack_sampling_profiler_unittest.cc
index 8fc25c9..b25943f 100644
--- a/base/profiler/stack_sampling_profiler_unittest.cc
+++ b/base/profiler/stack_sampling_profiler_unittest.cc
@@ -29,7 +29,7 @@
 #include "base/threading/platform_thread.h"
 #include "base/threading/simple_thread.h"
 #include "base/time/time.h"
-#include "build/build_config.h"
+#include "build_config.h"
 #include "testing/gtest/include/gtest/gtest.h"
 
 #if defined(OS_WIN)
diff --git a/base/rand_util.h b/base/rand_util.h
index 03bf46f..32c8fc7 100644
--- a/base/rand_util.h
+++ b/base/rand_util.h
@@ -12,7 +12,7 @@
 #include <string>
 
 #include "base/base_export.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 namespace base {
 
diff --git a/base/run_loop.cc b/base/run_loop.cc
index 3882f64..f15d62a 100644
--- a/base/run_loop.cc
+++ b/base/run_loop.cc
@@ -11,7 +11,7 @@
 #include "base/single_thread_task_runner.h"
 #include "base/threading/thread_local.h"
 #include "base/threading/thread_task_runner_handle.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 namespace base {
 
diff --git a/base/run_loop.h b/base/run_loop.h
index 719f928..493d934 100644
--- a/base/run_loop.h
+++ b/base/run_loop.h
@@ -17,7 +17,7 @@
 #include "base/observer_list.h"
 #include "base/sequence_checker.h"
 #include "base/threading/thread_checker.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 namespace base {
 #if defined(OS_ANDROID)
diff --git a/base/run_loop_unittest.cc b/base/run_loop_unittest.cc
index c7db14a..3564a2e 100644
--- a/base/run_loop_unittest.cc
+++ b/base/run_loop_unittest.cc
@@ -23,7 +23,7 @@
 #include "base/threading/thread.h"
 #include "base/threading/thread_checker_impl.h"
 #include "base/threading/thread_task_runner_handle.h"
-#include "build/build_config.h"
+#include "build_config.h"
 #include "testing/gmock/include/gmock/gmock.h"
 #include "testing/gtest/include/gtest/gtest.h"
 
diff --git a/base/safe_numerics_unittest.cc b/base/safe_numerics_unittest.cc
index 44675cf..7fcc5d1 100644
--- a/base/safe_numerics_unittest.cc
+++ b/base/safe_numerics_unittest.cc
@@ -29,7 +29,7 @@
 #include "base/numerics/safe_conversions.h"
 #include "base/numerics/safe_math.h"
 #include "base/test/gtest_util.h"
-#include "build/build_config.h"
+#include "build_config.h"
 #include "testing/gtest/include/gtest/gtest.h"
 
 #if defined(COMPILER_MSVC) && defined(ARCH_CPU_32_BITS)
diff --git a/base/sampling_heap_profiler/sampling_heap_profiler.cc b/base/sampling_heap_profiler/sampling_heap_profiler.cc
index 94383a2..aa695d8 100644
--- a/base/sampling_heap_profiler/sampling_heap_profiler.cc
+++ b/base/sampling_heap_profiler/sampling_heap_profiler.cc
@@ -17,7 +17,7 @@
 #include "base/partition_alloc_buildflags.h"
 #include "base/rand_util.h"
 #include "base/threading/thread_local_storage.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 namespace base {
 
diff --git a/base/sampling_heap_profiler/sampling_heap_profiler_unittest.cc b/base/sampling_heap_profiler/sampling_heap_profiler_unittest.cc
index 6602e6c..d3bef90 100644
--- a/base/sampling_heap_profiler/sampling_heap_profiler_unittest.cc
+++ b/base/sampling_heap_profiler/sampling_heap_profiler_unittest.cc
@@ -10,7 +10,7 @@
 #include "base/allocator/allocator_shim.h"
 #include "base/debug/alias.h"
 #include "base/threading/simple_thread.h"
-#include "build/build_config.h"
+#include "build_config.h"
 #include "testing/gtest/include/gtest/gtest.h"
 
 namespace base {
diff --git a/base/scoped_native_library_unittest.cc b/base/scoped_native_library_unittest.cc
index 763b45f..261e9d1 100644
--- a/base/scoped_native_library_unittest.cc
+++ b/base/scoped_native_library_unittest.cc
@@ -4,7 +4,7 @@
 
 #include "base/scoped_native_library.h"
 
-#include "build/build_config.h"
+#include "build_config.h"
 #include "testing/gtest/include/gtest/gtest.h"
 
 #if defined(OS_WIN)
diff --git a/base/security_unittest.cc b/base/security_unittest.cc
index 3073299..9a3bc27 100644
--- a/base/security_unittest.cc
+++ b/base/security_unittest.cc
@@ -17,7 +17,7 @@
 #include "base/files/file_util.h"
 #include "base/logging.h"
 #include "base/memory/free_deleter.h"
-#include "build/build_config.h"
+#include "build_config.h"
 #include "testing/gtest/include/gtest/gtest.h"
 
 #if defined(OS_POSIX)
diff --git a/base/strings/old_utf_string_conversions.cc b/base/strings/old_utf_string_conversions.cc
index 5cab038..b6edb29 100644
--- a/base/strings/old_utf_string_conversions.cc
+++ b/base/strings/old_utf_string_conversions.cc
@@ -9,7 +9,7 @@
 #include "base/strings/string_piece.h"
 #include "base/strings/string_util.h"
 #include "base/strings/utf_string_conversion_utils.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 namespace base_old {
 
diff --git a/base/strings/safe_sprintf.cc b/base/strings/safe_sprintf.cc
index 4d695cf..8c6e8ca 100644
--- a/base/strings/safe_sprintf.cc
+++ b/base/strings/safe_sprintf.cc
@@ -10,7 +10,7 @@
 #include <limits>
 
 #include "base/macros.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 #if !defined(NDEBUG)
 // In debug builds, we use RAW_CHECK() to print useful error messages, if
diff --git a/base/strings/safe_sprintf.h b/base/strings/safe_sprintf.h
index 01d649d..56a6eaf 100644
--- a/base/strings/safe_sprintf.h
+++ b/base/strings/safe_sprintf.h
@@ -5,7 +5,7 @@
 #ifndef BASE_STRINGS_SAFE_SPRINTF_H_
 #define BASE_STRINGS_SAFE_SPRINTF_H_
 
-#include "build/build_config.h"
+#include "build_config.h"
 
 #include <stddef.h>
 #include <stdint.h>
diff --git a/base/strings/safe_sprintf_unittest.cc b/base/strings/safe_sprintf_unittest.cc
index bb9908f..4a0948b 100644
--- a/base/strings/safe_sprintf_unittest.cc
+++ b/base/strings/safe_sprintf_unittest.cc
@@ -14,7 +14,7 @@
 
 #include "base/logging.h"
 #include "base/macros.h"
-#include "build/build_config.h"
+#include "build_config.h"
 #include "testing/gtest/include/gtest/gtest.h"
 
 // Death tests on Android are currently very flaky. No need to add more flaky
diff --git a/base/strings/strcat.h b/base/strings/strcat.h
index 44c6211..b2fcaae 100644
--- a/base/strings/strcat.h
+++ b/base/strings/strcat.h
@@ -11,7 +11,7 @@
 #include "base/compiler_specific.h"
 #include "base/containers/span.h"
 #include "base/strings/string_piece.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 #if defined(OS_WIN)
 // To resolve a conflict with Win32 API StrCat macro.
diff --git a/base/strings/string16.h b/base/strings/string16.h
index a86baa2..570a6d5 100644
--- a/base/strings/string16.h
+++ b/base/strings/string16.h
@@ -34,7 +34,7 @@
 #include <string>
 
 #include "base/base_export.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 #if defined(WCHAR_T_IS_UTF16)
 
diff --git a/base/strings/string_number_conversions.h b/base/strings/string_number_conversions.h
index 057b60a..b59ce46 100644
--- a/base/strings/string_number_conversions.h
+++ b/base/strings/string_number_conversions.h
@@ -14,7 +14,7 @@
 #include "base/base_export.h"
 #include "base/strings/string16.h"
 #include "base/strings/string_piece.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 // ----------------------------------------------------------------------------
 // IMPORTANT MESSAGE FROM YOUR SPONSOR
diff --git a/base/strings/string_util.cc b/base/strings/string_util.cc
index 32e5ff2..6293f6d 100644
--- a/base/strings/string_util.cc
+++ b/base/strings/string_util.cc
@@ -26,7 +26,7 @@
 #include "base/strings/utf_string_conversion_utils.h"
 #include "base/strings/utf_string_conversions.h"
 #include "base/third_party/icu/icu_utf.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 namespace base {
 
diff --git a/base/strings/string_util.h b/base/strings/string_util.h
index d6780ec..f1045bb 100644
--- a/base/strings/string_util.h
+++ b/base/strings/string_util.h
@@ -20,7 +20,7 @@
 #include "base/compiler_specific.h"
 #include "base/strings/string16.h"
 #include "base/strings/string_piece.h"  // For implicit conversions.
-#include "build/build_config.h"
+#include "build_config.h"
 
 namespace base {
 
diff --git a/base/strings/stringize_macros.h b/base/strings/stringize_macros.h
index d4e2707..95322e0 100644
--- a/base/strings/stringize_macros.h
+++ b/base/strings/stringize_macros.h
@@ -9,7 +9,7 @@
 #ifndef BASE_STRINGS_STRINGIZE_MACROS_H_
 #define BASE_STRINGS_STRINGIZE_MACROS_H_
 
-#include "build/build_config.h"
+#include "build_config.h"
 
 // This is not very useful as it does not expand defined symbols if
 // called directly. Use its counterpart without the _NO_EXPANSION
diff --git a/base/strings/stringprintf.cc b/base/strings/stringprintf.cc
index 415845d..dba4318 100644
--- a/base/strings/stringprintf.cc
+++ b/base/strings/stringprintf.cc
@@ -13,7 +13,7 @@
 #include "base/scoped_clear_errno.h"
 #include "base/strings/string_util.h"
 #include "base/strings/utf_string_conversions.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 namespace base {
 
diff --git a/base/strings/stringprintf.h b/base/strings/stringprintf.h
index 7a75d89..83969bb 100644
--- a/base/strings/stringprintf.h
+++ b/base/strings/stringprintf.h
@@ -11,7 +11,7 @@
 
 #include "base/base_export.h"
 #include "base/compiler_specific.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 namespace base {
 
diff --git a/base/strings/stringprintf_unittest.cc b/base/strings/stringprintf_unittest.cc
index 3d43e8c..02cd349 100644
--- a/base/strings/stringprintf_unittest.cc
+++ b/base/strings/stringprintf_unittest.cc
@@ -8,7 +8,7 @@
 #include <stddef.h>
 
 #include "base/macros.h"
-#include "build/build_config.h"
+#include "build_config.h"
 #include "testing/gtest/include/gtest/gtest.h"
 
 namespace base {
diff --git a/base/strings/sys_string_conversions.h b/base/strings/sys_string_conversions.h
index 1ad0307..875cafb 100644
--- a/base/strings/sys_string_conversions.h
+++ b/base/strings/sys_string_conversions.h
@@ -16,7 +16,7 @@
 #include "base/base_export.h"
 #include "base/strings/string16.h"
 #include "base/strings/string_piece.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 #if defined(OS_MACOSX)
 #include <CoreFoundation/CoreFoundation.h>
diff --git a/base/strings/sys_string_conversions_posix.cc b/base/strings/sys_string_conversions_posix.cc
index cfa7b76..97b027a 100644
--- a/base/strings/sys_string_conversions_posix.cc
+++ b/base/strings/sys_string_conversions_posix.cc
@@ -9,7 +9,7 @@
 
 #include "base/strings/string_piece.h"
 #include "base/strings/utf_string_conversions.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 namespace base {
 
diff --git a/base/strings/sys_string_conversions_unittest.cc b/base/strings/sys_string_conversions_unittest.cc
index f5ffaec..0f4eddd 100644
--- a/base/strings/sys_string_conversions_unittest.cc
+++ b/base/strings/sys_string_conversions_unittest.cc
@@ -11,7 +11,7 @@
 #include "base/strings/sys_string_conversions.h"
 #include "base/strings/utf_string_conversions.h"
 #include "base/test/scoped_locale.h"
-#include "build/build_config.h"
+#include "build_config.h"
 #include "testing/gtest/include/gtest/gtest.h"
 
 #ifdef WCHAR_T_IS_UTF32
diff --git a/base/strings/utf_string_conversion_utils.cc b/base/strings/utf_string_conversion_utils.cc
index f7682c1..89f02d5 100644
--- a/base/strings/utf_string_conversion_utils.cc
+++ b/base/strings/utf_string_conversion_utils.cc
@@ -5,7 +5,7 @@
 #include "base/strings/utf_string_conversion_utils.h"
 
 #include "base/third_party/icu/icu_utf.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 namespace base {
 
diff --git a/base/strings/utf_string_conversions.cc b/base/strings/utf_string_conversions.cc
index 89acc38..6a32e39 100644
--- a/base/strings/utf_string_conversions.cc
+++ b/base/strings/utf_string_conversions.cc
@@ -10,7 +10,7 @@
 #include "base/strings/string_util.h"
 #include "base/strings/utf_string_conversion_utils.h"
 #include "base/third_party/icu/icu_utf.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 namespace base {
 
diff --git a/base/strings/utf_string_conversions_unittest.cc b/base/strings/utf_string_conversions_unittest.cc
index 6f5e60c..ce776c8 100644
--- a/base/strings/utf_string_conversions_unittest.cc
+++ b/base/strings/utf_string_conversions_unittest.cc
@@ -9,7 +9,7 @@
 #include "base/strings/string_piece.h"
 #include "base/strings/string_util.h"
 #include "base/strings/utf_string_conversions.h"
-#include "build/build_config.h"
+#include "build_config.h"
 #include "testing/gtest/include/gtest/gtest.h"
 
 namespace base {
diff --git a/base/sync_socket.h b/base/sync_socket.h
index 42db9a2..9d582da 100644
--- a/base/sync_socket.h
+++ b/base/sync_socket.h
@@ -17,7 +17,7 @@
 #include "base/process/process_handle.h"
 #include "base/synchronization/waitable_event.h"
 #include "base/time/time.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 #if defined(OS_WIN)
 #include <windows.h>
diff --git a/base/sync_socket_posix.cc b/base/sync_socket_posix.cc
index ff1e0e6..816bb73 100644
--- a/base/sync_socket_posix.cc
+++ b/base/sync_socket_posix.cc
@@ -21,7 +21,7 @@
 #include "base/files/file_util.h"
 #include "base/logging.h"
 #include "base/threading/thread_restrictions.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 namespace base {
 
diff --git a/base/synchronization/atomic_flag_unittest.cc b/base/synchronization/atomic_flag_unittest.cc
index f7daafa..1a3da7f 100644
--- a/base/synchronization/atomic_flag_unittest.cc
+++ b/base/synchronization/atomic_flag_unittest.cc
@@ -11,7 +11,7 @@
 #include "base/test/gtest_util.h"
 #include "base/threading/platform_thread.h"
 #include "base/threading/thread.h"
-#include "build/build_config.h"
+#include "build_config.h"
 #include "testing/gtest/include/gtest/gtest.h"
 
 namespace base {
diff --git a/base/synchronization/condition_variable.h b/base/synchronization/condition_variable.h
index dfcf813..9ec9cd8 100644
--- a/base/synchronization/condition_variable.h
+++ b/base/synchronization/condition_variable.h
@@ -73,7 +73,7 @@
 #include "base/logging.h"
 #include "base/macros.h"
 #include "base/synchronization/lock.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 #if defined(OS_WIN)
 #include "base/win/windows_types.h"
diff --git a/base/synchronization/condition_variable_posix.cc b/base/synchronization/condition_variable_posix.cc
index f263252..b234341 100644
--- a/base/synchronization/condition_variable_posix.cc
+++ b/base/synchronization/condition_variable_posix.cc
@@ -12,7 +12,7 @@
 #include "base/threading/scoped_blocking_call.h"
 #include "base/threading/thread_restrictions.h"
 #include "base/time/time.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 namespace base {
 
diff --git a/base/synchronization/condition_variable_unittest.cc b/base/synchronization/condition_variable_unittest.cc
index 705257a..929060a 100644
--- a/base/synchronization/condition_variable_unittest.cc
+++ b/base/synchronization/condition_variable_unittest.cc
@@ -22,7 +22,7 @@
 #include "base/threading/thread.h"
 #include "base/threading/thread_collision_warner.h"
 #include "base/time/time.h"
-#include "build/build_config.h"
+#include "build_config.h"
 #include "testing/gtest/include/gtest/gtest.h"
 #include "testing/platform_test.h"
 
diff --git a/base/synchronization/lock.h b/base/synchronization/lock.h
index d1c647c..ecfa854 100644
--- a/base/synchronization/lock.h
+++ b/base/synchronization/lock.h
@@ -10,7 +10,7 @@
 #include "base/macros.h"
 #include "base/synchronization/lock_impl.h"
 #include "base/threading/platform_thread.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 namespace base {
 
diff --git a/base/synchronization/lock_impl.h b/base/synchronization/lock_impl.h
index 221d763..e9da872 100644
--- a/base/synchronization/lock_impl.h
+++ b/base/synchronization/lock_impl.h
@@ -8,7 +8,7 @@
 #include "base/base_export.h"
 #include "base/logging.h"
 #include "base/macros.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 #if defined(OS_WIN)
 #include "base/win/windows_types.h"
diff --git a/base/synchronization/lock_impl_posix.cc b/base/synchronization/lock_impl_posix.cc
index 392c53c..1cfa88a 100644
--- a/base/synchronization/lock_impl_posix.cc
+++ b/base/synchronization/lock_impl_posix.cc
@@ -11,7 +11,7 @@
 #include "base/posix/safe_strerror.h"
 #include "base/strings/stringprintf.h"
 #include "base/synchronization/lock.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 namespace base {
 namespace internal {
diff --git a/base/synchronization/waitable_event.h b/base/synchronization/waitable_event.h
index 836adc0..64f5719 100644
--- a/base/synchronization/waitable_event.h
+++ b/base/synchronization/waitable_event.h
@@ -9,7 +9,7 @@
 
 #include "base/base_export.h"
 #include "base/macros.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 #if defined(OS_WIN)
 #include "base/win/scoped_handle.h"
diff --git a/base/synchronization/waitable_event_mac.cc b/base/synchronization/waitable_event_mac.cc
index ad6f8cb..56e6cb3 100644
--- a/base/synchronization/waitable_event_mac.cc
+++ b/base/synchronization/waitable_event_mac.cc
@@ -17,7 +17,7 @@
 #include "base/posix/eintr_wrapper.h"
 #include "base/threading/scoped_blocking_call.h"
 #include "base/threading/thread_restrictions.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 namespace base {
 
diff --git a/base/synchronization/waitable_event_unittest.cc b/base/synchronization/waitable_event_unittest.cc
index e1d2683..9c981d8 100644
--- a/base/synchronization/waitable_event_unittest.cc
+++ b/base/synchronization/waitable_event_unittest.cc
@@ -11,7 +11,7 @@
 #include "base/compiler_specific.h"
 #include "base/threading/platform_thread.h"
 #include "base/time/time.h"
-#include "build/build_config.h"
+#include "build_config.h"
 #include "testing/gtest/include/gtest/gtest.h"
 
 namespace base {
diff --git a/base/synchronization/waitable_event_watcher.h b/base/synchronization/waitable_event_watcher.h
index 51728e3..5d0cf71 100644
--- a/base/synchronization/waitable_event_watcher.h
+++ b/base/synchronization/waitable_event_watcher.h
@@ -8,7 +8,7 @@
 #include "base/base_export.h"
 #include "base/macros.h"
 #include "base/sequenced_task_runner.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 #if defined(OS_WIN)
 #include "base/win/object_watcher.h"
diff --git a/base/synchronization/waitable_event_watcher_unittest.cc b/base/synchronization/waitable_event_watcher_unittest.cc
index ec056ef..bdb45a3 100644
--- a/base/synchronization/waitable_event_watcher_unittest.cc
+++ b/base/synchronization/waitable_event_watcher_unittest.cc
@@ -13,7 +13,7 @@
 #include "base/synchronization/waitable_event.h"
 #include "base/threading/platform_thread.h"
 #include "base/threading/sequenced_task_runner_handle.h"
-#include "build/build_config.h"
+#include "build_config.h"
 #include "testing/gtest/include/gtest/gtest.h"
 
 namespace base {
diff --git a/base/sys_byteorder.h b/base/sys_byteorder.h
index 9ee1827..767210c 100644
--- a/base/sys_byteorder.h
+++ b/base/sys_byteorder.h
@@ -14,7 +14,7 @@
 #include <stdint.h>
 
 #include "base/logging.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 #if defined(COMPILER_MSVC)
 #include <stdlib.h>
diff --git a/base/sys_byteorder_unittest.cc b/base/sys_byteorder_unittest.cc
index 8167be3..f5bad31 100644
--- a/base/sys_byteorder_unittest.cc
+++ b/base/sys_byteorder_unittest.cc
@@ -6,7 +6,7 @@
 
 #include <stdint.h>
 
-#include "build/build_config.h"
+#include "build_config.h"
 #include "testing/gtest/include/gtest/gtest.h"
 
 namespace {
diff --git a/base/sys_info.cc b/base/sys_info.cc
index 379d7f2..4250ca9 100644
--- a/base/sys_info.cc
+++ b/base/sys_info.cc
@@ -11,7 +11,7 @@
 #include "base/lazy_instance.h"
 #include "base/sys_info_internal.h"
 #include "base/time/time.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 namespace base {
 namespace {
diff --git a/base/sys_info.h b/base/sys_info.h
index 6e58715..19ed0e4 100644
--- a/base/sys_info.h
+++ b/base/sys_info.h
@@ -15,7 +15,7 @@
 #include "base/files/file_path.h"
 #include "base/gtest_prod_util.h"
 #include "base/time/time.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 namespace base {
 
diff --git a/base/sys_info_linux.cc b/base/sys_info_linux.cc
index b1fecff..f8fc1ae 100644
--- a/base/sys_info_linux.cc
+++ b/base/sys_info_linux.cc
@@ -16,7 +16,7 @@
 #include "base/process/process_metrics.h"
 #include "base/strings/string_number_conversions.h"
 #include "base/sys_info_internal.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 namespace {
 
diff --git a/base/sys_info_posix.cc b/base/sys_info_posix.cc
index f6fcd10..b3d18b9 100644
--- a/base/sys_info_posix.cc
+++ b/base/sys_info_posix.cc
@@ -18,7 +18,7 @@
 #include "base/strings/utf_string_conversions.h"
 #include "base/sys_info_internal.h"
 #include "base/threading/thread_restrictions.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 #if !defined(OS_FUCHSIA)
 #include <sys/resource.h>
diff --git a/base/sys_info_unittest.cc b/base/sys_info_unittest.cc
index e97ab57..7b2c458 100644
--- a/base/sys_info_unittest.cc
+++ b/base/sys_info_unittest.cc
@@ -12,7 +12,7 @@
 #include "base/sys_info.h"
 #include "base/threading/platform_thread.h"
 #include "base/time/time.h"
-#include "build/build_config.h"
+#include "build_config.h"
 #include "testing/gtest/include/gtest/gtest.h"
 #include "testing/platform_test.h"
 
diff --git a/base/syslog_logging.h b/base/syslog_logging.h
index 736a5b2..2c2ff55 100644
--- a/base/syslog_logging.h
+++ b/base/syslog_logging.h
@@ -8,7 +8,7 @@
 #include <iosfwd>
 
 #include "base/logging.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 namespace logging {
 
diff --git a/base/system_monitor/system_monitor.h b/base/system_monitor/system_monitor.h
index 7f21e47..f0828d1 100644
--- a/base/system_monitor/system_monitor.h
+++ b/base/system_monitor/system_monitor.h
@@ -9,7 +9,7 @@
 #include "base/macros.h"
 #include "base/memory/ref_counted.h"
 #include "base/observer_list_threadsafe.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 namespace base {
 
diff --git a/base/task_scheduler/lazy_task_runner.h b/base/task_scheduler/lazy_task_runner.h
index 7fcbddf..c0e0940 100644
--- a/base/task_scheduler/lazy_task_runner.h
+++ b/base/task_scheduler/lazy_task_runner.h
@@ -16,7 +16,7 @@
 #include "base/task_scheduler/scheduler_lock.h"
 #include "base/task_scheduler/single_thread_task_runner_thread_mode.h"
 #include "base/task_scheduler/task_traits.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 // Lazy(Sequenced|SingleThread|COMSTA)TaskRunner lazily creates a TaskRunner.
 //
diff --git a/base/task_scheduler/lazy_task_runner_unittest.cc b/base/task_scheduler/lazy_task_runner_unittest.cc
index 3ca09c9..e898a1e 100644
--- a/base/task_scheduler/lazy_task_runner_unittest.cc
+++ b/base/task_scheduler/lazy_task_runner_unittest.cc
@@ -10,7 +10,7 @@
 #include "base/task_scheduler/scoped_set_task_priority_for_current_thread.h"
 #include "base/test/scoped_task_environment.h"
 #include "base/threading/thread_checker_impl.h"
-#include "build/build_config.h"
+#include "build_config.h"
 #include "testing/gtest/include/gtest/gtest.h"
 
 #if defined(OS_WIN)
diff --git a/base/task_scheduler/post_task.h b/base/task_scheduler/post_task.h
index d757c85..042c1a0 100644
--- a/base/task_scheduler/post_task.h
+++ b/base/task_scheduler/post_task.h
@@ -19,7 +19,7 @@
 #include "base/task_scheduler/single_thread_task_runner_thread_mode.h"
 #include "base/task_scheduler/task_traits.h"
 #include "base/time/time.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 namespace base {
 
diff --git a/base/task_scheduler/scheduler_single_thread_task_runner_manager.h b/base/task_scheduler/scheduler_single_thread_task_runner_manager.h
index b25230d..5c5bb6c 100644
--- a/base/task_scheduler/scheduler_single_thread_task_runner_manager.h
+++ b/base/task_scheduler/scheduler_single_thread_task_runner_manager.h
@@ -17,7 +17,7 @@
 #include "base/task_scheduler/single_thread_task_runner_thread_mode.h"
 #include "base/task_scheduler/tracked_ref.h"
 #include "base/threading/platform_thread.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 namespace base {
 
diff --git a/base/task_scheduler/scheduler_worker.h b/base/task_scheduler/scheduler_worker.h
index 8bcfcb3..002db45 100644
--- a/base/task_scheduler/scheduler_worker.h
+++ b/base/task_scheduler/scheduler_worker.h
@@ -19,7 +19,7 @@
 #include "base/task_scheduler/tracked_ref.h"
 #include "base/threading/platform_thread.h"
 #include "base/time/time.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 #if defined(OS_WIN)
 #include "base/win/com_init_check_hook.h"
diff --git a/base/task_scheduler/scheduler_worker_pool_impl.h b/base/task_scheduler/scheduler_worker_pool_impl.h
index d9a169b..997fcc9 100644
--- a/base/task_scheduler/scheduler_worker_pool_impl.h
+++ b/base/task_scheduler/scheduler_worker_pool_impl.h
@@ -30,7 +30,7 @@
 #include "base/task_scheduler/task.h"
 #include "base/task_scheduler/tracked_ref.h"
 #include "base/time/time.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 namespace base {
 
diff --git a/base/task_scheduler/scheduler_worker_pool_impl_unittest.cc b/base/task_scheduler/scheduler_worker_pool_impl_unittest.cc
index 5f099b3..21e77f4 100644
--- a/base/task_scheduler/scheduler_worker_pool_impl_unittest.cc
+++ b/base/task_scheduler/scheduler_worker_pool_impl_unittest.cc
@@ -43,7 +43,7 @@
 #include "base/threading/thread_local_storage.h"
 #include "base/threading/thread_restrictions.h"
 #include "base/time/time.h"
-#include "build/build_config.h"
+#include "build_config.h"
 #include "testing/gtest/include/gtest/gtest.h"
 
 #if defined(OS_WIN)
diff --git a/base/task_scheduler/scheduler_worker_pool_unittest.cc b/base/task_scheduler/scheduler_worker_pool_unittest.cc
index 717409b..20c1ad0 100644
--- a/base/task_scheduler/scheduler_worker_pool_unittest.cc
+++ b/base/task_scheduler/scheduler_worker_pool_unittest.cc
@@ -22,7 +22,7 @@
 #include "base/threading/platform_thread.h"
 #include "base/threading/simple_thread.h"
 #include "base/threading/thread.h"
-#include "build/build_config.h"
+#include "build_config.h"
 #include "testing/gtest/include/gtest/gtest.h"
 
 #if defined(OS_WIN)
diff --git a/base/task_scheduler/scheduler_worker_unittest.cc b/base/task_scheduler/scheduler_worker_unittest.cc
index cbeb355..b7a2eda 100644
--- a/base/task_scheduler/scheduler_worker_unittest.cc
+++ b/base/task_scheduler/scheduler_worker_unittest.cc
@@ -26,7 +26,7 @@
 #include "base/threading/platform_thread.h"
 #include "base/threading/simple_thread.h"
 #include "base/time/time.h"
-#include "build/build_config.h"
+#include "build_config.h"
 #include "testing/gmock/include/gmock/gmock.h"
 #include "testing/gtest/include/gtest/gtest.h"
 
diff --git a/base/task_scheduler/service_thread_unittest.cc b/base/task_scheduler/service_thread_unittest.cc
index 9f61f9b..df4d8d6 100644
--- a/base/task_scheduler/service_thread_unittest.cc
+++ b/base/task_scheduler/service_thread_unittest.cc
@@ -13,7 +13,7 @@
 #include "base/test/histogram_tester.h"
 #include "base/threading/platform_thread.h"
 #include "base/time/time.h"
-#include "build/build_config.h"
+#include "build_config.h"
 #include "testing/gtest/include/gtest/gtest.h"
 
 namespace base {
diff --git a/base/task_scheduler/task_scheduler.h b/base/task_scheduler/task_scheduler.h
index cb6d097..8881028 100644
--- a/base/task_scheduler/task_scheduler.h
+++ b/base/task_scheduler/task_scheduler.h
@@ -20,7 +20,7 @@
 #include "base/task_scheduler/single_thread_task_runner_thread_mode.h"
 #include "base/task_scheduler/task_traits.h"
 #include "base/time/time.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 namespace gin {
 class V8Platform;
diff --git a/base/task_scheduler/task_scheduler_impl.h b/base/task_scheduler/task_scheduler_impl.h
index 81a5a87..4ad7fc2 100644
--- a/base/task_scheduler/task_scheduler_impl.h
+++ b/base/task_scheduler/task_scheduler_impl.h
@@ -23,7 +23,7 @@
 #include "base/task_scheduler/task_scheduler.h"
 #include "base/task_scheduler/task_tracker.h"
 #include "base/task_scheduler/task_traits.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 #if defined(OS_POSIX) && !defined(OS_NACL_SFI)
 #include "base/task_scheduler/task_tracker_posix.h"
diff --git a/base/task_scheduler/task_scheduler_impl_unittest.cc b/base/task_scheduler/task_scheduler_impl_unittest.cc
index 4fe4a25..6eee033 100644
--- a/base/task_scheduler/task_scheduler_impl_unittest.cc
+++ b/base/task_scheduler/task_scheduler_impl_unittest.cc
@@ -32,7 +32,7 @@
 #include "base/threading/thread.h"
 #include "base/threading/thread_restrictions.h"
 #include "base/time/time.h"
-#include "build/build_config.h"
+#include "build_config.h"
 #include "testing/gtest/include/gtest/gtest.h"
 
 #if defined(OS_POSIX)
diff --git a/base/task_scheduler/task_traits.h b/base/task_scheduler/task_traits.h
index a4a41fe..1adba6d 100644
--- a/base/task_scheduler/task_traits.h
+++ b/base/task_scheduler/task_traits.h
@@ -12,7 +12,7 @@
 
 #include "base/base_export.h"
 #include "base/task_scheduler/task_traits_details.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 namespace base {
 
diff --git a/base/template_util.h b/base/template_util.h
index 8544aa2..11e7ba8 100644
--- a/base/template_util.h
+++ b/base/template_util.h
@@ -12,7 +12,7 @@
 #include <utility>
 #include <vector>
 
-#include "build/build_config.h"
+#include "build_config.h"
 
 // Some versions of libstdc++ have partial support for type_traits, but misses
 // a smaller subset while removing some of the older non-standard stuff. Assume
diff --git a/base/test/gtest_util.h b/base/test/gtest_util.h
index df2bce9..e4ec0bf 100644
--- a/base/test/gtest_util.h
+++ b/base/test/gtest_util.h
@@ -11,7 +11,7 @@
 
 #include "base/compiler_specific.h"
 #include "base/logging.h"
-#include "build/build_config.h"
+#include "build_config.h"
 #include "testing/gtest/include/gtest/gtest.h"
 
 // EXPECT/ASSERT_DCHECK_DEATH is intended to replace EXPECT/ASSERT_DEBUG_DEATH
diff --git a/base/test/launcher/test_launcher.cc b/base/test/launcher/test_launcher.cc
index 71eb1ad..b5197c5 100644
--- a/base/test/launcher/test_launcher.cc
+++ b/base/test/launcher/test_launcher.cc
@@ -48,7 +48,7 @@
 #include "base/threading/thread_restrictions.h"
 #include "base/threading/thread_task_runner_handle.h"
 #include "base/time/time.h"
-#include "build/build_config.h"
+#include "build_config.h"
 #include "testing/gtest/include/gtest/gtest.h"
 
 #if defined(OS_POSIX)
diff --git a/base/test/launcher/test_launcher.h b/base/test/launcher/test_launcher.h
index 88b9f1f..fe9f080 100644
--- a/base/test/launcher/test_launcher.h
+++ b/base/test/launcher/test_launcher.h
@@ -22,7 +22,7 @@
 #include "base/threading/thread_checker.h"
 #include "base/time/time.h"
 #include "base/timer/timer.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 namespace base {
 
diff --git a/base/test/launcher/test_launcher_nacl_nonsfi.cc b/base/test/launcher/test_launcher_nacl_nonsfi.cc
index bdc4f67..eeb0d92 100644
--- a/base/test/launcher/test_launcher_nacl_nonsfi.cc
+++ b/base/test/launcher/test_launcher_nacl_nonsfi.cc
@@ -21,7 +21,7 @@
 #include "base/test/launcher/unit_test_launcher.h"
 #include "base/test/test_switches.h"
 #include "base/test/test_timeouts.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 #if defined(OS_POSIX)
 #include "base/files/file_descriptor_watcher_posix.h"
diff --git a/base/test/launcher/unit_test_launcher.cc b/base/test/launcher/unit_test_launcher.cc
index 1d4439c..230ecc3 100644
--- a/base/test/launcher/unit_test_launcher.cc
+++ b/base/test/launcher/unit_test_launcher.cc
@@ -33,7 +33,7 @@
 #include "base/third_party/dynamic_annotations/dynamic_annotations.h"
 #include "base/threading/thread_checker.h"
 #include "base/threading/thread_task_runner_handle.h"
-#include "build/build_config.h"
+#include "build_config.h"
 #include "testing/gtest/include/gtest/gtest.h"
 
 #if defined(OS_POSIX)
diff --git a/base/test/launcher/unit_test_launcher.h b/base/test/launcher/unit_test_launcher.h
index 0d1c21e..4e3d314 100644
--- a/base/test/launcher/unit_test_launcher.h
+++ b/base/test/launcher/unit_test_launcher.h
@@ -14,7 +14,7 @@
 #include "base/files/file_path.h"
 #include "base/macros.h"
 #include "base/test/launcher/test_launcher.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 namespace base {
 
diff --git a/base/test/multiprocess_test.cc b/base/test/multiprocess_test.cc
index 46556f7..7bc26e3 100644
--- a/base/test/multiprocess_test.cc
+++ b/base/test/multiprocess_test.cc
@@ -9,7 +9,7 @@
 #include "base/files/file_path.h"
 #include "base/files/file_util.h"
 #include "base/threading/thread_restrictions.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 namespace base {
 
diff --git a/base/test/multiprocess_test.h b/base/test/multiprocess_test.h
index 7c00d37..3fc0b61 100644
--- a/base/test/multiprocess_test.h
+++ b/base/test/multiprocess_test.h
@@ -10,7 +10,7 @@
 #include "base/macros.h"
 #include "base/process/launch.h"
 #include "base/process/process.h"
-#include "build/build_config.h"
+#include "build_config.h"
 #include "testing/platform_test.h"
 
 namespace base {
diff --git a/base/test/native_library_test_utils.h b/base/test/native_library_test_utils.h
index e26fd1a..c6c46ff 100644
--- a/base/test/native_library_test_utils.h
+++ b/base/test/native_library_test_utils.h
@@ -5,7 +5,7 @@
 #ifndef BASE_TEST_NATIVE_LIBRARY_TEST_UTILS_H_
 #define BASE_TEST_NATIVE_LIBRARY_TEST_UTILS_H_
 
-#include "build/build_config.h"
+#include "build_config.h"
 
 #if defined(OS_WIN)
 #define NATIVE_LIBRARY_TEST_ALWAYS_EXPORT __declspec(dllexport)
diff --git a/base/test/perf_test_suite.cc b/base/test/perf_test_suite.cc
index 2e2cdbb..e789883 100644
--- a/base/test/perf_test_suite.cc
+++ b/base/test/perf_test_suite.cc
@@ -11,7 +11,7 @@
 #include "base/process/launch.h"
 #include "base/strings/string_util.h"
 #include "base/test/perf_log.h"
-#include "build/build_config.h"
+#include "build_config.h"
 #include "testing/gtest/include/gtest/gtest.h"
 
 namespace base {
diff --git a/base/test/run_all_base_unittests.cc b/base/test/run_all_base_unittests.cc
index da52310..5d44604 100644
--- a/base/test/run_all_base_unittests.cc
+++ b/base/test/run_all_base_unittests.cc
@@ -5,7 +5,7 @@
 #include "base/bind.h"
 #include "base/test/launcher/unit_test_launcher.h"
 #include "base/test/test_suite.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 int main(int argc, char** argv) {
   base::TestSuite test_suite(argc, argv);
diff --git a/base/test/run_all_unittests.cc b/base/test/run_all_unittests.cc
index 0ad84ed..dd5a0ad 100644
--- a/base/test/run_all_unittests.cc
+++ b/base/test/run_all_unittests.cc
@@ -5,7 +5,7 @@
 #include "base/bind.h"
 #include "base/test/launcher/unit_test_launcher.h"
 #include "base/test/test_suite.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 int main(int argc, char** argv) {
   base::TestSuite test_suite(argc, argv);
diff --git a/base/test/scoped_task_environment.h b/base/test/scoped_task_environment.h
index f9523b3..038cca3 100644
--- a/base/test/scoped_task_environment.h
+++ b/base/test/scoped_task_environment.h
@@ -9,7 +9,7 @@
 #include "base/memory/ref_counted.h"
 #include "base/single_thread_task_runner.h"
 #include "base/task_scheduler/lazy_task_runner.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 namespace base {
 
diff --git a/base/test/scoped_task_environment_unittest.cc b/base/test/scoped_task_environment_unittest.cc
index 478fa5e..1c10d69 100644
--- a/base/test/scoped_task_environment_unittest.cc
+++ b/base/test/scoped_task_environment_unittest.cc
@@ -17,7 +17,7 @@
 #include "base/threading/sequence_local_storage_slot.h"
 #include "base/threading/thread_task_runner_handle.h"
 #include "base/time/tick_clock.h"
-#include "build/build_config.h"
+#include "build_config.h"
 #include "testing/gtest/include/gtest/gtest.h"
 
 #if defined(OS_POSIX)
diff --git a/base/test/test_file_util.h b/base/test/test_file_util.h
index d9172d7..f167ca7 100644
--- a/base/test/test_file_util.h
+++ b/base/test/test_file_util.h
@@ -14,7 +14,7 @@
 #include "base/compiler_specific.h"
 #include "base/files/file_path.h"
 #include "base/macros.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 #if defined(OS_ANDROID)
 #include <jni.h>
diff --git a/base/test/test_file_util_posix.cc b/base/test/test_file_util_posix.cc
index 87290fb..2082709 100644
--- a/base/test/test_file_util_posix.cc
+++ b/base/test/test_file_util_posix.cc
@@ -17,7 +17,7 @@
 #include "base/logging.h"
 #include "base/strings/string_util.h"
 #include "base/strings/utf_string_conversions.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 namespace base {
 
diff --git a/base/test/test_shared_memory_util.cc b/base/test/test_shared_memory_util.cc
index cfc96a9..5cde5b1 100644
--- a/base/test/test_shared_memory_util.cc
+++ b/base/test/test_shared_memory_util.cc
@@ -10,7 +10,7 @@
 #include <stdint.h>
 
 #include "base/logging.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 #if defined(OS_POSIX) && !defined(OS_NACL)
 #include <errno.h>
diff --git a/base/test/test_suite.cc b/base/test/test_suite.cc
index 3d53097..3bbb983 100644
--- a/base/test/test_suite.cc
+++ b/base/test/test_suite.cc
@@ -34,7 +34,7 @@
 #include "base/test/test_switches.h"
 #include "base/test/test_timeouts.h"
 #include "base/time/time.h"
-#include "build/build_config.h"
+#include "build_config.h"
 #include "testing/gmock/include/gmock/gmock.h"
 #include "testing/gtest/include/gtest/gtest.h"
 #include "testing/multiprocess_func_list.h"
diff --git a/base/test/test_suite.h b/base/test/test_suite.h
index 6d852ba..3547acf 100644
--- a/base/test/test_suite.h
+++ b/base/test/test_suite.h
@@ -17,7 +17,7 @@
 #include "base/macros.h"
 #include "base/test/scoped_feature_list.h"
 #include "base/test/trace_to_file.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 namespace testing {
 class TestInfo;
diff --git a/base/test/test_timeouts.cc b/base/test/test_timeouts.cc
index dd5acbc..4c8b4ab 100644
--- a/base/test/test_timeouts.cc
+++ b/base/test/test_timeouts.cc
@@ -11,7 +11,7 @@
 #include "base/logging.h"
 #include "base/strings/string_number_conversions.h"
 #include "base/test/test_switches.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 namespace {
 
diff --git a/base/third_party/nspr/prtime.cc b/base/third_party/nspr/prtime.cc
index c125160..8a3f7c0 100644
--- a/base/third_party/nspr/prtime.cc
+++ b/base/third_party/nspr/prtime.cc
@@ -69,7 +69,7 @@
 
 #include "base/logging.h"
 #include "base/third_party/nspr/prtime.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 #include <errno.h>  /* for EINVAL */
 #include <time.h>
diff --git a/base/threading/platform_thread.h b/base/threading/platform_thread.h
index faeb858..b891aef 100644
--- a/base/threading/platform_thread.h
+++ b/base/threading/platform_thread.h
@@ -14,7 +14,7 @@
 #include "base/base_export.h"
 #include "base/macros.h"
 #include "base/time/time.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 #if defined(OS_WIN)
 #include "base/win/windows_types.h"
diff --git a/base/threading/platform_thread_linux.cc b/base/threading/platform_thread_linux.cc
index 190aced..27d83d6 100644
--- a/base/threading/platform_thread_linux.cc
+++ b/base/threading/platform_thread_linux.cc
@@ -14,7 +14,7 @@
 #include "base/strings/string_number_conversions.h"
 #include "base/threading/platform_thread_internal_posix.h"
 #include "base/threading/thread_id_name_manager.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 #if !defined(OS_NACL) && !defined(OS_AIX)
 #include <pthread.h>
diff --git a/base/threading/platform_thread_mac.mm b/base/threading/platform_thread_mac.mm
index 39d979d..8f50203 100644
--- a/base/threading/platform_thread_mac.mm
+++ b/base/threading/platform_thread_mac.mm
@@ -18,7 +18,7 @@
 #include "base/mac/foundation_util.h"
 #include "base/mac/mach_logging.h"
 #include "base/threading/thread_id_name_manager.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 namespace base {
 
diff --git a/base/threading/platform_thread_posix.cc b/base/threading/platform_thread_posix.cc
index 2466b78..a5ddb2e 100644
--- a/base/threading/platform_thread_posix.cc
+++ b/base/threading/platform_thread_posix.cc
@@ -21,7 +21,7 @@
 #include "base/threading/platform_thread_internal_posix.h"
 #include "base/threading/thread_id_name_manager.h"
 #include "base/threading/thread_restrictions.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 #if defined(OS_LINUX)
 #include <sys/syscall.h>
diff --git a/base/threading/platform_thread_unittest.cc b/base/threading/platform_thread_unittest.cc
index 7eea22e..968f0e0 100644
--- a/base/threading/platform_thread_unittest.cc
+++ b/base/threading/platform_thread_unittest.cc
@@ -8,7 +8,7 @@
 #include "base/macros.h"
 #include "base/synchronization/waitable_event.h"
 #include "base/threading/platform_thread.h"
-#include "build/build_config.h"
+#include "build_config.h"
 #include "testing/gtest/include/gtest/gtest.h"
 
 #if defined(OS_POSIX)
diff --git a/base/threading/thread.cc b/base/threading/thread.cc
index 97e160f..e3ebb80 100644
--- a/base/threading/thread.cc
+++ b/base/threading/thread.cc
@@ -15,7 +15,7 @@
 #include "base/threading/thread_id_name_manager.h"
 #include "base/threading/thread_local.h"
 #include "base/threading/thread_restrictions.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 #if defined(OS_POSIX) && !defined(OS_NACL)
 #include "base/files/file_descriptor_watcher_posix.h"
diff --git a/base/threading/thread.h b/base/threading/thread.h
index 9fbdcb8..86a454c 100644
--- a/base/threading/thread.h
+++ b/base/threading/thread.h
@@ -21,7 +21,7 @@
 #include "base/synchronization/lock.h"
 #include "base/synchronization/waitable_event.h"
 #include "base/threading/platform_thread.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 namespace base {
 
diff --git a/base/threading/thread_local_storage.cc b/base/threading/thread_local_storage.cc
index 21fd323..466bd2f 100644
--- a/base/threading/thread_local_storage.cc
+++ b/base/threading/thread_local_storage.cc
@@ -7,7 +7,7 @@
 #include "base/atomicops.h"
 #include "base/logging.h"
 #include "base/synchronization/lock.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 using base::internal::PlatformThreadLocalStorage;
 
diff --git a/base/threading/thread_local_storage.h b/base/threading/thread_local_storage.h
index f84ac33..f9c7e98 100644
--- a/base/threading/thread_local_storage.h
+++ b/base/threading/thread_local_storage.h
@@ -10,7 +10,7 @@
 #include "base/atomicops.h"
 #include "base/base_export.h"
 #include "base/macros.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 #if defined(OS_WIN)
 #include "base/win/windows_types.h"
diff --git a/base/threading/thread_local_storage_unittest.cc b/base/threading/thread_local_storage_unittest.cc
index 9062ff0..006f928 100644
--- a/base/threading/thread_local_storage_unittest.cc
+++ b/base/threading/thread_local_storage_unittest.cc
@@ -12,7 +12,7 @@
 #include "base/macros.h"
 #include "base/no_destructor.h"
 #include "base/threading/simple_thread.h"
-#include "build/build_config.h"
+#include "build_config.h"
 #include "testing/gtest/include/gtest/gtest.h"
 
 #if defined(OS_WIN)
diff --git a/base/threading/thread_perftest.cc b/base/threading/thread_perftest.cc
index bf89049..272420e 100644
--- a/base/threading/thread_perftest.cc
+++ b/base/threading/thread_perftest.cc
@@ -20,7 +20,7 @@
 #include "base/synchronization/waitable_event.h"
 #include "base/threading/thread.h"
 #include "base/time/time.h"
-#include "build/build_config.h"
+#include "build_config.h"
 #include "testing/gtest/include/gtest/gtest.h"
 #include "testing/perf/perf_test.h"
 
diff --git a/base/threading/thread_unittest.cc b/base/threading/thread_unittest.cc
index d90b1f9..a1a9ed3 100644
--- a/base/threading/thread_unittest.cc
+++ b/base/threading/thread_unittest.cc
@@ -23,7 +23,7 @@
 #include "base/third_party/dynamic_annotations/dynamic_annotations.h"
 #include "base/threading/platform_thread.h"
 #include "base/time/time.h"
-#include "build/build_config.h"
+#include "build_config.h"
 #include "testing/gtest/include/gtest/gtest.h"
 #include "testing/platform_test.h"
 
diff --git a/base/time/pr_time_unittest.cc b/base/time/pr_time_unittest.cc
index 6fce4ab..341d778 100644
--- a/base/time/pr_time_unittest.cc
+++ b/base/time/pr_time_unittest.cc
@@ -9,7 +9,7 @@
 #include "base/macros.h"
 #include "base/third_party/nspr/prtime.h"
 #include "base/time/time.h"
-#include "build/build_config.h"
+#include "build_config.h"
 #include "testing/gtest/include/gtest/gtest.h"
 
 using base::Time;
diff --git a/base/time/time.cc b/base/time/time.cc
index 9c541a4..577895c 100644
--- a/base/time/time.cc
+++ b/base/time/time.cc
@@ -16,7 +16,7 @@
 #include "base/strings/stringprintf.h"
 #include "base/third_party/nspr/prtime.h"
 #include "base/time/time_override.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 namespace base {
 
diff --git a/base/time/time.h b/base/time/time.h
index 329dbd3..a760c55 100644
--- a/base/time/time.h
+++ b/base/time/time.h
@@ -61,7 +61,7 @@
 #include "base/compiler_specific.h"
 #include "base/logging.h"
 #include "base/numerics/safe_math.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 #if defined(OS_FUCHSIA)
 #include <zircon/types.h>
diff --git a/base/time/time_exploded_posix.cc b/base/time/time_exploded_posix.cc
index 627c6b4..ad95167 100644
--- a/base/time/time_exploded_posix.cc
+++ b/base/time/time_exploded_posix.cc
@@ -16,7 +16,7 @@
 
 #include "base/numerics/safe_math.h"
 #include "base/synchronization/lock.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 #if defined(OS_ANDROID)
 #include "base/os_compat_android.h"
diff --git a/base/time/time_mac.cc b/base/time/time_mac.cc
index 7ae7459..567696d 100644
--- a/base/time/time_mac.cc
+++ b/base/time/time_mac.cc
@@ -22,7 +22,7 @@
 #include "base/macros.h"
 #include "base/numerics/safe_conversions.h"
 #include "base/time/time_override.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 #if defined(OS_IOS)
 #include <time.h>
diff --git a/base/time/time_now_posix.cc b/base/time/time_now_posix.cc
index 5427836..f19a19f 100644
--- a/base/time/time_now_posix.cc
+++ b/base/time/time_now_posix.cc
@@ -15,7 +15,7 @@
 #include "base/logging.h"
 #include "base/numerics/safe_math.h"
 #include "base/time/time_override.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 // Ensure the Fuchsia and Mac builds do not include this module. Instead,
 // non-POSIX implementation is used for sampling the system clocks.
diff --git a/base/time/time_unittest.cc b/base/time/time_unittest.cc
index cde5cf5..0471dd4 100644
--- a/base/time/time_unittest.cc
+++ b/base/time/time_unittest.cc
@@ -16,7 +16,7 @@
 #include "base/strings/stringprintf.h"
 #include "base/threading/platform_thread.h"
 #include "base/time/time_override.h"
-#include "build/build_config.h"
+#include "build_config.h"
 #include "testing/gtest/include/gtest/gtest.h"
 
 #if defined(OS_ANDROID)
diff --git a/base/timer/hi_res_timer_manager.h b/base/timer/hi_res_timer_manager.h
index bfa316d..8939cd2 100644
--- a/base/timer/hi_res_timer_manager.h
+++ b/base/timer/hi_res_timer_manager.h
@@ -10,7 +10,7 @@
 #include "base/memory/ref_counted.h"
 #include "base/power_monitor/power_observer.h"
 #include "base/timer/timer.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 namespace base {
 
diff --git a/base/timer/hi_res_timer_manager_unittest.cc b/base/timer/hi_res_timer_manager_unittest.cc
index 43f607a..3860c09 100644
--- a/base/timer/hi_res_timer_manager_unittest.cc
+++ b/base/timer/hi_res_timer_manager_unittest.cc
@@ -11,7 +11,7 @@
 #include "base/power_monitor/power_monitor_device_source.h"
 #include "base/test/scoped_task_environment.h"
 #include "base/time/time.h"
-#include "build/build_config.h"
+#include "build_config.h"
 #include "testing/gtest/include/gtest/gtest.h"
 
 namespace base {
diff --git a/base/timer/timer_unittest.cc b/base/timer/timer_unittest.cc
index aaab237..ae587bf 100644
--- a/base/timer/timer_unittest.cc
+++ b/base/timer/timer_unittest.cc
@@ -26,7 +26,7 @@
 #include "base/threading/thread.h"
 #include "base/time/tick_clock.h"
 #include "base/time/time.h"
-#include "build/build_config.h"
+#include "build_config.h"
 #include "testing/gtest/include/gtest/gtest.h"
 
 namespace base {
diff --git a/base/tools_sanity_unittest.cc b/base/tools_sanity_unittest.cc
index 42d45f4..761559e 100644
--- a/base/tools_sanity_unittest.cc
+++ b/base/tools_sanity_unittest.cc
@@ -13,7 +13,7 @@
 #include "base/debug/profiler.h"
 #include "base/third_party/dynamic_annotations/dynamic_annotations.h"
 #include "base/threading/thread.h"
-#include "build/build_config.h"
+#include "build_config.h"
 #include "testing/gtest/include/gtest/gtest.h"
 
 namespace base {
diff --git a/base/trace_event/heap_profiler_allocation_context_tracker.cc b/base/trace_event/heap_profiler_allocation_context_tracker.cc
index 977f3b1..526eac8 100644
--- a/base/trace_event/heap_profiler_allocation_context_tracker.cc
+++ b/base/trace_event/heap_profiler_allocation_context_tracker.cc
@@ -14,7 +14,7 @@
 #include "base/threading/platform_thread.h"
 #include "base/threading/thread_local_storage.h"
 #include "base/trace_event/heap_profiler_allocation_context.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 #if defined(OS_LINUX) || defined(OS_ANDROID)
 #include <sys/prctl.h>
diff --git a/base/trace_event/malloc_dump_provider.cc b/base/trace_event/malloc_dump_provider.cc
index 975e6eb..04a4dd5 100644
--- a/base/trace_event/malloc_dump_provider.cc
+++ b/base/trace_event/malloc_dump_provider.cc
@@ -12,7 +12,7 @@
 #include "base/debug/profiler.h"
 #include "base/trace_event/process_memory_dump.h"
 #include "base/trace_event/trace_event_argument.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 #if defined(OS_MACOSX)
 #include <malloc/malloc.h>
diff --git a/base/trace_event/malloc_dump_provider.h b/base/trace_event/malloc_dump_provider.h
index e02eb9d..726c382 100644
--- a/base/trace_event/malloc_dump_provider.h
+++ b/base/trace_event/malloc_dump_provider.h
@@ -9,7 +9,7 @@
 #include "base/memory/singleton.h"
 #include "base/synchronization/lock.h"
 #include "base/trace_event/memory_dump_provider.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 #if defined(OS_LINUX) || defined(OS_ANDROID) || defined(OS_WIN) || \
     (defined(OS_MACOSX) && !defined(OS_IOS))
diff --git a/base/trace_event/memory_allocator_dump_unittest.cc b/base/trace_event/memory_allocator_dump_unittest.cc
index b0b6e74..3818629 100644
--- a/base/trace_event/memory_allocator_dump_unittest.cc
+++ b/base/trace_event/memory_allocator_dump_unittest.cc
@@ -14,7 +14,7 @@
 #include "base/trace_event/process_memory_dump.h"
 #include "base/trace_event/trace_event_argument.h"
 #include "base/values.h"
-#include "build/build_config.h"
+#include "build_config.h"
 #include "testing/gmock/include/gmock/gmock.h"
 #include "testing/gtest/include/gtest/gtest.h"
 
diff --git a/base/trace_event/memory_dump_manager.cc b/base/trace_event/memory_dump_manager.cc
index 9e595d6..60ba1bb 100644
--- a/base/trace_event/memory_dump_manager.cc
+++ b/base/trace_event/memory_dump_manager.cc
@@ -36,7 +36,7 @@
 #include "base/trace_event/process_memory_dump.h"
 #include "base/trace_event/trace_event.h"
 #include "base/trace_event/trace_event_argument.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 #if defined(OS_ANDROID)
 #include "base/trace_event/java_heap_dump_provider_android.h"
diff --git a/base/trace_event/memory_dump_manager_unittest.cc b/base/trace_event/memory_dump_manager_unittest.cc
index 2cd6977..8ed5fb3 100644
--- a/base/trace_event/memory_dump_manager_unittest.cc
+++ b/base/trace_event/memory_dump_manager_unittest.cc
@@ -32,7 +32,7 @@
 #include "base/trace_event/memory_dump_scheduler.h"
 #include "base/trace_event/memory_infra_background_whitelist.h"
 #include "base/trace_event/process_memory_dump.h"
-#include "build/build_config.h"
+#include "build_config.h"
 #include "testing/gmock/include/gmock/gmock.h"
 #include "testing/gtest/include/gtest/gtest.h"
 
diff --git a/base/trace_event/memory_peak_detector.cc b/base/trace_event/memory_peak_detector.cc
index 5419594..5f2ea91 100644
--- a/base/trace_event/memory_peak_detector.cc
+++ b/base/trace_event/memory_peak_detector.cc
@@ -14,7 +14,7 @@
 #include "base/trace_event/memory_dump_manager.h"
 #include "base/trace_event/memory_dump_provider_info.h"
 #include "base/trace_event/trace_event.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 namespace base {
 namespace trace_event {
diff --git a/base/trace_event/memory_usage_estimator_unittest.cc b/base/trace_event/memory_usage_estimator_unittest.cc
index b525990..7f8efc7 100644
--- a/base/trace_event/memory_usage_estimator_unittest.cc
+++ b/base/trace_event/memory_usage_estimator_unittest.cc
@@ -8,7 +8,7 @@
 
 #include "base/memory/ptr_util.h"
 #include "base/strings/string16.h"
-#include "build/build_config.h"
+#include "build_config.h"
 #include "testing/gtest/include/gtest/gtest.h"
 
 #if defined(ARCH_CPU_64_BITS)
diff --git a/base/trace_event/process_memory_dump.cc b/base/trace_event/process_memory_dump.cc
index 7442578..d565b47 100644
--- a/base/trace_event/process_memory_dump.cc
+++ b/base/trace_event/process_memory_dump.cc
@@ -17,7 +17,7 @@
 #include "base/trace_event/memory_infra_background_whitelist.h"
 #include "base/trace_event/trace_event_argument.h"
 #include "base/unguessable_token.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 #if defined(OS_IOS)
 #include <mach/vm_page_size.h>
diff --git a/base/trace_event/process_memory_dump.h b/base/trace_event/process_memory_dump.h
index a732a26..b2ce66f 100644
--- a/base/trace_event/process_memory_dump.h
+++ b/base/trace_event/process_memory_dump.h
@@ -18,7 +18,7 @@
 #include "base/trace_event/memory_allocator_dump.h"
 #include "base/trace_event/memory_allocator_dump_guid.h"
 #include "base/trace_event/memory_dump_request_args.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 // Define COUNT_RESIDENT_BYTES_SUPPORTED if platform supports counting of the
 // resident memory.
diff --git a/base/trace_event/process_memory_dump_unittest.cc b/base/trace_event/process_memory_dump_unittest.cc
index f1209ca..b33a39e 100644
--- a/base/trace_event/process_memory_dump_unittest.cc
+++ b/base/trace_event/process_memory_dump_unittest.cc
@@ -14,7 +14,7 @@
 #include "base/trace_event/memory_infra_background_whitelist.h"
 #include "base/trace_event/trace_event_argument.h"
 #include "base/trace_event/trace_log.h"
-#include "build/build_config.h"
+#include "build_config.h"
 #include "testing/gtest/include/gtest/gtest.h"
 
 #if defined(OS_WIN)
diff --git a/base/trace_event/trace_event.h b/base/trace_event/trace_event.h
index 2569f5e..e3ee400 100644
--- a/base/trace_event/trace_event.h
+++ b/base/trace_event/trace_event.h
@@ -23,7 +23,7 @@
 #include "base/trace_event/trace_category.h"
 #include "base/trace_event/trace_event_system_stats_monitor.h"
 #include "base/trace_event/trace_log.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 // By default, const char* argument values are assumed to have long-lived scope
 // and will not be copied. Use this macro to force a const char* to be copied.
diff --git a/base/trace_event/trace_event_impl.h b/base/trace_event/trace_event_impl.h
index b1c67b1..af47f49 100644
--- a/base/trace_event/trace_event_impl.h
+++ b/base/trace_event/trace_event_impl.h
@@ -24,7 +24,7 @@
 #include "base/synchronization/lock.h"
 #include "base/threading/thread_local.h"
 #include "base/trace_event/trace_event_memory_overhead.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 namespace base {
 namespace trace_event {
diff --git a/base/trace_event/trace_event_system_stats_monitor_unittest.cc b/base/trace_event/trace_event_system_stats_monitor_unittest.cc
index 52a05ba..978746d 100644
--- a/base/trace_event/trace_event_system_stats_monitor_unittest.cc
+++ b/base/trace_event/trace_event_system_stats_monitor_unittest.cc
@@ -11,7 +11,7 @@
 #include "base/message_loop/message_loop.h"
 #include "base/run_loop.h"
 #include "base/trace_event/trace_event_impl.h"
-#include "build/build_config.h"
+#include "build_config.h"
 #include "testing/gtest/include/gtest/gtest.h"
 
 namespace base {
diff --git a/base/trace_event/trace_log.cc b/base/trace_event/trace_log.cc
index 338e0f6..0012c09 100644
--- a/base/trace_event/trace_log.cc
+++ b/base/trace_event/trace_log.cc
@@ -43,7 +43,7 @@
 #include "base/trace_event/process_memory_dump.h"
 #include "base/trace_event/trace_buffer.h"
 #include "base/trace_event/trace_event.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 #if defined(OS_WIN)
 #include "base/trace_event/trace_event_etw_export_win.h"
diff --git a/base/trace_event/trace_log.h b/base/trace_event/trace_log.h
index 2c23189..56df18d 100644
--- a/base/trace_event/trace_log.h
+++ b/base/trace_event/trace_log.h
@@ -21,7 +21,7 @@
 #include "base/trace_event/memory_dump_provider.h"
 #include "base/trace_event/trace_config.h"
 #include "base/trace_event/trace_event_impl.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 namespace base {
 
diff --git a/base/tuple.h b/base/tuple.h
index 58681d5..393e17b 100644
--- a/base/tuple.h
+++ b/base/tuple.h
@@ -29,7 +29,7 @@
 #include <tuple>
 #include <utility>
 
-#include "build/build_config.h"
+#include "build_config.h"
 
 namespace base {
 
diff --git a/base/win/com_init_check_hook.h b/base/win/com_init_check_hook.h
index c998233..3f1fbcd 100644
--- a/base/win/com_init_check_hook.h
+++ b/base/win/com_init_check_hook.h
@@ -8,7 +8,7 @@
 #include "base/base_export.h"
 #include "base/logging.h"
 #include "base/macros.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 namespace base {
 namespace win {
diff --git a/base/win/pe_image_unittest.cc b/base/win/pe_image_unittest.cc
index 7890ce6..2216449 100644
--- a/base/win/pe_image_unittest.cc
+++ b/base/win/pe_image_unittest.cc
@@ -10,7 +10,7 @@
 #include "base/path_service.h"
 #include "base/scoped_native_library.h"
 #include "base/win/pe_image.h"
-#include "build/build_config.h"
+#include "build_config.h"
 #include "testing/gtest/include/gtest/gtest.h"
 
 namespace base {
diff --git a/build/BUILD.gn b/build/BUILD.gn
deleted file mode 100644
index 7ab955a..0000000
--- a/build/BUILD.gn
+++ /dev/null
@@ -1,9 +0,0 @@
-# Copyright 2018 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-source_set("buildflag_header_h") {
-  sources = [
-    "buildflag.h",
-  ]
-}
diff --git a/build/OWNERS b/build/OWNERS
deleted file mode 100644
index 0b659dd..0000000
--- a/build/OWNERS
+++ /dev/null
@@ -1,27 +0,0 @@
-agrieve@chromium.org
-dpranke@chromium.org
-jbudorick@chromium.org
-jochen@chromium.org
-scottmg@chromium.org
-thakis@chromium.org
-brucedawson@chromium.org
-
-# Clang build config changes
-hans@chromium.org
-
-per-file .gitignore=*
-per-file check_gn_headers_whitelist.txt=*
-per-file install-build-deps.sh=thomasanderson@chromium.org
-per-file mac_toolchain.py=erikchen@chromium.org
-per-file mac_toolchain.py=justincohen@chromium.org
-per-file package_mac_toolchain.py=erikchen@chromium.org
-per-file package_mac_toolchain.py=justincohen@chromium.org
-per-file whitespace_file.txt=*
-per-file OWNERS.status=*
-
-# gn-dev is probably a better team here, but the tooling won't let us
-# have more than one team per component, and infra-dev is a catch-all
-# for other build-related lists.
-#
-# TEAM: infra-dev@chromium.org
-# COMPONENT: Build
diff --git a/build/OWNERS.status b/build/OWNERS.status
deleted file mode 100644
index f5cc1fc..0000000
--- a/build/OWNERS.status
+++ /dev/null
@@ -1,12 +0,0 @@
-# Use this file to set a global status message that should be shown whenever
-# git cl owners proposes to add you as a reviewer.
-#
-# The status messages should be somewhat stable, so please don't use this for
-# short term, or frequently changing updates.
-#
-# The format of the file is
-#
-#  you@chromium.org: Single line status message.
-#
-
-jochen@chromium.org: EMEA based reviewer.
diff --git a/build/PRESUBMIT.py b/build/PRESUBMIT.py
deleted file mode 100644
index b886326..0000000
--- a/build/PRESUBMIT.py
+++ /dev/null
@@ -1,26 +0,0 @@
-# Copyright (c) 2017 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Presubmit script for //build.
-
-See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts
-for more details about the presubmit API built into depot_tools.
-"""
-
-def PostUploadHook(cl, change, output_api):
-  """git cl upload will call this hook after the issue is created/modified.
-
-  This hook modifies the CL description in order to run extra tests.
-  """
-
-  def affects_gn_checker(f):
-    return 'check_gn_headers' in f.LocalPath()
-  if not change.AffectedFiles(file_filter=affects_gn_checker):
-    return []
-  return output_api.EnsureCQIncludeTrybotsAreAdded(
-    cl,
-    [
-      'luci.chromium.try:linux_chromium_dbg_ng',
-    ],
-    'Automatically added tests to run on CQ.')
diff --git a/build/apply_locales.py b/build/apply_locales.py
deleted file mode 100755
index 6af7280..0000000
--- a/build/apply_locales.py
+++ /dev/null
@@ -1,45 +0,0 @@
-#!/usr/bin/env python
-# Copyright (c) 2009 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# TODO: remove this script when GYP has for loops
-
-import sys
-import optparse
-
-def main(argv):
-
-  parser = optparse.OptionParser()
-  usage = 'usage: %s [options ...] format_string locale_list'
-  parser.set_usage(usage.replace('%s', '%prog'))
-  parser.add_option('-d', dest='dash_to_underscore', action="store_true",
-                    default=False,
-                    help='map "en-US" to "en" and "-" to "_" in locales')
-
-  (options, arglist) = parser.parse_args(argv)
-
-  if len(arglist) < 3:
-    print 'ERROR: need string and list of locales'
-    return 1
-
-  str_template = arglist[1]
-  locales = arglist[2:]
-
-  results = []
-  for locale in locales:
-    # For Cocoa to find the locale at runtime, it needs to use '_' instead
-    # of '-' (http://crbug.com/20441).  Also, 'en-US' should be represented
-    # simply as 'en' (http://crbug.com/19165, http://crbug.com/25578).
-    if options.dash_to_underscore:
-      if locale == 'en-US':
-        locale = 'en'
-      locale = locale.replace('-', '_')
-    results.append(str_template.replace('ZZLOCALE', locale))
-
-  # Quote each element so filename spaces don't mess up GYP's attempt to parse
-  # it into a list.
-  print ' '.join(["'%s'" % x for x in results])
-
-if __name__ == '__main__':
-  sys.exit(main(sys.argv))
diff --git a/build/args/OWNERS b/build/args/OWNERS
deleted file mode 100644
index d218b6b..0000000
--- a/build/args/OWNERS
+++ /dev/null
@@ -1 +0,0 @@
-per-file headless.gn=file://headless/OWNERS
diff --git a/build/args/README.txt b/build/args/README.txt
deleted file mode 100644
index 825bf64..0000000
--- a/build/args/README.txt
+++ /dev/null
@@ -1,31 +0,0 @@
-This directory is here to hold .gni files that contain sets of GN build
-arguments for given configurations.
-
-(Currently this directory is empty because we removed the only thing here, but
-this has come up several times so I'm confident we'll need this again. If this
-directory is still empty by 2017, feel free to delete it. --Brett)
-
-Some projects or bots may have build configurations with specific combinations
-of flags. Rather than making a new global flag for your specific project and
-adding it all over the build to each arg it should affect, you can add a .gni
-file here with the variables.
-
-For example, for project foo you may put in build/args/foo.gni:
-
-  target_os = "android"
-  use_pulseaudio = false
-  use_ozone = true
-  system_libdir = "foo"
-
-Users wanting to build this configuration would run:
-
-  $ gn args out/mybuild
-
-And add the following line to their args for that build directory:
-
-  import("//build/args/foo.gni")
-  # You can set any other args here like normal.
-  is_component_build = false
-
-This way everybody can agree on a set of flags for a project, and their builds
-stay in sync as the flags in foo.gni are modified.
diff --git a/build/args/fuchsia.gn b/build/args/fuchsia.gn
deleted file mode 100644
index ba10b88..0000000
--- a/build/args/fuchsia.gn
+++ /dev/null
@@ -1,7 +0,0 @@
-import("//build/args/headless.gn")
-
-target_os = "fuchsia"
-enable_basic_printing = false
-headless_fontconfig_utils = false
-toolkit_views = false
-enable_plugins = false
diff --git a/build/args/headless.gn b/build/args/headless.gn
deleted file mode 100644
index ae5e043..0000000
--- a/build/args/headless.gn
+++ /dev/null
@@ -1,42 +0,0 @@
-# GN args template for the Headless Chrome library
-#
-# Add import to arg.gn in out directory and run gn gen on the directory to use.
-# E.g. for out directory out/foo:
-# echo 'import("//build/args/headless.gn")' > out/foo/args.gn
-# gn gen out/foo
-#
-# Use gn args to add your own build preference args.
-
-use_ozone = true
-ozone_auto_platforms = false
-ozone_platform = "headless"
-ozone_platform_headless = true
-
-# Embed resource.pak into binary to simplify deployment.
-headless_use_embedded_resources = true
-
-# Expose headless bindings for freetype library bundled with Chromium.
-headless_fontconfig_utils = true
-
-# Remove a dependency on a system fontconfig library.
-use_bundled_fontconfig = true
-
-# In order to simplify deployment we build ICU data file
-# into binary.
-icu_use_data_file = false
-
-# Use embedded data instead external files for headless in order
-# to simplify deployment.
-v8_use_external_startup_data = false
-
-enable_nacl = false
-enable_print_preview = false
-enable_remoting = false
-use_alsa = false
-use_cups = false
-use_dbus = false
-use_gio = false
-use_kerberos = false
-use_libpci = false
-use_pulseaudio = false
-use_udev = false
diff --git a/build/branding_value.sh b/build/branding_value.sh
deleted file mode 100755
index 9fcb550..0000000
--- a/build/branding_value.sh
+++ /dev/null
@@ -1,51 +0,0 @@
-#!/bin/sh
-
-# Copyright (c) 2008 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This is a wrapper for fetching values from the BRANDING files.  Pass the
-# value of GYP's branding variable followed by the key you want and the right
-# file is checked.
-#
-#  branding_value.sh Chromium COPYRIGHT
-#  branding_value.sh Chromium PRODUCT_FULLNAME
-#
-
-set -e
-
-if [ $# -ne 2 ] ;  then
-  echo "error: expect two arguments, branding and key" >&2
-  exit 1
-fi
-
-BUILD_BRANDING=$1
-THE_KEY=$2
-
-pushd $(dirname "${0}") > /dev/null
-BUILD_DIR=$(pwd)
-popd > /dev/null
-
-TOP="${BUILD_DIR}/.."
-
-case ${BUILD_BRANDING} in
-  Chromium)
-    BRANDING_FILE="${TOP}/chrome/app/theme/chromium/BRANDING"
-    ;;
-  Chrome)
-    BRANDING_FILE="${TOP}/chrome/app/theme/google_chrome/BRANDING"
-    ;;
-  *)
-    echo "error: unknown branding: ${BUILD_BRANDING}" >&2
-    exit 1
-    ;;
-esac
-
-BRANDING_VALUE=$(sed -n -e "s/^${THE_KEY}=\(.*\)\$/\1/p" "${BRANDING_FILE}")
-
-if [ -z "${BRANDING_VALUE}" ] ; then
-  echo "error: failed to find key '${THE_KEY}'" >&2
-  exit 1
-fi
-
-echo "${BRANDING_VALUE}"
diff --git a/build/build-ctags.sh b/build/build-ctags.sh
deleted file mode 100755
index 61e017e..0000000
--- a/build/build-ctags.sh
+++ /dev/null
@@ -1,49 +0,0 @@
-#!/bin/bash
-
-# Copyright 2013 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-if [[ a"`ctags --version | head -1 | grep \"^Exuberant Ctags\"`" == "a" ]]; then
-  cat <<EOF
-  You must be using Exuberant Ctags, not just standard GNU ctags. If you are on
-  Debian or a related flavor of Linux, you may want to try running
-  apt-get install exuberant-ctags.
-EOF
-  exit
-fi
-
-CHROME_SRC_DIR="$PWD"
-
-fail() {
-  echo "Failed to create ctags for $1"
-  exit 1
-}
-
-ctags_cmd() {
-  echo "ctags --languages=C++ $1 --exclude=.git -R -f .tmp_tags"
-}
-
-build_dir() {
-  local extraexcludes=""
-  if [[ a"$1" == "a--extra-excludes" ]]; then
-    extraexcludes="--exclude=third_party --exclude=build --exclude=out"
-    shift
-  fi
-
-  cd "$CHROME_SRC_DIR/$1" || fail $1
-  # Redirect error messages so they aren't seen because they are almost always
-  # errors about components that you just happen to have not built (NaCl, for
-  # example).
-  $(ctags_cmd "$extraexcludes") 2> /dev/null || fail $1
-  mv -f .tmp_tags tags
-}
-
-# We always build the top level but leave all submodules as optional.
-build_dir --extra-excludes "" "top level"
-
-# Build any other directies that are listed on the command line.
-for dir in $@; do
-  build_dir "$1"
-  shift
-done
diff --git a/tools/gn/bootstrap/build.ninja.template b/build/build.ninja.template
similarity index 100%
rename from tools/gn/bootstrap/build.ninja.template
rename to build/build.ninja.template
diff --git a/tools/gn/bootstrap/build_aix.ninja.template b/build/build_aix.ninja.template
similarity index 100%
rename from tools/gn/bootstrap/build_aix.ninja.template
rename to build/build_aix.ninja.template
diff --git a/build/build_config.h b/build/build_config.h
deleted file mode 100644
index c7b0266..0000000
--- a/build/build_config.h
+++ /dev/null
@@ -1,207 +0,0 @@
-// Copyright (c) 2012 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-// This file adds defines about the platform we're currently building on.
-//  Operating System:
-//    OS_WIN / OS_MACOSX / OS_LINUX / OS_POSIX (MACOSX or LINUX) /
-//    OS_NACL (NACL_SFI or NACL_NONSFI) / OS_NACL_SFI / OS_NACL_NONSFI
-//    OS_CHROMEOS is set by the build system
-//  Compiler:
-//    COMPILER_MSVC / COMPILER_GCC
-//  Processor:
-//    ARCH_CPU_X86 / ARCH_CPU_X86_64 / ARCH_CPU_X86_FAMILY (X86 or X86_64)
-//    ARCH_CPU_32_BITS / ARCH_CPU_64_BITS
-
-#ifndef BUILD_BUILD_CONFIG_H_
-#define BUILD_BUILD_CONFIG_H_
-
-// A set of macros to use for platform detection.
-#if defined(__native_client__)
-// __native_client__ must be first, so that other OS_ defines are not set.
-#define OS_NACL 1
-// OS_NACL comes in two sandboxing technology flavors, SFI or Non-SFI.
-// PNaCl toolchain defines __native_client_nonsfi__ macro in Non-SFI build
-// mode, while it does not in SFI build mode.
-#if defined(__native_client_nonsfi__)
-#define OS_NACL_NONSFI
-#else
-#define OS_NACL_SFI
-#endif
-#elif defined(ANDROID)
-#define OS_ANDROID 1
-#elif defined(__APPLE__)
-// only include TargetConditions after testing ANDROID as some android builds
-// on mac don't have this header available and it's not needed unless the target
-// is really mac/ios.
-#include <TargetConditionals.h>
-#define OS_MACOSX 1
-#if defined(TARGET_OS_IPHONE) && TARGET_OS_IPHONE
-#define OS_IOS 1
-#endif  // defined(TARGET_OS_IPHONE) && TARGET_OS_IPHONE
-#elif defined(__linux__)
-#define OS_LINUX 1
-// include a system header to pull in features.h for glibc/uclibc macros.
-#include <unistd.h>
-#if defined(__GLIBC__) && !defined(__UCLIBC__)
-// we really are using glibc, not uClibc pretending to be glibc
-#define LIBC_GLIBC 1
-#endif
-#elif defined(_WIN32)
-#define OS_WIN 1
-#elif defined(__Fuchsia__)
-#define OS_FUCHSIA 1
-#elif defined(__FreeBSD__)
-#define OS_FREEBSD 1
-#elif defined(__NetBSD__)
-#define OS_NETBSD 1
-#elif defined(__OpenBSD__)
-#define OS_OPENBSD 1
-#elif defined(__sun)
-#define OS_SOLARIS 1
-#elif defined(__QNXNTO__)
-#define OS_QNX 1
-#elif defined(_AIX)
-#define OS_AIX 1
-#elif defined(__asmjs__)
-#define OS_ASMJS
-#else
-#error Please add support for your platform in build/build_config.h
-#endif
-// NOTE: Adding a new port? Please follow
-// https://chromium.googlesource.com/chromium/src/+/master/docs/new_port_policy.md
-
-// For access to standard BSD features, use OS_BSD instead of a
-// more specific macro.
-#if defined(OS_FREEBSD) || defined(OS_NETBSD) || defined(OS_OPENBSD)
-#define OS_BSD 1
-#endif
-
-// For access to standard POSIXish features, use OS_POSIX instead of a
-// more specific macro.
-#if defined(OS_AIX) || defined(OS_ANDROID) || defined(OS_ASMJS) ||    \
-    defined(OS_FREEBSD) || defined(OS_LINUX) || defined(OS_MACOSX) || \
-    defined(OS_NACL) || defined(OS_NETBSD) || defined(OS_OPENBSD) ||  \
-    defined(OS_QNX) || defined(OS_SOLARIS)
-#define OS_POSIX 1
-#endif
-
-// Use tcmalloc
-#if (defined(OS_WIN) || defined(OS_LINUX) || defined(OS_ANDROID)) && \
-    !defined(NO_TCMALLOC)
-#define USE_TCMALLOC 1
-#endif
-
-// Compiler detection.
-#if defined(__GNUC__)
-#define COMPILER_GCC 1
-#elif defined(_MSC_VER)
-#define COMPILER_MSVC 1
-#else
-#error Please add support for your compiler in build/build_config.h
-#endif
-
-// Processor architecture detection.  For more info on what's defined, see:
-//   http://msdn.microsoft.com/en-us/library/b0084kay.aspx
-//   http://www.agner.org/optimize/calling_conventions.pdf
-//   or with gcc, run: "echo | gcc -E -dM -"
-#if defined(_M_X64) || defined(__x86_64__)
-#define ARCH_CPU_X86_FAMILY 1
-#define ARCH_CPU_X86_64 1
-#define ARCH_CPU_64_BITS 1
-#define ARCH_CPU_LITTLE_ENDIAN 1
-#elif defined(_M_IX86) || defined(__i386__)
-#define ARCH_CPU_X86_FAMILY 1
-#define ARCH_CPU_X86 1
-#define ARCH_CPU_32_BITS 1
-#define ARCH_CPU_LITTLE_ENDIAN 1
-#elif defined(__s390x__)
-#define ARCH_CPU_S390_FAMILY 1
-#define ARCH_CPU_S390X 1
-#define ARCH_CPU_64_BITS 1
-#define ARCH_CPU_BIG_ENDIAN 1
-#elif defined(__s390__)
-#define ARCH_CPU_S390_FAMILY 1
-#define ARCH_CPU_S390 1
-#define ARCH_CPU_31_BITS 1
-#define ARCH_CPU_BIG_ENDIAN 1
-#elif (defined(__PPC64__) || defined(__PPC__)) && defined(__BIG_ENDIAN__)
-#define ARCH_CPU_PPC64_FAMILY 1
-#define ARCH_CPU_PPC64 1
-#define ARCH_CPU_64_BITS 1
-#define ARCH_CPU_BIG_ENDIAN 1
-#elif defined(__PPC64__)
-#define ARCH_CPU_PPC64_FAMILY 1
-#define ARCH_CPU_PPC64 1
-#define ARCH_CPU_64_BITS 1
-#define ARCH_CPU_LITTLE_ENDIAN 1
-#elif defined(__ARMEL__)
-#define ARCH_CPU_ARM_FAMILY 1
-#define ARCH_CPU_ARMEL 1
-#define ARCH_CPU_32_BITS 1
-#define ARCH_CPU_LITTLE_ENDIAN 1
-#elif defined(__aarch64__)
-#define ARCH_CPU_ARM_FAMILY 1
-#define ARCH_CPU_ARM64 1
-#define ARCH_CPU_64_BITS 1
-#define ARCH_CPU_LITTLE_ENDIAN 1
-#elif defined(__pnacl__) || defined(__asmjs__)
-#define ARCH_CPU_32_BITS 1
-#define ARCH_CPU_LITTLE_ENDIAN 1
-#elif defined(__MIPSEL__)
-#if defined(__LP64__)
-#define ARCH_CPU_MIPS_FAMILY 1
-#define ARCH_CPU_MIPS64EL 1
-#define ARCH_CPU_64_BITS 1
-#define ARCH_CPU_LITTLE_ENDIAN 1
-#else
-#define ARCH_CPU_MIPS_FAMILY 1
-#define ARCH_CPU_MIPSEL 1
-#define ARCH_CPU_32_BITS 1
-#define ARCH_CPU_LITTLE_ENDIAN 1
-#endif
-#elif defined(__MIPSEB__)
-#if defined(__LP64__)
-#define ARCH_CPU_MIPS_FAMILY 1
-#define ARCH_CPU_MIPS64 1
-#define ARCH_CPU_64_BITS 1
-#define ARCH_CPU_BIG_ENDIAN 1
-#else
-#define ARCH_CPU_MIPS_FAMILY 1
-#define ARCH_CPU_MIPS 1
-#define ARCH_CPU_32_BITS 1
-#define ARCH_CPU_BIG_ENDIAN 1
-#endif
-#else
-#error Please add support for your architecture in build/build_config.h
-#endif
-
-// Type detection for wchar_t.
-#if defined(OS_WIN)
-#define WCHAR_T_IS_UTF16
-#elif defined(OS_FUCHSIA)
-#define WCHAR_T_IS_UTF32
-#elif defined(OS_POSIX) && defined(COMPILER_GCC) && defined(__WCHAR_MAX__) && \
-    (__WCHAR_MAX__ == 0x7fffffff || __WCHAR_MAX__ == 0xffffffff)
-#define WCHAR_T_IS_UTF32
-#elif defined(OS_POSIX) && defined(COMPILER_GCC) && defined(__WCHAR_MAX__) && \
-    (__WCHAR_MAX__ == 0x7fff || __WCHAR_MAX__ == 0xffff)
-// On Posix, we'll detect short wchar_t, but projects aren't guaranteed to
-// compile in this mode (in particular, Chrome doesn't). This is intended for
-// other projects using base who manage their own dependencies and make sure
-// short wchar works for them.
-#define WCHAR_T_IS_UTF16
-#else
-#error Please add support for your compiler in build/build_config.h
-#endif
-
-#if defined(OS_ANDROID)
-// The compiler thinks std::string::const_iterator and "const char*" are
-// equivalent types.
-#define STD_STRING_ITERATOR_IS_CHAR_POINTER
-// The compiler thinks base::string16::const_iterator and "char16*" are
-// equivalent types.
-#define BASE_STRING16_ITERATOR_IS_CHAR16_POINTER
-#endif
-
-#endif  // BUILD_BUILD_CONFIG_H_
diff --git a/tools/gn/bootstrap/build_mac.ninja.template b/build/build_mac.ninja.template
similarity index 100%
rename from tools/gn/bootstrap/build_mac.ninja.template
rename to build/build_mac.ninja.template
diff --git a/tools/gn/bootstrap/build_vs.ninja.template b/build/build_vs.ninja.template
similarity index 100%
rename from tools/gn/bootstrap/build_vs.ninja.template
rename to build/build_vs.ninja.template
diff --git a/build/buildflag.h b/build/buildflag.h
deleted file mode 100644
index 5776a75..0000000
--- a/build/buildflag.h
+++ /dev/null
@@ -1,47 +0,0 @@
-// Copyright 2015 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#ifndef BUILD_BUILDFLAG_H_
-#define BUILD_BUILDFLAG_H_
-
-// These macros un-mangle the names of the build flags in a way that looks
-// natural, and gives errors if the flag is not defined. Normally in the
-// preprocessor it's easy to make mistakes that interpret "you haven't done
-// the setup to know what the flag is" as "flag is off". Normally you would
-// include the generated header rather than include this file directly.
-//
-// This is for use with generated headers. See build/buildflag_header.gni.
-
-// This dance of two macros does a concatenation of two preprocessor args using
-// ## doubly indirectly because using ## directly prevents macros in that
-// parameter from being expanded.
-#define BUILDFLAG_CAT_INDIRECT(a, b) a ## b
-#define BUILDFLAG_CAT(a, b) BUILDFLAG_CAT_INDIRECT(a, b)
-
-// Accessor for build flags.
-//
-// To test for a value, if the build file specifies:
-//
-//   ENABLE_FOO=true
-//
-// Then you would check at build-time in source code with:
-//
-//   #include "foo_flags.h"  // The header the build file specified.
-//
-//   #if BUILDFLAG(ENABLE_FOO)
-//     ...
-//   #endif
-//
-// There will no #define called ENABLE_FOO so if you accidentally test for
-// whether that is defined, it will always be negative. You can also use
-// the value in expressions:
-//
-//   const char kSpamServerName[] = BUILDFLAG(SPAM_SERVER_NAME);
-//
-// Because the flag is accessed as a preprocessor macro with (), an error
-// will be thrown if the proper header defining the internal flag value has
-// not been included.
-#define BUILDFLAG(flag) (BUILDFLAG_CAT(BUILDFLAG_INTERNAL_, flag)())
-
-#endif  // BUILD_BUILDFLAG_H_
diff --git a/build/buildflag_header.gni b/build/buildflag_header.gni
deleted file mode 100644
index 281c164..0000000
--- a/build/buildflag_header.gni
+++ /dev/null
@@ -1,141 +0,0 @@
-# Copyright 2015 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# Generates a header with preprocessor defines specified by the build file.
-#
-# The flags are converted to function-style defines with mangled names and
-# code uses an accessor macro to access the values. This is to try to
-# minimize bugs where code checks whether something is defined or not, and
-# the proper header isn't included, meaning the answer will always be silently
-# false or might vary across the code base.
-#
-# In the GN template, specify build flags in the template as a list
-# of strings that encode key/value pairs like this:
-#
-#   flags = [ "ENABLE_FOO=1", "ENABLE_BAR=$enable_bar" ]
-#
-# The GN values "true" and "false" will be mapped to 0 and 1 for boolean
-# #if flags to be expressed naturally. This means you can't directly make a
-# define that generates C++ value of true or false for use in code. If you
-# REALLY need this, you can also use the string "(true)" and "(false)" to
-# prevent the rewriting.
-
-# To check the value of the flag in C code:
-#
-#   #include "path/to/here/header_file.h"
-#
-#   #if BUILDFLAG(ENABLE_FOO)
-#   ...
-#   #endif
-#
-#   const char kSpamServerUrl[] = BUILDFLAG(SPAM_SERVER_URL);
-#
-# There will no #define called ENABLE_FOO so if you accidentally test for that
-# in an ifdef it will always be negative.
-#
-#
-# Template parameters
-#
-#   flags [required, list of strings]
-#       Flag values as described above.
-#
-#   header [required, string]
-#       File name for generated header. By default, this will go in the
-#       generated file directory for this target, and you would include it
-#       with:
-#         #include "<path_to_this_BUILD_file>/<header>"
-#
-#   header_dir [optional, string]
-#       Override the default location of the generated header. The string will
-#       be treated as a subdirectory of the root_gen_dir. For example:
-#         header_dir = "foo/bar"
-#       Then you can include the header as:
-#         #include "foo/bar/baz.h"
-#
-#   deps, public_deps, testonly, visibility
-#       Normal meaning.
-#
-#
-# Grit defines
-#
-# If one .grd file uses a flag, just add to the grit target:
-#
-#   defines = [
-#     "enable_doom_melon=$enable_doom_melon",
-#   ]
-#
-# If multiple .grd files use it, you'll want to put the defines in a .gni file
-# so it can be shared. Generally this .gni file should include all grit defines
-# for a given module (for some definition of "module"). Then do:
-#
-#   defines = ui_grit_defines
-#
-# If you forget to do this, the flag will be implicitly false in the .grd file
-# and those resources won't be compiled. You'll know because the resource
-# #define won't be generated and any code that uses it won't compile. If you
-# see a missing IDS_* string, this is probably the reason.
-#
-#
-# Example
-#
-#   buildflag_header("foo_buildflags") {
-#     header = "foo_buildflags.h"
-#
-#     flags = [
-#       # This uses the GN build flag enable_doom_melon as the definition.
-#       "ENABLE_DOOM_MELON=$enable_doom_melon",
-#
-#       # This force-enables the flag.
-#       "ENABLE_SPACE_LASER=true",
-#
-#       # This will expand to the quoted C string when used in source code.
-#       "SPAM_SERVER_URL=\"http://www.example.com/\"",
-#     ]
-#   }
-template("buildflag_header") {
-  action(target_name) {
-    script = "//build/write_buildflag_header.py"
-
-    if (defined(invoker.header_dir)) {
-      header_file = "${invoker.header_dir}/${invoker.header}"
-    } else {
-      # Compute the path from the root to this file.
-      header_file = rebase_path(".", "//") + "/${invoker.header}"
-    }
-
-    outputs = [
-      "$root_gen_dir/$header_file",
-    ]
-
-    # Always write --flags to the file so it's not empty. Empty will confuse GN
-    # into thinking the response file isn't used.
-    response_file_contents = [ "--flags" ]
-    if (defined(invoker.flags)) {
-      response_file_contents += invoker.flags
-    }
-
-    args = [
-      "--output",
-      header_file,  # Not rebased, Python script puts it inside gen-dir.
-      "--rulename",
-      get_label_info(":$target_name", "label_no_toolchain"),
-      "--gen-dir",
-      rebase_path(root_gen_dir, root_build_dir),
-      "--definitions",
-      "{{response_file_name}}",
-    ]
-
-    forward_variables_from(invoker,
-                           [
-                             "deps",
-                             "public_deps",
-                             "testonly",
-                             "visibility",
-                           ])
-
-    public_deps = [
-      "//build:buildflag_header_h",
-    ]
-  }
-}
diff --git a/build/check_gn_headers.py b/build/check_gn_headers.py
deleted file mode 100755
index f6ae8f5..0000000
--- a/build/check_gn_headers.py
+++ /dev/null
@@ -1,304 +0,0 @@
-#!/usr/bin/env python
-# Copyright 2017 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Find header files missing in GN.
-
-This script gets all the header files from ninja_deps, which is from the true
-dependency generated by the compiler, and report if they don't exist in GN.
-"""
-
-import argparse
-import json
-import os
-import re
-import shutil
-import subprocess
-import sys
-import tempfile
-from multiprocessing import Process, Queue
-
-SRC_DIR = os.path.abspath(
-    os.path.join(os.path.abspath(os.path.dirname(__file__)), os.path.pardir))
-DEPOT_TOOLS_DIR = os.path.join(SRC_DIR, 'third_party', 'depot_tools')
-
-
-def GetHeadersFromNinja(out_dir, skip_obj, q):
-  """Return all the header files from ninja_deps"""
-
-  def NinjaSource():
-    cmd = [os.path.join(DEPOT_TOOLS_DIR, 'ninja'), '-C', out_dir, '-t', 'deps']
-    # A negative bufsize means to use the system default, which usually
-    # means fully buffered.
-    popen = subprocess.Popen(cmd, stdout=subprocess.PIPE, bufsize=-1)
-    for line in iter(popen.stdout.readline, ''):
-      yield line.rstrip()
-
-    popen.stdout.close()
-    return_code = popen.wait()
-    if return_code:
-      raise subprocess.CalledProcessError(return_code, cmd)
-
-  ans, err = set(), None
-  try:
-    ans = ParseNinjaDepsOutput(NinjaSource(), out_dir, skip_obj)
-  except Exception as e:
-    err = str(e)
-  q.put((ans, err))
-
-
-def ParseNinjaDepsOutput(ninja_out, out_dir, skip_obj):
-  """Parse ninja output and get the header files"""
-  all_headers = {}
-
-  # Ninja always uses "/", even on Windows.
-  prefix = '../../'
-
-  is_valid = False
-  obj_file = ''
-  for line in ninja_out:
-    if line.startswith('    '):
-      if not is_valid:
-        continue
-      if line.endswith('.h') or line.endswith('.hh'):
-        f = line.strip()
-        if f.startswith(prefix):
-          f = f[6:]  # Remove the '../../' prefix
-          # build/ only contains build-specific files like build_config.h
-          # and buildflag.h, and system header files, so they should be
-          # skipped.
-          if f.startswith(out_dir) or f.startswith('out'):
-            continue
-          if not f.startswith('build'):
-            all_headers.setdefault(f, [])
-            if not skip_obj:
-              all_headers[f].append(obj_file)
-    else:
-      is_valid = line.endswith('(VALID)')
-      obj_file = line.split(':')[0]
-
-  return all_headers
-
-
-def GetHeadersFromGN(out_dir, q):
-  """Return all the header files from GN"""
-
-  tmp = None
-  ans, err = set(), None
-  try:
-    # Argument |dir| is needed to make sure it's on the same drive on Windows.
-    # dir='' means dir='.', but doesn't introduce an unneeded prefix.
-    tmp = tempfile.mkdtemp(dir='')
-    shutil.copy2(os.path.join(out_dir, 'args.gn'),
-                 os.path.join(tmp, 'args.gn'))
-    # Do "gn gen" in a temp dir to prevent dirtying |out_dir|.
-    gn_exe = 'gn.bat' if sys.platform == 'win32' else 'gn'
-    subprocess.check_call([
-        os.path.join(DEPOT_TOOLS_DIR, gn_exe), 'gen', tmp, '--ide=json', '-q'])
-    gn_json = json.load(open(os.path.join(tmp, 'project.json')))
-    ans = ParseGNProjectJSON(gn_json, out_dir, tmp)
-  except Exception as e:
-    err = str(e)
-  finally:
-    if tmp:
-      shutil.rmtree(tmp)
-  q.put((ans, err))
-
-
-def ParseGNProjectJSON(gn, out_dir, tmp_out):
-  """Parse GN output and get the header files"""
-  all_headers = set()
-
-  for _target, properties in gn['targets'].iteritems():
-    sources = properties.get('sources', [])
-    public = properties.get('public', [])
-    # Exclude '"public": "*"'.
-    if type(public) is list:
-      sources += public
-    for f in sources:
-      if f.endswith('.h') or f.endswith('.hh'):
-        if f.startswith('//'):
-          f = f[2:]  # Strip the '//' prefix.
-          if f.startswith(tmp_out):
-            f = out_dir + f[len(tmp_out):]
-          all_headers.add(f)
-
-  return all_headers
-
-
-def GetDepsPrefixes(q):
-  """Return all the folders controlled by DEPS file"""
-  prefixes, err = set(), None
-  try:
-    gclient_exe = 'gclient.bat' if sys.platform == 'win32' else 'gclient'
-    gclient_out = subprocess.check_output([
-        os.path.join(DEPOT_TOOLS_DIR, gclient_exe),
-        'recurse', '--no-progress', '-j1',
-        'python', '-c', 'import os;print os.environ["GCLIENT_DEP_PATH"]'],
-        universal_newlines=True)
-    for i in gclient_out.split('\n'):
-      if i.startswith('src/'):
-        i = i[4:]
-        prefixes.add(i)
-  except Exception as e:
-    err = str(e)
-  q.put((prefixes, err))
-
-
-def IsBuildClean(out_dir):
-  cmd = [os.path.join(DEPOT_TOOLS_DIR, 'ninja'), '-C', out_dir, '-n']
-  try:
-    out = subprocess.check_output(cmd)
-    return 'no work to do.' in out
-  except Exception as e:
-    print e
-    return False
-
-def ParseWhiteList(whitelist):
-  out = set()
-  for line in whitelist.split('\n'):
-    line = re.sub(r'#.*', '', line).strip()
-    if line:
-      out.add(line)
-  return out
-
-
-def FilterOutDepsedRepo(files, deps):
-  return {f for f in files if not any(f.startswith(d) for d in deps)}
-
-
-def GetNonExistingFiles(lst):
-  out = set()
-  for f in lst:
-    if not os.path.isfile(f):
-      out.add(f)
-  return out
-
-
-def main():
-
-  def DumpJson(data):
-    if args.json:
-      with open(args.json, 'w') as f:
-        json.dump(data, f)
-
-  def PrintError(msg):
-    DumpJson([])
-    parser.error(msg)
-
-  parser = argparse.ArgumentParser(description='''
-      NOTE: Use ninja to build all targets in OUT_DIR before running
-      this script.''')
-  parser.add_argument('--out-dir', metavar='OUT_DIR', default='out/Release',
-                      help='output directory of the build')
-  parser.add_argument('--json',
-                      help='JSON output filename for missing headers')
-  parser.add_argument('--whitelist', help='file containing whitelist')
-  parser.add_argument('--skip-dirty-check', action='store_true',
-                      help='skip checking whether the build is dirty')
-  parser.add_argument('--verbose', action='store_true',
-                      help='print more diagnostic info')
-
-  args, _extras = parser.parse_known_args()
-
-  if not os.path.isdir(args.out_dir):
-    parser.error('OUT_DIR "%s" does not exist.' % args.out_dir)
-
-  if not args.skip_dirty_check and not IsBuildClean(args.out_dir):
-    dirty_msg = 'OUT_DIR looks dirty. You need to build all there.'
-    if args.json:
-      # Assume running on the bots. Silently skip this step.
-      # This is possible because "analyze" step can be wrong due to
-      # underspecified header files. See crbug.com/725877
-      print dirty_msg
-      DumpJson([])
-      return 0
-    else:
-      # Assume running interactively.
-      parser.error(dirty_msg)
-
-  d_q = Queue()
-  d_p = Process(target=GetHeadersFromNinja, args=(args.out_dir, True, d_q,))
-  d_p.start()
-
-  gn_q = Queue()
-  gn_p = Process(target=GetHeadersFromGN, args=(args.out_dir, gn_q,))
-  gn_p.start()
-
-  deps_q = Queue()
-  deps_p = Process(target=GetDepsPrefixes, args=(deps_q,))
-  deps_p.start()
-
-  d, d_err = d_q.get()
-  gn, gn_err = gn_q.get()
-  missing = set(d.keys()) - gn
-  nonexisting = GetNonExistingFiles(gn)
-
-  deps, deps_err = deps_q.get()
-  missing = FilterOutDepsedRepo(missing, deps)
-  nonexisting = FilterOutDepsedRepo(nonexisting, deps)
-
-  d_p.join()
-  gn_p.join()
-  deps_p.join()
-
-  if d_err:
-    PrintError(d_err)
-  if gn_err:
-    PrintError(gn_err)
-  if deps_err:
-    PrintError(deps_err)
-  if len(GetNonExistingFiles(d)) > 0:
-    print 'Non-existing files in ninja deps:', GetNonExistingFiles(d)
-    PrintError('Found non-existing files in ninja deps. You should ' +
-               'build all in OUT_DIR.')
-  if len(d) == 0:
-    PrintError('OUT_DIR looks empty. You should build all there.')
-  if any((('/gen/' in i) for i in nonexisting)):
-    PrintError('OUT_DIR looks wrong. You should build all there.')
-
-  if args.whitelist:
-    whitelist = ParseWhiteList(open(args.whitelist).read())
-    missing -= whitelist
-    nonexisting -= whitelist
-
-  missing = sorted(missing)
-  nonexisting = sorted(nonexisting)
-
-  DumpJson(sorted(missing + nonexisting))
-
-  if len(missing) == 0 and len(nonexisting) == 0:
-    return 0
-
-  if len(missing) > 0:
-    print '\nThe following files should be included in gn files:'
-    for i in missing:
-      print i
-
-  if len(nonexisting) > 0:
-    print '\nThe following non-existing files should be removed from gn files:'
-    for i in nonexisting:
-      print i
-
-  if args.verbose:
-    # Only get detailed obj dependency here since it is slower.
-    GetHeadersFromNinja(args.out_dir, False, d_q)
-    d, d_err = d_q.get()
-    print '\nDetailed dependency info:'
-    for f in missing:
-      print f
-      for cc in d[f]:
-        print '  ', cc
-
-    print '\nMissing headers sorted by number of affected object files:'
-    count = {k: len(v) for (k, v) in d.iteritems()}
-    for f in sorted(count, key=count.get, reverse=True):
-      if f in missing:
-        print count[f], f
-
-  return 1
-
-
-if __name__ == '__main__':
-  sys.exit(main())
diff --git a/build/check_gn_headers_unittest.py b/build/check_gn_headers_unittest.py
deleted file mode 100755
index 20c3b13..0000000
--- a/build/check_gn_headers_unittest.py
+++ /dev/null
@@ -1,101 +0,0 @@
-#!/usr/bin/env python
-# Copyright 2017 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-import json
-import unittest
-import check_gn_headers
-
-
-ninja_input = r'''
-obj/a.o: #deps 1, deps mtime 123 (VALID)
-    ../../a.cc
-    ../../dir/path/b.h
-    ../../c.hh
-
-obj/b.o: #deps 1, deps mtime 123 (STALE)
-    ../../b.cc
-    ../../dir2/path/b.h
-    ../../c2.hh
-
-obj/c.o: #deps 1, deps mtime 123 (VALID)
-    ../../c.cc
-    ../../build/a.h
-    gen/b.h
-    ../../out/Release/gen/no.h
-    ../../dir3/path/b.h
-    ../../c3.hh
-'''
-
-
-gn_input = json.loads(r'''
-{
-   "others": [],
-   "targets": {
-      "//:All": {
-      },
-      "//:base": {
-         "public": [ "//base/p.h" ],
-         "sources": [ "//base/a.cc", "//base/a.h", "//base/b.hh" ],
-         "visibility": [ "*" ]
-      },
-      "//:star_public": {
-         "public": "*",
-         "sources": [ "//base/c.h", "//tmp/gen/a.h" ],
-         "visibility": [ "*" ]
-      }
-    }
-}
-''')
-
-
-whitelist = r'''
-   white-front.c
-a/b/c/white-end.c # comment
- dir/white-both.c  #more comment
-
-# empty line above
-a/b/c
-'''
-
-
-class CheckGnHeadersTest(unittest.TestCase):
-  def testNinja(self):
-    headers = check_gn_headers.ParseNinjaDepsOutput(
-        ninja_input.split('\n'), 'out/Release', False)
-    expected = {
-        'dir/path/b.h': ['obj/a.o'],
-        'c.hh': ['obj/a.o'],
-        'dir3/path/b.h': ['obj/c.o'],
-        'c3.hh': ['obj/c.o'],
-    }
-    self.assertEquals(headers, expected)
-
-  def testGn(self):
-    headers = check_gn_headers.ParseGNProjectJSON(gn_input,
-                                                  'out/Release', 'tmp')
-    expected = set([
-        'base/a.h',
-        'base/b.hh',
-        'base/c.h',
-        'base/p.h',
-        'out/Release/gen/a.h',
-    ])
-    self.assertEquals(headers, expected)
-
-  def testWhitelist(self):
-    output = check_gn_headers.ParseWhiteList(whitelist)
-    expected = set([
-        'white-front.c',
-        'a/b/c/white-end.c',
-        'dir/white-both.c',
-        'a/b/c',
-    ])
-    self.assertEquals(output, expected)
-
-
-if __name__ == '__main__':
-  logging.getLogger().setLevel(logging.DEBUG)
-  unittest.main(verbosity=2)
diff --git a/build/check_gn_headers_whitelist.txt b/build/check_gn_headers_whitelist.txt
deleted file mode 100644
index 5755df1..0000000
--- a/build/check_gn_headers_whitelist.txt
+++ /dev/null
@@ -1,369 +0,0 @@
-# Do not add files to this whitelist unless you are adding a new OS or
-# changing the GN arguments on bots.
-
-ash/accelerators/accelerator_controller_delegate.h
-ash/accelerators/accelerator_controller_delegate_aura.h
-ash/accelerators/accelerator_table.h
-ash/ash_export.h
-ash/frame/frame_header.h
-ash/metrics/task_switch_metrics_recorder.h
-ash/metrics/task_switch_source.h
-ash/metrics/user_metrics_action.h
-ash/metrics/user_metrics_recorder.h
-ash/public/cpp/ash_public_export.h
-ash/public/cpp/ash_switches.h
-ash/public/cpp/config.h
-ash/public/cpp/shelf_types.h
-ash/session/session_observer.h
-ash/shell.h
-ash/system/devicetype_utils.h
-ash/wm/system_modal_container_event_filter_delegate.h
-cc/base/ring_buffer.h
-cc/cc_export.h
-cc/input/browser_controls_state.h
-cc/input/event_listener_properties.h
-cc/input/scrollbar.h
-cc/input/scroller_size_metrics.h
-cc/layers/performance_properties.h
-cc/layers/scrollbar_theme_painter.h
-cc/output/bsp_compare_result.h
-cc/resources/release_callback_impl.h
-cc/resources/return_callback.h
-cc/surfaces/surface_observer.h
-chrome/browser/android/android_theme_resources.h
-chrome/browser/android/resource_id.h
-chrome/browser/chromeos/certificate_provider/certificate_info.h
-chrome/browser/chromeos/certificate_provider/certificate_provider.h
-chrome/browser/chromeos/certificate_provider/certificate_provider_service.h
-chrome/browser/chromeos/certificate_provider/certificate_provider_service_factory.h
-chrome/browser/chromeos/certificate_provider/certificate_requests.h
-chrome/browser/chromeos/certificate_provider/pin_dialog_manager.h
-chrome/browser/chromeos/certificate_provider/sign_requests.h
-chrome/browser/chromeos/certificate_provider/thread_safe_certificate_map.h
-chrome/browser/chromeos/login/easy_unlock/easy_unlock_service_observer.h
-chrome/browser/chromeos/login/signin/oauth2_login_manager.h
-chrome/browser/chromeos/login/signin/oauth2_login_verifier.h
-chrome/browser/chromeos/login/signin/oauth2_token_fetcher.h
-chrome/browser/chromeos/profiles/profile_helper.h
-chrome/browser/chromeos/settings/cros_settings.h
-chrome/browser/chromeos/ui/request_pin_view.h
-chrome/browser/component_updater/component_installer_errors.h
-chrome/browser/download/download_file_icon_extractor.h
-chrome/browser/extensions/api/networking_cast_private/chrome_networking_cast_private_delegate.h
-chrome/browser/extensions/api/omnibox/omnibox_api_testbase.h
-chrome/browser/extensions/api/socket/mock_tcp_client_socket.h
-chrome/browser/mac/bluetooth_utility.h
-chrome/browser/media/router/mojo/media_route_provider_util_win.h
-chrome/browser/media/webrtc/desktop_media_list_ash.h
-chrome/browser/media/webrtc/desktop_media_list_observer.h
-chrome/browser/media/webrtc/rtp_dump_type.h
-chrome/browser/media_galleries/media_file_system_context.h
-chrome/browser/notifications/displayed_notifications_dispatch_callback.h
-chrome/browser/permissions/permission_queue_controller.h
-chrome/browser/prefs/active_profile_pref_service.h
-chrome/browser/rlz/chrome_rlz_tracker_delegate.h
-chrome/browser/ui/android/content_settings/subresource_filter_infobar_delegate.h
-chrome/browser/ui/app_icon_loader_delegate.h
-chrome/browser/ui/app_list/app_list_syncable_service_factory.h
-chrome/browser/ui/ash/ash_util.h
-chrome/browser/ui/ash/multi_user/multi_user_util.h
-chrome/browser/ui/network_profile_bubble.h
-chrome/browser/ui/passwords/manage_passwords_icon.h
-chrome/browser/ui/views/frame/browser_frame_header_ash.h
-chrome/browser/ui/webui/large_icon_source.h
-chrome/common/mac/app_shim_launch.h
-chrome/common/mac/app_shim_messages.h
-chrome/install_static/chromium_install_modes.h
-chrome/install_static/install_constants.h
-chrome/install_static/install_details.h
-chrome/install_static/install_modes.h
-chrome/install_static/install_util.h
-chrome/install_static/test/scoped_install_details.h
-chrome/installer/util/browser_distribution.h
-chrome/installer/util/google_update_constants.h
-chrome/installer/util/google_update_settings.h
-chrome/installer/util/util_constants.h
-chromeos/chromeos_export.h
-chromeos/login/login_state.h
-chromeos/login/scoped_test_public_session_login_state.h
-chromeos/settings/cros_settings_names.h
-chromeos/settings/cros_settings_provider.h
-components/browser_watcher/features.h
-components/browser_watcher/stability_paths.h
-components/cast_certificate/cast_crl_root_ca_cert_der-inc.h
-components/cdm/browser/cdm_message_filter_android.h
-components/contextual_search/browser/contextual_search_js_api_handler.h
-components/cryptauth/connection_finder.h
-components/cryptauth/connection_observer.h
-components/data_reduction_proxy/core/browser/data_use_group.h
-components/data_reduction_proxy/core/browser/data_use_group_provider.h
-components/data_use_measurement/core/url_request_classifier.h
-components/device_event_log/device_event_log_export.h
-components/dom_distiller/core/font_family_list.h
-components/dom_distiller/core/theme_list.h
-components/login/login_export.h
-components/nacl/browser/nacl_browser_delegate.h
-components/nacl/renderer/ppb_nacl_private.h
-components/omnibox/browser/autocomplete_i18n.h
-components/omnibox/browser/autocomplete_provider_client.h
-components/omnibox/browser/autocomplete_provider_listener.h
-components/password_manager/core/browser/keychain_migration_status_mac.h
-components/policy/core/browser/configuration_policy_handler_parameters.h
-components/policy/proto/policy_proto_export.h
-components/rlz/rlz_tracker_delegate.h
-components/session_manager/session_manager_types.h
-components/sessions/core/sessions_export.h
-components/sync/engine/connection_status.h
-components/sync/engine/net/network_time_update_callback.h
-components/translate/core/browser/translate_infobar_delegate.h
-components/user_manager/user.h
-components/user_manager/user_image/user_image.h
-components/user_manager/user_manager.h
-components/viz/display_compositor/display_provider.h
-components/viz/viz_export.h
-components/wifi/wifi_export.h
-components/wifi/wifi_service.h
-content/browser/background_fetch/background_fetch_constants.h
-content/browser/service_worker/service_worker_response_type.h
-content/common/gpu_stream_constants.h
-content/common/mac/attributed_string_coder.h
-content/public/browser/context_factory.h
-content/public/browser/media_observer.h
-content/renderer/external_popup_menu.h
-content/shell/android/shell_descriptors.h
-device/media_transfer_protocol/media_transfer_protocol_manager.h
-extensions/browser/api/clipboard/clipboard_api.h
-extensions/browser/api/networking_config/networking_config_service_factory.h
-extensions/browser/api/webcam_private/webcam.h
-extensions/browser/api/webcam_private/webcam_private_api.h
-extensions/browser/entry_info.h
-extensions/browser/extension_event_histogram_value.h
-extensions/browser/extension_function_histogram_value.h
-google_apis/gcm/base/encryptor.h
-google_apis/gcm/base/gcm_export.h
-gpu/GLES2/gl2chromium.h
-gpu/GLES2/gl2chromium_autogen.h
-gpu/GLES2/gl2extchromium.h
-gpu/command_buffer/client/context_support.h
-gpu/command_buffer/client/gles2_implementation_unittest_autogen.h
-gpu/command_buffer/client/gles2_interface_autogen.h
-gpu/command_buffer/client/gles2_interface_stub_autogen.h
-gpu/command_buffer/client/gles2_interface_stub_impl_autogen.h
-gpu/command_buffer/client/gpu_control_client.h
-gpu/command_buffer/client/ref_counted.h
-gpu/command_buffer/client/shared_memory_limits.h
-gpu/command_buffer/common/command_buffer_shared.h
-gpu/command_buffer/common/gles2_cmd_utils_autogen.h
-gpu/command_buffer/common/gles2_cmd_utils_implementation_autogen.h
-gpu/command_buffer/common/gpu_memory_allocation.h
-gpu/command_buffer/service/gl_stream_texture_image.h
-gpu/command_buffer/service/gles2_cmd_decoder_unittest_extensions_autogen.h
-gpu/command_buffer/service/memory_tracking.h
-gpu/command_buffer/service/progress_reporter.h
-gpu/config/gpu_lists_version.h
-gpu/gles2_conform_support/gtf/gtf_stubs.h
-gpu/gpu_export.h
-headless/lib/headless_macros.h
-headless/public/headless_tab_socket.h
-ipc/ipc_channel_proxy_unittest_messages.h
-ipc/ipc_message_null_macros.h
-ipc/param_traits_size_macros.h
-media/audio/audio_logging.h
-media/audio/sounds/test_data.h
-media/base/routing_token_callback.h
-media/base/video_renderer_sink.h
-media/cast/common/mod_util.h
-media/cast/net/rtcp/rtcp_session.h
-media/filters/ffmpeg_aac_bitstream_converter.h
-media/filters/ffmpeg_h264_to_annex_b_bitstream_converter.h
-media/filters/h264_to_annex_b_bitstream_converter.h
-media/formats/mp4/avc.h
-media/formats/mp4/bitstream_converter.h
-media/formats/mp4/fourccs.h
-media/formats/mp4/rcheck.h
-media/formats/mpeg/adts_stream_parser.h
-media/formats/mpeg/mpeg1_audio_stream_parser.h
-media/formats/mpeg/mpeg_audio_stream_parser_base.h
-media/gpu/media_gpu_export.h
-mojo/edk/system/broker_messages.h
-mojo/edk/system/system_impl_export.h
-mojo/public/cpp/bindings/strong_associated_binding_set.h
-mojo/public/cpp/bindings/tests/mojo_test_blink_export.h
-mojo/public/cpp/test_support/test_support.h
-net/base/winsock_init.h
-net/cert/cert_type.h
-net/cert/cert_verify_proc_android.h
-net/cert/scoped_nss_types.h
-net/dns/notify_watcher_mac.h
-net/http/http_status_code_list.h
-net/http/transport_security_state_static.h
-net/quic/core/session_notifier_interface.h
-ppapi/cpp/pass_ref.h
-ppapi/lib/gl/include/GLES2/gl2.h
-ppapi/lib/gl/include/GLES2/gl2ext.h
-ppapi/lib/gl/include/GLES2/gl2platform.h
-ppapi/lib/gl/include/KHR/khrplatform.h
-ppapi/nacl_irt/irt_manifest.h
-ppapi/nacl_irt/public/irt_ppapi.h
-ppapi/native_client/src/shared/ppapi_proxy/ppruntime.h
-ppapi/native_client/src/untrusted/pnacl_irt_shim/irt_shim_ppapi.h
-ppapi/native_client/src/untrusted/pnacl_irt_shim/pnacl_shim.h
-ppapi/native_client/src/untrusted/pnacl_irt_shim/shim_ppapi.h
-ppapi/proxy/dispatch_reply_message.h
-ppapi/proxy/plugin_proxy_delegate.h
-ppapi/proxy/plugin_resource_callback.h
-ppapi/proxy/ppapi_proxy_export.h
-ppapi/proxy/resource_message_filter.h
-ppapi/proxy/video_decoder_constants.h
-ppapi/shared_impl/api_id.h
-ppapi/shared_impl/dir_contents.h
-ppapi/shared_impl/ppapi_shared_export.h
-ppapi/shared_impl/singleton_resource_id.h
-remoting/base/chromoting_event_log_writer.h
-remoting/base/logging.h
-remoting/client/display/gl_renderer_delegate.h
-remoting/client/display/gl_texture_ids.h
-remoting/codec/webrtc_video_encoder.h
-remoting/host/linux/x11_keyboard.h
-remoting/host/worker_process_ipc_delegate.h
-remoting/protocol/audio_source.h
-remoting/protocol/audio_stream.h
-remoting/protocol/cursor_shape_stub.h
-remoting/protocol/message_channel_factory.h
-remoting/protocol/test_event_matchers.h
-remoting/protocol/video_feedback_stub.h
-remoting/protocol/video_stream.h
-sandbox/linux/system_headers/capability.h
-services/service_manager/public/c/main.h
-services/ui/ws/ids.h
-skia/ext/convolver_mips_dspr2.h
-skia/ext/skia_commit_hash.h
-testing/gmock_mutant.h
-third_party/blink/renderer/bindings/modules/v8/serialization/WebCryptoSubTags.h
-third_party/blink/renderer/core/animation/CSSInterpolationEnvironment.h
-third_party/blink/renderer/core/animation/SVGInterpolationEnvironment.h
-third_party/blink/renderer/core/css/resolver/StyleBuilder.h
-third_party/blink/renderer/core/css/threaded/MultiThreadedTestUtil.h
-third_party/blink/renderer/core/css/zoomAdjustedPixelValue.h
-third_party/blink/renderer/core/dom/ArrayBufferViewHelpers.h
-third_party/blink/renderer/core/editing/finder/FindOptions.h
-third_party/blink/renderer/core/paint/FindPaintOffsetAndVisualRectNeedingUpdate.h
-third_party/blink/renderer/core/style/ShapeValue.h
-third_party/blink/renderer/core/style/TransformOrigin.h
-third_party/blink/renderer/platform/EncryptedMediaRequest.h
-third_party/blink/renderer/platform/fonts/FontSelector.h
-third_party/blink/renderer/platform/fonts/Glyph.h
-third_party/blink/renderer/platform/graphics/cpu/arm/WebGLImageConversionNEON.h
-third_party/blink/renderer/platform/graphics/cpu/mips/WebGLImageConversionMSA.h
-third_party/blink/renderer/platform/graphics/paint/PaintImage.h
-third_party/blink/renderer/platform/scheduler/base/task_queue.h
-third_party/blink/renderer/platform/text/TabSize.h
-third_party/blink/renderer/platform/text/TextDirection.h
-third_party/blink/renderer/platform/transforms/TransformOperation.h
-third_party/blink/public/platform/WebSourceLocation.h
-third_party/blink/public/platform/WebTouchInfo.h
-third_party/blink/public/platform/modules/media_capabilities/WebMediaCapabilitiesInfo.h
-third_party/cacheinvalidation/src/google/cacheinvalidation/impl/build_constants.h
-third_party/expat/files/lib/ascii.h
-third_party/expat/files/lib/asciitab.h
-third_party/expat/files/lib/expat_config.h
-third_party/expat/files/lib/expat_external.h
-third_party/expat/files/lib/iasciitab.h
-third_party/expat/files/lib/internal.h
-third_party/expat/files/lib/latin1tab.h
-third_party/expat/files/lib/nametab.h
-third_party/expat/files/lib/utf8tab.h
-third_party/expat/files/lib/xmlrole.h
-third_party/expat/files/lib/xmltok.h
-third_party/expat/files/lib/xmltok_impl.h
-third_party/harfbuzz-ng/src/hb-ot-cbdt-table.hh
-third_party/harfbuzz-ng/src/hb-ot-cmap-table.hh
-third_party/harfbuzz-ng/src/hb-ot-glyf-table.hh
-third_party/harfbuzz-ng/src/hb-ot-layout-jstf-table.hh
-third_party/harfbuzz-ng/src/hb-ot-os2-table.hh
-third_party/hunspell/src/hunspell/hunvisapi.h
-third_party/khronos/EGL/egl.h
-third_party/khronos/EGL/eglext.h
-third_party/khronos/EGL/eglplatform.h
-third_party/khronos/GLES2/gl2.h
-third_party/khronos/GLES2/gl2ext.h
-third_party/khronos/GLES2/gl2platform.h
-third_party/khronos/GLES3/gl3.h
-third_party/khronos/GLES3/gl3platform.h
-third_party/khronos/KHR/khrplatform.h
-third_party/leveldatabase/chromium_logger.h
-third_party/libaddressinput/chromium/addressinput_util.h
-third_party/libphonenumber/phonenumber_api.h
-third_party/libudev/libudev0.h
-third_party/libudev/libudev1.h
-third_party/libvpx/source/config/linux/x64/vp8_rtcd.h
-third_party/libvpx/source/config/linux/x64/vp9_rtcd.h
-third_party/libvpx/source/config/linux/x64/vpx_config.h
-third_party/libvpx/source/config/linux/x64/vpx_dsp_rtcd.h
-third_party/libvpx/source/config/linux/x64/vpx_scale_rtcd.h
-third_party/libvpx/source/config/nacl/vp8_rtcd.h
-third_party/libvpx/source/config/nacl/vp9_rtcd.h
-third_party/libvpx/source/config/nacl/vpx_config.h
-third_party/libvpx/source/config/nacl/vpx_dsp_rtcd.h
-third_party/libvpx/source/config/nacl/vpx_scale_rtcd.h
-third_party/libvpx/source/config/vpx_version.h
-third_party/libxslt/src/libxslt/xsltwin32config.h
-third_party/opus/src/src/opus_private.h
-third_party/opus/src/tests/test_opus_common.h
-third_party/protobuf/src/google/protobuf/compiler/csharp/csharp_names.h
-third_party/protobuf/src/google/protobuf/compiler/javanano/javanano_params.h
-third_party/qcms/src/halffloat.h
-third_party/qcms/src/tests/qcms_test_util.h
-third_party/qcms/src/tests/timing.h
-third_party/snappy/linux/config.h
-third_party/speech-dispatcher/libspeechd.h
-third_party/sqlite/sqlite3.h
-third_party/tcmalloc/chromium/src/addressmap-inl.h
-third_party/tcmalloc/chromium/src/base/basictypes.h
-third_party/tcmalloc/chromium/src/base/dynamic_annotations.h
-third_party/tcmalloc/chromium/src/base/googleinit.h
-third_party/tcmalloc/chromium/src/base/linux_syscall_support.h
-third_party/tcmalloc/chromium/src/base/spinlock_linux-inl.h
-third_party/tcmalloc/chromium/src/base/stl_allocator.h
-third_party/tcmalloc/chromium/src/base/thread_annotations.h
-third_party/tcmalloc/chromium/src/base/thread_lister.h
-third_party/tcmalloc/chromium/src/gperftools/malloc_extension_c.h
-third_party/tcmalloc/chromium/src/gperftools/malloc_hook_c.h
-third_party/tcmalloc/chromium/src/gperftools/tcmalloc.h
-third_party/tcmalloc/chromium/src/heap-profile-stats.h
-third_party/tcmalloc/chromium/src/libc_override.h
-third_party/tcmalloc/chromium/src/malloc_hook_mmap_linux.h
-third_party/tcmalloc/chromium/src/packed-cache-inl.h
-third_party/tcmalloc/chromium/src/page_heap_allocator.h
-third_party/tcmalloc/chromium/src/pagemap.h
-third_party/tcmalloc/chromium/src/stacktrace_config.h
-third_party/tcmalloc/chromium/src/stacktrace_x86-inl.h
-third_party/tcmalloc/chromium/src/system-alloc.h
-third_party/tcmalloc/chromium/src/tcmalloc_guard.h
-third_party/wayland/include/config.h
-third_party/wayland/include/src/wayland-version.h
-third_party/woff2/src/port.h
-third_party/yasm/source/config/linux/config.h
-third_party/yasm/source/config/linux/libyasm-stdint.h
-third_party/zlib/contrib/minizip/crypt.h
-tools/battor_agent/battor_protocol_types.h
-tools/gn/ordered_set.h
-tools/ipc_fuzzer/message_lib/all_message_null_macros.h
-ui/app_list/app_list_export.h
-ui/app_list/app_list_item.h
-ui/app_list/app_list_switches.h
-ui/base/clipboard/clipboard_test_template.h
-ui/events/keycodes/keyboard_codes_posix.h
-ui/gfx/overlay_transform.h
-ui/gfx/scoped_ns_graphics_context_save_gstate_mac.h
-ui/gfx/swap_result.h
-ui/gfx/sys_color_change_listener.h
-ui/gl/GL/glextchromium.h
-ui/gl/gl_bindings_api_autogen_egl.h
-ui/gl/gl_bindings_api_autogen_gl.h
-ui/gl/gl_bindings_api_autogen_glx.h
-ui/gl/gl_bindings_api_autogen_osmesa.h
-ui/gl/gpu_preference.h
-ui/gl/gpu_switching_observer.h
-ui/ozone/ozone_base_export.h
-ui/ozone/public/ozone_switches.h
diff --git a/build/check_return_value.py b/build/check_return_value.py
deleted file mode 100755
index c659d1e..0000000
--- a/build/check_return_value.py
+++ /dev/null
@@ -1,17 +0,0 @@
-#!/usr/bin/env python
-# Copyright 2014 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""This program wraps an arbitrary command and prints "1" if the command ran
-successfully."""
-
-import os
-import subprocess
-import sys
-
-devnull = open(os.devnull, 'wb')
-if not subprocess.call(sys.argv[1:], stdout=devnull, stderr=devnull):
-  print 1
-else:
-  print 0
diff --git a/build/chromeos/create_vm_test_script.py b/build/chromeos/create_vm_test_script.py
deleted file mode 100755
index f14b818..0000000
--- a/build/chromeos/create_vm_test_script.py
+++ /dev/null
@@ -1,95 +0,0 @@
-#!/usr/bin/env python
-#
-# Copyright 2018 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Creates a script that runs a CrOS VM test by delegating to
-build/chromeos/run_vm_test.py.
-"""
-
-import argparse
-import os
-import re
-import sys
-
-
-SCRIPT_TEMPLATE = """\
-#!/usr/bin/env python
-#
-# This file was generated by build/chromeos/create_vm_test_script.py
-
-import os
-import sys
-
-def main():
-  script_directory = os.path.dirname(__file__)
-  def ResolvePath(path):
-    return os.path.abspath(os.path.join(script_directory, path))
-
-  vm_test_script = os.path.abspath(
-      os.path.join(script_directory, '{vm_test_script}'))
-
-  vm_args = {vm_test_args}
-  path_args = {vm_test_path_args}
-  for arg, path in path_args:
-    vm_args.extend([arg, ResolvePath(path)])
-
-  os.execv(vm_test_script,
-           [vm_test_script] + vm_args + sys.argv[1:])
-
-if __name__ == '__main__':
-  sys.exit(main())
-"""
-
-def main(args):
-  parser = argparse.ArgumentParser()
-  parser.add_argument('--script-output-path')
-  parser.add_argument('--output-directory')
-  parser.add_argument('--test-exe')
-  parser.add_argument('--runtime-deps-path')
-  parser.add_argument('--cros-cache')
-  parser.add_argument('--board')
-  args = parser.parse_args(args)
-
-
-  def RelativizePathToScript(path):
-    return os.path.relpath(path, os.path.dirname(args.script_output_path))
-
-  run_test_path = RelativizePathToScript(
-      os.path.join(os.path.dirname(__file__), 'run_vm_test.py'))
-
-  vm_test_args = [
-      '--board', args.board,
-      '-v',
-  ]
-  if args.test_exe:
-    vm_test_args.extend([
-        'vm-test',
-        '--test-exe',
-        args.test_exe,
-    ])
-  else:
-    vm_test_args.append('host-cmd')
-
-  vm_test_path_args = [
-      ('--cros-cache', RelativizePathToScript(args.cros_cache)),
-  ]
-  if args.runtime_deps_path:
-    vm_test_path_args.append(
-        ('--runtime-deps-path', RelativizePathToScript(args.runtime_deps_path)))
-  if args.output_directory:
-    vm_test_path_args.append(
-        ('--path-to-outdir', RelativizePathToScript(args.output_directory)))
-
-  with open(args.script_output_path, 'w') as script:
-    script.write(SCRIPT_TEMPLATE.format(
-        vm_test_script=run_test_path,
-        vm_test_args=str(vm_test_args),
-        vm_test_path_args=str(vm_test_path_args)))
-
-  os.chmod(args.script_output_path, 0750)
-
-
-if __name__ == '__main__':
-  sys.exit(main(sys.argv[1:]))
diff --git a/build/chromeos/run_vm_test.py b/build/chromeos/run_vm_test.py
deleted file mode 100755
index 00a7249..0000000
--- a/build/chromeos/run_vm_test.py
+++ /dev/null
@@ -1,242 +0,0 @@
-#!/usr/bin/env python
-#
-# Copyright 2018 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import argparse
-import contextlib
-import json
-import logging
-import os
-import re
-import stat
-import subprocess
-import sys
-
-
-CHROMIUM_SRC_PATH = os.path.abspath(os.path.join(
-    os.path.dirname(__file__), '..', '..'))
-
-# Use the android test-runner's gtest results support library for generating
-# output json ourselves.
-sys.path.insert(0, os.path.join(CHROMIUM_SRC_PATH, 'build', 'android'))
-from pylib.base import base_test_result
-from pylib.results import json_results
-
-CHROMITE_PATH = os.path.abspath(os.path.join(
-    CHROMIUM_SRC_PATH, 'third_party', 'chromite'))
-CROS_RUN_VM_TEST_PATH = os.path.abspath(os.path.join(
-    CHROMITE_PATH, 'bin', 'cros_run_vm_test'))
-
-
-_FILE_BLACKLIST = [
-  re.compile(r'.*build/chromeos.*'),
-  re.compile(r'.*build/cros_cache.*'),
-  re.compile(r'.*third_party/chromite.*'),
-]
-
-
-def read_runtime_files(runtime_deps_path, outdir):
-  if not runtime_deps_path:
-    return []
-
-  abs_runtime_deps_path = os.path.abspath(
-      os.path.join(outdir, runtime_deps_path))
-  with open(abs_runtime_deps_path) as runtime_deps_file:
-    files = [l.strip() for l in runtime_deps_file if l]
-  rel_file_paths = []
-  for f in files:
-    rel_file_path = os.path.relpath(
-        os.path.abspath(os.path.join(outdir, f)),
-        os.getcwd())
-    if not any(regex.match(rel_file_path) for regex in _FILE_BLACKLIST):
-      rel_file_paths.append(rel_file_path)
-
-  return rel_file_paths
-
-
-def host_cmd(args):
-  if not args.cmd:
-    logging.error('Must specify command to run on the host.')
-    return 1
-
-  cros_run_vm_test_cmd = [
-      CROS_RUN_VM_TEST_PATH,
-      '--start',
-      '--board', args.board,
-      '--cache-dir', args.cros_cache,
-  ]
-  if args.verbose:
-    cros_run_vm_test_cmd.append('--debug')
-
-  cros_run_vm_test_cmd += [
-      '--host-cmd',
-      '--',
-  ] + args.cmd
-
-  logging.info('Running the following command:')
-  logging.info(' '.join(cros_run_vm_test_cmd))
-
-  return subprocess.call(
-      cros_run_vm_test_cmd, stdout=sys.stdout, stderr=sys.stderr)
-
-
-def vm_test(args):
-  is_sanity_test = args.test_exe == 'cros_vm_sanity_test'
-
-  cros_run_vm_test_cmd = [
-      CROS_RUN_VM_TEST_PATH,
-      '--start',
-      '--board', args.board,
-      '--cache-dir', args.cros_cache,
-  ]
-
-  # cros_run_vm_test has trouble with relative paths that go up directories, so
-  # cd to src/, which should be the root of all data deps.
-  os.chdir(CHROMIUM_SRC_PATH)
-
-  runtime_files = read_runtime_files(
-      args.runtime_deps_path, args.path_to_outdir)
-  # If we're pushing files, we need to set the cwd.
-  if runtime_files:
-      cros_run_vm_test_cmd.extend(
-          ['--cwd', os.path.relpath(args.path_to_outdir, CHROMIUM_SRC_PATH)])
-  for f in runtime_files:
-    cros_run_vm_test_cmd.extend(['--files', f])
-
-  if args.test_launcher_summary_output and not is_sanity_test:
-    result_dir, result_file = os.path.split(args.test_launcher_summary_output)
-    # If args.test_launcher_summary_output is a file in cwd, result_dir will be
-    # an empty string, so replace it with '.' when this is the case so
-    # cros_run_vm_test can correctly handle it.
-    if not result_dir:
-      result_dir = '.'
-    vm_result_file = '/tmp/%s' % result_file
-    cros_run_vm_test_cmd += [
-      '--results-src', vm_result_file,
-      '--results-dest-dir', result_dir,
-    ]
-
-  if is_sanity_test:
-    # run_cros_vm_test's default behavior when no cmd is specified is the sanity
-    # test that's baked into the VM image. This test smoke-checks the system
-    # browser, so deploy our locally-built chrome to the VM before testing.
-    cros_run_vm_test_cmd += [
-        '--deploy',
-        '--build-dir', os.path.relpath(args.path_to_outdir, CHROMIUM_SRC_PATH),
-    ]
-  else:
-    cros_run_vm_test_cmd += [
-        '--cmd',
-        '--',
-        './' + args.test_exe,
-        '--test-launcher-shard-index=%d' % args.test_launcher_shard_index,
-        '--test-launcher-total-shards=%d' % args.test_launcher_total_shards,
-    ]
-
-  if args.test_launcher_summary_output and not is_sanity_test:
-    cros_run_vm_test_cmd += [
-      '--test-launcher-summary-output=%s' % vm_result_file,
-    ]
-
-  logging.info('Running the following command:')
-  logging.info(' '.join(cros_run_vm_test_cmd))
-
-  # deploy_chrome needs a set of GN args used to build chrome to determine if
-  # certain libraries need to be pushed to the VM. It looks for the args via an
-  # env var. To trigger the default deploying behavior, give it a dummy set of
-  # args.
-  # TODO(crbug.com/823996): Make the GN-dependent deps controllable via cmd-line
-  # args.
-  env_copy = os.environ.copy()
-  if not env_copy.get('GN_ARGS'):
-    env_copy['GN_ARGS'] = 'is_chromeos = true'
-  env_copy['PATH'] = env_copy['PATH'] + ':' + os.path.join(CHROMITE_PATH, 'bin')
-  rc = subprocess.call(
-      cros_run_vm_test_cmd, stdout=sys.stdout, stderr=sys.stderr, env=env_copy)
-
-  # Create a simple json results file for the sanity test if needed. The results
-  # will contain only one test ('cros_vm_sanity_test'), and will either be a
-  # PASS or FAIL depending on the return code of cros_run_vm_test above.
-  if args.test_launcher_summary_output and is_sanity_test:
-    result = (base_test_result.ResultType.FAIL if rc else
-                  base_test_result.ResultType.PASS)
-    sanity_test_result = base_test_result.BaseTestResult(
-        'cros_vm_sanity_test', result)
-    run_results = base_test_result.TestRunResults()
-    run_results.AddResult(sanity_test_result)
-    with open(args.test_launcher_summary_output, 'w') as f:
-      json.dump(json_results.GenerateResultsDict([run_results]), f)
-
-  return rc
-
-
-def main():
-  parser = argparse.ArgumentParser()
-  parser.add_argument('--verbose', '-v', action='store_true')
-  # Required args.
-  parser.add_argument(
-      '--board', type=str, required=True, help='Type of CrOS device.')
-  subparsers = parser.add_subparsers(dest='test_type')
-  # Host-side test args.
-  host_cmd_parser = subparsers.add_parser(
-      'host-cmd',
-      help='Runs a host-side test. Pass the host-side command to run after '
-           '"--". Hostname and port for the VM will be 127.0.0.1:9222.')
-  host_cmd_parser.set_defaults(func=host_cmd)
-  host_cmd_parser.add_argument(
-      '--cros-cache', type=str, required=True, help='Path to cros cache.')
-  host_cmd_parser.add_argument('cmd', nargs=argparse.REMAINDER)
-  # VM-side test args.
-  vm_test_parser = subparsers.add_parser(
-      'vm-test',
-      help='Runs a vm-side gtest.')
-  vm_test_parser.set_defaults(func=vm_test)
-  vm_test_parser.add_argument(
-      '--cros-cache', type=str, required=True, help='Path to cros cache.')
-  vm_test_parser.add_argument(
-      '--test-exe', type=str, required=True,
-      help='Path to test executable to run inside VM. If the value is '
-           '"cros_vm_sanity_test", the sanity test that ships with the VM '
-           'image runs instead. This test smokes-check the system browser '
-           '(eg: loads a simple webpage, executes some javascript), so a '
-           'fully-built Chrome binary that can get deployed to the VM is '
-           'expected to available in the out-dir.')
-  vm_test_parser.add_argument(
-      '--path-to-outdir', type=str, required=True,
-      help='Path to output directory, all of whose contents will be deployed '
-           'to the device.')
-  vm_test_parser.add_argument(
-      '--runtime-deps-path', type=str,
-      help='Runtime data dependency file from GN.')
-  vm_test_parser.add_argument(
-      '--test-launcher-summary-output', type=str,
-      help='When set, will pass the same option down to the test and retrieve '
-           'its result file at the specified location.')
-  vm_test_parser.add_argument(
-      '--test-launcher-shard-index',
-      type=int, default=os.environ.get('GTEST_SHARD_INDEX', 0),
-      help='Index of the external shard to run.')
-  vm_test_parser.add_argument(
-      '--test-launcher-total-shards',
-      type=int, default=os.environ.get('GTEST_TOTAL_SHARDS', 1),
-      help='Total number of external shards.')
-  args = parser.parse_args()
-
-  logging.basicConfig(level=logging.DEBUG if args.verbose else logging.WARN)
-
-  if not os.path.exists('/dev/kvm'):
-    logging.error('/dev/kvm is missing. Is KVM installed on this machine?')
-    return 1
-  elif not os.access('/dev/kvm', os.W_OK):
-    logging.error(
-        '/dev/kvm is not writable as current user. Perhaps you should be root?')
-    return 1
-
-  args.cros_cache = os.path.abspath(args.cros_cache)
-  return args.func(args)
-
-
-if __name__ == '__main__':
-  sys.exit(main())
diff --git a/build/ciopfs b/build/ciopfs
deleted file mode 100755
index 23f87e8..0000000
--- a/build/ciopfs
+++ /dev/null
Binary files differ
diff --git a/build/ciopfs.sha1 b/build/ciopfs.sha1
deleted file mode 100644
index c1855a3..0000000
--- a/build/ciopfs.sha1
+++ /dev/null
@@ -1 +0,0 @@
-5454b3c4f1c9992047e7ae9d6d14d5b49b1b12f3
\ No newline at end of file
diff --git a/build/cipd/clobber_cipd_root.py b/build/cipd/clobber_cipd_root.py
deleted file mode 100755
index 5d36c72..0000000
--- a/build/cipd/clobber_cipd_root.py
+++ /dev/null
@@ -1,33 +0,0 @@
-#!/usr/bin/env python
-#
-# Copyright 2018 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Clobbers a CIPD root."""
-
-import argparse
-import os
-import shutil
-import sys
-
-
-def main():
-  parser = argparse.ArgumentParser(
-      description='Clobbers the CIPD root in the given directory.')
-
-  parser.add_argument(
-      '--root',
-      required=True,
-      help='Root directory for dependency.')
-  args = parser.parse_args()
-
-  cipd_root_dir = os.path.join(args.root, '.cipd')
-  if os.path.exists(cipd_root_dir):
-    shutil.rmtree(cipd_root_dir)
-
-  return 0
-
-
-if __name__ == '__main__':
-  sys.exit(main())
diff --git a/build/clobber.py b/build/clobber.py
deleted file mode 100755
index 18791c2..0000000
--- a/build/clobber.py
+++ /dev/null
@@ -1,132 +0,0 @@
-#!/usr/bin/env python
-# Copyright 2015 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""This script provides methods for clobbering build directories."""
-
-import argparse
-import os
-import shutil
-import subprocess
-import sys
-
-
-def extract_gn_build_commands(build_ninja_file):
-  """Extracts from a build.ninja the commands to run GN.
-
-  The commands to run GN are the gn rule and build.ninja build step at the
-  top of the build.ninja file. We want to keep these when deleting GN builds
-  since we want to preserve the command-line flags to GN.
-
-  On error, returns the empty string."""
-  result = ""
-  with open(build_ninja_file, 'r') as f:
-    # Read until the third blank line. The first thing GN writes to the file
-    # is "ninja_required_version = x.y.z", then the "rule gn" and the third
-    # is the section for "build build.ninja", separated by blank lines.
-    num_blank_lines = 0
-    while num_blank_lines < 3:
-      line = f.readline()
-      if len(line) == 0:
-        return ''  # Unexpected EOF.
-      result += line
-      if line[0] == '\n':
-        num_blank_lines = num_blank_lines + 1
-  return result
-
-
-def delete_dir(build_dir):
-  if os.path.islink(build_dir):
-    return
-  # For unknown reasons (anti-virus?) rmtree of Chromium build directories
-  # often fails on Windows.
-  if sys.platform.startswith('win'):
-    subprocess.check_call(['rmdir', '/s', '/q', build_dir], shell=True)
-  else:
-    shutil.rmtree(build_dir)
-
-
-def delete_build_dir(build_dir):
-  # GN writes a build.ninja.d file. Note that not all GN builds have args.gn.
-  build_ninja_d_file = os.path.join(build_dir, 'build.ninja.d')
-  if not os.path.exists(build_ninja_d_file):
-    delete_dir(build_dir)
-    return
-
-  # GN builds aren't automatically regenerated when you sync. To avoid
-  # messing with the GN workflow, erase everything but the args file, and
-  # write a dummy build.ninja file that will automatically rerun GN the next
-  # time Ninja is run.
-  build_ninja_file = os.path.join(build_dir, 'build.ninja')
-  build_commands = extract_gn_build_commands(build_ninja_file)
-
-  try:
-    gn_args_file = os.path.join(build_dir, 'args.gn')
-    with open(gn_args_file, 'r') as f:
-      args_contents = f.read()
-  except IOError:
-    args_contents = ''
-
-  e = None
-  try:
-    # delete_dir and os.mkdir() may fail, such as when chrome.exe is running,
-    # and we still want to restore args.gn/build.ninja/build.ninja.d, so catch
-    # the exception and rethrow it later.
-    delete_dir(build_dir)
-    os.mkdir(build_dir)
-  except Exception as e:
-    pass
-
-  # Put back the args file (if any).
-  if args_contents != '':
-    with open(gn_args_file, 'w') as f:
-      f.write(args_contents)
-
-  # Write the build.ninja file sufficiently to regenerate itself.
-  with open(os.path.join(build_dir, 'build.ninja'), 'w') as f:
-    if build_commands != '':
-      f.write(build_commands)
-    else:
-      # Couldn't parse the build.ninja file, write a default thing.
-      f.write('''rule gn
-command = gn -q gen //out/%s/
-description = Regenerating ninja files
-
-build build.ninja: gn
-generator = 1
-depfile = build.ninja.d
-''' % (os.path.split(build_dir)[1]))
-
-  # Write a .d file for the build which references a nonexistant file. This
-  # will make Ninja always mark the build as dirty.
-  with open(build_ninja_d_file, 'w') as f:
-    f.write('build.ninja: nonexistant_file.gn\n')
-
-  if e:
-    # Rethrow the exception we caught earlier.
-    raise e
-
-def clobber(out_dir):
-  """Clobber contents of build directory.
-
-  Don't delete the directory itself: some checkouts have the build directory
-  mounted."""
-  for f in os.listdir(out_dir):
-    path = os.path.join(out_dir, f)
-    if os.path.isfile(path):
-      os.unlink(path)
-    elif os.path.isdir(path):
-      delete_build_dir(path)
-
-
-def main():
-  parser = argparse.ArgumentParser()
-  parser.add_argument('out_dir', help='The output directory to clobber')
-  args = parser.parse_args()
-  clobber(args.out_dir)
-  return 0
-
-
-if __name__ == '__main__':
-  sys.exit(main())
diff --git a/build/common.croc b/build/common.croc
deleted file mode 100644
index fde7a8b..0000000
--- a/build/common.croc
+++ /dev/null
@@ -1,127 +0,0 @@
-# -*- python -*-
-# Crocodile config file for Chromium - settings common to all platforms
-#
-# This should be speicified before the platform-specific config, for example:
-#       croc -c chrome_common.croc -c linux/chrome_linux.croc
-
-{
-  # List of root directories, applied in order
-  'roots' : [
-    # Sub-paths we specifically care about and want to call out
-    {
-      'root' : '_/src',
-      'altname' : 'CHROMIUM',
-    },
-  ],
-
-  # List of rules, applied in order
-  # Note that any 'include':0 rules here will be overridden by the 'include':1
-  # rules in the platform-specific configs.
-  'rules' : [
-    # Don't scan for executable lines in uninstrumented C++ header files
-    {
-      'regexp' : '.*\\.(h|hpp)$',
-      'add_if_missing' : 0,
-    },
-
-    # Groups
-    {
-      'regexp' : '',
-      'group' : 'source',
-    },
-    {
-      'regexp' : '.*_(test|unittest|uitest|browsertest)\\.',
-      'group' : 'test',
-    },
-
-    # Languages
-    {
-      'regexp' : '.*\\.(c|h)$',
-      'language' : 'C',
-    },
-    {
-      'regexp' : '.*\\.(cc|cpp|hpp)$',
-      'language' : 'C++',
-    },
-
-    # Files/paths to include.  Specify these before the excludes, since rules
-    # are in order.
-    {
-      'regexp' : '^CHROMIUM/(base|media|net|printing|remoting|chrome|content|webkit/glue|native_client)/',
-      'include' : 1,
-    },
-    # Don't include subversion or mercurial SCM dirs
-    {
-      'regexp' : '.*/(\\.svn|\\.hg)/',
-      'include' : 0,
-    },
-    # Don't include output dirs
-    {
-      'regexp' : '.*/(Debug|Release|out|xcodebuild)/',
-      'include' : 0,
-    },
-    # Don't include third-party source
-    {
-      'regexp' : '.*/third_party/',
-      'include' : 0,
-    },
-    # We don't run the V8 test suite, so we don't care about V8 coverage.
-    {
-      'regexp' : '.*/v8/',
-      'include' : 0,
-    },
-  ],
-
-  # Paths to add source from
-  'add_files' : [
-    'CHROMIUM'
-  ],
-
-  # Statistics to print
-  'print_stats' : [
-    {
-      'stat' : 'files_executable',
-      'format' : '*RESULT FilesKnown: files_executable= %d files',
-    },
-    {
-      'stat' : 'files_instrumented',
-      'format' : '*RESULT FilesInstrumented: files_instrumented= %d files',
-    },
-    {
-      'stat' : '100.0 * files_instrumented / files_executable',
-      'format' : '*RESULT FilesInstrumentedPercent: files_instrumented_percent= %g percent',
-    },
-    {
-      'stat' : 'lines_executable',
-      'format' : '*RESULT LinesKnown: lines_known= %d lines',
-    },
-    {
-      'stat' : 'lines_instrumented',
-      'format' : '*RESULT LinesInstrumented: lines_instrumented= %d lines',
-    },
-    {
-      'stat' : 'lines_covered',
-      'format' : '*RESULT LinesCoveredSource: lines_covered_source= %d lines',
-      'group' : 'source',
-    },
-    {
-      'stat' : 'lines_covered',
-      'format' : '*RESULT LinesCoveredTest: lines_covered_test= %d lines',
-      'group' : 'test',
-    },
-    {
-      'stat' : '100.0 * lines_covered / lines_executable',
-      'format' : '*RESULT PercentCovered: percent_covered= %g percent',
-    },
-    {
-      'stat' : '100.0 * lines_covered / lines_executable',
-      'format' : '*RESULT PercentCoveredSource: percent_covered_source= %g percent',
-      'group' : 'source',
-    },
-    {
-      'stat' : '100.0 * lines_covered / lines_executable',
-      'format' : '*RESULT PercentCoveredTest: percent_covered_test= %g percent',
-      'group' : 'test',
-    },
-  ],
-}
diff --git a/build/common.gypi b/build/common.gypi
deleted file mode 100644
index 2341fd7..0000000
--- a/build/common.gypi
+++ /dev/null
@@ -1,15 +0,0 @@
-# Copyright (c) 2012 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# IMPORTANT:
-# Please don't directly include this file if you are building via gyp_chromium,
-# since gyp_chromium is automatically forcing its inclusion.
-{
-  'target_defaults': {
-    'default_configuration': 'Release',
-    'configurations': {
-      'Release': {},
-    },
-  },
-}
diff --git a/build/compiled_action.gni b/build/compiled_action.gni
deleted file mode 100644
index 02170af..0000000
--- a/build/compiled_action.gni
+++ /dev/null
@@ -1,165 +0,0 @@
-# Copyright 2014 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file introduces two related templates that act like action and
-# action_foreach but instead of running a Python script, it will compile a
-# given tool in the host toolchain and run that (either once or over the list
-# of inputs, depending on the variant).
-#
-# Parameters
-#
-#   tool (required)
-#       [label] Label of the tool to run. This should be an executable, and
-#       this label should not include a toolchain (anything in parens). The
-#       host compile of this tool will be used.
-#
-#   outputs (required)
-#       [list of files] Like the outputs of action (if using "compiled_action",
-#       this would be just the list of outputs), or action_foreach (if using
-#       "compiled_action_foreach", this would contain source expansions mapping
-#       input to output files).
-#
-#   args (required)
-#       [list of strings] Same meaning as action/action_foreach.
-#
-#   inputs (optional)
-#       Files the binary takes as input. The step will be re-run whenever any
-#       of these change. If inputs is empty, the step will run only when the
-#       binary itself changes.
-#
-#   visibility
-#   deps
-#   args   (all optional)
-#       Same meaning as action/action_foreach.
-#
-#
-# Example of usage:
-#
-#   compiled_action("run_my_tool") {
-#     tool = "//tools/something:mytool"
-#     outputs = [
-#       "$target_gen_dir/mysource.cc",
-#       "$target_gen_dir/mysource.h",
-#     ]
-#
-#     # The tool takes this input.
-#     inputs = [ "my_input_file.idl" ]
-#
-#     # In this case, the tool takes as arguments the input file and the output
-#     # build dir (both relative to the "cd" that the script will be run in)
-#     # and will produce the output files listed above.
-#     args = [
-#       rebase_path("my_input_file.idl", root_build_dir),
-#       "--output-dir", rebase_path(target_gen_dir, root_build_dir),
-#     ]
-#   }
-#
-# You would typically declare your tool like this:
-#   if (host_toolchain == current_toolchain) {
-#     executable("mytool") {
-#       ...
-#     }
-#   }
-# The if statement around the executable is optional. That says "I only care
-# about this target in the host toolchain". Usually this is what you want, and
-# saves unnecessarily compiling your tool for the target platform. But if you
-# need a target build of your tool as well, just leave off the if statement.
-
-if (host_os == "win") {
-  _host_executable_suffix = ".exe"
-} else {
-  _host_executable_suffix = ""
-}
-
-template("compiled_action") {
-  assert(defined(invoker.tool), "tool must be defined for $target_name")
-  assert(defined(invoker.outputs), "outputs must be defined for $target_name")
-  assert(defined(invoker.args), "args must be defined for $target_name")
-
-  assert(!defined(invoker.sources),
-         "compiled_action doesn't take a sources arg. Use inputs instead.")
-
-  action(target_name) {
-    forward_variables_from(invoker,
-                           [
-                             "data_deps",
-                             "deps",
-                             "inputs",
-                             "outputs",
-                             "testonly",
-                             "visibility",
-                           ])
-    if (!defined(deps)) {
-      deps = []
-    }
-    if (!defined(inputs)) {
-      inputs = []
-    }
-
-    script = "//build/gn_run_binary.py"
-
-    # Constuct the host toolchain version of the tool.
-    host_tool = invoker.tool + "($host_toolchain)"
-
-    # Get the path to the executable. Currently, this assumes that the tool
-    # does not specify output_name so that the target name is the name to use.
-    # If that's not the case, we'll need another argument to the script to
-    # specify this, since we can't know what the output name is (it might be in
-    # another file not processed yet).
-    host_executable =
-        get_label_info(host_tool, "root_out_dir") + "/" +
-        get_label_info(host_tool, "name") + _host_executable_suffix
-
-    deps += [ host_tool ]
-
-    # The script takes as arguments the binary to run, and then the arguments
-    # to pass it.
-    args = [ rebase_path(host_executable, root_build_dir) ] + invoker.args
-  }
-}
-
-template("compiled_action_foreach") {
-  assert(defined(invoker.sources), "sources must be defined for $target_name")
-  assert(defined(invoker.tool), "tool must be defined for $target_name")
-  assert(defined(invoker.outputs), "outputs must be defined for $target_name")
-  assert(defined(invoker.args), "args must be defined for $target_name")
-
-  action_foreach(target_name) {
-    forward_variables_from(invoker,
-                           [
-                             "deps",
-                             "inputs",
-                             "outputs",
-                             "sources",
-                             "testonly",
-                             "visibility",
-                           ])
-    if (!defined(deps)) {
-      deps = []
-    }
-    if (!defined(inputs)) {
-      inputs = []
-    }
-
-    script = "//build/gn_run_binary.py"
-
-    # Constuct the host toolchain version of the tool.
-    host_tool = invoker.tool + "($host_toolchain)"
-
-    # Get the path to the executable. Currently, this assumes that the tool
-    # does not specify output_name so that the target name is the name to use.
-    # If that's not the case, we'll need another argument to the script to
-    # specify this, since we can't know what the output name is (it might be in
-    # another file not processed yet).
-    host_executable =
-        get_label_info(host_tool, "root_out_dir") + "/" +
-        get_label_info(host_tool, "name") + _host_executable_suffix
-
-    deps += [ host_tool ]
-
-    # The script takes as arguments the binary to run, and then the arguments
-    # to pass it.
-    args = [ rebase_path(host_executable, root_build_dir) ] + invoker.args
-  }
-}
diff --git a/build/config/BUILD.gn b/build/config/BUILD.gn
deleted file mode 100644
index 4ba4d33..0000000
--- a/build/config/BUILD.gn
+++ /dev/null
@@ -1,397 +0,0 @@
-# Copyright (c) 2013 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import("//build/config/allocator.gni")
-import("//build/config/c++/c++.gni")
-import("//build/config/chrome_build.gni")
-import("//build/config/chromecast_build.gni")
-import("//build/config/coverage/coverage.gni")
-import("//build/config/crypto.gni")
-import("//build/config/dcheck_always_on.gni")
-import("//build/config/features.gni")
-
-# Subprojects need to override arguments in {mac,ios}_sdk_overrides.gni in their
-# .gn config, but those arguments are only used on macOS. Including
-# mac_sdk_overrides.gni insures that this doesn't trigger an unused argument
-# warning.
-import("//build/config/mac/mac_sdk_overrides.gni")
-import("//build/config/ios/ios_sdk_overrides.gni")
-
-import("//build/config/pch.gni")
-import("//build/config/sanitizers/sanitizers.gni")
-import("//build/config/ui.gni")
-import("//build/toolchain/goma.gni")
-
-declare_args() {
-  # When set (the default) enables C++ iterator debugging in debug builds.
-  # Iterator debugging is always off in release builds (technically, this flag
-  # affects the "debug" config, which is always available but applied by
-  # default only in debug builds).
-  #
-  # Iterator debugging is generally useful for catching bugs. But it can
-  # introduce extra locking to check the state of an iterator against the state
-  # of the current object. For iterator- and thread-heavy code, this can
-  # significantly slow execution.
-  enable_iterator_debugging = true
-}
-
-# ==============================================
-#   PLEASE DO NOT ADD MORE THINGS TO THIS LIST
-# ==============================================
-#
-# Legacy feature defines applied to all targets.
-#
-# These are applied to every single compile in the build and most of them are
-# only relevant to a few files. This bloats command lines and causes
-# unnecessary recompiles when flags are flipped.
-#
-# To pass defines to source code from the build, use the buildflag system which
-# will write headers containing the defines you need. This isolates the define
-# and means its definition can participate in the build graph, only recompiling
-# things when it actually changes.
-#
-# See //build/buildflag_header.gni for instructions on generating headers.
-#
-# This will also allow you to scope your build flag to a BUILD.gn file (or a
-# .gni file if you need it from more than one place) rather than making global
-# flags. See //build/config/BUILDCONFIG.gn for advice on where to define
-# build flags.
-config("feature_flags") {
-  # Don't use deprecated V8 APIs anywhere.
-  defines = [ "V8_DEPRECATION_WARNINGS" ]
-  if (dcheck_always_on) {
-    defines += [ "DCHECK_ALWAYS_ON=1" ]
-    if (dcheck_is_configurable) {
-      defines += [ "DCHECK_IS_CONFIGURABLE=1" ]
-    }
-  }
-  if (use_udev) {
-    # TODO(brettw) should probably be "=1".
-    defines += [ "USE_UDEV" ]
-  }
-  if (use_aura) {
-    defines += [ "USE_AURA=1" ]
-  }
-  if (use_glib) {
-    defines += [ "USE_GLIB=1" ]
-  }
-  if (use_nss_certs) {
-    defines += [ "USE_NSS_CERTS=1" ]
-  }
-  if (use_ozone && !is_android) {
-    # Note that some Chrome OS builds unconditionally set |use_ozone| to true,
-    # but they also build some targets with the Android toolchain. This ensures
-    # that Android targets still build with USE_OZONE=0 in such cases.
-    #
-    # TODO(crbug.com/837032): Maybe this can be cleaned up if we can avoid
-    # setting use_ozone globally.
-    defines += [ "USE_OZONE=1" ]
-  }
-  if (use_x11) {
-    defines += [ "USE_X11=1" ]
-  }
-  if (use_allocator != "tcmalloc") {
-    defines += [ "NO_TCMALLOC" ]
-  }
-  if (is_asan || is_lsan || is_tsan || is_msan) {
-    defines += [
-      "MEMORY_TOOL_REPLACES_ALLOCATOR",
-      "MEMORY_SANITIZER_INITIAL_SIZE",
-    ]
-  }
-  if (is_asan) {
-    defines += [ "ADDRESS_SANITIZER" ]
-  }
-  if (is_lsan) {
-    defines += [ "LEAK_SANITIZER" ]
-  }
-  if (is_tsan) {
-    defines += [
-      "THREAD_SANITIZER",
-      "DYNAMIC_ANNOTATIONS_EXTERNAL_IMPL=1",
-      "WTF_USE_DYNAMIC_ANNOTATIONS_NOIMPL=1",
-    ]
-  }
-  if (is_msan) {
-    defines += [ "MEMORY_SANITIZER" ]
-  }
-  if (is_ubsan || is_ubsan_null || is_ubsan_vptr || is_ubsan_security) {
-    defines += [ "UNDEFINED_SANITIZER" ]
-  }
-  if (use_clang_coverage) {
-    defines += [ "CLANG_COVERAGE" ]
-  }
-  if (safe_browsing_mode == 1) {
-    defines += [ "FULL_SAFE_BROWSING" ]
-    defines += [ "SAFE_BROWSING_CSD" ]
-    defines += [ "SAFE_BROWSING_DB_LOCAL" ]
-  } else if (safe_browsing_mode == 2) {
-    defines += [ "SAFE_BROWSING_DB_REMOTE" ]
-  }
-  if (is_official_build) {
-    defines += [ "OFFICIAL_BUILD" ]
-  }
-  if (is_chrome_branded) {
-    defines += [ "GOOGLE_CHROME_BUILD" ]
-  } else {
-    defines += [ "CHROMIUM_BUILD" ]
-  }
-  if (!fieldtrial_testing_like_official_build && !is_chrome_branded) {
-    defines += [ "FIELDTRIAL_TESTING_ENABLED" ]
-  }
-
-  # ==============================================
-  #   PLEASE DO NOT ADD MORE THINGS TO THIS LIST
-  # ==============================================
-  #
-  # See the comment at the top.
-}
-
-# Debug/release ----------------------------------------------------------------
-
-config("debug") {
-  defines = [
-    "_DEBUG",
-    "DYNAMIC_ANNOTATIONS_ENABLED=1",
-    "WTF_USE_DYNAMIC_ANNOTATIONS=1",
-  ]
-
-  if (is_nacl) {
-    defines += [ "DYNAMIC_ANNOTATIONS_PREFIX=NACL_" ]
-  }
-
-  if (is_win) {
-    if (!enable_iterator_debugging) {
-      # Iterator debugging is enabled by default by the compiler on debug
-      # builds, and we have to tell it to turn it off.
-      defines += [ "_HAS_ITERATOR_DEBUGGING=0" ]
-    }
-  } else if (is_linux && current_cpu == "x64" && enable_iterator_debugging) {
-    # Enable libstdc++ debugging facilities to help catch problems early, see
-    # http://crbug.com/65151 .
-    # TODO(phajdan.jr): Should we enable this for all of POSIX?
-    defines += [ "_GLIBCXX_DEBUG=1" ]
-  }
-}
-
-config("release") {
-  defines = [ "NDEBUG" ]
-
-  # Sanitizers.
-  if (is_tsan) {
-    defines += [
-      "DYNAMIC_ANNOTATIONS_ENABLED=1",
-      "WTF_USE_DYNAMIC_ANNOTATIONS=1",
-    ]
-  } else {
-    defines += [ "NVALGRIND" ]
-    if (!is_nacl) {
-      # NaCl always enables dynamic annotations. Currently this value is set to
-      # 1 for all .nexes.
-      defines += [ "DYNAMIC_ANNOTATIONS_ENABLED=0" ]
-    }
-  }
-
-  if (is_ios) {
-    # Disable NSAssert and GTMDevAssert (from Google Toolbox for Mac). This
-    # follows XCode's default behavior for Release builds.
-    defines += [ "NS_BLOCK_ASSERTIONS=1" ]
-  }
-}
-
-# Default libraries ------------------------------------------------------------
-
-# This config defines the default libraries applied to all targets.
-config("default_libs") {
-  if (is_win) {
-    # TODO(brettw) this list of defaults should probably be smaller, and
-    # instead the targets that use the less common ones (e.g. wininet or
-    # winspool) should include those explicitly.
-    libs = [
-      "advapi32.lib",
-      "comdlg32.lib",
-      "dbghelp.lib",
-      "dnsapi.lib",
-      "gdi32.lib",
-      "msimg32.lib",
-      "odbc32.lib",
-      "odbccp32.lib",
-      "oleaut32.lib",
-      "psapi.lib",
-      "shell32.lib",
-      "shlwapi.lib",
-      "user32.lib",
-      "usp10.lib",
-      "uuid.lib",
-      "version.lib",
-      "wininet.lib",
-      "winmm.lib",
-      "winspool.lib",
-      "ws2_32.lib",
-
-      # Please don't add more stuff here. We should actually be making this
-      # list smaller, since all common things should be covered. If you need
-      # some extra libraries, please just add a libs = [ "foo.lib" ] to your
-      # target that needs it.
-    ]
-    if (current_os == "winuwp") {
-      # These libraries are needed for Windows UWP (i.e. store apps).
-      libs += [
-        "dloadhelper.lib",
-        "WindowsApp.lib",
-      ]
-    } else {
-      # These libraries are not compatible with Windows UWP (i.e. store apps.)
-      libs += [
-        "delayimp.lib",
-        "kernel32.lib",
-        "ole32.lib",
-      ]
-    }
-  } else if (is_android) {
-    libs = [
-      "dl",
-      "m",
-    ]
-  } else if (is_mac) {
-    # Targets should choose to explicitly link frameworks they require. Since
-    # linking can have run-time side effects, nothing should be listed here.
-    libs = []
-  } else if (is_ios) {
-    # The libraries listed here will be specified for both the target and the
-    # host. Only the common ones should be listed here.
-    libs = [
-      "CoreFoundation.framework",
-      "CoreGraphics.framework",
-      "CoreText.framework",
-      "Foundation.framework",
-    ]
-  } else if (is_linux) {
-    libs = [
-      "dl",
-      "pthread",
-      "rt",
-    ]
-  }
-}
-
-# Dependencies that all executables and shared libraries should have.
-group("exe_and_shlib_deps") {
-  public_deps = []
-  if (using_sanitizer) {
-    public_deps += [ "//build/config/sanitizers:deps" ]
-  }
-  if (use_custom_libcxx) {
-    public_deps += [ "//buildtools/third_party/libc++" ]
-  }
-  if (use_afl) {
-    public_deps += [ "//third_party/afl" ]
-  }
-
-  if (is_win && generate_order_files && !is_nacl) {
-    public_deps += [ "//tools/cygprofile_win" ]
-  }
-}
-
-# Executable configs -----------------------------------------------------------
-
-# Windows linker setup for EXEs and DLLs.
-if (is_win) {
-  _windows_linker_configs = [
-    "//build/config/win:sdk_link",
-    "//build/config/win:common_linker_setup",
-  ]
-}
-
-# This config defines the configs applied to all executables.
-config("executable_config") {
-  configs = []
-
-  if (is_win) {
-    configs += _windows_linker_configs
-
-    # Currently only turn on linker CFI for executables.
-    configs += [ "//build/config/win:cfi_linker" ]
-  } else if (is_mac) {
-    configs += [ "//build/config/mac:mac_dynamic_flags" ]
-  } else if (is_ios) {
-    configs += [
-      "//build/config/ios:ios_dynamic_flags",
-      "//build/config/ios:ios_executable_flags",
-    ]
-  } else if (is_linux || is_android || current_os == "aix") {
-    configs += [ "//build/config/gcc:executable_ldconfig" ]
-    if (is_android) {
-      configs += [ "//build/config/android:executable_config" ]
-    } else if (is_chromecast) {
-      configs += [ "//build/config/chromecast:executable_config" ]
-    }
-  }
-
-  # If we're using the prebuilt instrumented libraries with the sanitizers, we
-  # need to add ldflags to every binary to make sure they are picked up.
-  if (prebuilt_instrumented_libraries_available) {
-    configs += [ "//third_party/instrumented_libraries:prebuilt_ldflags" ]
-  }
-  if (use_locally_built_instrumented_libraries) {
-    configs += [ "//third_party/instrumented_libraries:locally_built_ldflags" ]
-  }
-  configs += [ "//build/config/sanitizers:link_executable" ]
-}
-
-# Shared library configs -------------------------------------------------------
-
-# This config defines the configs applied to all shared libraries.
-config("shared_library_config") {
-  configs = []
-
-  if (is_win) {
-    configs += _windows_linker_configs
-  } else if (is_mac) {
-    configs += [ "//build/config/mac:mac_dynamic_flags" ]
-  } else if (is_ios) {
-    configs += [ "//build/config/ios:ios_dynamic_flags" ]
-  } else if (is_chromecast) {
-    configs += [ "//build/config/chromecast:shared_library_config" ]
-  }
-
-  # If we're using the prebuilt instrumented libraries with the sanitizers, we
-  # need to add ldflags to every binary to make sure they are picked up.
-  if (prebuilt_instrumented_libraries_available) {
-    configs += [ "//third_party/instrumented_libraries:prebuilt_ldflags" ]
-  }
-  if (use_locally_built_instrumented_libraries) {
-    configs += [ "//third_party/instrumented_libraries:locally_built_ldflags" ]
-  }
-  configs += [ "//build/config/sanitizers:link_shared_library" ]
-}
-
-# Add this config to your target to enable precompiled headers.
-#
-# Precompiled headers are done on a per-target basis. If you have just a couple
-# of files, the time it takes to precompile (~2 seconds) can actually be longer
-# than the time saved. On a Z620, a 100 file target compiles about 2 seconds
-# faster with precompiled headers, with greater savings for larger targets.
-#
-# Recommend precompiled headers for targets with more than 50 .cc files.
-config("precompiled_headers") {
-  if (enable_precompiled_headers) {
-    if (is_win) {
-      # This is a string rather than a file GN knows about. It has to match
-      # exactly what's in the /FI flag below, and what might appear in the
-      # source code in quotes for an #include directive.
-      precompiled_header = "build/precompile.h"
-
-      # This is a file that GN will compile with the above header. It will be
-      # implicitly added to the sources (potentially multiple times, with one
-      # variant for each language used in the target).
-      precompiled_source = "//build/precompile.cc"
-
-      # Force include the header.
-      cflags = [ "/FI$precompiled_header" ]
-    } else if (is_mac) {
-      precompiled_source = "//build/precompile.h"
-    }
-  }
-}
diff --git a/build/config/BUILDCONFIG.gn b/build/config/BUILDCONFIG.gn
deleted file mode 100644
index a0b62e5..0000000
--- a/build/config/BUILDCONFIG.gn
+++ /dev/null
@@ -1,696 +0,0 @@
-# Copyright (c) 2013 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# =============================================================================
-# WHAT IS THIS FILE?
-# =============================================================================
-#
-# This is the master GN build configuration. This file is loaded after the
-# build args (args.gn) for the build directory and after the toplevel ".gn"
-# file (which points to this file as the build configuration).
-#
-# This file will be executed and the resulting context will be used to execute
-# every other file in the build. So variables declared here (that don't start
-# with an underscore) will be implicitly global.
-
-# =============================================================================
-# PLATFORM SELECTION
-# =============================================================================
-#
-# There are two main things to set: "os" and "cpu". The "toolchain" is the name
-# of the GN thing that encodes combinations of these things.
-#
-# Users typically only set the variables "target_os" and "target_cpu" in "gn
-# args", the rest are set up by our build and internal to GN.
-#
-# There are three different types of each of these things: The "host"
-# represents the computer doing the compile and never changes. The "target"
-# represents the main thing we're trying to build. The "current" represents
-# which configuration is currently being defined, which can be either the
-# host, the target, or something completely different (like nacl). GN will
-# run the same build file multiple times for the different required
-# configuration in the same build.
-#
-# This gives the following variables:
-#  - host_os, host_cpu, host_toolchain
-#  - target_os, target_cpu, default_toolchain
-#  - current_os, current_cpu, current_toolchain.
-#
-# Note the default_toolchain isn't symmetrical (you would expect
-# target_toolchain). This is because the "default" toolchain is a GN built-in
-# concept, and "target" is something our build sets up that's symmetrical with
-# its GYP counterpart. Potentially the built-in default_toolchain variable
-# could be renamed in the future.
-#
-# When writing build files, to do something only for the host:
-#   if (current_toolchain == host_toolchain) { ...
-
-if (target_os == "") {
-  target_os = host_os
-}
-
-if (target_cpu == "") {
-  if (target_os == "android") {
-    # If we're building for Android, we should assume that we want to
-    # build for ARM by default, not the host_cpu (which is likely x64).
-    # This allows us to not have to specify both target_os and target_cpu
-    # on the command line.
-    target_cpu = "arm"
-  } else {
-    target_cpu = host_cpu
-  }
-}
-
-if (current_cpu == "") {
-  current_cpu = target_cpu
-}
-if (current_os == "") {
-  current_os = target_os
-}
-
-# =============================================================================
-# BUILD FLAGS
-# =============================================================================
-#
-# This block lists input arguments to the build, along with their default
-# values.
-#
-# If a value is specified on the command line, it will overwrite the defaults
-# given in a declare_args block, otherwise the default will be used.
-#
-# YOU SHOULD ALMOST NEVER NEED TO ADD FLAGS TO THIS FILE. GN allows any file in
-# the build to declare build flags. If you need a flag for a single component,
-# you can just declare it in the corresponding BUILD.gn file.
-#
-# - If your feature is a single target, say //components/foo, you can put
-#   a declare_args() block in //components/foo/BUILD.gn and use it there.
-#   Nobody else in the build needs to see the flag.
-#
-# - Defines based on build variables should be implemented via the generated
-#   build flag header system. See //build/buildflag_header.gni. You can put
-#   the buildflag_header target in the same file as the build flag itself. You
-#   should almost never set "defines" directly.
-#
-# - If your flag toggles a target on and off or toggles between different
-#   versions of similar things, write a "group" target that forwards to the
-#   right target (or no target) depending on the value of the build flag. This
-#   group can be in the same BUILD.gn file as the build flag, and targets can
-#   depend unconditionally on the group rather than duplicating flag checks
-#   across many targets.
-#
-# - If a semi-random set of build files REALLY needs to know about a define and
-#   the above pattern for isolating the build logic in a forwarding group
-#   doesn't work, you can put the argument in a .gni file. This should be put
-#   in the lowest level of the build that knows about this feature (which should
-#   almost always be outside of the //build directory!).
-#
-# Other flag advice:
-#
-# - Use boolean values when possible. If you need a default value that expands
-#   to some complex thing in the default case (like the location of the
-#   compiler which would be computed by a script), use a default value of -1 or
-#   the empty string. Outside of the declare_args block, conditionally expand
-#   the default value as necessary.
-#
-# - Use a name like "use_foo" or "is_foo" (whatever is more appropriate for
-#   your feature) rather than just "foo".
-#
-# - Write good comments directly above the declaration with no blank line.
-#   These comments will appear as documentation in "gn args --list".
-#
-# - Don't call exec_script inside declare_args. This will execute the script
-#   even if the value is overridden, which is wasteful. See first bullet.
-
-declare_args() {
-  # Set to enable the official build level of optimization. This has nothing
-  # to do with branding, but enables an additional level of optimization above
-  # release (!is_debug). This might be better expressed as a tri-state
-  # (debug, release, official) but for historical reasons there are two
-  # separate flags.
-  is_official_build = false
-
-  # Whether we're a traditional desktop unix.
-  is_desktop_linux = current_os == "linux"
-
-  # Set to true when compiling with the Clang compiler.
-  is_clang = current_os != "linux" ||
-             (current_cpu != "s390x" && current_cpu != "s390" &&
-              current_cpu != "ppc64" && current_cpu != "ppc" &&
-              current_cpu != "mips" && current_cpu != "mips64")
-
-  # Allows the path to a custom target toolchain to be injected as a single
-  # argument, and set as the default toolchain.
-  custom_toolchain = ""
-
-  # This should not normally be set as a build argument.  It's here so that
-  # every toolchain can pass through the "global" value via toolchain_args().
-  host_toolchain = ""
-
-  # DON'T ADD MORE FLAGS HERE. Read the comment above.
-}
-
-declare_args() {
-  # Debug build. Enabling official builds automatically sets is_debug to false.
-  is_debug = !is_official_build
-}
-
-declare_args() {
-  # Component build. Setting to true compiles targets declared as "components"
-  # as shared libraries loaded dynamically. This speeds up development time.
-  # When false, components will be linked statically.
-  #
-  # For more information see
-  # https://chromium.googlesource.com/chromium/src/+/master/docs/component_build.md
-  is_component_build = is_debug && current_os != "ios"
-}
-
-assert(!(is_debug && is_official_build), "Can't do official debug builds")
-
-# ==============================================================================
-# TOOLCHAIN SETUP
-# ==============================================================================
-#
-# Here we set the default toolchain, as well as the variable host_toolchain
-# which will identify the toolchain corresponding to the local system when
-# doing cross-compiles. When not cross-compiling, this will be the same as the
-# default toolchain.
-#
-# We do this before anything else to make sure we complain about any
-# unsupported os/cpu combinations as early as possible.
-
-if (host_toolchain == "") {
-  # This should only happen in the top-level context.
-  # In a specific toolchain context, the toolchain_args()
-  # block should have propagated a value down.
-  # TODO(dpranke): Add some sort of assert here that verifies that
-  # no toolchain omitted host_toolchain from its toolchain_args().
-
-  if (host_os == "linux") {
-    if (target_os != "linux") {
-      # TODO(dpranke) - is_clang normally applies only to the target
-      # build, and there is no way to indicate that you want to override
-      # it for both the target build *and* the host build. Do we need to
-      # support this?
-      host_toolchain = "//build/toolchain/linux:clang_$host_cpu"
-    } else if (is_clang) {
-      host_toolchain = "//build/toolchain/linux:clang_$host_cpu"
-    } else {
-      host_toolchain = "//build/toolchain/linux:$host_cpu"
-    }
-  } else if (host_os == "mac") {
-    host_toolchain = "//build/toolchain/mac:clang_$host_cpu"
-  } else if (host_os == "win") {
-    # On Windows always use the target CPU for host builds for x86/x64. On the
-    # configurations we support this will always work and it saves build steps.
-    if (target_cpu == "x86" || target_cpu == "x64") {
-      if (is_clang) {
-        host_toolchain = "//build/toolchain/win:win_clang_$target_cpu"
-      } else {
-        host_toolchain = "//build/toolchain/win:$target_cpu"
-      }
-    } else if (is_clang) {
-      host_toolchain = "//build/toolchain/win:win_clang_$host_cpu"
-    } else {
-      host_toolchain = "//build/toolchain/win:$host_cpu"
-    }
-  } else if (host_os == "aix") {
-    host_toolchain = "//build/toolchain/aix:$host_cpu"
-  } else {
-    assert(false, "Unsupported host_os: $host_os")
-  }
-}
-
-_default_toolchain = ""
-
-if (target_os == "android") {
-  assert(host_os == "linux" || host_os == "mac",
-         "Android builds are only supported on Linux and Mac hosts.")
-  _default_toolchain = "//build/toolchain/android:android_clang_$target_cpu"
-} else if (target_os == "chromeos" || target_os == "linux") {
-  # See comments in build/toolchain/cros/BUILD.gn about board compiles.
-  if (is_clang) {
-    _default_toolchain = "//build/toolchain/linux:clang_$target_cpu"
-  } else {
-    _default_toolchain = "//build/toolchain/linux:$target_cpu"
-  }
-} else if (target_os == "fuchsia") {
-  _default_toolchain = "//build/toolchain/fuchsia:$target_cpu"
-} else if (target_os == "ios") {
-  _default_toolchain = "//build/toolchain/mac:ios_clang_$target_cpu"
-} else if (target_os == "mac") {
-  assert(host_os == "mac", "Mac cross-compiles are unsupported.")
-  _default_toolchain = host_toolchain
-} else if (target_os == "win") {
-  # On Windows we use the same toolchain for host and target by default.
-  # Beware, win cross builds have some caveats, see docs/win_cross.md
-  # TODO(thakis): See if we want to make 32-bit builds on mac hosts work.
-  assert(host_os != "mac" || target_cpu == "x64",
-         "Mac hosts can only build 64-bit chrome/win, https://crbug.com/794838")
-  if (is_clang) {
-    _default_toolchain = "//build/toolchain/win:win_clang_$target_cpu"
-  } else {
-    _default_toolchain = "//build/toolchain/win:$target_cpu"
-  }
-} else if (target_os == "winuwp") {
-  # Only target WinUWP on for a Windows store application and only
-  # x86, x64 and arm are supported target CPUs.
-  assert(target_cpu == "x86" || target_cpu == "x64" || target_cpu == "arm" ||
-         target_cpu == "arm64")
-  _default_toolchain = "//build/toolchain/win:uwp_$target_cpu"
-} else if (target_os == "aix") {
-  _default_toolchain = "//build/toolchain/aix:$target_cpu"
-} else {
-  assert(false, "Unsupported target_os: $target_os")
-}
-
-# If a custom toolchain has been set in the args, set it as default. Otherwise,
-# set the default toolchain for the platform (if any).
-if (custom_toolchain != "") {
-  set_default_toolchain(custom_toolchain)
-} else if (_default_toolchain != "") {
-  set_default_toolchain(_default_toolchain)
-}
-
-# =============================================================================
-# OS DEFINITIONS
-# =============================================================================
-#
-# We set these various is_FOO booleans for convenience in writing OS-based
-# conditions.
-#
-# - is_android, is_chromeos, is_ios, and is_win should be obvious.
-# - is_mac is set only for desktop Mac. It is not set on iOS.
-# - is_posix is true for mac and any Unix-like system (basically everything
-#   except Windows).
-# - is_linux is true for desktop Linux and ChromeOS, but not Android (which is
-#   generally too different despite being based on the Linux kernel).
-#
-# Do not add more is_* variants here for random lesser-used Unix systems like
-# aix or one of the BSDs. If you need to check these, just check the
-# current_os value directly.
-
-if (current_os == "win" || current_os == "winuwp") {
-  is_android = false
-  is_chromeos = false
-  is_fuchsia = false
-  is_ios = false
-  is_linux = false
-  is_mac = false
-  is_nacl = false
-  is_posix = false
-  is_win = true
-} else if (current_os == "mac") {
-  is_android = false
-  is_chromeos = false
-  is_fuchsia = false
-  is_ios = false
-  is_linux = false
-  is_mac = true
-  is_nacl = false
-  is_posix = true
-  is_win = false
-} else if (current_os == "android") {
-  is_android = true
-  is_chromeos = false
-  is_fuchsia = false
-  is_ios = false
-  is_linux = false
-  is_mac = false
-  is_nacl = false
-  is_posix = true
-  is_win = false
-} else if (current_os == "chromeos") {
-  is_android = false
-  is_chromeos = true
-  is_fuchsia = false
-  is_ios = false
-  is_linux = true
-  is_mac = false
-  is_nacl = false
-  is_posix = true
-  is_win = false
-} else if (current_os == "nacl") {
-  # current_os == "nacl" will be passed by the nacl toolchain definition.
-  # It is not set by default or on the command line. We treat is as a
-  # Posix variant.
-  is_android = false
-  is_chromeos = false
-  is_fuchsia = false
-  is_ios = false
-  is_linux = false
-  is_mac = false
-  is_nacl = true
-  is_posix = true
-  is_win = false
-} else if (current_os == "fuchsia") {
-  is_android = false
-  is_chromeos = false
-  is_fuchsia = true
-  is_ios = false
-  is_linux = false
-  is_mac = false
-  is_nacl = false
-  is_posix = false
-  is_win = false
-} else if (current_os == "ios") {
-  is_android = false
-  is_chromeos = false
-  is_fuchsia = false
-  is_ios = true
-  is_linux = false
-  is_mac = false
-  is_nacl = false
-  is_posix = true
-  is_win = false
-} else if (current_os == "linux") {
-  is_android = false
-  is_chromeos = false
-  is_fuchsia = false
-  is_ios = false
-  is_linux = true
-  is_mac = false
-  is_nacl = false
-  is_posix = true
-  is_win = false
-} else if (current_os == "aix") {
-  is_android = false
-  is_chromeos = false
-  is_fuchsia = false
-  is_ios = false
-  is_linux = false
-  is_mac = false
-  is_nacl = false
-  is_posix = true
-  is_win = false
-}
-
-# =============================================================================
-# SOURCES FILTERS
-# =============================================================================
-#
-# These patterns filter out platform-specific files when assigning to the
-# sources variable. The magic variable |sources_assignment_filter| is applied
-# to each assignment or appending to the sources variable and matches are
-# automatically removed.
-#
-# Note that the patterns are NOT regular expressions. Only "*" and "\b" (path
-# boundary = end of string or slash) are supported, and the entire string
-# must match the pattern (so you need "*.cc" to match all .cc files, for
-# example).
-
-# DO NOT ADD MORE PATTERNS TO THIS LIST, see set_sources_assignment_filter call
-# below.
-sources_assignment_filter = []
-
-if (!is_win) {
-  sources_assignment_filter += [
-    "*_win.cc",
-    "*_win.h",
-    "*_win_unittest.cc",
-    "*\bwin/*",
-    "*.def",
-    "*.rc",
-  ]
-}
-if (!is_mac) {
-  sources_assignment_filter += [
-    "*_mac.h",
-    "*_mac.cc",
-    "*_mac.mm",
-    "*_mac_unittest.h",
-    "*_mac_unittest.cc",
-    "*_mac_unittest.mm",
-    "*\bmac/*",
-    "*_cocoa.h",
-    "*_cocoa.cc",
-    "*_cocoa.mm",
-    "*_cocoa_unittest.h",
-    "*_cocoa_unittest.cc",
-    "*_cocoa_unittest.mm",
-    "*\bcocoa/*",
-  ]
-}
-if (!is_ios) {
-  sources_assignment_filter += [
-    "*_ios.h",
-    "*_ios.cc",
-    "*_ios.mm",
-    "*_ios_unittest.h",
-    "*_ios_unittest.cc",
-    "*_ios_unittest.mm",
-    "*\bios/*",
-  ]
-}
-if (!is_mac && !is_ios) {
-  sources_assignment_filter += [ "*.mm" ]
-}
-if (!is_linux) {
-  sources_assignment_filter += [
-    "*_linux.h",
-    "*_linux.cc",
-    "*_linux_unittest.h",
-    "*_linux_unittest.cc",
-    "*\blinux/*",
-  ]
-}
-if (!is_android) {
-  sources_assignment_filter += [
-    "*_android.h",
-    "*_android.cc",
-    "*_android_unittest.h",
-    "*_android_unittest.cc",
-    "*\bandroid/*",
-  ]
-}
-if (!is_chromeos) {
-  sources_assignment_filter += [
-    "*_chromeos.h",
-    "*_chromeos.cc",
-    "*_chromeos_unittest.h",
-    "*_chromeos_unittest.cc",
-    "*\bchromeos/*",
-  ]
-}
-
-# DO NOT ADD MORE PATTERNS TO THIS LIST, see set_sources_assignment_filter call
-# below.
-
-# Actually save this list.
-#
-# These patterns are executed for every file in the source tree of every run.
-# Therefore, adding more patterns slows down the build for everybody. We should
-# only add automatic patterns for configurations affecting hundreds of files
-# across many projects in the tree.
-#
-# Therefore, we only add rules to this list corresponding to platforms on the
-# Chromium waterfall.  This is not for non-officially-supported platforms
-# (FreeBSD, etc.) toolkits, (X11, GTK, etc.), or features. For these cases,
-# write a conditional in the target to remove the file(s) from the list when
-# your platform/toolkit/feature doesn't apply.
-set_sources_assignment_filter(sources_assignment_filter)
-
-# =============================================================================
-# TARGET DEFAULTS
-# =============================================================================
-#
-# Set up the default configuration for every build target of the given type.
-# The values configured here will be automatically set on the scope of the
-# corresponding target. Target definitions can add or remove to the settings
-# here as needed.
-#
-# WHAT GOES HERE?
-#
-# Other than the main compiler and linker configs, the only reason for a config
-# to be in this list is if some targets need to explicitly override that config
-# by removing it. This is how targets opt-out of flags. If you don't have that
-# requirement and just need to add a config everywhere, reference it as a
-# sub-config of an existing one, most commonly the main "compiler" one.
-
-# Holds all configs used for running the compiler.
-default_compiler_configs = [
-  "//build/config:feature_flags",
-  "//build/config/compiler:afdo",
-  "//build/config/compiler:afdo_optimize_size",
-  "//build/config/compiler:compiler",
-  "//build/config/compiler:clang_stackrealign",
-  "//build/config/compiler:compiler_arm_fpu",
-  "//build/config/compiler:compiler_arm_thumb",
-  "//build/config/compiler:chromium_code",
-  "//build/config/compiler:default_include_dirs",
-  "//build/config/compiler:default_optimization",
-  "//build/config/compiler:default_stack_frames",
-  "//build/config/compiler:default_symbols",
-  "//build/config/compiler:no_exceptions",
-  "//build/config/compiler:no_rtti",
-  "//build/config/compiler:runtime_library",
-  "//build/config/compiler:thin_archive",
-  "//build/config/coverage:default_coverage",
-  "//build/config/sanitizers:default_sanitizer_flags",
-]
-
-if (is_win) {
-  default_compiler_configs += [
-    "//build/config/win:default_crt",
-    "//build/config/win:lean_and_mean",
-    "//build/config/win:nominmax",
-    "//build/config/win:unicode",
-    "//build/config/win:winver",
-    "//build/config/win:vs_code_analysis",
-  ]
-}
-
-if (is_posix) {
-  if (current_os != "aix") {
-    default_compiler_configs +=
-        [ "//build/config/gcc:symbol_visibility_hidden" ]
-  }
-}
-
-if (is_fuchsia) {
-  default_compiler_configs += [ "//build/config/gcc:symbol_visibility_hidden" ]
-}
-
-if (is_android) {
-  default_compiler_configs +=
-      [ "//build/config/android:default_cygprofile_instrumentation" ]
-}
-
-if (is_win) {
-  default_compiler_configs +=
-      [ "//build/config/win:default_cygprofile_instrumentation" ]
-}
-
-if (is_clang && !is_nacl) {
-  default_compiler_configs += [
-    "//build/config/clang:find_bad_constructs",
-    "//build/config/clang:extra_warnings",
-  ]
-}
-
-# Debug/release-related defines.
-if (is_debug) {
-  default_compiler_configs += [ "//build/config:debug" ]
-} else {
-  default_compiler_configs += [ "//build/config:release" ]
-}
-
-# Static libraries and source sets use only the compiler ones.
-set_defaults("static_library") {
-  configs = default_compiler_configs
-}
-set_defaults("source_set") {
-  configs = default_compiler_configs
-}
-
-# Compute the set of configs common to all linked targets (shared libraries,
-# loadable modules, executables) to avoid duplication below.
-if (is_win) {
-  # Many targets remove these configs, so they are not contained within
-  # //build/config:executable_config for easy removal.
-  _linker_configs = [
-    "//build/config/win:default_incremental_linking",
-
-    # Default to console-mode apps. Most of our targets are tests and such
-    # that shouldn't use the windows subsystem.
-    "//build/config/win:console",
-  ]
-} else if (is_mac) {
-  _linker_configs = [ "//build/config/mac:strip_all" ]
-} else {
-  _linker_configs = []
-}
-
-# Executable defaults.
-default_executable_configs = default_compiler_configs + [
-                               "//build/config:default_libs",
-                               "//build/config:executable_config",
-                             ] + _linker_configs
-set_defaults("executable") {
-  configs = default_executable_configs
-}
-
-# Shared library and loadable module defaults (also for components in component
-# mode).
-default_shared_library_configs = default_compiler_configs + [
-                                   "//build/config:default_libs",
-                                   "//build/config:shared_library_config",
-                                 ] + _linker_configs
-if (is_android) {
-  # Strip native JNI exports from shared libraries by default. Binaries that
-  # want this can remove this config.
-  default_shared_library_configs +=
-      [ "//build/config/android:hide_all_but_jni_onload" ]
-}
-set_defaults("shared_library") {
-  configs = default_shared_library_configs
-}
-set_defaults("loadable_module") {
-  configs = default_shared_library_configs
-
-  # loadable_modules are generally used by other libs, not just via JNI.
-  if (is_android) {
-    configs -= [ "//build/config/android:hide_all_but_jni_onload" ]
-  }
-}
-
-# ==============================================================================
-# COMPONENT SETUP
-# ==============================================================================
-
-# Defines a component, which equates to a shared_library when
-# is_component_build == true and a static_library otherwise.
-#
-# Use static libraries for the static build rather than source sets because
-# many of of our test binaries link many large dependencies but often don't
-# use large portions of them. The static libraries are much more efficient to
-# link in this situation since only the necessary object files are linked.
-#
-# The invoker can override the type of the target in the non-component-build
-# case by setting static_component_type to either "source_set" or
-# "static_library". If unset, the default will be used.
-template("component") {
-  if (is_component_build) {
-    _component_mode = "shared_library"
-  } else if (defined(invoker.static_component_type)) {
-    assert(invoker.static_component_type == "static_library" ||
-           invoker.static_component_type == "source_set")
-    _component_mode = invoker.static_component_type
-  } else if (!defined(invoker.sources)) {
-    # When there are no sources defined, use a source set to avoid creating
-    # an empty static library (which generally don't work).
-    _component_mode = "source_set"
-  } else {
-    _component_mode = "static_library"
-  }
-  target(_component_mode, target_name) {
-    # Explicitly forward visibility, implicitly forward everything else.
-    # Forwarding "*" doesn't recurse into nested scopes (to avoid copying all
-    # globals into each template invocation), so won't pick up file-scoped
-    # variables. Normally this isn't too bad, but visibility is commonly
-    # defined at the file scope. Explicitly forwarding visibility and then
-    # excluding it from the "*" set works around this problem.
-    # See http://crbug.com/594610
-    forward_variables_from(invoker, [ "visibility" ])
-    forward_variables_from(invoker, "*", [ "visibility" ])
-
-    # All shared libraries must have the sanitizer deps to properly link in
-    # asan mode (this target will be empty in other cases).
-    if (!defined(deps)) {
-      deps = []
-    }
-    deps += [ "//build/config:exe_and_shlib_deps" ]
-  }
-}
-
-# Component defaults
-set_defaults("component") {
-  if (is_component_build) {
-    configs = default_shared_library_configs
-    if (is_android) {
-      configs -= [ "//build/config/android:hide_all_but_jni_onload" ]
-    }
-  } else {
-    configs = default_compiler_configs
-  }
-}
diff --git a/build/config/OWNERS b/build/config/OWNERS
deleted file mode 100644
index f1592d3..0000000
--- a/build/config/OWNERS
+++ /dev/null
@@ -1,5 +0,0 @@
-dpranke@chromium.org
-scottmg@chromium.org
-
-per-file BUILDCONFIG.gn=dpranke@chromium.org
-per-file BUILDCONFIG.gn=set noparent
diff --git a/build/config/aix/BUILD.gn b/build/config/aix/BUILD.gn
deleted file mode 100644
index 7ab1e7c..0000000
--- a/build/config/aix/BUILD.gn
+++ /dev/null
@@ -1,50 +0,0 @@
-# Copyright 2017 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import("//build/config/sanitizers/sanitizers.gni")
-import("//build/toolchain/toolchain.gni")
-
-# This is included by reference in the //build/config/compiler config that
-# is applied to all targets. It is here to separate out the logic.
-
-config("compiler") {
-  # These flags are shared between the C compiler and linker.
-  defines = [
-    "_LINUX_SOURCE_COMPAT=1",
-    "__STDC_FORMAT_MACROS",
-    "_ALL_SOURCE=1",
-  ]
-
-  cflags = [
-    "-Wall",
-    "-Wno-unused-parameter",
-    "-pthread",
-    "-Wmissing-field-initializers",
-    "-Wno-uninitialized",
-    "-mcpu=power5+",
-    "-mfprnd",
-    "-mno-popcntb",
-    "-maix64",
-    "-fdata-sections",
-    "-ffunction-sections",
-    "-O3",
-
-    # "-Werror"
-    # We need to find a way to fix the TOC warnings if we want to enable this.
-  ]
-
-  cflags_cc = [
-    "-std=gnu++11",
-    "-fno-rtti",
-    "-fno-exceptions",
-    "-Wno-narrowing",
-    "-Wnon-virtual-dtor",
-  ]
-
-  ldflags = [
-    "-pthread",
-    "-maix64",
-    "-Wl,-bbigtoc",
-  ]
-}
diff --git a/build/config/allocator.gni b/build/config/allocator.gni
deleted file mode 100644
index 709f80f..0000000
--- a/build/config/allocator.gni
+++ /dev/null
@@ -1,55 +0,0 @@
-# Copyright 2014 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import("//build/config/sanitizers/sanitizers.gni")
-
-# Temporarily disable tcmalloc on arm64 linux to get rid of compilation errors.
-if (is_android || is_mac || is_ios || is_asan || is_lsan || is_tsan ||
-    is_msan || is_win || is_fuchsia || (is_linux && target_cpu == "arm64")) {
-  _default_allocator = "none"
-} else {
-  _default_allocator = "tcmalloc"
-}
-
-# The debug CRT on Windows has some debug features that are incompatible with
-# the shim. NaCl in particular does seem to link some binaries statically
-# against the debug CRT with "is_nacl=false".
-if ((is_linux || is_android || is_mac ||
-     (is_win && !is_component_build && !is_debug)) && !is_asan && !is_lsan &&
-    !is_tsan && !is_msan) {
-  _default_use_allocator_shim = true
-} else {
-  _default_use_allocator_shim = false
-}
-
-declare_args() {
-  # Memory allocator to use. Set to "none" to use default allocator.
-  use_allocator = _default_allocator
-
-  # Causes all the allocations to be routed via allocator_shim.cc.
-  use_allocator_shim = _default_use_allocator_shim
-
-  # Partition alloc is included by default except iOS.
-  use_partition_alloc = !is_ios
-}
-
-if (is_nacl) {
-  # Turn off the build flag for NaCL builds to minimize confusion, as NaCL
-  # doesn't support the heap shim.
-  use_allocator_shim = false
-}
-
-assert(use_allocator == "none" || use_allocator == "tcmalloc")
-
-assert(!is_win || use_allocator == "none", "Tcmalloc doesn't work on Windows.")
-assert(!is_mac || use_allocator == "none", "Tcmalloc doesn't work on macOS.")
-
-assert(
-    !use_allocator_shim || is_linux || is_android || is_win || is_mac,
-    "use_allocator_shim is supported only on Linux, Android, Windows and macOS targets")
-
-if (is_win && use_allocator_shim) {
-  assert(!is_component_build,
-         "The allocator shim doesn't work for the component build on Windows.")
-}
diff --git a/build/config/android/BUILD.gn b/build/config/android/BUILD.gn
deleted file mode 100644
index bf74673..0000000
--- a/build/config/android/BUILD.gn
+++ /dev/null
@@ -1,215 +0,0 @@
-# Copyright 2014 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import("//build/config/android/config.gni")
-import("//build/config/c++/c++.gni")
-import("//build/config/sanitizers/sanitizers.gni")
-
-assert(is_android)
-
-# This is included by reference in the //build/config/compiler config that
-# is applied to all targets. It is here to separate out the logic that is
-# Android-only.
-config("compiler") {
-  cflags = [
-    "-ffunction-sections",
-    "-fno-short-enums",
-  ]
-  defines = [
-    "ANDROID",
-
-    # The NDK has these things, but doesn't define the constants to say that it
-    # does. Define them here instead.
-    "HAVE_SYS_UIO_H",
-
-    # Forces full rebuilds on NDK rolls. To rebuild everything when NDK version
-    # stays the same, increment the suffix number.
-    "ANDROID_NDK_VERSION_ROLL=${android_ndk_version}_1",
-  ]
-
-  if (current_cpu == "mips64el") {
-    cflags += [
-      # Have to force IAS for mips64.
-      "-fintegrated-as",
-    ]
-  }
-
-  ldflags = [
-    "-Wl,--no-undefined",
-
-    # Don't allow visible symbols from libgcc or libc++ to be
-    # re-exported.
-    "-Wl,--exclude-libs=libgcc.a",
-    "-Wl,--exclude-libs=libc++_static.a",
-
-    # Don't allow visible symbols from libraries that contain
-    # assembly code with symbols that aren't hidden properly.
-    # http://crbug.com/448386
-    "-Wl,--exclude-libs=libvpx_assembly_arm.a",
-  ]
-
-  # $compile_api_level corresponds to the API level used for the sysroot path
-  # calculation in //build/config/android/config.gni
-  if (current_cpu == "arm") {
-    abi_target = "arm-linux-androideabi"
-    compile_api_level = android32_ndk_api_level
-  } else if (current_cpu == "x86") {
-    abi_target = "i686-linux-android"
-    compile_api_level = android32_ndk_api_level
-  } else if (current_cpu == "arm64") {
-    abi_target = "aarch64-linux-android"
-    compile_api_level = android64_ndk_api_level
-  } else if (current_cpu == "x64") {
-    # Place holder for x64 support, not tested.
-    # TODO: Enable clang support for Android x64. http://crbug.com/539781
-    abi_target = "x86_64-linux-android"
-    compile_api_level = android64_ndk_api_level
-  } else if (current_cpu == "mipsel") {
-    abi_target = "mipsel-linux-android"
-    compile_api_level = android32_ndk_api_level
-  } else if (current_cpu == "mips64el") {
-    # Place holder for mips64 support, not tested.
-    abi_target = "mips64el-linux-android"
-    compile_api_level = android64_ndk_api_level
-  } else {
-    assert(false, "Architecture not supported")
-  }
-  cflags += [
-    "--target=$abi_target",
-    "-isystem" +
-        rebase_path("$android_ndk_root/sysroot/usr/include/$abi_target",
-                    root_build_dir),
-    "-D__ANDROID_API__=$compile_api_level",
-
-    # Temporary workaround for third party dependencies requiring this to be
-    # defined.
-    # TODO(crbug.com/771171): Remove this once the third party deps have been
-    # fixed to be compatible with newer NDK versions
-    "-D__NDK_FPABI__=",
-  ]
-  ldflags += [ "--target=$abi_target" ]
-
-  # TODO(crbug.com/771171): Remove this define once code that uses it has been
-  # updated to no longer need it. This is leftover from older Android NDK
-  # versions.
-  if (compile_api_level < 20) {
-    cflags += [ "-DHAVE_PTHREAD_COND_TIMEDWAIT_MONOTONIC=1" ]
-  }
-
-  # Assign any flags set for the C compiler to asmflags so that they are sent
-  # to the assembler.
-  asmflags = cflags
-}
-
-# This is included by reference in the //build/config/compiler:runtime_library
-# config that is applied to all targets. It is here to separate out the logic
-# that is Android-only. Please see that target for advice on what should go in
-# :runtime_library vs. :compiler.
-config("runtime_library") {
-  # NOTE: The libc++ header include paths below are specified in cflags_cc
-  # rather than include_dirs because they need to come after include_dirs.
-  # Think of them like system headers, but don't use '-isystem' because the
-  # arm-linux-androideabi-4.4.3 toolchain (circa Gingerbread) will exhibit
-  # strange errors. The include ordering here is important; change with
-  # caution.
-  cflags_cc = []
-  if (!use_custom_libcxx) {
-    cflags_cc = []
-    if (android_ndk_major_version >= 13) {
-      libcxx_include_path =
-          rebase_path("$android_libcpp_root/include", root_build_dir)
-      libcxxabi_include_path = rebase_path(
-              "$android_ndk_root/sources/cxx-stl/llvm-libc++abi/include",
-              root_build_dir)
-    } else {
-      libcxx_include_path =
-          rebase_path("$android_libcpp_root/libcxx/include", root_build_dir)
-      libcxxabi_include_path = rebase_path(
-              "$android_ndk_root/sources/cxx-stl/llvm-libc++abi/libcxxabi/include",
-              root_build_dir)
-    }
-    cflags_cc += [
-      "-isystem" + libcxx_include_path,
-      "-isystem" + libcxxabi_include_path,
-    ]
-  }
-  cflags_cc += [ "-isystem" + rebase_path(
-                     "$android_ndk_root/sources/android/support/include",
-                     root_build_dir) ]
-
-  defines = [
-    "__GNU_SOURCE=1",  # Necessary for clone().
-    "CHROMIUM_CXX_TWEAK_INLINES",  # Saves binary size.
-  ]
-  ldflags = [ "-nostdlib" ]
-  lib_dirs = [ android_libcpp_lib_dir ]
-
-  libs = []
-  if (!use_custom_libcxx) {
-    # The libc++ runtime library (must come first).
-    # ASan needs to dynamically link to libc++ even in static builds so
-    # that it can interpose operator new.
-    if (is_component_build || is_asan) {
-      libs += [ "c++_shared" ]
-    } else {
-      libs += [ "c++_static" ]
-    }
-    libs += [ "c++abi" ]
-  }
-  libs += [ "android_support" ]
-
-  # arm builds of libc++ starting in NDK r12 depend on unwind.
-  if (current_cpu == "arm") {
-    libs += [ "unwind" ]
-  }
-
-  # Manually link the libgcc.a that the cross compiler uses. This is
-  # absolute because the linker will look inside the sysroot if it's not.
-  libs += [
-    rebase_path(android_libgcc_file),
-    "c",
-  ]
-
-  if (current_cpu == "arm" && arm_version == 6) {
-    libs += [ "atomic" ]
-  }
-
-  if (current_cpu == "mipsel") {
-    libs += [ "atomic" ]
-  }
-
-  # TODO(jdduke) Re-enable on mips after resolving linking
-  # issues with libc++ (crbug.com/456380).
-  if (current_cpu != "mipsel" && current_cpu != "mips64el") {
-    ldflags += [ "-Wl,--warn-shared-textrel" ]
-  }
-}
-
-config("executable_config") {
-  cflags = [ "-fPIE" ]
-  asmflags = [ "-fPIE" ]
-  ldflags = [ "-pie" ]
-}
-
-config("hide_all_but_jni_onload") {
-  ldflags = [ "-Wl,--version-script=" + rebase_path(
-                  "//build/android/android_only_explicit_jni_exports.lst") ]
-}
-
-config("hide_all_but_jni") {
-  ldflags = [ "-Wl,--version-script=" +
-              rebase_path("//build/android/android_only_jni_exports.lst") ]
-}
-
-config("lld_pack_relocations") {
-  ldflags = [ "-Wl,--pack-dyn-relocs=android" ]
-}
-
-# Used for instrumented build to generate the orderfile.
-config("default_cygprofile_instrumentation") {
-  if (use_order_profiling) {
-    defines = [ "CYGPROFILE_INSTRUMENTATION=1" ]
-    cflags = [ "-finstrument-function-entry-bare" ]
-  }
-}
diff --git a/build/config/android/OWNERS b/build/config/android/OWNERS
deleted file mode 100644
index 74dca6f..0000000
--- a/build/config/android/OWNERS
+++ /dev/null
@@ -1,6 +0,0 @@
-agrieve@chromium.org
-estevenson@chromium.org
-digit@chromium.org
-wnwen@chromium.org
-
-# COMPONENT: Build
diff --git a/build/config/android/abi.gni b/build/config/android/abi.gni
deleted file mode 100644
index dc25b49..0000000
--- a/build/config/android/abi.gni
+++ /dev/null
@@ -1,69 +0,0 @@
-# Copyright 2017 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# Logic separated out from config.gni so that it can be used by compiler.gni
-# without introducing a circular dependency.
-
-# NOTE: Because Chrome OS builds may depend on targets built with the Android
-# toolchain, this GNI file may be read and processed from within Chrome OS
-# toolchains. Checking |is_android| here would therefore be too restrictive.
-assert(is_android || is_chromeos)
-
-declare_args() {
-  # Adds intrumentation to each function. Writes a file with the order that
-  # functions are called at startup.
-  use_order_profiling = false
-
-  # Builds secondary abi for APKs, supports build 32-bit arch as secondary
-  # abi in 64-bit Monochrome and WebView.
-  build_apk_secondary_abi = true
-}
-
-if (current_cpu == "x86") {
-  android_app_abi = "x86"
-} else if (current_cpu == "arm") {
-  import("//build/config/arm.gni")
-  if (arm_version < 7) {
-    android_app_abi = "armeabi"
-  } else {
-    android_app_abi = "armeabi-v7a"
-  }
-} else if (current_cpu == "mipsel") {
-  android_app_abi = "mips"
-} else if (current_cpu == "x64") {
-  android_app_abi = "x86_64"
-} else if (current_cpu == "arm64") {
-  android_app_abi = "arm64-v8a"
-} else if (current_cpu == "mips64el") {
-  android_app_abi = "mips64"
-} else {
-  assert(false, "Unknown Android ABI: " + current_cpu)
-}
-
-if (target_cpu == "arm64" || target_cpu == "x64" || target_cpu == "mips64el") {
-  android_64bit_target_cpu = true
-} else if (target_cpu == "arm" || target_cpu == "x86" ||
-           target_cpu == "mipsel") {
-  android_64bit_target_cpu = false
-} else {
-  assert(false, "Unknown target CPU: $target_cpu")
-}
-
-# Intentionally do not define android_app_secondary_abi_cpu and
-# android_app_secondary_abi for 32-bit target_cpu, since they are not used.
-if (target_cpu == "arm64") {
-  android_secondary_abi_cpu = "arm"
-  android_app_secondary_abi = "armeabi-v7a"
-} else if (target_cpu == "x64") {
-  android_secondary_abi_cpu = "x86"
-  android_app_secondary_abi = "x86"
-} else if (target_cpu == "mips64el") {
-  android_secondary_abi_cpu = "mipsel"
-  android_app_secondary_abi = "mips"
-}
-
-if (defined(android_secondary_abi_cpu)) {
-  android_secondary_abi_toolchain =
-      "//build/toolchain/android:android_clang_${android_secondary_abi_cpu}"
-}
diff --git a/build/config/android/config.gni b/build/config/android/config.gni
deleted file mode 100644
index dcd694c..0000000
--- a/build/config/android/config.gni
+++ /dev/null
@@ -1,374 +0,0 @@
-# Copyright 2014 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file contains common system config stuff for the Android build.
-
-# NOTE: Because Chrome OS builds may depend on targets built with the Android
-# toolchain, this GNI file may be read and processed from within Chrome OS
-# toolchains. Checking |is_android| here would therefore be too restrictive.
-if (is_android || is_chromeos) {
-  import("//build_overrides/build.gni")
-  import("abi.gni")
-
-  if (build_with_chromium) {
-    # Some non-chromium projects (e.g. WebRTC) use our build configs
-    # heavily but don't write gclient args files.
-
-    import("//build/config/gclient_args.gni")
-    if (defined(checkout_android_native_support)) {
-      n = "$0x0A"  # Newline
-      assert(checkout_android_native_support,
-             "Missing native Android toolchain support. |target_os| in your " +
-                 ".gclient configuration file (in the parent directory of " +
-                 "src) must include \"android\" and/or \"chromeos\". For " +
-                 "example:${n}${n}solutions = [${n}...${n}]${n}" +
-                 "target_os=[\"chromeos\"]${n}")
-    }
-  }
-
-  has_chrome_android_internal =
-      exec_script("//build/dir_exists.py",
-                  [ rebase_path("//clank", root_build_dir) ],
-                  "string") == "True"
-
-  # We are using a separate declare_args block for only this argument so that
-  # we can decide if we have to pull in definitions from the internal config
-  # early.
-  declare_args() {
-    # Enables using the internal Chrome for Android repository. The default
-    # value depends on whether the repository is available, and if it's not but
-    # this argument is manually set to True, the generation will fail.
-    # The main purpose of this argument is to avoid having to maintain 2
-    # repositories to support both public only and internal builds.
-    enable_chrome_android_internal = has_chrome_android_internal
-
-    # Android API level for 32 bits platforms
-    android32_ndk_api_level = 16
-
-    # Android API level for 64 bits platforms
-    android64_ndk_api_level = 21
-  }
-
-  if (enable_chrome_android_internal) {
-    import("//clank/config.gni")
-  } else {
-    import("//build/config/android/sdk.gni")
-    declare_args() {
-      # Android SDK release. Currently, only "o_mr1" is publicly supported.
-      android_sdk_release = default_android_sdk_release
-    }
-  }
-
-  if (!defined(extra_chrome_shared_library_configs)) {
-    extra_chrome_shared_library_configs = []
-  }
-  if (!defined(extra_chrome_shared_library_deps)) {
-    extra_chrome_shared_library_deps = []
-  }
-
-  if (!defined(default_android_ndk_root)) {
-    default_android_ndk_root = "//third_party/android_ndk"
-    default_android_ndk_version = "r16"
-    default_android_ndk_major_version = 16
-  } else {
-    assert(defined(default_android_ndk_version))
-    assert(defined(default_android_ndk_major_version))
-  }
-
-  if (android_sdk_release == "o_mr1") {
-    default_android_sdk_root = "//third_party/android_tools/sdk"
-    default_android_sdk_version = "27"
-    default_android_sdk_build_tools_version = "27.0.3"
-    default_android_sdk_tools_version_suffix = "-26.0.0-dev"
-    public_android_sdk = true
-  }
-
-  if (!defined(default_lint_android_sdk_root)) {
-    # Purposefully repeated so that downstream can change
-    # default_android_sdk_root without changing lint version.
-    default_lint_android_sdk_root = "//third_party/android_tools/sdk"
-    default_lint_android_sdk_version = "26"
-  }
-
-  if (!defined(default_extras_android_sdk_root)) {
-    # Purposefully repeated so that downstream can change
-    # default_android_sdk_root without changing where we load the SDK extras
-    # from. (Google Play services, etc.)
-    default_extras_android_sdk_root = "//third_party/android_tools/sdk"
-  }
-
-  if (!defined(default_android_keystore_path)) {
-    default_android_keystore_path = "//build/android/chromium-debug.keystore"
-    default_android_keystore_name = "chromiumdebugkey"
-    default_android_keystore_password = "chromium"
-  }
-
-  # google_play_services_package contains the path where individual client
-  # targets (e.g. google_play_services_base_java) are located.
-  if (!defined(google_play_services_package)) {
-    google_play_services_package = "//third_party/android_tools"
-  }
-
-  if (!defined(android_support_library_package)) {
-    android_support_library_package = "//third_party/android_tools/support"
-  }
-
-  if (!defined(system_webview_apk_target)) {
-    system_webview_apk_target = "//android_webview:system_webview_apk"
-  }
-
-  webview_public_framework_jar =
-      "//third_party/android_system_sdk/android_system.jar"
-  if (!defined(webview_framework_jar)) {
-    webview_framework_jar = webview_public_framework_jar
-  }
-
-  # TODO(crbug.com/807768): Remove this extra dependency.
-  if (!defined(android_extra_test_deps)) {
-    android_extra_test_deps = []
-  }
-
-  assert(defined(default_android_sdk_root),
-         "SDK release " + android_sdk_release + " not recognized.")
-
-  declare_args() {
-    android_ndk_root = default_android_ndk_root
-    android_ndk_version = default_android_ndk_version
-    android_ndk_major_version = default_android_ndk_major_version
-
-    android_sdk_root = default_android_sdk_root
-    android_sdk_version = default_android_sdk_version
-    android_sdk_build_tools_version = default_android_sdk_build_tools_version
-    android_sdk_tools_version_suffix = default_android_sdk_tools_version_suffix
-
-    lint_android_sdk_root = default_lint_android_sdk_root
-    lint_android_sdk_version = default_lint_android_sdk_version
-
-    # Libc++ library directory. Override to use a custom libc++ binary.
-    android_libcpp_lib_dir = ""
-
-    # Android versionCode for android_apk()s that don't explicitly set one.
-    android_default_version_code = "1"
-
-    # Android versionName for android_apk()s that don't explicitly set one.
-    android_default_version_name = "Developer Build"
-
-    # The path to the keystore to use for signing builds.
-    android_keystore_path = default_android_keystore_path
-
-    # The name of the keystore to use for signing builds.
-    android_keystore_name = default_android_keystore_name
-
-    # The password for the keystore to use for signing builds.
-    android_keystore_password = default_android_keystore_password
-
-    # Enables verbose proguard output (summaries and unfiltered output).
-    proguard_verbose = false
-
-    # Java debug on Android. Having this on enables multidexing, and turning it
-    # off will enable proguard.
-    is_java_debug = is_debug
-
-    # Report Java assert failure on Android. Turning it on will report Java
-    # assert failure without crash.
-    report_java_assert = false
-
-    # Mark APKs as android:debuggable="true".
-    debuggable_apks = !is_official_build
-
-    # Set to false to disable the Errorprone compiler
-    use_errorprone_java_compiler = true
-
-    # Enables EMMA Java code coverage. Instruments classes during build to
-    # produce .ec files during runtime
-    emma_coverage = false
-
-    # EMMA filter string consisting of a list of inclusion/exclusion patterns
-    # separated with whitespace and/or comma. Only has effect if
-    # emma_coverage==true
-    emma_filter = ""
-
-    # Disables process isolation when building _incremental targets.
-    # Required for Android M+ due to SELinux policies (stronger sandboxing).
-    disable_incremental_isolated_processes = false
-
-    # Speeds up incremental compiles by compiling only changed files.
-    enable_incremental_javac = false
-
-    # Build incremental targets whenever possible.
-    # Ex. with this arg set to true, the chrome_public_apk target result in
-    # chrome_public_apk_incremental being built.
-    incremental_apk_by_default = false
-
-    # When true, updates all android_aar_prebuilt() .info files during gn gen.
-    # Refer to android_aar_prebuilt() for more details.
-    update_android_aar_prebuilts = false
-
-    # When true, uses the third party libraries from //third_party/android_deps
-    # over the ones in other places. (instead of the support library from
-    # android_tools for example)
-    enable_android_deps_repository = true
-  }
-
-  # We need a second declare_args block to make sure we are using the overridden
-  # value of the arguments set above.
-  declare_args() {
-    if (defined(default_android_sdk_platform_version)) {
-      android_sdk_platform_version = default_android_sdk_platform_version
-    } else {
-      android_sdk_platform_version = android_sdk_version
-    }
-
-    # Speed up dexing using dx --incremental.
-    enable_incremental_dx = is_java_debug
-  }
-
-  # Neither of these should ever be used for release builds since they are
-  # somewhat experimental and dx --incremental is known to not produce
-  # byte-for-byte identical output.
-  assert(!(enable_incremental_dx && !is_java_debug))
-  assert(!(enable_incremental_javac && !is_java_debug))
-
-  # Path to where selected build variables are written to.
-  android_build_vars = "$root_build_dir/build_vars.txt"
-
-  # Host stuff -----------------------------------------------------------------
-
-  # Defines the name the Android build gives to the current host CPU
-  # architecture, which is different than the names GN uses.
-  if (host_cpu == "x64") {
-    android_host_arch = "x86_64"
-  } else if (host_cpu == "x86") {
-    android_host_arch = "x86"
-  } else {
-    assert(false, "Need Android toolchain support for your build CPU arch.")
-  }
-
-  # Defines the name the Android build gives to the current host CPU
-  # architecture, which is different than the names GN uses.
-  if (host_os == "linux") {
-    android_host_os = "linux"
-  } else if (host_os == "mac") {
-    android_host_os = "darwin"
-  } else {
-    assert(false, "Need Android toolchain support for your build OS.")
-  }
-
-  # Directories and files ------------------------------------------------------
-  #
-  # We define may of the dirs strings here for each output architecture (rather
-  # than just the current one) since these are needed by the Android toolchain
-  # file to define toolchains for all possible targets in one pass.
-
-  android_sdk =
-      "${android_sdk_root}/platforms/android-${android_sdk_platform_version}"
-
-  # Path to the Android NDK and SDK.
-  android_ndk_include_dir = "$android_ndk_root/usr/include"
-
-  android_sdk_tools = "${android_sdk_root}/tools"
-  android_sdk_build_tools =
-      "${android_sdk_root}/build-tools/$android_sdk_build_tools_version"
-
-  # Path to the SDK's android.jar
-  android_sdk_jar = "$android_sdk/android.jar"
-
-  # Subdirectories inside android_ndk_root that contain the sysroot for the
-  # associated platform.
-  x86_android_sysroot_subdir =
-      "platforms/android-${android32_ndk_api_level}/arch-x86"
-  arm_android_sysroot_subdir =
-      "platforms/android-${android32_ndk_api_level}/arch-arm"
-  mips_android_sysroot_subdir =
-      "platforms/android-${android32_ndk_api_level}/arch-mips"
-  x86_64_android_sysroot_subdir =
-      "platforms/android-${android64_ndk_api_level}/arch-x86_64"
-  arm64_android_sysroot_subdir =
-      "platforms/android-${android64_ndk_api_level}/arch-arm64"
-  mips64_android_sysroot_subdir =
-      "platforms/android-${android64_ndk_api_level}/arch-mips64"
-
-  # Toolchain root directory for each build. The actual binaries are inside
-  # a "bin" directory inside of these.
-  _android_toolchain_version = "4.9"
-  _android_toolchain_detailed_version = "4.9.x"
-  x86_android_toolchain_root = "$android_ndk_root/toolchains/x86-${_android_toolchain_version}/prebuilt/${android_host_os}-${android_host_arch}"
-  arm_android_toolchain_root = "$android_ndk_root/toolchains/arm-linux-androideabi-${_android_toolchain_version}/prebuilt/${android_host_os}-${android_host_arch}"
-  mips_android_toolchain_root = "$android_ndk_root/toolchains/mipsel-linux-android-${_android_toolchain_version}/prebuilt/${android_host_os}-${android_host_arch}"
-  x86_64_android_toolchain_root = "$android_ndk_root/toolchains/x86_64-${_android_toolchain_version}/prebuilt/${android_host_os}-${android_host_arch}"
-  arm64_android_toolchain_root = "$android_ndk_root/toolchains/aarch64-linux-android-${_android_toolchain_version}/prebuilt/${android_host_os}-${android_host_arch}"
-  mips64_android_toolchain_root = "$android_ndk_root/toolchains/mips64el-linux-android-${_android_toolchain_version}/prebuilt/${android_host_os}-${android_host_arch}"
-
-  # Location of libgcc. This is only needed for the current GN toolchain, so we
-  # only need to define the current one, rather than one for every platform
-  # like the toolchain roots.
-  if (current_cpu == "x86") {
-    android_prebuilt_arch = "android-x86"
-    _binary_prefix = "i686-linux-android"
-    android_toolchain_root = "$x86_android_toolchain_root"
-    android_libgcc_file = "$android_toolchain_root/lib/gcc/i686-linux-android/${_android_toolchain_detailed_version}/libgcc.a"
-  } else if (current_cpu == "arm") {
-    android_prebuilt_arch = "android-arm"
-    _binary_prefix = "arm-linux-androideabi"
-    android_toolchain_root = "$arm_android_toolchain_root"
-    android_libgcc_file = "$android_toolchain_root/lib/gcc/arm-linux-androideabi/${_android_toolchain_detailed_version}/libgcc.a"
-  } else if (current_cpu == "mipsel") {
-    android_prebuilt_arch = "android-mips"
-    _binary_prefix = "mipsel-linux-android"
-    android_toolchain_root = "$mips_android_toolchain_root"
-    android_libgcc_file = "$android_toolchain_root/lib/gcc/mipsel-linux-android/${_android_toolchain_detailed_version}/libgcc.a"
-  } else if (current_cpu == "x64") {
-    android_prebuilt_arch = "android-x86_64"
-    _binary_prefix = "x86_64-linux-android"
-    android_toolchain_root = "$x86_64_android_toolchain_root"
-    android_libgcc_file = "$android_toolchain_root/lib/gcc/x86_64-linux-android/${_android_toolchain_detailed_version}/libgcc.a"
-  } else if (current_cpu == "arm64") {
-    android_prebuilt_arch = "android-arm64"
-    _binary_prefix = "aarch64-linux-android"
-    android_toolchain_root = "$arm64_android_toolchain_root"
-    android_libgcc_file = "$android_toolchain_root/lib/gcc/aarch64-linux-android/${_android_toolchain_detailed_version}/libgcc.a"
-  } else if (current_cpu == "mips64el") {
-    android_prebuilt_arch = "android-mips64"
-    _binary_prefix = "mips64el-linux-android"
-    android_toolchain_root = "$mips64_android_toolchain_root"
-    android_libgcc_file = "$android_toolchain_root/lib/gcc/mips64el-linux-android/${_android_toolchain_detailed_version}/libgcc.a"
-  } else {
-    assert(false, "Need android libgcc support for your target arch.")
-  }
-
-  android_tool_prefix = "$android_toolchain_root/bin/$_binary_prefix-"
-  android_readelf = "${android_tool_prefix}readelf"
-  android_objcopy = "${android_tool_prefix}objcopy"
-  android_gdbserver =
-      "$android_ndk_root/prebuilt/$android_prebuilt_arch/gdbserver/gdbserver"
-
-  # Toolchain stuff ------------------------------------------------------------
-
-  android_libcpp_root = "$android_ndk_root/sources/cxx-stl/llvm-libc++"
-
-  if (android_libcpp_lib_dir == "") {
-    android_libcpp_lib_dir = "${android_libcpp_root}/libs/${android_app_abi}"
-  }
-
-  # Dynamic app bundles -------------------------------------------------------
-
-  # TODO(digit): Remove this once we roll a version of the Android SDK that
-  # has the proper build-tools binaries to both public and internal.
-  declare_args() {
-    # To enable generation of application bundles, define
-    # android_sdk_app_bundle_build_tools to point to an Android SDK build-tools
-    # directory that has the relevant aapt2 and bundletool binaries.
-    #
-    android_sdk_app_bundle_build_tools = ""
-  }
-
-  # Whether building application bundles is supported.
-  android_enable_app_bundles = android_sdk_app_bundle_build_tools != ""
-}
-
-declare_args() {
-  # Enables used resource whitelist generation. Set for official builds only
-  # as a large amount of build output is generated.
-  enable_resource_whitelist_generation = is_android && is_official_build
-}
diff --git a/build/config/android/extract_unwind_tables.gni b/build/config/android/extract_unwind_tables.gni
deleted file mode 100644
index d0b0532..0000000
--- a/build/config/android/extract_unwind_tables.gni
+++ /dev/null
@@ -1,50 +0,0 @@
-# Copyright 2018 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import("//build/config/android/rules.gni")
-
-template("unwind_table_asset") {
-  # Note: This file name is used in multiple monochrome build scripts.
-  _asset_path = "${target_gen_dir}/${target_name}/unwind_cfi_32"
-  _unwind_action = "${target_name}__extract"
-
-  action(_unwind_action) {
-    if (defined(invoker.testonly)) {
-      testonly = invoker.testonly
-    }
-
-    script = "//build/android/gyp/extract_unwind_tables.py"
-    outputs = [
-      _asset_path,
-    ]
-    inputs = [
-      "$root_out_dir/lib.unstripped/$shlib_prefix${invoker.library_target}$shlib_extension",
-    ]
-
-    args = [
-      "--input_path",
-      rebase_path(
-          "$root_out_dir/lib.unstripped/$shlib_prefix${invoker.library_target}$shlib_extension",
-          root_build_dir),
-      "--output_path",
-      rebase_path(_asset_path, root_build_dir),
-      "--dump_syms_path",
-      rebase_path("$root_out_dir/dump_syms", root_build_dir),
-    ]
-    deps = invoker.deps
-    deps += [ "//third_party/breakpad:dump_syms" ]
-  }
-  android_assets(target_name) {
-    if (defined(invoker.testonly)) {
-      testonly = invoker.testonly
-    }
-    sources = [
-      _asset_path,
-    ]
-    disable_compression = true
-    deps = [
-      ":$_unwind_action",
-    ]
-  }
-}
diff --git a/build/config/android/internal_rules.gni b/build/config/android/internal_rules.gni
deleted file mode 100644
index 02bfabf..0000000
--- a/build/config/android/internal_rules.gni
+++ /dev/null
@@ -1,3179 +0,0 @@
-# Copyright 2014 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# Do not add any imports to non-//build directories here.
-# Some projects (e.g. V8) do not have non-build directories DEPS'ed in.
-import("//build_overrides/build.gni")
-import("//build/config/android/config.gni")
-import("//build/config/dcheck_always_on.gni")
-import("//build/config/sanitizers/sanitizers.gni")
-
-assert(is_android)
-
-# These identify targets that have .build_config files (except for android_apk,
-# java_binary, resource_rewriter, since we never need to depend on these).
-_java_target_whitelist = [
-  "*:*_java",
-  "*:*_javalib",
-  "*:*_java_*",  # e.g. java_test_support
-  "*:java",
-  "*:junit",
-  "*:junit_*",
-  "*:*_junit_*",
-  "*:*javatests",
-  "*:*_assets",
-  "*android*:assets",
-  "*:*_apk_*resources",
-  "*android*:resources",
-  "*:*_resources",
-  "*:*_grd",
-  "*:*locale_paks",
-
-  # TODO(agrieve): Rename targets below to match above patterns.
-  "*android_webview/glue:glue",
-]
-
-# Targets that match the whitelist but are not actually java targets.
-_java_target_blacklist = [
-  "//chrome:packed_resources",
-  "*:*_unpack_aar",
-]
-
-_default_proguard_jar_path = "//third_party/proguard/lib/proguard.jar"
-
-# Write the target's .build_config file. This is a json file that contains a
-# dictionary of information about how to build this target (things that
-# require knowledge about this target's dependencies and cannot be calculated
-# at gn-time). There is a special syntax to add a value in that dictionary to
-# an action/action_foreachs args:
-#   --python-arg=@FileArg($rebased_build_config_path:key0:key1)
-# At runtime, such an arg will be replaced by the value in the build_config.
-# See build/android/gyp/write_build_config.py and
-# build/android/gyp/util/build_utils.py:ExpandFileArgs
-template("write_build_config") {
-  _type = invoker.type
-
-  # Don't need to enforce naming scheme for these targets since we never
-  # consider them in dependency chains.
-  if (_type != "android_apk" && _type != "java_binary" &&
-      _type != "resource_rewriter" && _type != "dist_jar" &&
-      _type != "java_annotation_processor" && _type != "dist_aar") {
-    set_sources_assignment_filter(_java_target_whitelist)
-    _parent_invoker = invoker.invoker
-    _target_label =
-        get_label_info(":${_parent_invoker.target_name}", "label_no_toolchain")
-    sources = [
-      _target_label,
-    ]
-    if (sources != []) {
-      set_sources_assignment_filter(_java_target_blacklist)
-      sources = []
-      sources = [
-        _target_label,
-      ]
-      if (sources != []) {
-        assert(false, "Invalid java target name: $_target_label")
-      }
-    }
-    sources = []
-  }
-
-  action(target_name) {
-    forward_variables_from(invoker,
-                           [
-                             "deps",
-                             "testonly",
-                           ])
-    if (!defined(deps)) {
-      deps = []
-    }
-    if (defined(invoker.android_manifest_dep)) {
-      deps += [ invoker.android_manifest_dep ]
-    }
-
-    script = "//build/android/gyp/write_build_config.py"
-    depfile = "$target_gen_dir/$target_name.d"
-    inputs = []
-    outputs = [
-      invoker.build_config,
-    ]
-
-    _deps_configs = []
-    if (defined(invoker.possible_config_deps)) {
-      foreach(_possible_dep, invoker.possible_config_deps) {
-        set_sources_assignment_filter(_java_target_whitelist)
-        _target_label = get_label_info(_possible_dep, "label_no_toolchain")
-        sources = [
-          _target_label,
-        ]
-        if (sources == []) {
-          set_sources_assignment_filter(_java_target_blacklist)
-          sources = []
-          sources = [
-            _target_label,
-          ]
-          if (sources != []) {
-            deps += [ "${_target_label}__build_config" ]
-            _dep_gen_dir = get_label_info(_possible_dep, "target_gen_dir")
-            _dep_name = get_label_info(_possible_dep, "name")
-            _deps_configs += [ "$_dep_gen_dir/$_dep_name.build_config" ]
-          }
-        }
-        sources = []
-      }
-    }
-    _rebased_deps_configs = rebase_path(_deps_configs, root_build_dir)
-
-    args = [
-      "--type=$_type",
-      "--depfile",
-      rebase_path(depfile, root_build_dir),
-      "--deps-configs=$_rebased_deps_configs",
-      "--build-config",
-      rebase_path(invoker.build_config, root_build_dir),
-    ]
-
-    if (defined(invoker.jar_path)) {
-      args += [
-        "--jar-path",
-        rebase_path(invoker.jar_path, root_build_dir),
-      ]
-    }
-    if (defined(invoker.unprocessed_jar_path)) {
-      args += [
-        "--unprocessed-jar-path",
-        rebase_path(invoker.unprocessed_jar_path, root_build_dir),
-      ]
-    }
-    if (defined(invoker.ijar_path)) {
-      args += [
-        "--interface-jar-path",
-        rebase_path(invoker.ijar_path, root_build_dir),
-      ]
-    }
-    if (defined(invoker.java_resources_jar)) {
-      args += [
-        "--java-resources-jar-path",
-        rebase_path(invoker.java_resources_jar, root_build_dir),
-      ]
-    }
-    if (defined(invoker.annotation_processor_deps)) {
-      _processor_configs = []
-      foreach(_processor_dep, invoker.annotation_processor_deps) {
-        _target_label = get_label_info(_processor_dep, "label_no_toolchain")
-        _dep_gen_dir = get_label_info(_processor_dep, "target_gen_dir")
-        _dep_name = get_label_info(_processor_dep, "name")
-        deps += [ "${_target_label}__build_config" ]
-        _processor_configs += [ "$_dep_gen_dir/$_dep_name.build_config" ]
-      }
-      _rebased_processor_configs =
-          rebase_path(_processor_configs, root_build_dir)
-      args += [ "--annotation-processor-configs=$_rebased_processor_configs" ]
-    }
-
-    if (defined(invoker.dex_path)) {
-      args += [
-        "--dex-path",
-        rebase_path(invoker.dex_path, root_build_dir),
-      ]
-    }
-    if (defined(invoker.supports_android) && invoker.supports_android) {
-      args += [ "--supports-android" ]
-    }
-    if (defined(invoker.requires_android) && invoker.requires_android) {
-      args += [ "--requires-android" ]
-    }
-    if (defined(invoker.is_prebuilt) && invoker.is_prebuilt) {
-      args += [ "--is-prebuilt" ]
-    }
-    if (defined(invoker.bypass_platform_checks) &&
-        invoker.bypass_platform_checks) {
-      args += [ "--bypass-platform-checks" ]
-    }
-
-    if (defined(invoker.apk_under_test)) {
-      deps += [ "${invoker.apk_under_test}__build_config" ]
-      apk_under_test_gen_dir =
-          get_label_info(invoker.apk_under_test, "target_gen_dir")
-      apk_under_test_name = get_label_info(invoker.apk_under_test, "name")
-      apk_under_test_config =
-          "$apk_under_test_gen_dir/$apk_under_test_name.build_config"
-      args += [
-        "--tested-apk-config",
-        rebase_path(apk_under_test_config, root_build_dir),
-      ]
-    }
-
-    if (defined(invoker.asset_sources)) {
-      _rebased_asset_sources =
-          rebase_path(invoker.asset_sources, root_build_dir)
-      args += [ "--asset-sources=$_rebased_asset_sources" ]
-    }
-    if (defined(invoker.asset_renaming_sources)) {
-      _rebased_asset_renaming_sources =
-          rebase_path(invoker.asset_renaming_sources, root_build_dir)
-      args += [ "--asset-renaming-sources=$_rebased_asset_renaming_sources" ]
-
-      # These are zip paths, so no need to rebase.
-      args += [
-        "--asset-renaming-destinations=${invoker.asset_renaming_destinations}",
-      ]
-    }
-    if (defined(invoker.disable_compression) && invoker.disable_compression) {
-      args += [ "--disable-asset-compression" ]
-    }
-    if (defined(invoker.treat_as_locale_paks) && invoker.treat_as_locale_paks) {
-      args += [ "--treat-as-locale-paks" ]
-    }
-
-    if (defined(invoker.android_manifest)) {
-      inputs += [ invoker.android_manifest ]
-      args += [
-        "--android-manifest",
-        rebase_path(invoker.android_manifest, root_build_dir),
-      ]
-    }
-    if (defined(invoker.resources_zip)) {
-      args += [
-        "--resources-zip",
-        rebase_path(invoker.resources_zip, root_build_dir),
-      ]
-    }
-    if (defined(invoker.custom_package)) {
-      args += [
-        "--package-name",
-        invoker.custom_package,
-      ]
-    }
-    if (defined(invoker.r_text)) {
-      args += [
-        "--r-text",
-        rebase_path(invoker.r_text, root_build_dir),
-      ]
-    }
-
-    if (defined(invoker.resource_dirs)) {
-      resource_dirs = rebase_path(invoker.resource_dirs, root_build_dir)
-      args += [ "--resource-dirs=$resource_dirs" ]
-    }
-
-    if (defined(invoker.proto_resources_path)) {
-      _rebased_proto_resources =
-          rebase_path(invoker.proto_resources_path, root_build_dir)
-      args += [ "--apk-proto-resources=$_rebased_proto_resources" ]
-    }
-
-    if (defined(invoker.shared_libraries_runtime_deps_file)) {
-      # Don't list shared_libraries_runtime_deps_file as an input in order to
-      # avoid having to depend on the runtime_deps target. See comment in
-      # rules.gni for why we do this.
-      args += [
-        "--shared-libraries-runtime-deps",
-        rebase_path(invoker.shared_libraries_runtime_deps_file, root_build_dir),
-      ]
-    }
-
-    if (defined(invoker.secondary_abi_shared_libraries_runtime_deps_file)) {
-      # Don't list secondary_abi_shared_libraries_runtime_deps_file as an
-      # input in order to avoid having to depend on the runtime_deps target.
-      # See comment in rules.gni for why we do this.
-      args += [
-        "--secondary-abi-shared-libraries-runtime-deps",
-        rebase_path(invoker.secondary_abi_shared_libraries_runtime_deps_file,
-                    root_build_dir),
-      ]
-    }
-
-    if (defined(invoker.apk_path)) {
-      _rebased_apk_path = rebase_path(invoker.apk_path, root_build_dir)
-      _rebased_incremental_apk_path =
-          rebase_path(invoker.incremental_apk_path, root_build_dir)
-      _rebased_incremental_install_json_path =
-          rebase_path(invoker.incremental_install_json_path, root_build_dir)
-      _incremental_allowed =
-          defined(invoker.incremental_allowed) && invoker.incremental_allowed
-      args += [ "--apk-path=$_rebased_apk_path" ]
-      args += [ "--incremental-install-json-path=$_rebased_incremental_install_json_path" ]
-
-      assert(_rebased_incremental_apk_path != "")  # Mark as used.
-      if (_incremental_allowed) {
-        args += [ "--incremental-apk-path=$_rebased_incremental_apk_path" ]
-      }
-    }
-
-    if (defined(invoker.non_native_packed_relocations) &&
-        invoker.non_native_packed_relocations) {
-      args += [ "--non-native-packed-relocations" ]
-    }
-    if (defined(invoker.java_sources_file)) {
-      args += [
-        "--java-sources-file",
-        rebase_path(invoker.java_sources_file, root_build_dir),
-      ]
-    }
-    if (defined(invoker.srcjar)) {
-      args += [
-        "--srcjar",
-        rebase_path(invoker.srcjar, root_build_dir),
-      ]
-    }
-    if (defined(invoker.bundled_srcjars)) {
-      _rebased_bundled_srcjars =
-          rebase_path(invoker.bundled_srcjars, root_build_dir)
-      args += [ "--bundled-srcjars=$_rebased_bundled_srcjars" ]
-    }
-    if (defined(invoker.classpath_deps)) {
-      _classpath_deps_configs = []
-      foreach(d, invoker.classpath_deps) {
-        _target_label = get_label_info(d, "label_no_toolchain")
-        deps += [ "${_target_label}__build_config" ]
-        _dep_gen_dir = get_label_info(d, "target_gen_dir")
-        _dep_name = get_label_info(d, "name")
-        _classpath_deps_configs += [ "$_dep_gen_dir/$_dep_name.build_config" ]
-      }
-      _rebased_classpath_deps_configs =
-          rebase_path(_classpath_deps_configs, root_build_dir)
-      args += [ "--classpath-deps-configs=$_rebased_classpath_deps_configs" ]
-    }
-    if (defined(invoker.input_jars_paths)) {
-      _rebased_input_jars_paths =
-          rebase_path(invoker.input_jars_paths, root_build_dir)
-      args += [ "--extra-classpath-jars=$_rebased_input_jars_paths" ]
-    }
-    if (defined(invoker.proguard_enabled) && invoker.proguard_enabled) {
-      args += [ "--proguard-enabled" ]
-    }
-    if (defined(invoker.proguard_configs)) {
-      _rebased_proguard_configs =
-          rebase_path(invoker.proguard_configs, root_build_dir)
-      args += [ "--proguard-configs=$_rebased_proguard_configs" ]
-    }
-    if (defined(invoker.gradle_treat_as_prebuilt) &&
-        invoker.gradle_treat_as_prebuilt) {
-      args += [ "--gradle-treat-as-prebuilt" ]
-    }
-    if (defined(invoker.main_class)) {
-      args += [
-        "--main-class",
-        invoker.main_class,
-      ]
-    }
-    if (defined(invoker.alternative_android_sdk_ijar)) {
-      args += [
-        "--bootclasspath",
-        rebase_path(invoker.alternative_android_sdk_ijar, root_build_dir),
-      ]
-    }
-    if (current_toolchain != default_toolchain) {
-      # This has to be a built-time error rather than a GN assert because many
-      # packages have a mix of java and non-java targets. For example, the
-      # following would fail even though nothing depends on :bar(//baz):
-      #
-      # shared_library("foo") {
-      # }
-      #
-      # android_library("bar") {
-      #   deps = [ ":foo(//baz)" ]
-      #   assert(current_toolchain == default_toolchain)
-      # }
-      _msg = [
-        "Tried to build an Android target in a non-default toolchain.",
-        "target: " + get_label_info(":$target_name", "label_with_toolchain"),
-        "default_toolchain: $default_toolchain",
-      ]
-      args += [ "--fail=$_msg" ]
-    }
-  }
-}
-
-# Copy a list of file into a destination directory. Potentially renaming
-# files are they are copied. This also ensures that symlinks are followed
-# during the copy (i.e. the symlinks are never copied, only their content).
-#
-# Variables:
-#  dest: Destination directory path.
-#  sources: List of source files or directories to copy to dest.
-#  renaming_sources: Optional list of source file paths that will be renamed
-#    during the copy operation. If provided, renaming_destinations is required.
-#  renaming_destinations: Optional list of destination file paths, required
-#    when renaming_sources is provided. Both lists should have the same size
-#    and matching entries.
-#  args: Optional. Additionnal arguments to the copy_ex.py script.
-#
-#  The following variables have the usual GN meaning: data, deps, inputs,
-#  outputs, testonly, visibility.
-#
-template("copy_ex") {
-  set_sources_assignment_filter([])
-  action(target_name) {
-    forward_variables_from(invoker,
-                           [
-                             "data",
-                             "deps",
-                             "inputs",
-                             "outputs",
-                             "sources",
-                             "testonly",
-                             "visibility",
-                           ])
-    if (!defined(sources)) {
-      sources = []
-    }
-    script = "//build/android/gyp/copy_ex.py"
-
-    args = [
-      "--dest",
-      rebase_path(invoker.dest, root_build_dir),
-    ]
-    rebased_sources = rebase_path(sources, root_build_dir)
-    args += [ "--files=$rebased_sources" ]
-
-    if (defined(invoker.args)) {
-      args += invoker.args
-    }
-
-    if (defined(invoker.renaming_sources) &&
-        defined(invoker.renaming_destinations)) {
-      sources += invoker.renaming_sources
-      rebased_renaming_sources =
-          rebase_path(invoker.renaming_sources, root_build_dir)
-      args += [ "--renaming-sources=$rebased_renaming_sources" ]
-
-      renaming_destinations = invoker.renaming_destinations
-      args += [ "--renaming-destinations=$renaming_destinations" ]
-    }
-  }
-}
-
-# Generates a script in the build bin directory which runs the test
-# target using the test runner script in build/android/test_runner.py.
-template("test_runner_script") {
-  testonly = true
-  _test_name = invoker.test_name
-  _test_type = invoker.test_type
-  _incremental_install =
-      defined(invoker.incremental_install) && invoker.incremental_install
-
-  _runtime_deps =
-      !defined(invoker.ignore_all_data_deps) || !invoker.ignore_all_data_deps
-
-  if (_runtime_deps) {
-    # This runtime_deps file is used at runtime and thus cannot go in
-    # target_gen_dir.
-    _target_dir_name = get_label_info(":$target_name", "dir")
-    _runtime_deps_file =
-        "$root_out_dir/gen.runtime/$_target_dir_name/$target_name.runtime_deps"
-    _runtime_deps_target = "${target_name}__write_deps"
-    group(_runtime_deps_target) {
-      forward_variables_from(invoker,
-                             [
-                               "data",
-                               "deps",
-                               "public_deps",
-                             ])
-      data_deps = []
-      if (defined(invoker.data_deps)) {
-        data_deps += invoker.data_deps
-      }
-      if (defined(invoker.additional_apks)) {
-        data_deps += invoker.additional_apks
-      }
-      write_runtime_deps = _runtime_deps_file
-    }
-  }
-
-  action(target_name) {
-    forward_variables_from(invoker,
-                           [
-                             "data_deps",
-                             "deps",
-                           ])
-    if (!defined(deps)) {
-      deps = []
-    }
-    if (!defined(data_deps)) {
-      data_deps = []
-    }
-
-    script = "//build/android/gyp/create_test_runner_script.py"
-    depfile = "$target_gen_dir/$target_name.d"
-
-    data_deps += [
-      "//build/android:test_runner_py",
-      "//build/android:logdog_wrapper_py",
-    ]
-
-    data = []
-
-    test_runner_args = [
-      _test_type,
-      "--output-directory",
-      rebase_path(root_build_dir, root_build_dir),
-    ]
-
-    if (_runtime_deps) {
-      deps += [ ":$_runtime_deps_target" ]
-      data += [ _runtime_deps_file ]
-      test_runner_args += [
-        "--runtime-deps-path",
-        rebase_path(_runtime_deps_file, root_build_dir),
-      ]
-    }
-
-    # apk_target is not used for native executable tests
-    # (e.g. breakpad_unittests).
-    if (defined(invoker.apk_target)) {
-      assert(!defined(invoker.executable_dist_dir))
-      deps += [ "${invoker.apk_target}__build_config" ]
-      _apk_build_config =
-          get_label_info(invoker.apk_target, "target_gen_dir") + "/" +
-          get_label_info(invoker.apk_target, "name") + ".build_config"
-      _rebased_apk_build_config = rebase_path(_apk_build_config, root_build_dir)
-      assert(_rebased_apk_build_config != "")  # Mark as used.
-    } else if (_test_type == "gtest") {
-      assert(
-          defined(invoker.executable_dist_dir),
-          "Must define either apk_target or executable_dist_dir for test_runner_script()")
-      test_runner_args += [
-        "--executable-dist-dir",
-        rebase_path(invoker.executable_dist_dir, root_build_dir),
-      ]
-    }
-
-    _device_test = true
-    if (_test_type == "gtest") {
-      assert(defined(invoker.test_suite))
-      test_runner_args += [
-        "--suite",
-        invoker.test_suite,
-      ]
-    } else if (_test_type == "instrumentation") {
-      _test_apk = "@FileArg($_rebased_apk_build_config:deps_info:apk_path)"
-      if (_incremental_install) {
-        _test_apk = "@FileArg($_rebased_apk_build_config:deps_info:incremental_apk_path)"
-      }
-      test_runner_args += [
-        "--test-apk=$_test_apk",
-        "--test-jar",
-        rebase_path(invoker.test_jar, root_build_dir),
-      ]
-      if (defined(invoker.apk_under_test)) {
-        deps += [ "${invoker.apk_under_test}__build_config" ]
-        _apk_under_test_build_config =
-            get_label_info(invoker.apk_under_test, "target_gen_dir") + "/" +
-            get_label_info(invoker.apk_under_test, "name") + ".build_config"
-        _rebased_apk_under_test_build_config =
-            rebase_path(_apk_under_test_build_config, root_build_dir)
-        _apk_under_test =
-            "@FileArg($_rebased_apk_under_test_build_config:deps_info:apk_path)"
-        if (_incremental_install) {
-          _apk_under_test = "@FileArg($_rebased_apk_under_test_build_config:deps_info:incremental_apk_path)"
-        }
-        test_runner_args += [ "--apk-under-test=$_apk_under_test" ]
-        test_runner_args += [
-          "--non-native-packed-relocations",
-          "@FileArg($_rebased_apk_under_test_build_config:deps_info:non_native_packed_relocations)",
-        ]
-      }
-      if (defined(invoker.proguard_enabled) && invoker.proguard_enabled) {
-        test_runner_args += [ "--enable-java-deobfuscation" ]
-      }
-      if (emma_coverage) {
-        # Set a default coverage output directory (can be overridden by user
-        # passing the same flag).
-        test_runner_args += [
-          "--coverage-dir",
-          rebase_path("$root_out_dir/coverage", root_build_dir),
-        ]
-      }
-    } else if (_test_type == "junit") {
-      assert(defined(invoker.test_suite))
-      _device_test = false
-      test_runner_args += [
-        "--test-suite",
-        invoker.test_suite,
-      ]
-      if (defined(invoker.android_manifest_path)) {
-        test_runner_args += [
-          "--android-manifest-path",
-          rebase_path(invoker.android_manifest_path, root_build_dir),
-        ]
-      }
-
-      if (defined(invoker.package_name)) {
-        test_runner_args += [
-          "--package-name",
-          invoker.package_name,
-        ]
-
-        deps += [ ":${invoker.test_suite}__build_config" ]
-        _junit_binary_build_config =
-            "${target_gen_dir}/${invoker.test_suite}.build_config"
-        _rebased_build_config =
-            rebase_path("$_junit_binary_build_config", root_build_dir)
-        test_runner_args += [
-          "--resource-zips",
-          "@FileArg($_rebased_build_config:resources:dependency_zips)",
-        ]
-      }
-
-      test_runner_args += [
-        "--robolectric-runtime-deps-dir",
-        rebase_path("$root_build_dir/lib.java/third_party/robolectric",
-                    root_build_dir),
-      ]
-    } else if (_test_type == "linker") {
-      test_runner_args += [
-        "--test-apk",
-        "@FileArg($_rebased_apk_build_config:deps_info:apk_path)",
-      ]
-    } else {
-      assert(false, "Invalid test type: $_test_type.")
-    }
-
-    if (defined(invoker.additional_apks)) {
-      foreach(additional_apk, invoker.additional_apks) {
-        deps += [ "${additional_apk}__build_config" ]
-        _build_config = get_label_info(additional_apk, "target_gen_dir") + "/" +
-                        get_label_info(additional_apk, "name") + ".build_config"
-        _rebased_build_config = rebase_path(_build_config, root_build_dir)
-        test_runner_args += [
-          "--additional-apk",
-          "@FileArg($_rebased_build_config:deps_info:apk_path)",
-          "--additional-apk-incremental",
-          "@FileArg($_rebased_build_config:deps_info:incremental_apk_path)",
-        ]
-      }
-    }
-    if (defined(invoker.shard_timeout)) {
-      test_runner_args += [ "--shard-timeout=${invoker.shard_timeout}" ]
-    }
-    if (_incremental_install) {
-      test_runner_args += [
-        "--test-apk-incremental-install-json",
-        "@FileArg($_rebased_apk_build_config:deps_info:incremental_install_json_path)",
-      ]
-      if (defined(invoker.apk_under_test)) {
-        test_runner_args += [
-          "--apk-under-test-incremental-install-json",
-          "@FileArg($_rebased_apk_under_test_build_config:deps_info:incremental_install_json_path)",
-        ]
-      }
-      test_runner_args += [ "--fast-local-dev" ]
-    }
-    if (_device_test && is_asan) {
-      test_runner_args += [ "--tool=asan" ]
-    }
-
-    if (defined(invoker.generated_script)) {
-      assert(_test_name != "" || true)  # Mark _test_name as used.
-      generated_script = invoker.generated_script
-    } else {
-      generated_script = "$root_build_dir/bin/run_${_test_name}"
-    }
-    outputs = [
-      generated_script,
-    ]
-    data += [ generated_script ]
-
-    args = [
-      "--depfile",
-      rebase_path(depfile, root_build_dir),
-      "--script-output-path",
-      rebase_path(generated_script, root_build_dir),
-    ]
-    if (defined(android_test_runner_script)) {
-      args += [
-        "--test-runner-path",
-        android_test_runner_script,
-      ]
-    }
-
-    args += test_runner_args
-  }
-}
-
-template("stack_script") {
-  forward_variables_from(invoker, [ "testonly" ])
-
-  _stack_target_name = invoker.stack_target_name
-
-  action(target_name) {
-    forward_variables_from(invoker,
-                           [
-                             "data_deps",
-                             "deps",
-                           ])
-    if (!defined(deps)) {
-      deps = []
-    }
-    if (!defined(data_deps)) {
-      data_deps = []
-    }
-
-    data_deps +=
-        [ "//third_party/android_platform/development/scripts:stack_py" ]
-
-    script = "//build/android/gyp/create_stack_script.py"
-    depfile = "$target_gen_dir/$target_name.d"
-
-    _stack_script = "//third_party/android_platform/development/scripts/stack"
-
-    _generated_script = "$root_build_dir/bin/stack_${_stack_target_name}"
-
-    outputs = [
-      _generated_script,
-    ]
-    data = [
-      _generated_script,
-    ]
-
-    args = [
-      "--depfile",
-      rebase_path(depfile, root_build_dir),
-      "--output-directory",
-      rebase_path(root_build_dir, root_build_dir),
-      "--script-path",
-      rebase_path(_stack_script, root_build_dir),
-      "--script-output-path",
-      rebase_path(_generated_script, root_build_dir),
-      "--arch=$target_cpu",
-    ]
-    if (defined(invoker.packed_libraries)) {
-      args += [
-        "--packed-libs",
-        invoker.packed_libraries,
-      ]
-    }
-  }
-}
-
-if (enable_java_templates) {
-  import("//build/config/zip.gni")
-  import("//third_party/ijar/ijar.gni")
-  import("//third_party/android_platform/config.gni")
-
-  android_sdk_jar = "$android_sdk/android.jar"
-  android_default_aapt_path = "$android_sdk_build_tools/aapt"
-
-  template("android_lint") {
-    action(target_name) {
-      forward_variables_from(invoker,
-                             [
-                               "deps",
-                               "data_deps",
-                               "public_deps",
-                               "testonly",
-                             ])
-      if (!defined(deps)) {
-        deps = []
-      }
-
-      if (defined(invoker.lint_suppressions_file)) {
-        lint_suppressions_file = invoker.lint_suppressions_file
-      } else if (!defined(lint_suppressions_file)) {
-        lint_suppressions_file = "//build/android/lint/suppressions.xml"
-      }
-
-      _lint_path = "$lint_android_sdk_root/tools/bin/lint"
-      _cache_dir = "$root_build_dir/android_lint_cache"
-      _result_path = "$target_gen_dir/$target_name/result.xml"
-      _config_path = "$target_gen_dir/$target_name/config.xml"
-      _suppressions_file = lint_suppressions_file
-      _platform_xml_path =
-          "${android_sdk_root}/platform-tools/api/api-versions.xml"
-
-      script = "//build/android/gyp/lint.py"
-      depfile = "$target_gen_dir/$target_name.d"
-      inputs = [
-        _platform_xml_path,
-        _suppressions_file,
-      ]
-
-      outputs = [
-        _result_path,
-        _config_path,
-      ]
-
-      args = [
-        "--lint-path",
-        rebase_path(_lint_path, root_build_dir),
-        "--cache-dir",
-        rebase_path(_cache_dir, root_build_dir),
-        "--platform-xml-path",
-        rebase_path(_platform_xml_path, root_build_dir),
-        "--android-sdk-version=${lint_android_sdk_version}",
-        "--depfile",
-        rebase_path(depfile, root_build_dir),
-        "--config-path",
-        rebase_path(_suppressions_file, root_build_dir),
-        "--product-dir=.",
-        "--processed-config-path",
-        rebase_path(_config_path, root_build_dir),
-        "--result-path",
-        rebase_path(_result_path, root_build_dir),
-        "--include-unexpected-failures",
-      ]
-      if (defined(invoker.android_manifest)) {
-        inputs += [ invoker.android_manifest ]
-        args += [
-          "--manifest-path",
-          rebase_path(invoker.android_manifest, root_build_dir),
-        ]
-      }
-
-      if (defined(invoker.disable)) {
-        args += [ "--disable=${invoker.disable}" ]
-      }
-
-      if (defined(invoker.create_cache) && invoker.create_cache) {
-        args += [
-          "--create-cache",
-          "--silent",
-        ]
-      } else {
-        inputs += invoker.java_files
-        inputs += [
-          invoker.jar_path,
-          invoker.build_config,
-        ]
-        if (invoker.java_files != []) {
-          inputs += [ invoker.java_sources_file ]
-          _rebased_java_sources_file =
-              rebase_path(invoker.java_sources_file, root_build_dir)
-          args += [ "--java-sources-file=$_rebased_java_sources_file" ]
-        }
-        deps += [ "//build/android:prepare_android_lint_cache" ]
-
-        _rebased_build_config =
-            rebase_path(invoker.build_config, root_build_dir)
-        args += [
-          "--jar-path",
-          rebase_path(invoker.jar_path, root_build_dir),
-          "--classpath=@FileArg($_rebased_build_config:javac:interface_classpath)",
-          "--srcjars=@FileArg($_rebased_build_config:gradle:bundled_srcjars)",
-          "--can-fail-build",
-        ]
-        if (invoker.requires_android) {
-          args += [
-            "--resource-sources=@FileArg($_rebased_build_config:deps_info:owned_resources_dirs)",
-            "--resource-sources=@FileArg($_rebased_build_config:deps_info:owned_resources_zips)",
-          ]
-        }
-      }
-    }
-  }
-
-  template("proguard") {
-    action(target_name) {
-      set_sources_assignment_filter([])
-      forward_variables_from(invoker,
-                             [
-                               "data",
-                               "data_deps",
-                               "deps",
-                               "public_deps",
-                               "testonly",
-                             ])
-      script = "//build/android/gyp/proguard.py"
-
-      # http://crbug.com/725224. Fix for bots running out of memory.
-      pool = "//build/toolchain:link_pool($default_toolchain)"
-
-      _output_jar_path = invoker.output_jar_path
-      _proguard_jar_path = _default_proguard_jar_path
-      if (defined(invoker.proguard_jar_path)) {
-        _proguard_jar_path = invoker.proguard_jar_path
-      }
-      _android_sdk_jar = android_sdk_jar
-      if (defined(invoker.alternative_android_sdk_jar)) {
-        _android_sdk_jar = invoker.alternative_android_sdk_jar
-      }
-
-      inputs = [
-        _android_sdk_jar,
-        _proguard_jar_path,
-      ]
-      if (defined(invoker.inputs)) {
-        inputs += invoker.inputs
-      }
-      depfile = "${target_gen_dir}/${target_name}.d"
-      outputs = [
-        _output_jar_path,
-        "$_output_jar_path.flags",
-        "$_output_jar_path.mapping",
-        "$_output_jar_path.seeds",
-        "$_output_jar_path.usage",
-      ]
-      args = [
-        "--depfile",
-        rebase_path(depfile, root_build_dir),
-        "--proguard-path",
-        rebase_path(_proguard_jar_path, root_build_dir),
-        "--output-path",
-        rebase_path(_output_jar_path, root_build_dir),
-        "--classpath",
-        rebase_path(_android_sdk_jar, root_build_dir),
-      ]
-      if (proguard_verbose) {
-        args += [ "--verbose" ]
-      }
-      if (defined(invoker.args)) {
-        args += invoker.args
-      }
-      if (defined(invoker.proguard_jar_path)) {
-        # We assume that if we are using a different ProGuard, this new version
-        # can handle the 'dangerous' optimizaions.
-        args += [ "--enable-dangerous-optimizations" ]
-      }
-    }
-  }
-
-  # Generates a script in the build bin directory to run a java binary.
-  #
-  # Variables
-  #   main_class: The class containing the program entry point.
-  #   build_config: Path to .build_config for the jar (contains classpath).
-  #   jar_path: Optional. First classpath entry to be inserted before
-  #     the classpath extracted from the build_config.
-  #   script_name: Name of the script to generate.
-  #   wrapper_script_args: List of extra arguments to pass to the executable.
-  #   bootclasspath: Optional. list of zip/jar file paths to add to the boot
-  #     class path when the script will invoke javac.
-  #
-  template("java_binary_script") {
-    action(target_name) {
-      forward_variables_from(invoker,
-                             [
-                               "deps",
-                               "testonly",
-                             ])
-
-      _main_class = invoker.main_class
-      _build_config = invoker.build_config
-      _script_name = invoker.script_name
-
-      script = "//build/android/gyp/create_java_binary_script.py"
-      depfile = "$target_gen_dir/$_script_name.d"
-      _java_script = "$root_build_dir/bin/$_script_name"
-      inputs = [
-        _build_config,
-      ]
-      outputs = [
-        _java_script,
-      ]
-      _rebased_build_config = rebase_path(_build_config, root_build_dir)
-      args = [
-        "--depfile",
-        rebase_path(depfile, root_build_dir),
-        "--output",
-        rebase_path(_java_script, root_build_dir),
-        "--main-class",
-        _main_class,
-      ]
-      if (defined(invoker.jar_path)) {
-        _jar_path_list = [ rebase_path(invoker.jar_path, root_build_dir) ]
-        args += [ "--classpath=$_jar_path_list" ]
-      }
-      args += [ "--classpath=@FileArg($_rebased_build_config:deps_info:java_runtime_classpath)" ]
-
-      if (emma_coverage) {
-        args += [
-          "--classpath",
-          rebase_path("//third_party/android_tools/sdk/tools/lib/emma.jar",
-                      root_build_dir),
-          "--noverify",
-        ]
-      }
-      if (defined(invoker.wrapper_script_args)) {
-        args += [ "--" ] + invoker.wrapper_script_args
-      }
-      if (defined(invoker.bootclasspath)) {
-        args += [
-          "--bootclasspath",
-          rebase_path(invoker.bootclasspath, root_build_dir),
-        ]
-      }
-    }
-  }
-
-  template("dex") {
-    _enable_multidex =
-        defined(invoker.enable_multidex) && invoker.enable_multidex
-
-    if (_enable_multidex) {
-      _main_dex_list_path = invoker.output + ".main_dex_list"
-      _main_dex_list_target_name = "${target_name}__main_dex_list"
-      action(_main_dex_list_target_name) {
-        forward_variables_from(invoker,
-                               [
-                                 "deps",
-                                 "testonly",
-                               ])
-
-        script = "//build/android/gyp/main_dex_list.py"
-        depfile = "$target_gen_dir/$target_name.d"
-
-        # http://crbug.com/725224. Fix for bots running out of memory.
-        pool = "//build/toolchain:link_pool($default_toolchain)"
-
-        main_dex_rules = "//build/android/main_dex_classes.flags"
-
-        if (defined(invoker.proguard_jar_path)) {
-          _proguard_jar_path = invoker.proguard_jar_path
-        } else {
-          _proguard_jar_path = _default_proguard_jar_path
-        }
-
-        inputs = [
-          main_dex_rules,
-          _proguard_jar_path,
-        ]
-
-        outputs = [
-          _main_dex_list_path,
-        ]
-
-        args = [
-          "--depfile",
-          rebase_path(depfile, root_build_dir),
-          "--android-sdk-tools",
-          rebase_path(android_sdk_build_tools, root_build_dir),
-          "--main-dex-list-path",
-          rebase_path(_main_dex_list_path, root_build_dir),
-          "--main-dex-rules-path",
-          rebase_path(main_dex_rules, root_build_dir),
-          "--proguard-path",
-          rebase_path(_proguard_jar_path, root_build_dir),
-        ]
-
-        if (defined(invoker.extra_main_dex_proguard_config)) {
-          inputs += [ invoker.extra_main_dex_proguard_config ]
-          args += [
-            "--main-dex-rules-path",
-            rebase_path(invoker.extra_main_dex_proguard_config, root_build_dir),
-          ]
-        }
-
-        if (defined(invoker.negative_main_dex_globs)) {
-          args +=
-              [ "--negative-main-dex-globs=${invoker.negative_main_dex_globs}" ]
-        }
-
-        if (defined(invoker.input_jars_file_arg)) {
-          inputs += [ invoker.build_config ]
-          args += [ "--inputs=${invoker.input_jars_file_arg}" ]
-        }
-
-        if (defined(invoker.input_jars)) {
-          inputs += invoker.input_jars
-          args += rebase_path(invoker.input_jars, root_build_dir)
-        }
-      }
-    }
-
-    assert(defined(invoker.output))
-    action(target_name) {
-      forward_variables_from(invoker,
-                             [
-                               "deps",
-                               "testonly",
-                             ])
-      script = "//build/android/gyp/dex.py"
-      depfile = "$target_gen_dir/$target_name.d"
-      inputs = []
-      outputs = [
-        invoker.output,
-      ]
-
-      if (defined(invoker.use_pool) && invoker.use_pool) {
-        pool = "//build/toolchain:link_pool($default_toolchain)"
-      }
-
-      _rebased_output = rebase_path(invoker.output, root_build_dir)
-
-      args = [
-        "--depfile",
-        rebase_path(depfile, root_build_dir),
-        "--android-sdk-tools",
-        rebase_path(android_sdk_build_tools, root_build_dir),
-        "--dex-path",
-        _rebased_output,
-      ]
-
-      if (enable_incremental_dx) {
-        args += [ "--incremental" ]
-      }
-
-      # EMMA requires --no-locals.
-      if (emma_coverage) {
-        args += [ "--no-locals=1" ]
-      }
-
-      if (_enable_multidex) {
-        args += [
-          "--multi-dex",
-          "--main-dex-list-path",
-          rebase_path(_main_dex_list_path, root_build_dir),
-        ]
-        deps += [ ":${_main_dex_list_target_name}" ]
-        inputs += [ _main_dex_list_path ]
-      }
-
-      if (defined(invoker.input_jars_file_arg)) {
-        inputs += [ invoker.build_config ]
-        args += [ "--inputs=${invoker.input_jars_file_arg}" ]
-      }
-
-      if (defined(invoker.input_jars)) {
-        inputs += invoker.input_jars
-        args += rebase_path(invoker.input_jars, root_build_dir)
-      }
-    }
-  }
-
-  template("emma_instr") {
-    action(target_name) {
-      forward_variables_from(invoker,
-                             [
-                               "deps",
-                               "public_deps",
-                               "testonly",
-                             ])
-
-      _coverage_file = "$target_out_dir/${target_name}.em"
-      _source_dirs_listing_file = "$target_out_dir/${target_name}_sources.txt"
-      _emma_jar = "${android_sdk_root}/tools/lib/emma.jar"
-
-      script = "//build/android/gyp/emma_instr.py"
-      depfile = "${target_gen_dir}/${target_name}.d"
-      inputs = invoker.java_files + [
-                 _emma_jar,
-                 invoker.input_jar_path,
-               ]
-      outputs = [
-        _coverage_file,
-        _source_dirs_listing_file,
-        invoker.output_jar_path,
-      ]
-      args = [
-        "instrument_jar",
-        "--input-path",
-        rebase_path(invoker.input_jar_path, root_build_dir),
-        "--output-path",
-        rebase_path(invoker.output_jar_path, root_build_dir),
-        "--depfile",
-        rebase_path(depfile, root_build_dir),
-        "--coverage-file",
-        rebase_path(_coverage_file, root_build_dir),
-        "--sources-list-file",
-        rebase_path(_source_dirs_listing_file, root_build_dir),
-        "--src-root",
-        rebase_path("//", root_build_dir),
-        "--emma-jar",
-        rebase_path(_emma_jar, root_build_dir),
-      ]
-      _rebased_java_sources_file =
-          rebase_path(invoker.java_sources_file, root_build_dir)
-      args += [ "--java-sources-file=$_rebased_java_sources_file" ]
-
-      if (emma_filter != "") {
-        args += [
-          "--filter-string",
-          emma_filter,
-        ]
-      }
-    }
-  }
-
-  # TODO(digit): Document this!
-  #
-  # Variables:
-  #  testonly:
-  #  build_config:
-  #  input_jar_path:
-  #  output_jar_path:
-  #  enable_build_hooks:
-  #  enable_build_hooks_android:
-  #  supports_android:
-  #  emma_instrument:
-  #  jar_excluded_patterns: Optional list of .class file patterns to exclude
-  #    from the final .jar file.
-  #  jar_included_patterns: OPtional list of .class file patterns to include
-  #    in the final .jar file. jar_excluded_patterns take precedence over this.
-  #  strip_resource_classes:
-  #  alternative_android_sdk_ijar:
-  #  alternative_android_sdk_ijar_dep:
-  #  alternative_android_sdk:
-  #  deps:
-  #  java_files:
-  #  java_sources_file:
-  #  inputs:
-  #  data_deps:
-  #  visibility:
-  #
-  template("process_java_prebuilt") {
-    set_sources_assignment_filter([])
-    forward_variables_from(invoker, [ "testonly" ])
-
-    assert(invoker.build_config != "")
-    _build_config = invoker.build_config
-    _rebased_build_config = rebase_path(_build_config, root_build_dir)
-    assert(_rebased_build_config != "" || true)  # Mark used.
-
-    _input_jar_path = invoker.input_jar_path
-    _output_jar_path = invoker.output_jar_path
-
-    _enable_assert =
-        defined(invoker.enable_build_hooks) && invoker.enable_build_hooks &&
-        (is_java_debug || dcheck_always_on || report_java_assert)
-
-    _enable_custom_resources = defined(invoker.enable_build_hooks_android) &&
-                               invoker.enable_build_hooks_android
-
-    _desugar = defined(invoker.supports_android) && invoker.supports_android
-    _emma_instrument = invoker.emma_instrument
-
-    _jar_excluded_patterns = []
-    if (defined(invoker.jar_excluded_patterns)) {
-      _jar_excluded_patterns = invoker.jar_excluded_patterns
-    }
-    _jar_included_patterns = []
-    if (defined(invoker.jar_included_patterns)) {
-      _jar_included_patterns = invoker.jar_included_patterns
-    }
-    _strip_resource_classes = defined(invoker.strip_resource_classes) &&
-                              invoker.strip_resource_classes
-    _filter_jar = _jar_excluded_patterns != [] ||
-                  _jar_included_patterns != [] || _strip_resource_classes
-
-    _deps = []
-    _previous_output_jar = _input_jar_path
-
-    assert(!defined(invoker.alternative_android_sdk_ijar) ||
-           invoker.alternative_android_sdk_ijar != "")
-    assert(!defined(invoker.alternative_android_sdk_ijar_dep) ||
-           invoker.alternative_android_sdk_ijar_dep != "")
-    assert(!defined(invoker.alternative_android_sdk_jar) ||
-           invoker.alternative_android_sdk_jar != "")
-
-    if (_enable_assert || _enable_custom_resources) {
-      _java_bytecode_rewriter_target = "${target_name}__bytecode_rewrite"
-      _java_bytecode_rewriter_input_jar = _previous_output_jar
-      _java_bytecode_rewriter_output_jar =
-          "$target_out_dir/$target_name-bytecode-rewritten.jar"
-
-      action(_java_bytecode_rewriter_target) {
-        script = "//build/android/gyp/bytecode_processor.py"
-        depfile = "$target_gen_dir/$target_name.d"
-        _bytecode_rewriter_script =
-            "$root_build_dir/bin/helper/java_bytecode_rewriter"
-        deps = [
-          "//build/android/bytecode:java_bytecode_rewriter($default_toolchain)",
-        ]
-        deps += _deps
-        if (defined(invoker.deps)) {
-          deps += invoker.deps
-        }
-        _android_sdk_jar = android_sdk_jar
-        if (defined(invoker.alternative_android_sdk_jar)) {
-          _android_sdk_jar = invoker.alternative_android_sdk_jar
-        }
-        inputs = [
-          _android_sdk_jar,
-          _java_bytecode_rewriter_input_jar,
-          _build_config,
-        ]
-        outputs = [
-          _java_bytecode_rewriter_output_jar,
-        ]
-        args = [
-          "--depfile",
-          rebase_path(depfile, root_build_dir),
-          "--script",
-          rebase_path(_bytecode_rewriter_script, root_build_dir),
-          "--input-jar",
-          rebase_path(_java_bytecode_rewriter_input_jar, root_build_dir),
-          "--output-jar",
-          rebase_path(_java_bytecode_rewriter_output_jar, root_build_dir),
-        ]
-        if (_enable_assert) {
-          args += [ "--enable-assert" ]
-        }
-        if (_enable_custom_resources) {
-          args += [ "--enable-custom-resources" ]
-        }
-        args += [
-          "--extra-classpath-jar",
-          rebase_path(_android_sdk_jar, root_build_dir),
-          "--extra-classpath-jar",
-          "@FileArg($_rebased_build_config:deps_info:javac_full_classpath)",
-        ]
-      }
-
-      _deps = []
-      _deps = [ ":$_java_bytecode_rewriter_target" ]
-      _previous_output_jar = _java_bytecode_rewriter_output_jar
-    }
-
-    if (_desugar) {
-      _desugar_target = "${target_name}__desugar"
-      _desugar_input_jar = _previous_output_jar
-      _desugar_output_jar = "$target_out_dir/$target_name-desugar.jar"
-
-      action(_desugar_target) {
-        script = "//build/android/gyp/desugar.py"
-        depfile = "$target_gen_dir/$target_name.d"
-        deps = _deps
-        if (defined(invoker.deps)) {
-          deps += invoker.deps
-        }
-        inputs = [
-          _build_config,
-          _desugar_input_jar,
-        ]
-        outputs = [
-          _desugar_output_jar,
-        ]
-        if (defined(invoker.alternative_android_sdk_ijar)) {
-          deps += [ invoker.alternative_android_sdk_ijar_dep ]
-          _android_sdk_ijar = invoker.alternative_android_sdk_ijar
-        } else {
-          deps += [ "//build/android:android_ijar" ]
-          _android_sdk_ijar = "$root_out_dir/lib.java/android.interface.jar"
-        }
-        inputs += [ _android_sdk_ijar ]
-        args = [
-          "--depfile",
-          rebase_path(depfile, root_build_dir),
-          "--input-jar",
-          rebase_path(_desugar_input_jar, root_build_dir),
-          "--output-jar",
-          rebase_path(_desugar_output_jar, root_build_dir),
-          "--classpath=@FileArg($_rebased_build_config:javac:interface_classpath)",
-          "--bootclasspath-entry",
-          rebase_path(_android_sdk_ijar, root_build_dir),
-        ]
-      }
-
-      _deps = []
-      _deps = [ ":$_desugar_target" ]
-      _previous_output_jar = _desugar_output_jar
-    }
-
-    if (_filter_jar) {
-      _filter_target = "${target_name}__filter"
-      _filter_input_jar = _previous_output_jar
-      _filter_output_jar = "$target_out_dir/$target_name-filtered.jar"
-
-      action(_filter_target) {
-        script = "//build/android/gyp/jar.py"
-        deps = _deps
-        if (defined(invoker.deps)) {
-          deps += invoker.deps
-        }
-        inputs = [
-          _build_config,
-          _filter_input_jar,
-        ]
-        outputs = [
-          _filter_output_jar,
-        ]
-        args = [
-          "--input-jar",
-          rebase_path(_filter_input_jar, root_build_dir),
-          "--jar-path",
-          rebase_path(_filter_output_jar, root_build_dir),
-          "--excluded-classes=$_jar_excluded_patterns",
-          "--included-classes=$_jar_included_patterns",
-        ]
-        if (_strip_resource_classes) {
-          args += [ "--strip-resource-classes-for=@FileArg($_rebased_build_config:javac:resource_packages)" ]
-        }
-      }
-
-      _deps = []
-      _deps = [ ":$_filter_target" ]
-      _previous_output_jar = _filter_output_jar
-    }
-
-    if (_emma_instrument) {
-      # Emma must run after desugar (or else desugar sometimes fails).
-      _emma_target = "${target_name}__emma"
-      _emma_input_jar = _previous_output_jar
-      _emma_output_jar = "$target_out_dir/$target_name-instrumented.jar"
-
-      emma_instr(_emma_target) {
-        deps = _deps
-        if (defined(invoker.deps)) {
-          deps += invoker.deps
-        }
-
-        forward_variables_from(invoker,
-                               [
-                                 "java_files",
-                                 "java_sources_file",
-                               ])
-
-        input_jar_path = _emma_input_jar
-        output_jar_path = _emma_output_jar
-      }
-
-      _deps = []
-      _deps = [ ":$_emma_target" ]
-      _previous_output_jar = _emma_output_jar
-    }
-
-    _output_jar_target = "${target_name}__copy"
-
-    # This is copy_ex rather than copy to ensure that JARs (rather than
-    # possibly broken symlinks to them) get copied into the output
-    # directory.
-    copy_ex(_output_jar_target) {
-      forward_variables_from(invoker, [ "inputs" ])
-      deps = _deps
-      if (defined(invoker.deps)) {
-        deps += invoker.deps
-      }
-      dest = _output_jar_path
-      sources = [
-        _previous_output_jar,
-      ]
-      outputs = [
-        _output_jar_path,
-      ]
-    }
-
-    group(target_name) {
-      forward_variables_from(invoker,
-                             [
-                               "data_deps",
-                               "visibility",
-                             ])
-      public_deps = [
-        ":$_output_jar_target",
-      ]
-    }
-  }
-
-  template("merge_manifests") {
-    action(target_name) {
-      forward_variables_from(invoker,
-                             [
-                               "deps",
-                               "testonly",
-                             ])
-      script = "//build/android/gyp/merge_manifest.py"
-      depfile = "$target_gen_dir/$target_name.d"
-
-      inputs = [
-        invoker.build_config,
-        invoker.input_manifest,
-      ]
-
-      outputs = [
-        invoker.output_manifest,
-      ]
-      _rebased_build_config = rebase_path(invoker.build_config, root_build_dir)
-
-      args = [
-        "--depfile",
-        rebase_path(depfile, root_build_dir),
-        "--build-vars",
-        rebase_path(android_build_vars, root_build_dir),
-        "--root-manifest",
-        rebase_path(invoker.input_manifest, root_build_dir),
-        "--output",
-        rebase_path(invoker.output_manifest, root_build_dir),
-        "--extras",
-        "@FileArg($_rebased_build_config:extra_android_manifests)",
-      ]
-    }
-  }
-
-  # This template is used to parse a set of resource directories and
-  # create the R.txt, .srcjar and .resources.zip for it.
-  #
-  # Input variables:
-  #   deps: Specifies the input dependencies for this target.
-  #
-  #   build_config: Path to the .build_config file corresponding to the target.
-  #
-  #   resource_dirs:
-  #     List of directories containing Android resources, layout should be
-  #     similar to what aapt -S <dir> expects.
-  #
-  #   generated_resource_dirs: (optional)
-  #     List of directories containing generated resources.
-  #
-  #   generated_resource_files: (optional)
-  #     If generated_resources_dirs is not empty, must list all the files
-  #     within these directories (the directory must appear at the start of
-  #     the file path).
-  #
-  #   custom_package: (optional)
-  #     Package name for the generated R.java source file. Optional if
-  #     android_manifest is not provided.
-  #
-  #   android_manifest: (optional)
-  #     If custom_package is not provided, path to an AndroidManifest.xml file
-  #     that is only used to extract a package name out of it.
-  #
-  #   r_text_in_path: (optional)
-  #     Path to an input R.txt file to use to generate the R.java file.
-  #     The default is to use 'aapt' to generate the file from the content
-  #     of the resource directories.
-  #
-  #   alternative_android_sdk_jar: Alternative system android.jar to use.
-  #
-  #   shared_resources: (optional)
-  #     If true, generate an R.java file that uses non-final resource ID
-  #     variables and an onResourcesLoaded() method.
-  #
-  #   v14_skip: (optional)
-  #     If true, skip generation of v14 compatible resources.
-  #     (see generate_v14_compatible_resources.py for details).
-  #
-  # Output variables:
-  #   zip_path: (optional)
-  #     Path to a .resources.zip that will simply contain all the
-  #     input resources, collected in a single archive.
-  #
-  #   r_text_out_path: (optional): Path for the generated R.txt file.
-  #
-  #   srcjar_path: (optional) Path to a generated .srcjar containing the
-  #                           generated R.java source file.
-  #
-  template("prepare_resources") {
-    if (defined(invoker.srcjar_path)) {
-      _srcjar_path = invoker.srcjar_path
-    }
-    action(target_name) {
-      set_sources_assignment_filter([])
-      forward_variables_from(invoker,
-                             [
-                               "deps",
-                               "testonly",
-                               "visibility",
-                             ])
-      script = "//build/android/gyp/prepare_resources.py"
-
-      depfile = "$target_gen_dir/${invoker.target_name}.d"
-      outputs = []
-      _all_resource_dirs = []
-      sources = []
-
-      if (defined(invoker.resource_dirs)) {
-        _all_resource_dirs += invoker.resource_dirs
-
-        # Speed up "gn gen" by short-circuiting the empty directory.
-        if (invoker.resource_dirs != [ "//build/android/empty" ] &&
-            invoker.resource_dirs != []) {
-          _sources_build_rel =
-              exec_script("//build/android/gyp/find.py",
-                          rebase_path(invoker.resource_dirs, root_build_dir),
-                          "list lines")
-          sources += rebase_path(_sources_build_rel, ".", root_build_dir)
-        }
-      }
-
-      if (defined(invoker.generated_resource_dirs)) {
-        assert(defined(invoker.generated_resource_files))
-        _all_resource_dirs += invoker.generated_resource_dirs
-        sources += invoker.generated_resource_files
-      }
-
-      _android_aapt_path = android_default_aapt_path
-
-      _android_sdk_jar = android_sdk_jar
-      if (defined(invoker.alternative_android_sdk_jar)) {
-        _android_sdk_jar = invoker.alternative_android_sdk_jar
-      }
-
-      inputs = [
-        invoker.build_config,
-        _android_aapt_path,
-        _android_sdk_jar,
-      ]
-
-      _rebased_all_resource_dirs =
-          rebase_path(_all_resource_dirs, root_build_dir)
-      _rebased_build_config = rebase_path(invoker.build_config, root_build_dir)
-
-      args = [
-        "--depfile",
-        rebase_path(depfile, root_build_dir),
-        "--android-sdk-jar",
-        rebase_path(_android_sdk_jar, root_build_dir),
-        "--aapt-path",
-        rebase_path(_android_aapt_path, root_build_dir),
-        "--dependencies-res-zips=@FileArg($_rebased_build_config:resources:dependency_zips)",
-        "--extra-res-packages=@FileArg($_rebased_build_config:resources:extra_package_names)",
-        "--extra-r-text-files=@FileArg($_rebased_build_config:resources:extra_r_text_files)",
-      ]
-
-      if (defined(invoker.android_manifest)) {
-        inputs += [ invoker.android_manifest ]
-        args += [
-          "--android-manifest",
-          rebase_path(invoker.android_manifest, root_build_dir),
-        ]
-      }
-
-      if (_rebased_all_resource_dirs != []) {
-        args += [ "--resource-dirs=$_rebased_all_resource_dirs" ]
-      }
-
-      if (defined(invoker.zip_path)) {
-        outputs += [
-          invoker.zip_path,
-          invoker.zip_path + ".info",
-        ]
-        args += [
-          "--resource-zip-out",
-          rebase_path(invoker.zip_path, root_build_dir),
-        ]
-      }
-
-      if (defined(invoker.r_text_out_path)) {
-        outputs += [ invoker.r_text_out_path ]
-        args += [
-          "--r-text-out",
-          rebase_path(invoker.r_text_out_path, root_build_dir),
-        ]
-      }
-
-      if (defined(_srcjar_path)) {
-        outputs += [ _srcjar_path ]
-        args += [
-          "--srcjar-out",
-          rebase_path(_srcjar_path, root_build_dir),
-        ]
-      }
-
-      if (defined(invoker.r_text_in_path)) {
-        _r_text_in_path = invoker.r_text_in_path
-        inputs += [ _r_text_in_path ]
-        args += [
-          "--r-text-in",
-          rebase_path(_r_text_in_path, root_build_dir),
-        ]
-      }
-
-      if (defined(invoker.custom_package)) {
-        args += [
-          "--custom-package",
-          invoker.custom_package,
-        ]
-      }
-
-      if (defined(invoker.shared_resources) && invoker.shared_resources) {
-        args += [ "--shared-resources" ]
-      }
-
-      if (defined(invoker.v14_skip) && invoker.v14_skip) {
-        args += [ "--v14-skip" ]
-      }
-    }
-  }
-
-  # A template that is used to compile all resources needed by a binary
-  # (e.g. an android_apk or a junit_binary) into an intermediate .ar_
-  # archive. It can also generate an associated .srcjar that contains the
-  # final R.java sources for all resource packages the binary depends on.
-  #
-  # Input variables:
-  #   deps: Specifies the input dependencies for this target.
-  #
-  #   build_config: Path to the .build_config file corresponding to the target.
-  #
-  #   android_manifest: Path to root manifest for the binary.
-  #
-  #   version_code: (optional)
-  #
-  #   version_name: (optional)
-  #
-  #   alternative_android_sdk_jar: Alternative system android.jar to use.
-  #
-  #   shared_resources: (optional)
-  #     If true, make all variables in each generated R.java file non-final,
-  #     and provide an onResourcesLoaded() method that can be used to reset
-  #     their package index at load time. Useful when the APK corresponds to
-  #     a library that is loaded at runtime, like system_webview_apk or
-  #     monochrome_apk.
-  #
-  #   app_as_shared_lib: (optional)
-  #     If true, same effect as shared_resources, but also ensures that the
-  #     resources can be used by the APK when it is loaded as a regular
-  #     application as well. Useful for the monochrome_public_apk target
-  #     which is both an application and a shared runtime library that
-  #     implements the system webview feature.
-  #
-  #   shared_resources_whitelist: (optional)
-  #     Path to an R.txt file. If provided, acts similar to shared_resources
-  #     except that it restricts the list of non-final resource variables
-  #     to the list from the input R.txt file. Overrides shared_resources
-  #     when both are specified.
-  #
-  #   support_zh_hk: (optional)
-  #     If true, support zh-HK in Chrome on Android by using the resources
-  #     from zh-TW. See https://crbug.com/780847.
-  #
-  #   aapt_locale_whitelist: (optional)
-  #     Restrict compiled locale-dependent resources to a specific whitelist.
-  #     NOTE: This is a list of Chromium locale names, not Android ones.
-  #
-  #   exclude_xxxhdpi: (optional)
-  #
-  #   xxxhdpi_whitelist: (optional)
-  #
-  #   no_xml_namespaces: (optional)
-  #
-  #   png_to_webp: (optional)
-  #     If true, convert all PNG resources (except 9-patch files) to WebP.
-  #
-  #   post_process_script: (optional)
-  #
-  #   proto_format: (optional). If true, compiles resources into protocol
-  #     buffer format.
-  #
-  # Output variables:
-  #   output:  Path to a zip file containing the compiled resources.
-  #
-  #   r_text_out_path: (optional):
-  #       Path for the corresponding generated R.txt file.
-  #
-  #   srcjar_path: (optional)
-  #       Path to a generated .srcjar containing the generated R.java sources
-  #       for all dependent resource libraries.
-  #
-  #   proguard_file: (optional)
-  #       Path to proguard configuration file for this apk target.
-  #
-  #   proguard_file_main_dex: (optional)
-  #
-  #
-  template("compile_resources") {
-    _compile_resources_target_name = target_name
-    _compiled_resources_path = invoker.output
-
-    if (defined(invoker.srcjar_path)) {
-      _srcjar_path = invoker.srcjar_path
-    }
-    if (defined(invoker.post_process_script)) {
-      _compile_resources_target_name = "${target_name}__intermediate"
-      _compiled_resources_path =
-          get_path_info(_compiled_resources_path, "dir") + "/" +
-          get_path_info(_compiled_resources_path, "name") + ".intermediate.ap_"
-      _srcjar_path = "${_srcjar_path}.intermediate.srcjar"
-    }
-
-    action(_compile_resources_target_name) {
-      set_sources_assignment_filter([])
-      forward_variables_from(invoker,
-                             [
-                               "deps",
-                               "testonly",
-                               "visibility",
-                             ])
-      script = "//build/android/gyp/compile_resources.py"
-
-      depfile = "$target_gen_dir/${invoker.target_name}_1.d"
-      outputs = []
-
-      _android_aapt_path = android_default_aapt_path
-      if (defined(invoker.proto_format) && invoker.proto_format) {
-        _android_aapt_path = "$android_sdk_app_bundle_build_tools/aapt"
-      }
-
-      _android_sdk_jar = android_sdk_jar
-      if (defined(invoker.alternative_android_sdk_jar)) {
-        _android_sdk_jar = invoker.alternative_android_sdk_jar
-      }
-
-      inputs = [
-        invoker.build_config,
-        _android_aapt_path,
-        _android_sdk_jar,
-      ]
-
-      _rebased_build_config = rebase_path(invoker.build_config, root_build_dir)
-
-      args = [
-        "--depfile",
-        rebase_path(depfile, root_build_dir),
-        "--android-sdk-jar",
-        rebase_path(_android_sdk_jar, root_build_dir),
-        "--aapt-path",
-        rebase_path(_android_aapt_path, root_build_dir),
-        "--dependencies-res-zips=@FileArg($_rebased_build_config:resources:dependency_zips)",
-        "--extra-res-packages=@FileArg($_rebased_build_config:resources:extra_package_names)",
-        "--extra-r-text-files=@FileArg($_rebased_build_config:resources:extra_r_text_files)",
-      ]
-
-      inputs += [ invoker.android_manifest ]
-      args += [
-        "--android-manifest",
-        rebase_path(invoker.android_manifest, root_build_dir),
-      ]
-
-      if (defined(invoker.no_xml_namespaces) && invoker.no_xml_namespaces) {
-        args += [ "--no-xml-namespaces" ]
-      }
-
-      if (defined(invoker.version_code)) {
-        args += [
-          "--version-code",
-          invoker.version_code,
-        ]
-      }
-      if (defined(invoker.version_name)) {
-        args += [
-          "--version-name",
-          invoker.version_name,
-        ]
-      }
-      if (defined(_compiled_resources_path)) {
-        _info_path = invoker.output + ".info"
-        outputs += [
-          _compiled_resources_path,
-          _info_path,
-        ]
-        args += [
-          "--apk-path",
-          rebase_path(_compiled_resources_path, root_build_dir),
-          "--apk-info-path",
-          rebase_path(_info_path, root_build_dir),
-        ]
-      }
-
-      # Useful to have android:debuggable in the manifest even for Release
-      # builds. Just omit it for officai
-      if (debuggable_apks) {
-        args += [ "--debuggable" ]
-      }
-
-      if (defined(invoker.r_text_out_path)) {
-        outputs += [ invoker.r_text_out_path ]
-        args += [
-          "--r-text-out",
-          rebase_path(invoker.r_text_out_path, root_build_dir),
-        ]
-      }
-
-      if (defined(_srcjar_path)) {
-        outputs += [ _srcjar_path ]
-        args += [
-          "--srcjar-out",
-          rebase_path(_srcjar_path, root_build_dir),
-        ]
-      }
-
-      if (defined(invoker.custom_package)) {
-        args += [
-          "--custom-package",
-          invoker.custom_package,
-        ]
-      }
-
-      _proto_format = defined(invoker.proto_format) && invoker.proto_format
-      if (_proto_format) {
-        args += [ "--proto-format" ]
-      }
-
-      # Define the flags related to shared resources.
-      #
-      # Note the small sanity check to ensure that the package ID of the
-      # generated resources table is correct. It should be 0x02 for runtime
-      # shared libraries, and 0x7f otherwise.
-
-      _expected_resources_pkg_id = "0x7f"
-      if (defined(invoker.shared_resources) && invoker.shared_resources) {
-        args += [ "--shared-resources" ]
-        _expected_resources_pkg_id = "0x02"
-      } else if (defined(invoker.app_as_shared_lib) &&
-                 invoker.app_as_shared_lib) {
-        args += [ "--app-as-shared-lib" ]
-      }
-
-      # NOTE: It is not possible to check the resources package ID of
-      # proto-compiled APKs at the moment.
-      if (!_proto_format) {
-        args += [ "--check-resources-pkg-id=$_expected_resources_pkg_id" ]
-      } else {
-        assert(_expected_resources_pkg_id != "")  # Mark as used.
-      }
-
-      if (defined(invoker.shared_resources_whitelist)) {
-        inputs += [ invoker.shared_resources_whitelist ]
-        args += [
-          "--shared-resources-whitelist",
-          rebase_path(invoker.shared_resources_whitelist, root_build_dir),
-        ]
-      }
-
-      if (defined(invoker.proguard_file)) {
-        outputs += [ invoker.proguard_file ]
-        args += [
-          "--proguard-file",
-          rebase_path(invoker.proguard_file, root_build_dir),
-        ]
-      }
-
-      if (defined(invoker.proguard_file_main_dex)) {
-        outputs += [ invoker.proguard_file_main_dex ]
-        args += [
-          "--proguard-file-main-dex",
-          rebase_path(invoker.proguard_file_main_dex, root_build_dir),
-        ]
-      }
-
-      if (defined(invoker.aapt_locale_whitelist)) {
-        args += [ "--locale-whitelist=${invoker.aapt_locale_whitelist}" ]
-      }
-      if (defined(invoker.png_to_webp) && invoker.png_to_webp) {
-        _webp_target = "//third_party/libwebp:cwebp($host_toolchain)"
-        _webp_binary = get_label_info(_webp_target, "root_out_dir") + "/cwebp"
-        deps += [ _webp_target ]
-        inputs += [ _webp_binary ]
-        args += [
-          "--png-to-webp",
-          "--webp-binary",
-          rebase_path(_webp_binary, root_build_dir),
-        ]
-      }
-      if (defined(invoker.exclude_xxxhdpi) && invoker.exclude_xxxhdpi) {
-        args += [ "--exclude-xxxhdpi" ]
-        if (defined(invoker.xxxhdpi_whitelist)) {
-          args += [ "--xxxhdpi-whitelist=${invoker.xxxhdpi_whitelist}" ]
-        }
-      }
-
-      if (defined(invoker.support_zh_hk) && invoker.support_zh_hk) {
-        args += [ "--support-zh-hk" ]
-      }
-
-      if (defined(invoker.args)) {
-        args += invoker.args
-      }
-    }
-
-    if (defined(invoker.post_process_script)) {
-      action(target_name) {
-        depfile = "${target_gen_dir}/${invoker.target_name}_2.d"
-        script = invoker.post_process_script
-        args = [
-          "--depfile",
-          rebase_path(depfile, root_build_dir),
-          "--apk-path",
-          rebase_path(_compiled_resources_path, root_build_dir),
-          "--output",
-          rebase_path(invoker.output, root_build_dir),
-          "--srcjar-in",
-          rebase_path(_srcjar_path, root_build_dir),
-          "--srcjar-out",
-          rebase_path(invoker.srcjar_path, root_build_dir),
-        ]
-        if (defined(invoker.shared_resources_whitelist)) {
-          args += [
-            "--r-text-whitelist",
-            rebase_path(invoker.shared_resources_whitelist, root_build_dir),
-            "--r-text",
-            rebase_path(invoker.r_text_out_path, root_build_dir),
-          ]
-        }
-        inputs = [
-          _srcjar_path,
-          _compiled_resources_path,
-        ]
-        outputs = [
-          invoker.output,
-          invoker.srcjar_path,
-        ]
-        public_deps = [
-          ":${_compile_resources_target_name}",
-        ]
-      }
-    }
-  }
-
-  # Compile resources for an APK.
-  #
-  # This is very similar to compile_resources, except that it can *also*
-  # compile the same set of inputs using the new protocol-buffer based
-  # format, if proto_outut and proto_resources_target are set.
-  #
-  # Takes the same variables as compile_resources, with the following
-  # extras:
-  #
-  #   proto_output: optional. If provided, the path to an output file that
-  #     will contain the resources compiled in the new protocol buffer format.
-  #   proto_resources_target: required when proto_output is set. Name of the
-  #     target associated with compiling the protocol-buffer based resources.
-  #
-  template("compile_apk_resources") {
-    # First call to compile_resources() is used to generate the compiled
-    # resources using the standard binary xml + resources.arsc format.
-    compile_resources(target_name) {
-      forward_variables_from(invoker,
-                             "*",
-                             [
-                               "proto_format",
-                               "proto_output",
-                               "proto_resources_target",
-                             ])
-    }
-
-    # The second call is optional, and is only used to compile the resources
-    # using the new protocol buffer based format. This omits the generation of
-    # R.txt, /srcjar and proguard files (performed during the first call), or
-    # resources post-processing.
-    if (defined(invoker.proto_output)) {
-      compile_resources(invoker.proto_resources_target) {
-        forward_variables_from(invoker,
-                               "*",
-                               [
-                                 "output",
-                                 "post_process_script",
-                                 "proguard_file",
-                                 "proguard_file_main_dex",
-                                 "proto_output",
-                                 "proto_resources_target",
-                                 "r_text_out_path",
-                                 "srcjar_path",
-                               ])
-        output = invoker.proto_output
-        proto_format = true
-      }
-    }
-  }
-
-  # Create an apk.jar.info file by merging several .jar.info files into one.
-  #
-  # Variables:
-  #   apk_build_config: Path to APK's build config file. Used to extract the
-  #       list of input .jar files from its dependencies.
-  #   output: Output file path.
-  #
-  template("create_apk_jar_info") {
-    _output = invoker.output
-    _build_config = invoker.apk_build_config
-    _rebased_build_config = rebase_path(_build_config, root_build_dir)
-    action(target_name) {
-      forward_variables_from(invoker,
-                             [
-                               "testonly",
-                               "deps",
-                             ])
-      script = "//build/android/gyp/merge_jar_info_files.py"
-      inputs = [
-        _build_config,
-      ]
-      outputs = [
-        _output,
-      ]
-      depfile = "$target_gen_dir/$target_name.d"
-      args = [
-        "--depfile",
-        rebase_path(depfile, root_build_dir),
-        "--output",
-        rebase_path(_output, root_build_dir),
-        "--apk-jar-file=@FileArg($_rebased_build_config:deps_info:jar_path)",
-        "--dep-jar-files=@FileArg(" +
-            "$_rebased_build_config:deps_info:javac_full_classpath)",
-      ]
-    }
-  }
-
-  # Creates a signed and aligned .apk.
-  #
-  # Variables
-  #   assets_build_config: Path to android_apk .build_config containing merged
-  #       asset information.
-  #   deps: Specifies the dependencies of this target.
-  #   dex_path: Path to classes.dex file to include (optional).
-  #   packaged_resources_path: Path to .ap_ to use.
-  #   output_apk_path: Output path for the generated .apk.
-  #   native_lib_placeholders: List of placeholder filenames to add to the apk
-  #     (optional).
-  #   secondary_native_lib_placeholders: List of placeholder filenames to add to
-  #     the apk for the secondary ABI (optional).
-  #   native_libs: List of native libraries.
-  #   native_libs_filearg: @FileArg() of additionally native libraries.
-  #   write_asset_list: Adds an extra file to the assets, which contains a list of
-  #     all other asset files.
-  #   keystore_path: Path to keystore to use for signing.
-  #   keystore_name: Key alias to use.
-  #   keystore_password: Keystore password.
-  template("package_apk") {
-    action(target_name) {
-      forward_variables_from(invoker,
-                             [
-                               "deps",
-                               "public_deps",
-                               "testonly",
-                             ])
-      _native_lib_placeholders = []
-      if (defined(invoker.native_lib_placeholders)) {
-        _native_lib_placeholders = invoker.native_lib_placeholders
-      }
-      _secondary_native_lib_placeholders = []
-      if (defined(invoker.secondary_native_lib_placeholders)) {
-        _secondary_native_lib_placeholders =
-            invoker.secondary_native_lib_placeholders
-      }
-
-      script = "//build/android/gyp/apkbuilder.py"
-      depfile = "$target_gen_dir/$target_name.d"
-      _apksigner = "$android_sdk_build_tools/apksigner"
-      _zipalign = "$android_sdk_build_tools/zipalign"
-      data_deps = [
-        "//tools/android/md5sum",
-      ]  # Used when deploying APKs
-
-      inputs = invoker.native_libs + [
-                 invoker.keystore_path,
-                 invoker.packaged_resources_path,
-                 _apksigner,
-                 _zipalign,
-               ]
-      if (defined(invoker.dex_path)) {
-        inputs += [ invoker.dex_path ]
-      }
-
-      outputs = [
-        invoker.output_apk_path,
-      ]
-      data = [
-        invoker.output_apk_path,
-      ]
-
-      _rebased_compiled_resources_path =
-          rebase_path(invoker.packaged_resources_path, root_build_dir)
-      _rebased_packaged_apk_path =
-          rebase_path(invoker.output_apk_path, root_build_dir)
-      args = [
-        "--depfile",
-        rebase_path(depfile, root_build_dir),
-        "--resource-apk=$_rebased_compiled_resources_path",
-        "--output-apk=$_rebased_packaged_apk_path",
-        "--apksigner-path",
-        rebase_path(_apksigner, root_build_dir),
-        "--zipalign-path",
-        rebase_path(_zipalign, root_build_dir),
-        "--key-path",
-        rebase_path(invoker.keystore_path, root_build_dir),
-        "--key-name",
-        invoker.keystore_name,
-        "--key-passwd",
-        invoker.keystore_password,
-      ]
-      if (defined(invoker.assets_build_config)) {
-        inputs += [ invoker.assets_build_config ]
-        _rebased_build_config =
-            rebase_path(invoker.assets_build_config, root_build_dir)
-        args += [
-          "--assets=@FileArg($_rebased_build_config:assets)",
-          "--uncompressed-assets=@FileArg($_rebased_build_config:uncompressed_assets)",
-        ]
-
-        # TODO(mlopatkin) We are relying on the fact that assets_build_config is
-        # an APK build_config.
-        args += [ "--java-resources=@FileArg($_rebased_build_config:java_resources_jars)" ]
-
-        if (defined(invoker.apk_name)) {
-          # The supersize tool will search in this directory for each apk.
-          _apk_pak_info_path = "size-info/${invoker.apk_name}.apk.pak.info"
-          _apk_res_info_path = "size-info/${invoker.apk_name}.apk.res.info"
-          args += [
-            "--apk-pak-info-path",
-            _apk_pak_info_path,
-            "--apk-res-info-path",
-            _apk_res_info_path,
-          ]
-          outputs += [
-            "$root_build_dir/$_apk_pak_info_path",
-            "$root_build_dir/$_apk_res_info_path",
-          ]
-        }
-      }
-      if (defined(invoker.write_asset_list) && invoker.write_asset_list) {
-        args += [ "--write-asset-list" ]
-      }
-      if (defined(invoker.dex_path)) {
-        _rebased_dex_path = rebase_path(invoker.dex_path, root_build_dir)
-        args += [ "--dex-file=$_rebased_dex_path" ]
-      }
-      if (invoker.native_libs != [] || defined(invoker.native_libs_filearg) ||
-          _native_lib_placeholders != []) {
-        args += [ "--android-abi=$android_app_abi" ]
-      }
-      if (defined(invoker.secondary_abi_native_libs_filearg) ||
-          (defined(invoker.secondary_native_libs) &&
-           invoker.secondary_native_libs != []) ||
-          _secondary_native_lib_placeholders != []) {
-        assert(defined(android_app_secondary_abi))
-        args += [ "--secondary-android-abi=$android_app_secondary_abi" ]
-      }
-      if (invoker.native_libs != []) {
-        _rebased_native_libs = rebase_path(invoker.native_libs, root_build_dir)
-        args += [ "--native-libs=$_rebased_native_libs" ]
-      }
-      if (defined(invoker.native_libs_filearg)) {
-        args += [ "--native-libs=${invoker.native_libs_filearg}" ]
-      }
-      if (_native_lib_placeholders != []) {
-        args += [ "--native-lib-placeholders=$_native_lib_placeholders" ]
-      }
-      if (_secondary_native_lib_placeholders != []) {
-        args += [ "--secondary-native-lib-placeholders=$_secondary_native_lib_placeholders" ]
-      }
-
-      # TODO (michaelbai): Remove the secondary_native_libs variable.
-      if (defined(invoker.secondary_abi_native_libs_filearg)) {
-        args += [ "--secondary-native-libs=${invoker.secondary_abi_native_libs_filearg}" ]
-      } else if (defined(invoker.secondary_native_libs) &&
-                 invoker.secondary_native_libs != []) {
-        inputs += invoker.secondary_native_libs
-        _secondary_native_libs = rebase_path(invoker.secondary_native_libs)
-        args += [ "--secondary-native-libs=$_secondary_native_libs" ]
-      }
-
-      if (defined(invoker.uncompress_shared_libraries) &&
-          invoker.uncompress_shared_libraries) {
-        args += [ "--uncompress-shared-libraries" ]
-      }
-    }
-  }
-
-  # Packages resources, assets, dex, and native libraries into an apk. Signs and
-  # zipaligns the apk.
-  template("create_apk") {
-    set_sources_assignment_filter([])
-    forward_variables_from(invoker, [ "testonly" ])
-
-    _android_manifest = invoker.android_manifest
-    _base_path = invoker.base_path
-    _final_apk_path = invoker.apk_path
-    _incremental_final_apk_path_helper =
-        process_file_template(
-            [ _final_apk_path ],
-            "{{source_dir}}/{{source_name_part}}_incremental.apk")
-    _incremental_final_apk_path = _incremental_final_apk_path_helper[0]
-
-    if (defined(invoker.dex_path)) {
-      _dex_path = invoker.dex_path
-    }
-    _load_library_from_apk = invoker.load_library_from_apk
-    assert(_load_library_from_apk || true)
-
-    _deps = []
-    if (defined(invoker.deps)) {
-      _deps = invoker.deps
-    }
-    _incremental_deps = []
-    if (defined(invoker.incremental_deps)) {
-      _incremental_deps = invoker.incremental_deps
-    }
-    _native_libs = []
-    if (defined(invoker.native_libs)) {
-      _native_libs = invoker.native_libs
-    }
-    _native_libs_even_when_incremental = []
-    if (defined(invoker.native_libs_even_when_incremental)) {
-      _native_libs_even_when_incremental =
-          invoker.native_libs_even_when_incremental
-    }
-
-    _incremental_compiled_resources_path = "${_base_path}_incremental.ap_"
-    _shared_resources =
-        defined(invoker.shared_resources) && invoker.shared_resources
-    assert(_shared_resources || true)  # Mark as used.
-
-    _keystore_path = invoker.keystore_path
-    _keystore_name = invoker.keystore_name
-    _keystore_password = invoker.keystore_password
-
-    _incremental_compile_resources_target_name =
-        "${target_name}_incremental__compile_resources"
-    _incremental_android_manifest =
-        get_label_info(_incremental_compile_resources_target_name,
-                       "target_gen_dir") + "/AndroidManifest.xml"
-    action(_incremental_compile_resources_target_name) {
-      deps = _incremental_deps
-      script =
-          "//build/android/incremental_install/generate_android_manifest.py"
-      inputs = [
-        # Save on a depfile by listing only .py dep here.
-        "//build/android/gyp/util/build_utils.py",
-        _android_manifest,
-        invoker.packaged_resources_path,
-      ]
-      outputs = [
-        # Output the non-compiled manifest for easy debugging (as opposed to
-        # generating to a temp file).
-        _incremental_android_manifest,
-        _incremental_compiled_resources_path,
-      ]
-
-      _android_sdk_jar = android_sdk_jar
-      if (defined(invoker.alternative_android_sdk_jar)) {
-        _android_sdk_jar = invoker.alternative_android_sdk_jar
-      }
-      args = [
-        "--src-manifest",
-        rebase_path(_android_manifest, root_build_dir),
-        "--out-manifest",
-        rebase_path(_incremental_android_manifest, root_build_dir),
-        "--in-apk",
-        rebase_path(invoker.packaged_resources_path, root_build_dir),
-        "--out-apk",
-        rebase_path(_incremental_compiled_resources_path, root_build_dir),
-        "--aapt-path",
-        rebase_path(android_default_aapt_path, root_build_dir),
-        "--android-sdk-jar",
-        rebase_path(_android_sdk_jar, root_build_dir),
-      ]
-      if (disable_incremental_isolated_processes) {
-        args += [ "--disable-isolated-processes" ]
-      }
-    }
-
-    package_apk(target_name) {
-      forward_variables_from(invoker,
-                             [
-                               "apk_name",
-                               "assets_build_config",
-                               "native_lib_placeholders",
-                               "native_libs_filearg",
-                               "packaged_resources_path",
-                               "secondary_native_lib_placeholders",
-                               "secondary_abi_native_libs_filearg",
-                               "secondary_native_libs",
-                               "uncompress_shared_libraries",
-                               "write_asset_list",
-                             ])
-      if (!defined(uncompress_shared_libraries)) {
-        uncompress_shared_libraries = _load_library_from_apk
-      }
-      deps = _deps
-      native_libs = _native_libs + _native_libs_even_when_incremental
-      keystore_path = _keystore_path
-      keystore_name = _keystore_name
-      keystore_password = _keystore_password
-
-      if (defined(_dex_path)) {
-        dex_path = _dex_path
-      }
-
-      output_apk_path = _final_apk_path
-    }
-
-    package_apk("${target_name}_incremental") {
-      forward_variables_from(invoker,
-                             [
-                               "assets_build_config",
-                               "secondary_native_libs",
-                               "uncompress_shared_libraries",
-                             ])
-      if (!defined(uncompress_shared_libraries)) {
-        uncompress_shared_libraries = _load_library_from_apk
-      }
-      _dex_target = "//build/android/incremental_install:bootstrap_java__dex"
-      deps = _incremental_deps + [
-               ":${_incremental_compile_resources_target_name}",
-               _dex_target,
-             ]
-
-      if (defined(_dex_path)) {
-        dex_path =
-            get_label_info(_dex_target, "target_gen_dir") + "/bootstrap.dex"
-      }
-
-      native_libs = _native_libs_even_when_incremental
-      keystore_path = _keystore_path
-      keystore_name = _keystore_name
-      keystore_password = _keystore_password
-
-      # http://crbug.com/384638
-      _has_native_libs =
-          defined(invoker.native_libs_filearg) || _native_libs != []
-      if (_has_native_libs && _native_libs_even_when_incremental == []) {
-        native_lib_placeholders = [ "libfix.crbug.384638.so" ]
-      }
-
-      output_apk_path = _incremental_final_apk_path
-      packaged_resources_path = _incremental_compiled_resources_path
-    }
-  }
-
-  # Compile Java source files into a .jar file, potentially using an
-  # annotation processor, and/or the errorprone compiler.
-  #
-  # Note that the only way to specify custom annotation processors is
-  # by using build_config to point to a file that corresponds to a java-related
-  # target that includes javac:processor_classes entries (i.e. there is no
-  # variable here that can be used for this purpose).
-  #
-  # Note also the peculiar use of java_files / java_sources_file. The content
-  # of the java_files list and the java_sources_file file must match exactly.
-  # This rule uses java_files only to list the inputs to the action that
-  # calls the javac.py script, but will pass the list of Java source files
-  # with the '@${java_sources_file}" command-line syntax. Not a problem in
-  # practice since this is only called from java_library_impl() that sets up
-  # the variables properly.
-  #
-  # Variables:
-  #  java_files: Optional list of Java source file paths.
-  #  srcjar_deps: Optional list of .srcjar dependencies (not file paths).
-  #    The corresponding source files they contain will be compiled too.
-  #  srcjar_filearg: Optional @FileArg for additional srcjars.
-  #  java_sources_file: Optional path to file containing list of Java source
-  #    file paths. This must always be provided if java_files is not empty
-  #    and must match it exactly.
-  #  build_config: Path to the .build_config file of the corresponding
-  #    java_library_impl() target. The following entries will be used by this
-  #    template: javac:srcjars, deps_info:javac_full_classpath,
-  #    deps_info:javac_full_interface_classpath, javac:processor_classpath,
-  #    javac:processor_classes
-  #  javac_jar_path: Path to the final output .jar file.
-  #  javac_args: Optional list of extra arguments to pass to javac.
-  #  chromium_code: Whether this corresponds to Chromium-specific sources.
-  #  requires_android: True if these sources can only run on Android.
-  #  alternative_android_sdk_ijar: Optional path to alternative Android system
-  #    interface jar file (android.jar). Ignored it !requires_android.
-  #  alternative_android_sdk_ijar_dep: If alternative_android_sdk_ijar is
-  #    provided, this should be the dependency target that generates the
-  #    alternative .jar file.
-  #  additional_jar_files: Optional list of files to copy into the resulting
-  #    .jar file (by default, only .class files are put there). Each entry
-  #    has the 'srcPath:dstPath' format.
-  #  enable_incremental_javac_override: Optional. If provided, determines
-  #    whether incremental javac compilation (based on jmake) is enabled.
-  #    Otherwise, decision is based on the global enable_incremental_javac
-  #    build arg variable.
-  #  enable_errorprone: Optional. If True, use the errorprone compiler to
-  #    check for error-prone constructs in the language. If not provided,
-  #    whether this is enabled depends on chromium_code and the global
-  #    use_errorprone_java_compiler variable.
-  #  apk_name: Optional APK name. If provided, will tell javac.py to also
-  #    generate an .apk.jar.info file under size-info/${apk_name}.apk.jar.info
-  #  provider_configurations: Optional list of paths to Java service
-  #    provider configuration files [1]. These will be copied under
-  #    META-INF/services/ in the final .jar file.
-  #  processor_args_javac: List of annotation processor arguments, each one
-  #    will be passed to javac as -A<entry>.
-  #  deps: Dependencies for the corresponding target.
-  #  testonly: Usual meaning (should be True for test-only targets)
-  #
-  # [1] https://docs.oracle.com/javase/7/docs/api/java/util/ServiceLoader.html
-  #
-  template("compile_java") {
-    forward_variables_from(invoker, [ "testonly" ])
-
-    _build_config = invoker.build_config
-    _chromium_code = invoker.chromium_code
-    _requires_android = invoker.requires_android
-
-    if (defined(invoker.enable_errorprone)) {
-      _enable_errorprone = invoker.enable_errorprone
-    } else {
-      _enable_errorprone = use_errorprone_java_compiler && _chromium_code
-    }
-
-    _provider_configurations = []
-    if (defined(invoker.provider_configurations)) {
-      _provider_configurations = invoker.provider_configurations
-    }
-
-    _processor_args = []
-    if (defined(invoker.processor_args_javac)) {
-      _processor_args = invoker.processor_args_javac
-    }
-
-    _additional_jar_files = []
-    if (defined(invoker.additional_jar_files)) {
-      _additional_jar_files = invoker.additional_jar_files
-    }
-
-    if (defined(invoker.enable_incremental_javac_override)) {
-      # Use invoker-specified override.
-      _enable_incremental_javac = invoker.enable_incremental_javac_override
-    } else {
-      # Default to build arg if not overridden.
-      _enable_incremental_javac = enable_incremental_javac
-    }
-
-    _srcjar_deps = []
-    if (defined(invoker.srcjar_deps)) {
-      _srcjar_deps += invoker.srcjar_deps
-    }
-
-    _java_srcjars = []
-    foreach(dep, _srcjar_deps) {
-      _dep_gen_dir = get_label_info(dep, "target_gen_dir")
-      _dep_name = get_label_info(dep, "name")
-      _java_srcjars += [ "$_dep_gen_dir/$_dep_name.srcjar" ]
-    }
-
-    _javac_args = []
-    if (defined(invoker.javac_args)) {
-      _javac_args = invoker.javac_args
-    }
-
-    action(target_name) {
-      script = "//build/android/gyp/javac.py"
-      depfile = "$target_gen_dir/$target_name.d"
-      deps = _srcjar_deps
-      if (defined(invoker.deps)) {
-        deps += invoker.deps
-      }
-
-      outputs = [
-        invoker.javac_jar_path,
-        invoker.javac_jar_path + ".md5.stamp",
-        invoker.javac_jar_path + ".info",
-      ]
-      inputs = invoker.java_files + _java_srcjars + [ _build_config ]
-      if (invoker.java_files != []) {
-        inputs += [ invoker.java_sources_file ]
-      }
-
-      _rebased_build_config = rebase_path(_build_config, root_build_dir)
-      _rebased_javac_jar_path =
-          rebase_path(invoker.javac_jar_path, root_build_dir)
-      _rebased_java_srcjars = rebase_path(_java_srcjars, root_build_dir)
-      _rebased_depfile = rebase_path(depfile, root_build_dir)
-      args = [
-        "--depfile=$_rebased_depfile",
-        "--jar-path=$_rebased_javac_jar_path",
-        "--java-srcjars=$_rebased_java_srcjars",
-        "--java-version=1.8",
-        "--full-classpath=@FileArg($_rebased_build_config:deps_info:javac_full_classpath)",
-        "--interface-classpath=@FileArg($_rebased_build_config:deps_info:javac_full_interface_classpath)",
-        "--processorpath=@FileArg($_rebased_build_config:javac:processor_classpath)",
-        "--processors=@FileArg($_rebased_build_config:javac:processor_classes)",
-      ]
-      if (defined(invoker.srcjar_filearg)) {
-        args += [ "--java-srcjars=${invoker.srcjar_filearg}" ]
-      }
-      if (_enable_incremental_javac) {
-        args += [ "--incremental" ]
-        deps += [ "//third_party/jmake($default_toolchain)" ]
-        inputs += [ "$root_build_dir/bin/jmake" ]
-        outputs += [ "${invoker.javac_jar_path}.pdb" ]
-      }
-      if (_requires_android) {
-        if (defined(invoker.alternative_android_sdk_ijar)) {
-          deps += [ invoker.alternative_android_sdk_ijar_dep ]
-          _android_sdk_ijar = invoker.alternative_android_sdk_ijar
-        } else {
-          deps += [ "//build/android:android_ijar" ]
-          _android_sdk_ijar = "$root_out_dir/lib.java/android.interface.jar"
-        }
-        inputs += [ _android_sdk_ijar ]
-        _rebased_android_sdk_ijar =
-            rebase_path(_android_sdk_ijar, root_build_dir)
-        args += [ "--bootclasspath=$_rebased_android_sdk_ijar" ]
-      }
-      if (_chromium_code) {
-        args += [ "--chromium-code=1" ]
-      }
-      if (_enable_errorprone) {
-        deps += [ "//third_party/errorprone:errorprone($default_toolchain)" ]
-        deps += [ "//tools/android/errorprone_plugin:errorprone_plugin_java($default_toolchain)" ]
-        _rebased_errorprone_processorpath = [
-          "lib.java/tools/android/errorprone_plugin/errorprone_plugin_java.jar",
-        ]
-        args += [
-          "--use-errorprone-path",
-          "bin/errorprone",
-          "--processorpath=$_rebased_errorprone_processorpath",
-        ]
-      }
-      foreach(e, _provider_configurations) {
-        args += [ "--provider-configuration=" + rebase_path(e, root_build_dir) ]
-      }
-      foreach(e, _processor_args) {
-        args += [ "--processor-arg=" + e ]
-      }
-
-      foreach(file_tuple, _additional_jar_files) {
-        # Each element is of length two, [ path_to_file, path_to_put_in_jar ]
-        inputs += [ file_tuple[0] ]
-        args +=
-            [ "--additional-jar-file=" + file_tuple[0] + ":" + file_tuple[1] ]
-      }
-      if (invoker.java_files != []) {
-        args += [ "@" + rebase_path(invoker.java_sources_file, root_build_dir) ]
-      }
-      foreach(e, _javac_args) {
-        args += [ "--javac-arg=" + e ]
-      }
-    }
-  }
-
-  # A rule that will handle multiple Java-related targets.
-  #
-  # The caller can provide a list of source files with 'java_files'
-  # and 'srcjar_deps', or a prebuilt .jar file through 'jar_path'.
-  #
-  # In the case of a 'java_binary' target type, it can even provide none of
-  # that (and the rule will just generate its wrapper script).
-  #
-  # The template will process the input .jar file (either the prebuilt one,
-  # or the result of compiling the sources), for example to apply Proguard,
-  # but also other ranges of bytecode-level rewriting schemes.
-  #
-  # Variables:
-  #  type: type of Java target, valid values: 'java_library', 'java_binary',
-  #    'junit_binary', 'java_annotation_processor', and 'android_apk'
-  #  main_target_name: optional. If provided, overrides target_name when
-  #    creating sub-targets (e.g. "${main_target_name}__build_config") and
-  #    some output files (e.g. "${main_target_name}.sources"). Only used
-  #    for 'android_apk' types at the moment, where main_target_name will
-  #    be the name of the main APK target.
-  #  supports_android: Optional. True if target can run on Android.
-  #  requires_android: Optional. True if target can only run on Android.
-  #  java_files: Optional list of Java source file paths for this target.
-  #  srcjar_deps: Optional list of .srcjar targets (not file paths). The Java
-  #    source files they contain will also be compiled for this target.
-  #  java_sources_file: Optional path to a file which will be written with
-  #    the content of java_files. If not provided, the file will be written
-  #    under $target_gen_dir/$main_target_name.sources. Ignored if
-  #    java_files is empty. If not
-  #  jar_path: Optional path to a prebuilt .jar file for this target.
-  #    Mutually exclusive with java_files and srcjar_deps.
-  #  final_jar_path: Optional path to the final output .jar file (after
-  #    processing). If not provided, the output will go under
-  #    $root_build_dir/lib.java/
-  #  output_name: Optional output name for the final jar path. Ignored if
-  #    final_jar_path is provided. Otherwise, used to determine the name
-  #    of the final jar. If not provided, the default is to use the same
-  #    name as jar_path, if provided, or main_target_name.
-  #  dex_path: Optional. Path to the output dex.jar file for this target.
-  #    Ignored if !supports_android.
-  #  main_class: Main Java class name for 'java_binary', 'junit_binary' and
-  #    'java_annotation_processor' target types. Should not be set for other
-  #    ones.
-  #  deps: Dependencies for this target.
-  #  testonly: True iff target should only be used for tests.
-  #  no_build_hooks: Disables bytecode rewriting of asserts and android
-  #    resources methods.
-  #  chromium_code: Optional. Whether this is Chromium-specific code. If not
-  #    provided, this is determined automatically, based on the location of
-  #    the source files (i.e. anything under third_party/ is not
-  #    Chromium-specific unless it is in a 'chromium' sub-directory).
-  #  emma_never_instrument: Optional. If provided, whether to forbid
-  #    instrumentation with the Emma coverage processor. If not provided,
-  #    this is controlled by the global emma_coverage build arg variable
-  #    and only used for non-test Chromium code.
-  #  alternative_android_sdk_jar: Optional. Alternative Android system
-  #    android.jar to use.
-  #  alternative_android_sdk_ijar: Optional alternative Android system
-  #    interface jar path (i.e. the alternative_android_sdk_jar file processed
-  #    by the ijar tool).
-  #  alternative_android_sdk_ijar_dep: Optional. Dependency target that
-  #    generates alternative_android_sdk_ijar (if the latter is provided).
-  #  annotation_processor_deps: Optional list of dependencies corresponding
-  #    to annotation processors used to compile these sources.
-  #  input_jars_paths: Optional list of additional .jar file paths, which will
-  #    be added to the compile-time classpath when building this target (but
-  #    not to the runtime classpath).
-  #  classpath_deps: Optional list of additional java library dependencies,
-  #    whose .jar files will be added to the compile-time classpath when
-  #    building this target (but not to the runtime classpath).
-  #  gradle_treat_as_prebuilt: Cause generate_gradle.py to reference this
-  #    library via its built .jar rather than including its .java sources.
-  #  proguard_enabled: Optional. True to enable ProGuard obfuscation.
-  #  proguard_configs: Optional list of additional proguard config file paths.
-  #  bypass_platform_checks: Optional. If True, platform checks will not
-  #    be performed. They are used to verify that every target with
-  #    requires_android only depends on targets that, at least supports_android.
-  #    Similarly, if a target has !supports_android, then it cannot depend on
-  #    any other target that has requires_android.
-  #  include_java_resources: Optional. If True, include Java (not Android)
-  #    resources into final .jar file.
-  #  android_manifest_for_lint: Optional path to Android manifest to use
-  #    if Android linting is enabled. Ignored for 'android_apk' types
-  #    (since the value of android_manifest will be used instead).
-  #  lint_suppressions_file: Optional lint suppressions input file.
-  #  jar_excluded_patterns: Optional list of .class file patterns to exclude
-  #    from the final .jar file.
-  #  jar_included_patterns: Optional list of .class file patterns to include
-  #    in the final .jar file. jar_excluded_patterns take precedence over this.
-  #
-  # For 'android_apk' targets only:
-  #
-  #  apk_path: Path to the final APK file.
-  #  android_manifest: Path to AndroidManifest.xml file for the APK.
-  #  android_manifest_dep: Optional. Dependency target that generates
-  #    android_manifest.
-  #  apk_under_test: For 'android_apk' targets used to test other APKs,
-  #    this is the target name of APK being tested.
-  #  incremental_allowed: Optional (default false). True to allow the
-  #    generation of incremental APKs ('android_apk' targets only).
-  #  incremental_apk_path: If incremental_allowed, path to the incremental
-  #    output APK.
-  #  incremental_install_json_path: If incremental_allowed, path to the output
-  #    incremental install json configuration file.
-  #  non_native_packed_relocations: Optional. True if the target Android
-  #    system does not support compressed relocations in native shared
-  #    libraries.
-  #  shared_libraries_runtime_deps_file: Optional. Path to a file listing the
-  #    native shared libraries required at runtime by
-  #  secondary_abi_shared_libraries_runtime_deps_file:
-  #
-  #
-  # For 'java_binary' and 'junit_binary' targets only. Ignored by others:
-  #
-  #  bootclasspath: Optional list of boot class paths used by the generated
-  #    wrapper script.
-  #  wrapper_script_name: Optional name for the generated wrapper script.
-  #    Default is main target name.
-  #  wrapper_script_args: Optional list of extra arguments used by the
-  #    generated wrapper script.
-  #
-  template("java_library_impl") {
-    set_sources_assignment_filter([])
-    forward_variables_from(invoker, [ "testonly" ])
-    _is_prebuilt = defined(invoker.jar_path)
-    _is_annotation_processor = invoker.type == "java_annotation_processor"
-    _is_java_binary =
-        invoker.type == "java_binary" || invoker.type == "junit_binary"
-    _supports_android =
-        defined(invoker.supports_android) && invoker.supports_android
-    _requires_android =
-        defined(invoker.requires_android) && invoker.requires_android
-
-    _main_target_name = target_name
-    if (defined(invoker.main_target_name)) {
-      _main_target_name = invoker.main_target_name
-    }
-    _java_files = []
-    if (defined(invoker.java_files)) {
-      _java_files = invoker.java_files
-    }
-    _srcjar_deps = []
-    if (defined(invoker.srcjar_deps)) {
-      _srcjar_deps = invoker.srcjar_deps
-    }
-    _has_sources = _java_files != [] || _srcjar_deps != []
-
-    if (_is_prebuilt) {
-      assert(!_has_sources)
-    } else {
-      # Allow java_binary to not specify any sources. This is needed when a prebuilt
-      # is needed as a library as well as a binary.
-      assert(_is_annotation_processor || _is_java_binary || _has_sources)
-    }
-
-    if (_is_java_binary) {
-      assert(defined(invoker.main_class),
-             "${invoker.type}() must set main_class")
-    } else if (_is_annotation_processor) {
-      assert(defined(invoker.main_class),
-             "java_annotation_processor() must set main_class")
-    } else {
-      assert(!defined(invoker.main_class),
-             "main_class cannot be used for target of type ${invoker.type}")
-    }
-
-    # The only target that might have no prebuilt and no sources is a java_binary.
-    if (_is_prebuilt || _has_sources) {
-      if (defined(invoker.output_name)) {
-        _output_name = invoker.output_name
-      } else if (_is_prebuilt) {
-        _output_name = get_path_info(invoker.jar_path, "name")
-      } else {
-        _output_name = _main_target_name
-      }
-
-      # Jar files can be needed at runtime (by Robolectric tests or java binaries),
-      # so do not put them under gen/.
-      _target_dir_name = get_label_info(":$_main_target_name", "dir")
-      _final_jar_path =
-          "$root_out_dir/lib.java$_target_dir_name/$_output_name.jar"
-      if (defined(invoker.final_jar_path)) {
-        _final_jar_path = invoker.final_jar_path
-      }
-      _final_ijar_path =
-          get_path_info(_final_jar_path, "dir") + "/" +
-          get_path_info(_final_jar_path, "name") + ".interface.jar"
-
-      if (_has_sources) {
-        _javac_jar_path = "$target_gen_dir/$_main_target_name.javac.jar"
-      }
-
-      if (_is_prebuilt) {
-        _unprocessed_jar_path = invoker.jar_path
-      } else {
-        _unprocessed_jar_path = _javac_jar_path
-      }
-
-      if (_supports_android) {
-        _dex_path = "$target_gen_dir/$_main_target_name.dex.jar"
-        if (defined(invoker.dex_path)) {
-          _dex_path = invoker.dex_path
-        }
-      }
-    }
-
-    _accumulated_deps = []
-    if (defined(invoker.deps)) {
-      _accumulated_deps = invoker.deps
-    }
-    _accumulated_deps += [ "//build/config:exe_and_shlib_deps" ]
-
-    _enable_build_hooks =
-        _supports_android &&
-        (!defined(invoker.no_build_hooks) || !invoker.no_build_hooks)
-    if (_enable_build_hooks) {
-      _accumulated_deps += [ "//build/android/buildhooks:build_hooks_java" ]
-    }
-
-    # Some testonly targets use their own resources and the code being
-    # tested will use custom resources so there's no need to enable this
-    # for testonly targets.
-    _enable_build_hooks_android =
-        _enable_build_hooks && _requires_android &&
-        (!defined(invoker.testonly) || !invoker.testonly)
-    if (_enable_build_hooks_android) {
-      _accumulated_deps +=
-          [ "//build/android/buildhooks:build_hooks_android_java" ]
-    }
-
-    # Don't enable coverage or lint unless the target has some non-generated
-    # files.
-    if (defined(invoker.chromium_code)) {
-      _chromium_code = invoker.chromium_code
-    } else {
-      # Default based on whether target is in third_party.
-      set_sources_assignment_filter([ "*\bthird_party\b*" ])
-      sources = [
-        get_label_info(":$_main_target_name", "dir"),
-      ]
-      _chromium_code = sources != []
-      if (!_chromium_code && !_is_prebuilt && _java_files != []) {
-        # Unless third_party code has an org.chromium file in it.
-        set_sources_assignment_filter([ "*\bchromium\b*" ])
-        sources = _java_files
-        _chromium_code = _java_files != sources
-      }
-      set_sources_assignment_filter([])
-      sources = []
-    }
-
-    if (defined(_final_jar_path)) {
-      _emma_instrument = emma_coverage && _chromium_code && _java_files != [] &&
-                         (!defined(invoker.testonly) || !invoker.testonly)
-      if (defined(invoker.emma_never_instrument)) {
-        _emma_instrument = !invoker.emma_never_instrument && _emma_instrument
-      }
-      if (_emma_instrument) {
-        _accumulated_deps += [ "//third_party/android_tools:emma_device_java" ]
-      }
-    }
-
-    if (_java_files != []) {
-      _java_sources_file = "$target_gen_dir/$_main_target_name.sources"
-      if (defined(invoker.java_sources_file)) {
-        _java_sources_file = invoker.java_sources_file
-      }
-      write_file(_java_sources_file, rebase_path(_java_files, root_build_dir))
-    }
-
-    # Define build_config_deps which will be a list of targets required to
-    # build the _build_config.
-    _build_config = "$target_gen_dir/$_main_target_name.build_config"
-    _build_config_target_name = "${_main_target_name}__build_config"
-
-    write_build_config(_build_config_target_name) {
-      forward_variables_from(invoker,
-                             [
-                               "alternative_android_sdk_ijar",
-                               "annotation_processor_deps",
-                               "classpath_deps",
-                               "gradle_treat_as_prebuilt",
-                               "proguard_enabled",
-                               "proguard_configs",
-                               "input_jars_paths",
-                               "main_class",
-                               "type",
-                             ])
-      if (type == "android_apk") {
-        forward_variables_from(
-            invoker,
-            [
-              "android_manifest",
-              "android_manifest_dep",
-              "apk_path",
-              "apk_under_test",
-              "incremental_allowed",
-              "incremental_apk_path",
-              "incremental_install_json_path",
-              "non_native_packed_relocations",
-              "proto_resources_path",
-              "shared_libraries_runtime_deps_file",
-              "secondary_abi_shared_libraries_runtime_deps_file",
-            ])
-      }
-      build_config = _build_config
-      is_prebuilt = _is_prebuilt
-      possible_config_deps = _accumulated_deps
-      if (defined(apk_under_test)) {
-        possible_config_deps += [ apk_under_test ]
-      }
-      supports_android = _supports_android
-      requires_android = _requires_android
-      bypass_platform_checks = defined(invoker.bypass_platform_checks) &&
-                               invoker.bypass_platform_checks
-
-      if (defined(_final_jar_path)) {
-        jar_path = _final_jar_path
-        ijar_path = _final_ijar_path
-        unprocessed_jar_path = _unprocessed_jar_path
-      }
-      if (defined(_dex_path)) {
-        dex_path = _dex_path
-      }
-      if (_java_files != []) {
-        java_sources_file = _java_sources_file
-      }
-
-      bundled_srcjars = []
-      foreach(d, _srcjar_deps) {
-        _dep_gen_dir = get_label_info(d, "target_gen_dir")
-        _dep_name = get_label_info(d, "name")
-        bundled_srcjars += [ "$_dep_gen_dir/$_dep_name.srcjar" ]
-      }
-      if (defined(invoker.include_java_resources) &&
-          invoker.include_java_resources) {
-        # Use original jar_path because _jar_path points to a library without
-        # resources.
-        java_resources_jar = invoker.jar_path
-      }
-    }
-    _accumulated_deps += [ ":$_build_config_target_name" ]
-
-    # Don't need to depend on the apk-under-test to be packaged.
-    if (defined(invoker.apk_under_test)) {
-      _accumulated_deps += [ "${invoker.apk_under_test}__java" ]
-    }
-    if (defined(invoker.android_manifest_dep)) {
-      _accumulated_deps += [ invoker.android_manifest_dep ]
-    }
-    if (defined(invoker.classpath_deps)) {
-      _accumulated_deps += invoker.classpath_deps
-    }
-    if (defined(invoker.annotation_processor_deps)) {
-      _accumulated_deps += invoker.annotation_processor_deps
-    }
-
-    # TODO(agrieve): Enable lint for _has_sources rather than just _java_files.
-    _has_lint_target = _java_files != [] && _supports_android && _chromium_code
-    if (_has_sources) {
-      _compile_java_target = "${_main_target_name}__compile_java"
-      compile_java(_compile_java_target) {
-        forward_variables_from(invoker,
-                               [
-                                 "additional_jar_files",
-                                 "alternative_android_sdk_ijar",
-                                 "alternative_android_sdk_ijar_dep",
-                                 "apk_name",
-                                 "enable_errorprone",
-                                 "enable_incremental_javac_override",
-                                 "processor_args_javac",
-                                 "provider_configurations",
-                                 "javac_args",
-                               ])
-        build_config = _build_config
-        java_files = _java_files
-        if (_java_files != []) {
-          java_sources_file = _java_sources_file
-        }
-        srcjar_deps = _srcjar_deps
-        chromium_code = _chromium_code
-        requires_android = _requires_android
-        deps = _accumulated_deps
-        javac_jar_path = _javac_jar_path
-
-        # android_apk and junit_binary pass R.java srcjars via srcjar_deps.
-        if (invoker.type == "java_library" && _requires_android) {
-          _rebased_build_config = rebase_path(_build_config, root_build_dir)
-          srcjar_filearg = "@FileArg($_rebased_build_config:deps_info:owned_resource_srcjars)"
-        }
-      }
-      _accumulated_deps += [ ":$_compile_java_target" ]
-
-      if (_has_lint_target) {
-        android_lint("${_main_target_name}__lint") {
-          if (invoker.type == "android_apk") {
-            forward_variables_from(invoker, [ "android_manifest" ])
-          } else if (defined(invoker.android_manifest_for_lint)) {
-            android_manifest = invoker.android_manifest_for_lint
-          }
-          build_config = _build_config
-          requires_android = _requires_android
-          jar_path = _javac_jar_path
-          deps = _accumulated_deps
-          java_files = _java_files
-          if (_java_files != []) {
-            java_sources_file = _java_sources_file
-          }
-          if (defined(invoker.lint_suppressions_file)) {
-            lint_suppressions_file = invoker.lint_suppressions_file
-          }
-        }
-
-        # Use an intermediate group() rather as the data_deps target in order to
-        # avoid lint artifacts showing up as runtime_deps (while still having lint
-        # run in parallel to other targets).
-        group("${_main_target_name}__analysis") {
-          public_deps = [
-            ":${_main_target_name}__lint",
-          ]
-        }
-      }
-    }  # _has_sources
-
-    if (defined(_final_jar_path)) {
-      _process_prebuilt_target_name = "${target_name}__process_prebuilt"
-      process_java_prebuilt(_process_prebuilt_target_name) {
-        forward_variables_from(invoker,
-                               [
-                                 "alternative_android_sdk_ijar",
-                                 "alternative_android_sdk_ijar_dep",
-                                 "alternative_android_sdk_jar",
-                                 "jar_excluded_patterns",
-                                 "jar_included_patterns",
-                               ])
-        supports_android = _supports_android
-        enable_build_hooks = _enable_build_hooks
-        enable_build_hooks_android = _enable_build_hooks_android
-        build_config = _build_config
-        input_jar_path = _unprocessed_jar_path
-        emma_instrument = _emma_instrument
-        if (_emma_instrument) {
-          java_files = _java_files
-          java_sources_file = _java_sources_file
-        }
-        output_jar_path = _final_jar_path
-        deps = _accumulated_deps
-
-        # Although these will be listed as deps in the depfile, they must also
-        # appear here so that "gn analyze" knows about them.
-        # https://crbug.com/827197
-        if (defined(invoker.proguard_configs)) {
-          inputs = invoker.proguard_configs
-          deps += _srcjar_deps  # For the aapt-generated proguard rules.
-        }
-      }
-      _accumulated_deps += [ ":$_process_prebuilt_target_name" ]
-
-      if (defined(_dex_path)) {
-        dex("${target_name}__dex") {
-          input_jars = [ _final_jar_path ]
-          output = _dex_path
-          deps = [
-            ":$_process_prebuilt_target_name",
-          ]
-        }
-        _accumulated_deps += [ ":${target_name}__dex" ]
-      }
-
-      _ijar_target_name = "${target_name}__ijar"
-      generate_interface_jar(_ijar_target_name) {
-        # Always used the unfiltered .jar to create the interface jar so that
-        # other targets will resolve filtered classes when depending on
-        # BuildConfig, NativeLibraries, etc.
-        input_jar = _unprocessed_jar_path
-        if (_is_prebuilt) {
-          forward_variables_from(invoker, [ "deps" ])
-        } else {
-          deps = [
-            ":$_compile_java_target",
-          ]
-        }
-        output_jar = _final_ijar_path
-      }
-      _accumulated_deps += [ ":$_ijar_target_name" ]
-    }
-
-    if (_is_java_binary) {
-      # Targets might use the generated script while building, so make it a dep
-      # rather than a data_dep.
-      java_binary_script("${target_name}__java_binary_script") {
-        forward_variables_from(invoker,
-                               [
-                                 "bootclasspath",
-                                 "main_class",
-                                 "wrapper_script_args",
-                               ])
-        build_config = _build_config
-        if (defined(_final_jar_path)) {
-          jar_path = _final_jar_path
-        }
-        script_name = _main_target_name
-        if (defined(invoker.wrapper_script_name)) {
-          script_name = invoker.wrapper_script_name
-        }
-        deps = _accumulated_deps
-      }
-      _accumulated_deps += [ ":${target_name}__java_binary_script" ]
-    }
-
-    group(target_name) {
-      forward_variables_from(invoker,
-                             [
-                               "data",
-                               "data_deps",
-                               "visibility",
-                             ])
-      public_deps = _accumulated_deps
-      if (_has_lint_target) {
-        if (!defined(data_deps)) {
-          data_deps = []
-        }
-        data_deps += [ ":${_main_target_name}__analysis" ]
-      }
-    }
-  }
-
-  template("pack_relocation_section") {
-    assert(defined(invoker.file_list_json))
-    assert(defined(invoker.libraries_filearg))
-    action(target_name) {
-      forward_variables_from(invoker,
-                             [
-                               "deps",
-                               "public_deps",
-                               "inputs",
-                               "testonly",
-                             ])
-      script = "//build/android/gyp/pack_relocations.py"
-      depfile = "$target_gen_dir/$target_name.d"
-      _packed_libraries_dir = "$target_gen_dir/$target_name/packed-libs"
-      outputs = [
-        invoker.file_list_json,
-      ]
-      deps += [ relocation_packer_target ]
-
-      args = [
-        "--depfile",
-        rebase_path(depfile, root_build_dir),
-        "--enable-packing=1",
-        "--android-pack-relocations",
-        rebase_path(relocation_packer_exe, root_build_dir),
-        "--stripped-libraries-dir",
-        rebase_path(root_build_dir, root_build_dir),
-        "--packed-libraries-dir",
-        rebase_path(_packed_libraries_dir, root_build_dir),
-        "--libraries=${invoker.libraries_filearg}",
-        "--filelistjson",
-        rebase_path(invoker.file_list_json, root_build_dir),
-      ]
-    }
-  }
-}
diff --git a/build/config/android/rules.gni b/build/config/android/rules.gni
deleted file mode 100644
index c1b35a3..0000000
--- a/build/config/android/rules.gni
+++ /dev/null
@@ -1,3415 +0,0 @@
-# Copyright 2014 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# Do not add any imports to non-//build directories here.
-# Some projects (e.g. V8) do not have non-build directories DEPS'ed in.
-import("//build/config/android/config.gni")
-import("//build/config/android/internal_rules.gni")
-import("//build/config/clang/clang.gni")
-import("//build/config/compiler/compiler.gni")
-import("//build/config/dcheck_always_on.gni")
-import("//build/toolchain/toolchain.gni")
-
-assert(is_android)
-
-declare_args() {
-  enable_jni_tracing = false
-}
-
-if (target_cpu == "arm") {
-  _sanitizer_arch = "arm"
-} else if (target_cpu == "arm64") {
-  _sanitizer_arch = "aarch64"
-} else if (target_cpu == "x86") {
-  _sanitizer_arch = "i686"
-}
-
-_sanitizer_runtimes = []
-if (use_cfi_diag || is_ubsan || is_ubsan_security || is_ubsan_vptr) {
-  _sanitizer_runtimes = [ "$clang_base_path/lib/clang/$clang_version/lib/linux/libclang_rt.ubsan_standalone-$_sanitizer_arch-android.so" ]
-}
-
-# Creates a dist directory for a native executable.
-#
-# Running a native executable on a device requires all the shared library
-# dependencies of that executable. To make it easier to install and run such an
-# executable, this will create a directory containing the native exe and all
-# it's library dependencies.
-#
-# Note: It's usually better to package things as an APK than as a native
-# executable.
-#
-# Variables
-#   dist_dir: Directory for the exe and libraries. Everything in this directory
-#     will be deleted before copying in the exe and libraries.
-#   binary: Path to (stripped) executable.
-#   extra_files: List of extra files to copy in (optional).
-#
-# Example
-#   create_native_executable_dist("foo_dist") {
-#     dist_dir = "$root_build_dir/foo_dist"
-#     binary = "$root_build_dir/foo"
-#     deps = [ ":the_thing_that_makes_foo" ]
-#   }
-template("create_native_executable_dist") {
-  forward_variables_from(invoker, [ "testonly" ])
-
-  _libraries_list = "${target_gen_dir}/${target_name}_library_dependencies.list"
-
-  _runtime_deps_file = "$target_gen_dir/${target_name}.runtimedeps"
-  _runtime_deps_target_name = "${target_name}__runtime_deps"
-  group(_runtime_deps_target_name) {
-    data = _sanitizer_runtimes
-    data_deps = []
-    if (defined(invoker.deps)) {
-      data_deps += invoker.deps
-    }
-    if (is_component_build || is_asan) {
-      data_deps += [ "//build/android:cpplib_stripped" ]
-    }
-    write_runtime_deps = _runtime_deps_file
-  }
-
-  _find_deps_target_name = "${target_name}__find_library_dependencies"
-
-  # TODO(agrieve): Extract dependent libs from GN rather than readelf.
-  action(_find_deps_target_name) {
-    deps = invoker.deps + [ ":$_runtime_deps_target_name" ]
-    script = "//build/android/gyp/write_ordered_libraries.py"
-    depfile = "$target_gen_dir/$target_name.d"
-    inputs = [
-      invoker.binary,
-      _runtime_deps_file,
-      android_readelf,
-    ]
-    outputs = [
-      _libraries_list,
-    ]
-    args = [
-      "--depfile",
-      rebase_path(depfile, root_build_dir),
-      "--runtime-deps",
-      rebase_path(_runtime_deps_file, root_build_dir),
-      "--output",
-      rebase_path(_libraries_list, root_build_dir),
-      "--readelf",
-      rebase_path(android_readelf, root_build_dir),
-    ]
-  }
-
-  copy_ex(target_name) {
-    inputs = [
-      _libraries_list,
-      invoker.binary,
-    ]
-
-    dest = invoker.dist_dir
-    data = [
-      "${invoker.dist_dir}/",
-    ]
-
-    _rebased_libraries_list = rebase_path(_libraries_list, root_build_dir)
-    _rebased_binaries_list = rebase_path([ invoker.binary ], root_build_dir)
-    args = [
-      "--clear",
-      "--files=@FileArg($_rebased_libraries_list:lib_paths)",
-      "--files=$_rebased_binaries_list",
-    ]
-    if (defined(invoker.extra_files)) {
-      _rebased_extra_files = rebase_path(invoker.extra_files, root_build_dir)
-      args += [ "--files=$_rebased_extra_files" ]
-    }
-
-    _depfile = "$target_gen_dir/$target_name.d"
-    _stamp_file = "$target_gen_dir/$target_name.stamp"
-    outputs = [
-      _stamp_file,
-    ]
-    args += [
-      "--depfile",
-      rebase_path(_depfile, root_build_dir),
-      "--stamp",
-      rebase_path(_stamp_file, root_build_dir),
-    ]
-
-    deps = [
-      ":$_find_deps_target_name",
-    ]
-    if (defined(invoker.deps)) {
-      deps += invoker.deps
-    }
-  }
-}
-
-# Writes a script to root_out_dir/bin that passes --output-directory to the
-# wrapped script, in addition to forwarding arguments. Most / all of these
-# wrappers should be made deps of //tools/android:android_tools.
-#
-# Variables
-#   target: Script to wrap.
-#   flag_name: Default is "--output-directory"
-#
-# Example
-#   wrapper_script("foo_wrapper") {
-#     target = "//pkg/foo.py"
-#   }
-template("wrapper_script") {
-  action(target_name) {
-    _name = get_path_info(invoker.target, "name")
-    _output = "$root_out_dir/bin/$_name"
-
-    script = "//build/android/gyp/create_tool_wrapper.py"
-    outputs = [
-      _output,
-    ]
-
-    # The target isn't actually used by the script, but it's nice to have GN
-    # check that it exists.
-    inputs = [
-      invoker.target,
-    ]
-    args = [
-      "--output",
-      rebase_path(_output, root_build_dir),
-      "--target",
-      rebase_path(invoker.target, root_build_dir),
-      "--output-directory",
-      rebase_path(root_out_dir, root_build_dir),
-    ]
-    if (defined(invoker.flag_name)) {
-      args += [ "--flag-name=${invoker.flag_name}" ]
-    }
-  }
-}
-
-if (enable_java_templates) {
-  import("//build/config/sanitizers/sanitizers.gni")
-  import("//tools/grit/grit_rule.gni")
-
-  # Declare a jni target
-  #
-  # This target generates the native jni bindings for a set of .java files.
-  #
-  # See base/android/jni_generator/jni_generator.py for more info about the
-  # format of generating JNI bindings.
-  #
-  # Variables
-  #   sources: list of .java files to generate jni for
-  #   jni_package: subdirectory path for generated bindings
-  #
-  # Example
-  #   generate_jni("foo_jni") {
-  #     sources = [
-  #       "android/java/src/org/chromium/foo/Foo.java",
-  #       "android/java/src/org/chromium/foo/FooUtil.java",
-  #     ]
-  #     jni_package = "foo"
-  #   }
-  template("generate_jni") {
-    set_sources_assignment_filter([])
-    forward_variables_from(invoker, [ "testonly" ])
-
-    _base_output_dir = "${target_gen_dir}/${target_name}"
-    _package_output_dir = "${_base_output_dir}/${invoker.jni_package}"
-    _jni_output_dir = "${_package_output_dir}/jni"
-
-    if (defined(invoker.jni_generator_include)) {
-      _jni_generator_include = invoker.jni_generator_include
-      _jni_generator_include_deps = []
-    } else {
-      _jni_generator_include =
-          "//base/android/jni_generator/jni_generator_helper.h"
-      _jni_generator_include_deps = [
-        # Using //base/android/jni_generator/jni_generator_helper.h introduces
-        # a dependency on debugging_buildflags indirectly through
-        # base/android/jni_android.h, which is part of the //base target.
-        # This can't depend directly on //base without causing a dependency
-        # cycle, though.
-        "//base:debugging_buildflags",
-      ]
-    }
-
-    _foreach_target_name = "${target_name}__jni_gen"
-    action_foreach(_foreach_target_name) {
-      script = "//base/android/jni_generator/jni_generator.py"
-      depfile = "$target_gen_dir/$target_name.{{source_name_part}}.d"
-      sources = invoker.sources
-      outputs = [
-        "${_jni_output_dir}/{{source_name_part}}_jni.h",
-      ]
-
-      args = [
-        "--depfile",
-        rebase_path(depfile, root_build_dir),
-        "--input_file={{source}}",
-        "--ptr_type=long",
-        "--output_dir",
-        rebase_path(_jni_output_dir, root_build_dir),
-        "--includes",
-        rebase_path(_jni_generator_include, _jni_output_dir),
-      ]
-
-      if (enable_profiling) {
-        args += [ "--enable_profiling" ]
-      }
-      if (defined(invoker.namespace)) {
-        args += [ "-n ${invoker.namespace}" ]
-      }
-      if (enable_jni_tracing) {
-        args += [ "--enable_tracing" ]
-      }
-    }
-
-    config("jni_includes_${target_name}") {
-      # TODO(cjhopman): #includes should probably all be relative to
-      # _base_output_dir. Remove that from this config once the includes are
-      # updated.
-      include_dirs = [
-        _base_output_dir,
-        _package_output_dir,
-      ]
-    }
-
-    group(target_name) {
-      forward_variables_from(invoker,
-                             [
-                               "deps",
-                               "public_deps",
-                               "visibility",
-                             ])
-      if (!defined(public_deps)) {
-        public_deps = []
-      }
-      public_deps += [ ":$_foreach_target_name" ]
-      public_deps += _jni_generator_include_deps
-      public_configs = [ ":jni_includes_${target_name}" ]
-    }
-  }
-
-  # Declare a jni target for a prebuilt jar
-  #
-  # This target generates the native jni bindings for a set of classes in a .jar.
-  #
-  # See base/android/jni_generator/jni_generator.py for more info about the
-  # format of generating JNI bindings.
-  #
-  # Variables
-  #   classes: list of .class files in the jar to generate jni for. These should
-  #     include the full path to the .class file.
-  #   jni_package: subdirectory path for generated bindings
-  #   jar_file: the path to the .jar. If not provided, will default to the sdk's
-  #     android.jar
-  #
-  #   deps, public_deps: As normal
-  #
-  # Example
-  #   generate_jar_jni("foo_jni") {
-  #     classes = [
-  #       "android/view/Foo.class",
-  #     ]
-  #     jni_package = "foo"
-  #   }
-  template("generate_jar_jni") {
-    forward_variables_from(invoker, [ "testonly" ])
-
-    if (defined(invoker.jar_file)) {
-      _jar_file = invoker.jar_file
-    } else {
-      _jar_file = android_sdk_jar
-    }
-
-    _base_output_dir = "${target_gen_dir}/${target_name}/${invoker.jni_package}"
-    _jni_output_dir = "${_base_output_dir}/jni"
-
-    if (defined(invoker.jni_generator_include)) {
-      _jni_generator_include = invoker.jni_generator_include
-    } else {
-      _jni_generator_include =
-          "//base/android/jni_generator/jni_generator_helper.h"
-    }
-
-    # TODO(cjhopman): make jni_generator.py support generating jni for multiple
-    # .class files from a .jar.
-    _jni_actions = []
-    foreach(_class, invoker.classes) {
-      _classname = get_path_info(_class, "name")
-      _jni_target_name = "${target_name}__jni_${_classname}"
-      _jni_actions += [ ":$_jni_target_name" ]
-      action(_jni_target_name) {
-        # The sources aren't compiled so don't check their dependencies.
-        check_includes = false
-        depfile = "$target_gen_dir/$target_name.d"
-        script = "//base/android/jni_generator/jni_generator.py"
-        inputs = [
-          _jar_file,
-        ]
-        outputs = [
-          "${_jni_output_dir}/${_classname}_jni.h",
-        ]
-
-        args = [
-          "--depfile",
-          rebase_path(depfile, root_build_dir),
-          "--jar_file",
-          rebase_path(_jar_file, root_build_dir),
-          "--input_file",
-          _class,
-          "--ptr_type=long",
-          "--output_dir",
-          rebase_path(_jni_output_dir, root_build_dir),
-          "--includes",
-          rebase_path(_jni_generator_include, _jni_output_dir),
-        ]
-
-        if (enable_profiling) {
-          args += [ "--enable_profiling" ]
-        }
-        if (enable_jni_tracing) {
-          args += [ "--enable_tracing" ]
-        }
-      }
-    }
-
-    config("jni_includes_${target_name}") {
-      include_dirs = [ _base_output_dir ]
-    }
-
-    group(target_name) {
-      public_deps = []
-      forward_variables_from(invoker,
-                             [
-                               "deps",
-                               "public_deps",
-                               "visibility",
-                             ])
-      public_deps += _jni_actions
-      public_configs = [ ":jni_includes_${target_name}" ]
-    }
-  }
-
-  # Declare a jni registration target.
-  #
-  # This target generates a header file calling JNI registration functions
-  # created by generate_jni and generate_jar_jni.
-  #
-  # See base/android/jni_generator/jni_registration_generator.py for more info
-  # about the format of the header file.
-  #
-  # Variables
-  #   target: The Apk target to generate registrations for.
-  #   output: Path to the generated .h file.
-  #   exception_files: List of .java files that should be ignored when searching
-  #   for native methods. (optional)
-  #
-  # Example
-  #   generate_jni_registration("chrome_jni_registration") {
-  #     target = ":chrome_public_apk"
-  #     output = "$root_gen_dir/chrome/browser/android/${target_name}.h"
-  #     exception_files = [
-  #       "//base/android/java/src/org/chromium/base/library_loader/LegacyLinker.java",
-  #       "//base/android/java/src/org/chromium/base/library_loader/Linker.java",
-  #       "//base/android/java/src/org/chromium/base/library_loader/ModernLinker.java",
-  #     ]
-  #   }
-  template("generate_jni_registration") {
-    action(target_name) {
-      forward_variables_from(invoker, [ "testonly" ])
-      _build_config = get_label_info(invoker.target, "target_gen_dir") + "/" +
-                      get_label_info(invoker.target, "name") + ".build_config"
-      _rebased_build_config = rebase_path(_build_config, root_build_dir)
-
-      _rebase_exception_java_files =
-          rebase_path(invoker.exception_files, root_build_dir)
-
-      script = "//base/android/jni_generator/jni_registration_generator.py"
-      deps = [
-        "${invoker.target}__build_config",
-      ]
-      inputs = [
-        _build_config,
-      ]
-      outputs = [
-        invoker.output,
-      ]
-      depfile = "$target_gen_dir/$target_name.d"
-
-      args = [
-        # This is a list of .sources files.
-        "--sources_files=@FileArg($_rebased_build_config:jni:all_source)",
-        "--output",
-        rebase_path(invoker.output, root_build_dir),
-        "--no_register_java=$_rebase_exception_java_files",
-        "--depfile",
-        rebase_path(depfile, root_build_dir),
-      ]
-    }
-  }
-
-  # Declare a target for c-preprocessor-generated java files
-  #
-  # NOTE: For generating Java conterparts to enums prefer using the java_cpp_enum
-  #       rule instead.
-  #
-  # This target generates java files using the host C pre-processor. Each file in
-  # sources will be compiled using the C pre-processor. If include_path is
-  # specified, it will be passed (with --I) to the pre-processor.
-  #
-  # This target will create a single .srcjar. Adding this target to an
-  # android_library target's srcjar_deps will make the generated java files be
-  # included in that library's final outputs.
-  #
-  # Variables
-  #   sources: list of files to be processed by the C pre-processor. For each
-  #     file in sources, there will be one .java file in the final .srcjar. For a
-  #     file named FooBar.template, a java file will be created with name
-  #     FooBar.java.
-  #   inputs: additional compile-time dependencies. Any files
-  #     `#include`-ed in the templates should be listed here.
-  #   package_path: this will be the subdirectory for each .java file in the
-  #     .srcjar.
-  #
-  # Example
-  #   java_cpp_template("foo_generated_enum") {
-  #     sources = [
-  #       "android/java/templates/Foo.template",
-  #     ]
-  #     inputs = [
-  #       "android/java/templates/native_foo_header.h",
-  #     ]
-  #
-  #     package_path = "org/chromium/base/library_loader"
-  #     include_path = "android/java/templates"
-  #   }
-  template("java_cpp_template") {
-    set_sources_assignment_filter([])
-    forward_variables_from(invoker, [ "testonly" ])
-
-    _include_path = "//"
-    if (defined(invoker.include_path)) {
-      _include_path = invoker.include_path
-    }
-
-    _apply_gcc_target_name = "${target_name}__apply_gcc"
-    _base_gen_dir = "${target_gen_dir}/${target_name}/java_cpp_template"
-    _package_path = invoker.package_path
-
-    action_foreach(_apply_gcc_target_name) {
-      forward_variables_from(invoker,
-                             [
-                               "deps",
-                               "inputs",
-                               "public_deps",
-                               "data_deps",
-                             ])
-      script = "//build/android/gyp/gcc_preprocess.py"
-      depfile =
-          "${target_gen_dir}/${invoker.target_name}_{{source_name_part}}.d"
-
-      sources = invoker.sources
-
-      outputs = [
-        "$_base_gen_dir/${_package_path}/{{source_name_part}}.java",
-      ]
-
-      args = [
-        "--depfile",
-        rebase_path(depfile, root_build_dir),
-        "--include-path",
-        rebase_path(_include_path, root_build_dir),
-        "--output",
-        rebase_path(outputs[0], root_build_dir),
-        "--template={{source}}",
-      ]
-
-      if (defined(invoker.defines)) {
-        foreach(_def, invoker.defines) {
-          args += [
-            "--defines",
-            _def,
-          ]
-        }
-      }
-    }
-
-    # Filter out .d files.
-    set_sources_assignment_filter([ "*.d" ])
-    sources = get_target_outputs(":$_apply_gcc_target_name")
-
-    zip(target_name) {
-      forward_variables_from(invoker, [ "visibility" ])
-      inputs = sources
-      output = "${target_gen_dir}/${target_name}.srcjar"
-      base_dir = _base_gen_dir
-      deps = [
-        ":$_apply_gcc_target_name",
-      ]
-    }
-  }
-
-  # Declare a target for generating Java classes from C++ enums.
-  #
-  # This target generates Java files from C++ enums using a script.
-  #
-  # This target will create a single .srcjar. Adding this target to an
-  # android_library target's srcjar_deps will make the generated java files be
-  # included in that library's final outputs.
-  #
-  # Variables
-  #   sources: list of files to be processed by the script. For each annotated
-  #     enum contained in the sources files the script will generate a .java
-  #     file with the same name as the name of the enum.
-  #
-  # Example
-  #   java_cpp_enum("foo_generated_enum") {
-  #     sources = [
-  #       "src/native_foo_header.h",
-  #     ]
-  #   }
-  template("java_cpp_enum") {
-    set_sources_assignment_filter([])
-    action(target_name) {
-      forward_variables_from(invoker,
-                             [
-                               "sources",
-                               "testonly",
-                               "visibility",
-                             ])
-
-      # The sources aren't compiled so don't check their dependencies.
-      check_includes = false
-      script = "//build/android/gyp/java_cpp_enum.py"
-      depfile = "$target_gen_dir/$target_name.d"
-
-      _srcjar_path = "${target_gen_dir}/${target_name}.srcjar"
-      _rebased_srcjar_path = rebase_path(_srcjar_path, root_build_dir)
-      _rebased_sources = rebase_path(invoker.sources, root_build_dir)
-
-      args = [
-               "--depfile",
-               rebase_path(depfile, root_build_dir),
-               "--srcjar=$_rebased_srcjar_path",
-             ] + _rebased_sources
-      outputs = [
-        _srcjar_path,
-      ]
-    }
-  }
-
-  # Declare a target for processing a Jinja template.
-  #
-  # Variables
-  #   input: The template file to be processed.
-  #   output: Where to save the result.
-  #   variables: (Optional) A list of variables to make available to the template
-  #     processing environment, e.g. ["name=foo", "color=red"].
-  #
-  # Example
-  #   jinja_template("chrome_public_manifest") {
-  #     input = "java/AndroidManifest.xml"
-  #     output = "$target_gen_dir/AndroidManifest.xml"
-  #   }
-  template("jinja_template") {
-    forward_variables_from(invoker, [ "testonly" ])
-
-    action(target_name) {
-      forward_variables_from(invoker,
-                             [
-                               "visibility",
-                               "deps",
-                             ])
-
-      inputs = [
-        invoker.input,
-      ]
-      script = "//build/android/gyp/jinja_template.py"
-      depfile = "$target_gen_dir/$target_name.d"
-
-      outputs = [
-        invoker.output,
-      ]
-
-      args = [
-        "--loader-base-dir",
-        rebase_path("//", root_build_dir),
-        "--inputs",
-        rebase_path(invoker.input, root_build_dir),
-        "--output",
-        rebase_path(invoker.output, root_build_dir),
-        "--depfile",
-        rebase_path(depfile, root_build_dir),
-      ]
-      if (defined(invoker.variables)) {
-        args += [ "--variables=${invoker.variables}" ]
-      }
-    }
-  }
-
-  # Declare a target for processing Android resources as Jinja templates.
-  #
-  # This takes an Android resource directory where each resource is a Jinja
-  # template, processes each template, then packages the results in a zip file
-  # which can be consumed by an android resources, library, or apk target.
-  #
-  # If this target is included in the deps of an android resources/library/apk,
-  # the resources will be included with that target.
-  #
-  # Variables
-  #   resources: The list of resources files to process.
-  #   res_dir: The resource directory containing the resources.
-  #   variables: (Optional) A list of variables to make available to the template
-  #     processing environment, e.g. ["name=foo", "color=red"].
-  #
-  # Example
-  #   jinja_template_resources("chrome_public_template_resources") {
-  #     res_dir = "res_template"
-  #     resources = ["res_template/xml/syncable.xml"]
-  #     variables = ["color=red"]
-  #   }
-  template("jinja_template_resources") {
-    forward_variables_from(invoker, [ "testonly" ])
-
-    # JUnit tests use resource zip files. These must not be put in gen/
-    # directory or they will not be available to tester bots.
-    _resources_zip_rebased_path = rebase_path(target_gen_dir, root_gen_dir)
-    _resources_zip = "${root_out_dir}/resource_zips/${_resources_zip_rebased_path}/${target_name}.resources.zip"
-    _build_config = "$target_gen_dir/$target_name.build_config"
-
-    write_build_config("${target_name}__build_config") {
-      build_config = _build_config
-      resources_zip = _resources_zip
-      type = "android_resources"
-      if (defined(invoker.deps)) {
-        possible_config_deps = invoker.deps
-      }
-    }
-
-    action("${target_name}__template") {
-      forward_variables_from(invoker, [ "deps" ])
-      inputs = invoker.resources
-      script = "//build/android/gyp/jinja_template.py"
-      depfile = "$target_gen_dir/$target_name.d"
-
-      outputs = [
-        _resources_zip,
-      ]
-
-      _rebased_resources = rebase_path(invoker.resources, root_build_dir)
-      args = [
-        "--inputs=${_rebased_resources}",
-        "--inputs-base-dir",
-        rebase_path(invoker.res_dir, root_build_dir),
-        "--outputs-zip",
-        rebase_path(_resources_zip, root_build_dir),
-        "--depfile",
-        rebase_path(depfile, root_build_dir),
-      ]
-      if (defined(invoker.variables)) {
-        variables = invoker.variables
-        args += [ "--variables=${variables}" ]
-      }
-    }
-
-    group(target_name) {
-      public_deps = [
-        ":${target_name}__build_config",
-        ":${target_name}__template",
-      ]
-    }
-  }
-
-  # Declare an Android resources target
-  #
-  # This creates a resources zip file that will be used when building an Android
-  # library or apk and included into a final apk.
-  #
-  # To include these resources in a library/apk, this target should be listed in
-  # the library's deps. A library/apk will also include any resources used by its
-  # own dependencies.
-  #
-  # Variables
-  #   deps: Specifies the dependencies of this target. Any Android resources
-  #     listed in deps will be included by libraries/apks that depend on this
-  #     target.
-  #   resource_dirs: List of directories containing resources for this target.
-  #   generated_resource_dirs: List of directories containing resources for this
-  #     target which are *generated* by a dependency. |generated_resource_files|
-  #     must be specified if |generated_resource_dirs| is specified.
-  #   generated_resource_files: List of all files in |generated_resource_dirs|.
-  #     |generated_resource_dirs| must be specified in |generated_resource_files|
-  #     is specified.
-  #   android_manifest: AndroidManifest.xml for this target (optional). Will be
-  #     merged into apks that directly or indirectly depend on this target.
-  #   android_manifest_dep: Target that generates AndroidManifest (if applicable)
-  #   custom_package: java package for generated .java files.
-  #   v14_skip: If true, don't run v14 resource generator on this. Defaults to
-  #     false. (see build/android/gyp/generate_v14_compatible_resources.py)
-  #   shared_resources: If true make a resource package that can be loaded by a
-  #     different application at runtime to access the package's resources.
-  #   r_text_file: (optional) path to pre-generated R.txt to be used when
-  #     generating R.java instead of resource-based aapt-generated one.
-  #   create_srcjar: If false, does not create an R.java file. Needed only for
-  #     prebuilts that have R.txt files that do not match their res/
-  #     (Play Services).
-  #
-  # Example:
-  #   android_resources("foo_resources") {
-  #     deps = [":foo_strings_grd"]
-  #     resource_dirs = ["res"]
-  #     custom_package = "org.chromium.foo"
-  #   }
-  #
-  #   android_resources("foo_resources_overrides") {
-  #     deps = [":foo_resources"]
-  #     resource_dirs = ["res_overrides"]
-  #   }
-  template("android_resources") {
-    forward_variables_from(invoker, [ "testonly" ])
-
-    _base_path = "$target_gen_dir/$target_name"
-
-    # JUnit tests use resource zip files. These must not be put in gen/
-    # directory or they will not be available to tester bots.
-    _resources_zip_rebased_path = rebase_path(target_gen_dir, root_gen_dir)
-    _zip_path = "${root_out_dir}/resource_zips/${_resources_zip_rebased_path}/${target_name}.resources.zip"
-    _r_text_out_path = _base_path + "_R.txt"
-    _build_config = _base_path + ".build_config"
-    _build_config_target_name = "${target_name}__build_config"
-
-    if (!defined(invoker.create_srcjar) || invoker.create_srcjar) {
-      _srcjar_path = _base_path + ".srcjar"
-    }
-
-    write_build_config(_build_config_target_name) {
-      type = "android_resources"
-      build_config = _build_config
-      resources_zip = _zip_path
-
-      resource_dirs = invoker.resource_dirs
-      if (defined(invoker.generated_resource_dirs)) {
-        resource_dirs += invoker.generated_resource_dirs
-      }
-
-      if (defined(_srcjar_path)) {
-        forward_variables_from(invoker,
-                               [
-                                 "android_manifest",
-                                 "android_manifest_dep",
-                                 "custom_package",
-                               ])
-
-        # No package means resources override their deps.
-        if (defined(custom_package) || defined(android_manifest)) {
-          r_text = _r_text_out_path
-        } else {
-          assert(defined(invoker.deps),
-                 "Must specify deps when custom_package is omitted.")
-        }
-        srcjar = _srcjar_path
-      }
-
-      if (defined(invoker.deps)) {
-        possible_config_deps = invoker.deps
-      }
-    }
-
-    prepare_resources(target_name) {
-      forward_variables_from(invoker,
-                             [
-                               "android_manifest",
-                               "custom_package",
-                               "deps",
-                               "generated_resource_dirs",
-                               "generated_resource_files",
-                               "resource_dirs",
-                               "v14_skip",
-                             ])
-      if (!defined(deps)) {
-        deps = []
-      }
-      deps += [ ":$_build_config_target_name" ]
-      if (defined(invoker.android_manifest_dep)) {
-        deps += [ invoker.android_manifest_dep ]
-      }
-
-      build_config = _build_config
-      zip_path = _zip_path
-      r_text_out_path = _r_text_out_path
-
-      if (defined(invoker.r_text_file)) {
-        r_text_in_path = invoker.r_text_file
-      }
-      if (defined(_srcjar_path)) {
-        srcjar_path = _srcjar_path
-      }
-
-      # Always generate R.onResourcesLoaded() method, it is required for
-      # compiling ResourceRewriter, there is no side effect because the
-      # generated R.class isn't used in final apk.
-      shared_resources = true
-    }
-  }
-
-  # Declare an Android assets target.
-  #
-  # Defines a set of files to include as assets in a dependent apk.
-  #
-  # To include these assets in an apk, this target should be listed in
-  # the apk's deps, or in the deps of a library target used by an apk.
-  #
-  # Variables
-  #   deps: Specifies the dependencies of this target. Any Android assets
-  #     listed in deps will be included by libraries/apks that depend on this
-  #     target.
-  #   sources: List of files to include as assets.
-  #   renaming_sources: List of files to include as assets and be renamed.
-  #   renaming_destinations: List of asset paths for files in renaming_sources.
-  #   disable_compression: Whether to disable compression for files that are
-  #     known to be compressable (default: false).
-  #   treat_as_locale_paks: Causes base's BuildConfig.java to consider these
-  #     assets to be locale paks.
-  #
-  # Example:
-  # android_assets("content_shell_assets") {
-  #   deps = [
-  #     ":generates_foo",
-  #     ":other_assets",
-  #     ]
-  #   sources = [
-  #     "//path/asset1.png",
-  #     "//path/asset2.png",
-  #     "$target_gen_dir/foo.dat",
-  #   ]
-  # }
-  #
-  # android_assets("overriding_content_shell_assets") {
-  #   deps = [ ":content_shell_assets" ]
-  #   # Override foo.dat from content_shell_assets.
-  #   sources = [ "//custom/foo.dat" ]
-  #   renaming_sources = [ "//path/asset2.png" ]
-  #   renaming_destinations = [ "renamed/asset2.png" ]
-  # }
-  template("android_assets") {
-    forward_variables_from(invoker, [ "testonly" ])
-
-    _build_config = "$target_gen_dir/$target_name.build_config"
-    _build_config_target_name = "${target_name}__build_config"
-
-    write_build_config(_build_config_target_name) {
-      type = "android_assets"
-      build_config = _build_config
-
-      forward_variables_from(invoker,
-                             [
-                               "disable_compression",
-                               "treat_as_locale_paks",
-                             ])
-
-      if (defined(invoker.deps)) {
-        possible_config_deps = invoker.deps
-      }
-
-      if (defined(invoker.sources)) {
-        asset_sources = invoker.sources
-      }
-      if (defined(invoker.renaming_sources)) {
-        assert(defined(invoker.renaming_destinations))
-        _source_count = 0
-        foreach(_, invoker.renaming_sources) {
-          _source_count += 1
-        }
-        _dest_count = 0
-        foreach(_, invoker.renaming_destinations) {
-          _dest_count += 1
-        }
-        assert(
-            _source_count == _dest_count,
-            "android_assets() renaming_sources.length != renaming_destinations.length")
-        asset_renaming_sources = invoker.renaming_sources
-        asset_renaming_destinations = invoker.renaming_destinations
-      }
-    }
-
-    group(target_name) {
-      forward_variables_from(invoker,
-                             [
-                               "deps",
-                               "visibility",
-                             ])
-      public_deps = [
-        ":$_build_config_target_name",
-      ]
-    }
-  }
-
-  # Declare a group() that supports forwarding java dependency information.
-  #
-  # Example
-  #  java_group("conditional_deps") {
-  #    if (enable_foo) {
-  #      deps = [":foo_java"]
-  #    }
-  #  }
-  template("java_group") {
-    forward_variables_from(invoker, [ "testonly" ])
-    write_build_config("${target_name}__build_config") {
-      type = "group"
-      build_config = "$target_gen_dir/${invoker.target_name}.build_config"
-      supports_android = true
-      if (defined(invoker.deps)) {
-        possible_config_deps = invoker.deps
-      }
-    }
-    group(target_name) {
-      forward_variables_from(invoker, "*")
-      if (!defined(deps)) {
-        deps = []
-      }
-      deps += [ ":${target_name}__build_config" ]
-    }
-  }
-
-  # Declare a target that generates localized strings.xml from a .grd file.
-  #
-  # If this target is included in the deps of an android resources/library/apk,
-  # the strings.xml will be included with that target.
-  #
-  # Variables
-  #   deps: Specifies the dependencies of this target.
-  #   grd_file: Path to the .grd file to generate strings.xml from.
-  #   outputs: Expected grit outputs (see grit rule).
-  #
-  # Example
-  #  java_strings_grd("foo_strings_grd") {
-  #    grd_file = "foo_strings.grd"
-  #  }
-  template("java_strings_grd") {
-    forward_variables_from(invoker, [ "testonly" ])
-
-    # JUnit tests use resource zip files. These must not be put in gen/
-    # directory or they will not be available to tester bots.
-    _resources_zip_rebased_path = rebase_path(target_gen_dir, root_gen_dir)
-    _resources_zip = "${root_out_dir}/resource_zips/${_resources_zip_rebased_path}/${target_name}.resources.zip"
-    _build_config = "$target_gen_dir/$target_name.build_config"
-
-    write_build_config("${target_name}__build_config") {
-      type = "android_resources"
-      build_config = _build_config
-      resources_zip = _resources_zip
-    }
-
-    _grit_target_name = "${target_name}__grit"
-    _grit_output_dir = "$target_gen_dir/${target_name}_grit_output"
-
-    grit(_grit_target_name) {
-      forward_variables_from(invoker, [ "deps" ])
-      grit_flags = [
-        "-E",
-        "ANDROID_JAVA_TAGGED_ONLY=false",
-      ]
-      output_dir = _grit_output_dir
-      resource_ids = ""
-      source = invoker.grd_file
-      outputs = invoker.outputs
-    }
-
-    zip(target_name) {
-      base_dir = _grit_output_dir
-
-      # This needs to get outputs from grit's internal target, not the final
-      # source_set.
-      inputs = get_target_outputs(":${_grit_target_name}_grit")
-      output = _resources_zip
-      deps = [
-        ":$_grit_target_name",
-      ]
-    }
-  }
-
-  # Declare a target that packages strings.xml generated from a grd file.
-  #
-  # If this target is included in the deps of an android resources/library/apk,
-  # the strings.xml will be included with that target.
-  #
-  # Variables
-  #  grit_output_dir: directory containing grit-generated files.
-  #  generated_files: list of android resource files to package.
-  #
-  # Example
-  #  java_strings_grd_prebuilt("foo_strings_grd") {
-  #    grit_output_dir = "$root_gen_dir/foo/grit"
-  #    generated_files = [
-  #      "values/strings.xml"
-  #    ]
-  #  }
-  template("java_strings_grd_prebuilt") {
-    forward_variables_from(invoker, [ "testonly" ])
-
-    # JUnit tests use resource zip files. These must not be put in gen/
-    # directory or they will not be available to tester bots.
-    _resources_zip_rebased_path = rebase_path(target_gen_dir, root_gen_dir)
-    _resources_zip = "${root_out_dir}/resource_zips/${_resources_zip_rebased_path}/${target_name}.resources.zip"
-    _build_config = "$target_gen_dir/$target_name.build_config"
-    _build_config_target_name = "${target_name}__build_config"
-
-    write_build_config(_build_config_target_name) {
-      type = "android_resources"
-      build_config = _build_config
-      resources_zip = _resources_zip
-    }
-
-    zip(target_name) {
-      forward_variables_from(invoker, [ "visibility" ])
-
-      base_dir = invoker.grit_output_dir
-      inputs = rebase_path(invoker.generated_files, ".", base_dir)
-      output = _resources_zip
-      deps = [
-        ":$_build_config_target_name",
-      ]
-      if (defined(invoker.deps)) {
-        deps += invoker.deps
-      }
-    }
-  }
-
-  # Declare a Java executable target
-  #
-  # Same as java_library, but also creates a wrapper script within
-  # $root_out_dir/bin.
-  #
-  # Supports all variables of java_library(), plus:
-  #   main_class: When specified, a wrapper script is created within
-  #     $root_build_dir/bin to launch the binary with the given class as the
-  #     entrypoint.
-  #   wrapper_script_name: Filename for the wrapper script (default=target_name)
-  #   wrapper_script_args: List of additional arguments for the wrapper script.
-  #
-  # Example
-  #   java_binary("foo") {
-  #     java_files = [ "org/chromium/foo/FooMain.java" ]
-  #     deps = [ ":bar_java" ]
-  #     main_class = "org.chromium.foo.FooMain"
-  #   }
-  #
-  #   java_binary("foo") {
-  #     jar_path = "lib/prebuilt.jar"
-  #     deps = [ ":bar_java" ]
-  #     main_class = "org.chromium.foo.FooMain"
-  #   }
-  template("java_binary") {
-    java_library_impl(target_name) {
-      forward_variables_from(invoker, "*")
-      type = "java_binary"
-    }
-  }
-
-  # Declare a Java Annotation Processor.
-  #
-  # Supports all variables of java_library(), plus:
-  #   jar_path: Path to a prebuilt jar. Mutually exclusive with java_files &
-  #     srcjar_deps.
-  #   main_class: The fully-quallified class name of the processor's entry
-  #       point.
-  #
-  # Example
-  #   java_annotation_processor("foo_processor") {
-  #     java_files = [ "org/chromium/foo/FooProcessor.java" ]
-  #     deps = [ ":bar_java" ]
-  #     main_class = "org.chromium.foo.FooProcessor"
-  #   }
-  #
-  #   java_annotation_processor("foo_processor") {
-  #     jar_path = "lib/prebuilt.jar"
-  #     main_class = "org.chromium.foo.FooMain"
-  #   }
-  #
-  #   java_library("...") {
-  #     annotation_processor_deps = [":foo_processor"]
-  #   }
-  #
-  template("java_annotation_processor") {
-    java_library_impl(target_name) {
-      forward_variables_from(invoker, "*")
-      type = "java_annotation_processor"
-    }
-  }
-
-  # Declare a Junit executable target
-  #
-  # This target creates an executable from java code for running as a junit test
-  # suite. The executable will be in the output folder's /bin/ directory.
-  #
-  # Supports all variables of java_binary().
-  #
-  # Example
-  #   junit_binary("foo") {
-  #     java_files = [ "org/chromium/foo/FooTest.java" ]
-  #     deps = [ ":bar_java" ]
-  #   }
-  template("junit_binary") {
-    testonly = true
-
-    _java_binary_target_name = "${target_name}__java_binary"
-    _test_runner_target_name = "${target_name}__test_runner_script"
-    _main_class = "org.chromium.testing.local.JunitTestMain"
-
-    _build_config = "$target_gen_dir/$target_name.build_config"
-    _build_config_target_name = "${target_name}__build_config"
-    _deps = [
-      "//testing/android/junit:junit_test_support",
-      "//third_party/junit",
-      "//third_party/mockito:mockito_java",
-      "//third_party/robolectric:robolectric_all_java",
-
-      # This dep is required if any deps require android (but it doesn't hurt
-      # to add it regardless) and is used by bytecode rewritten classes.
-      "//build/android/buildhooks:build_hooks_android_impl_java",
-    ]
-    if (defined(invoker.deps)) {
-      _deps += invoker.deps
-    }
-
-    _prepare_resources_target = "${target_name}__prepare_resources"
-    prepare_resources(_prepare_resources_target) {
-      deps = _deps + [ ":$_build_config_target_name" ]
-      build_config = _build_config
-      srcjar_path = "${target_gen_dir}/${target_name}.srcjar"
-      if (defined(invoker.package_name)) {
-        custom_package = invoker.package_name
-      }
-      if (defined(invoker.android_manifest_path)) {
-        android_manifest = invoker.android_manifest_path
-      } else {
-        android_manifest = "//build/android/AndroidManifest.xml"
-      }
-    }
-
-    java_library_impl(_java_binary_target_name) {
-      forward_variables_from(invoker, "*", [ "deps" ])
-      type = "junit_binary"
-      main_target_name = invoker.target_name
-
-      # Robolectric can handle deps that set !supports_android as well those
-      # that set requires_android.
-      bypass_platform_checks = true
-      deps = _deps
-      testonly = true
-      main_class = _main_class
-      wrapper_script_name = "helper/$main_target_name"
-      if (!defined(srcjar_deps)) {
-        srcjar_deps = []
-      }
-      srcjar_deps += [
-        ":$_prepare_resources_target",
-
-        # This dep is required for any targets that depend on //base:base_java.
-        "//base:base_build_config_gen",
-      ]
-    }
-
-    test_runner_script(_test_runner_target_name) {
-      test_name = invoker.target_name
-      test_suite = invoker.target_name
-      test_type = "junit"
-      ignore_all_data_deps = true
-      forward_variables_from(invoker,
-                             [
-                               "android_manifest_path",
-                               "package_name",
-                             ])
-    }
-
-    group(target_name) {
-      public_deps = [
-        ":$_build_config_target_name",
-        ":$_java_binary_target_name",
-        ":$_test_runner_target_name",
-      ]
-    }
-  }
-
-  # Declare a java library target
-  #
-  # Variables
-  #   deps: Specifies the dependencies of this target. Java targets in this list
-  #     will be added to the javac classpath.
-  #   annotation_processor_deps: List of java_annotation_processor targets to
-  #     use when compiling.
-  #
-  #   jar_path: Path to a prebuilt jar. Mutually exclusive with java_files &
-  #     srcjar_deps.
-  #   java_files: List of .java files included in this library.
-  #   srcjar_deps: List of srcjar dependencies. The .java files in the srcjars
-  #     will be added to java_files and be included in this library.
-  #
-  #   input_jars_paths: A list of paths to the jars that should be included
-  #     in the compile-time classpath. These are in addition to library .jars
-  #     that appear in deps.
-  #   classpath_deps: Deps that should added to the classpath for this target,
-  #     but not linked into the apk (use this for annotation processors).
-  #
-  #   chromium_code: If true, extra analysis warning/errors will be enabled.
-  #   enable_errorprone: If true, enables the errorprone compiler.
-  #   enable_incremental_javac_override: Overrides the global
-  #     enable_incremental_javac.
-  #
-  #   jar_excluded_patterns: List of patterns of .class files to exclude.
-  #   jar_included_patterns: List of patterns of .class files to include.
-  #     When omitted, all classes not matched by jar_excluded_patterns are
-  #     included. When specified, all non-matching .class files are stripped.
-  #
-  #   output_name: File name for the output .jar (not including extension).
-  #     Defaults to the input .jar file name.
-  #
-  #   proguard_configs: List of proguard configs to use in final apk step for
-  #     any apk that depends on this library.
-  #
-  #   supports_android: If true, Android targets (android_library, android_apk)
-  #     may depend on this target. Note: if true, this target must only use the
-  #     subset of Java available on Android.
-  #   bypass_platform_checks: Disables checks about cross-platform (Java/Android)
-  #     dependencies for this target. This will allow depending on an
-  #     android_library target, for example.
-  #
-  #   additional_jar_files: Use to package additional files (Java resources)
-  #     into the output jar. Pass a list of length-2 lists with format:
-  #         [ [ path_to_file, path_to_put_in_jar ] ]
-  #
-  #   javac_args: Additional arguments to pass to javac.
-  #
-  #   data_deps, testonly
-  #
-  # Example
-  #   java_library("foo_java") {
-  #     java_files = [
-  #       "org/chromium/foo/Foo.java",
-  #       "org/chromium/foo/FooInterface.java",
-  #       "org/chromium/foo/FooService.java",
-  #     ]
-  #     deps = [
-  #       ":bar_java"
-  #     ]
-  #     srcjar_deps = [
-  #       ":foo_generated_enum"
-  #     ]
-  #     jar_excluded_patterns = [
-  #       "*/FooService.class", "org/chromium/FooService\$*.class"
-  #     ]
-  #   }
-  template("java_library") {
-    java_library_impl(target_name) {
-      forward_variables_from(invoker, "*")
-      type = "java_library"
-    }
-  }
-
-  # Declare a java library target for a prebuilt jar
-  #
-  # Supports all variables of java_library().
-  #
-  # Example
-  #   java_prebuilt("foo_java") {
-  #     jar_path = "foo.jar"
-  #     deps = [
-  #       ":foo_resources",
-  #       ":bar_java"
-  #     ]
-  #   }
-  template("java_prebuilt") {
-    java_library_impl(target_name) {
-      forward_variables_from(invoker, "*")
-      type = "java_library"
-    }
-  }
-
-  # Combines all dependent .jar files into a single .jar file.
-  #
-  # Variables:
-  #   output: Path to the output jar.
-  #   dex_path: Path to dex()'ed output (optional).
-  #   override_build_config: Use a pre-existing .build_config. Must be of type
-  #     "apk".
-  #   use_interface_jars: Use all dependent interface .jars rather than
-  #     implementation .jars.
-  #   use_unprocessed_jars: Use unprocessed / undesugared .jars.
-  #   direct_deps_only: Do not recurse on deps.
-  #   proguard_enabled: Whether to run ProGuard on resulting jar.
-  #   proguard_configs: List of proguard configs.
-  #   proguard_jar_path: The path to proguard.jar you wish to use. If undefined,
-  #     the proguard used will be the checked in one in //third_party/proguard.
-  #   alternative_android_sdk_jar: System jar to use when proguard is enabled.
-  #
-  # Example
-  #   dist_jar("lib_fatjar") {
-  #     deps = [ ":my_java_lib" ]
-  #     output = "$root_build_dir/MyLibrary.jar"
-  #   }
-  #   dist_jar("sideloaded_dex") {
-  #     deps = [ ":my_java_lib" ]
-  #     output = "$root_build_dir/MyLibrary.jar"
-  #     dex_path = "$root_build_dir/MyLibrary.dex"
-  #   }
-  template("dist_jar") {
-    forward_variables_from(invoker, [ "testonly" ])
-    _supports_android =
-        !defined(invoker.supports_android) || invoker.supports_android
-    _requires_android =
-        defined(invoker.requires_android) && invoker.requires_android
-    _proguard_enabled =
-        defined(invoker.proguard_enabled) && invoker.proguard_enabled
-    _use_interface_jars =
-        defined(invoker.use_interface_jars) && invoker.use_interface_jars
-    _use_unprocessed_jars =
-        defined(invoker.use_unprocessed_jars) && invoker.use_unprocessed_jars
-    _direct_deps_only =
-        defined(invoker.direct_deps_only) && invoker.direct_deps_only
-    assert(!(_proguard_enabled && _use_interface_jars),
-           "Cannot set both proguard_enabled and use_interface_jars")
-    assert(!(_proguard_enabled && _direct_deps_only),
-           "Cannot set both proguard_enabled and direct_deps_only")
-    assert(!(_use_unprocessed_jars && _use_interface_jars),
-           "Cannot set both use_interface_jars and use_unprocessed_jars")
-
-    _jar_target_name = target_name
-    if (defined(invoker.dex_path)) {
-      if (_proguard_enabled) {
-        _jar_target_name = "${target_name}__proguard"
-      } else {
-        _jar_target_name = "${target_name}__dist_jar"
-      }
-    }
-
-    _deps = []
-    if (defined(invoker.deps)) {
-      _deps = invoker.deps
-    }
-    _enable_build_hooks =
-        _supports_android &&
-        (!defined(invoker.no_build_hooks) || !invoker.no_build_hooks)
-    if (_enable_build_hooks && _requires_android) {
-      _deps += [ "//build/android/buildhooks:build_hooks_android_impl_java" ]
-    }
-
-    if (defined(invoker.override_build_config)) {
-      _build_config = invoker.override_build_config
-    } else {
-      _build_config = "$target_gen_dir/$target_name.build_config"
-      _build_config_target_name = "${target_name}__build_config"
-
-      write_build_config(_build_config_target_name) {
-        type = "dist_jar"
-        forward_variables_from(invoker,
-                               [
-                                 "proguard_enabled",
-                                 "proguard_configs",
-                               ])
-        supports_android = _supports_android
-        requires_android = _requires_android
-        possible_config_deps = _deps
-        build_config = _build_config
-      }
-
-      _deps += [ ":$_build_config_target_name" ]
-    }
-
-    _rebased_build_config = rebase_path(_build_config, root_build_dir)
-    if (_proguard_enabled) {
-      proguard(_jar_target_name) {
-        forward_variables_from(invoker,
-                               [
-                                 "alternative_android_sdk_jar",
-                                 "data",
-                                 "proguard_jar_path",
-                               ])
-        deps = _deps
-        inputs = [
-          _build_config,
-        ]
-
-        # Although these will be listed as deps in the depfile, they must also
-        # appear here so that "gn analyze" knows about them.
-        # https://crbug.com/827197
-        if (defined(invoker.proguard_configs)) {
-          inputs += invoker.proguard_configs
-        }
-
-        output_jar_path = invoker.output
-        args = [
-          "--proguard-configs=@FileArg($_rebased_build_config:deps_info:proguard_all_configs)",
-          "--input-paths=@FileArg($_rebased_build_config:deps_info:java_runtime_classpath)",
-          "--classpath=@FileArg($_rebased_build_config:deps_info:proguard_all_extra_jars)",
-        ]
-        if (defined(invoker.proguard_config_exclusions)) {
-          _rebased_proguard_config_exclusions =
-              rebase_path(invoker.proguard_config_exclusions, root_build_dir)
-          args += [
-            "--proguard-config-exclusions=$_rebased_proguard_config_exclusions",
-          ]
-        }
-      }
-    } else {
-      action(_jar_target_name) {
-        forward_variables_from(invoker, [ "data" ])
-        script = "//build/android/gyp/create_dist_jar.py"
-        depfile = "$target_gen_dir/$target_name.d"
-        deps = _deps
-
-        inputs = [
-          _build_config,
-        ]
-
-        outputs = [
-          invoker.output,
-        ]
-
-        args = [
-          "--depfile",
-          rebase_path(depfile, root_build_dir),
-          "--output",
-          rebase_path(invoker.output, root_build_dir),
-        ]
-
-        if (_direct_deps_only) {
-          if (_use_interface_jars) {
-            args += [ "--jars=@FileArg($_rebased_build_config:javac:interface_classpath)" ]
-          } else if (_use_unprocessed_jars) {
-            args +=
-                [ "--jars=@FileArg($_rebased_build_config:javac:classpath)" ]
-          } else {
-            assert(
-                false,
-                "direct_deps_only does not work without use_interface_jars or use_unprocessed_jars")
-          }
-        } else {
-          if (_use_interface_jars) {
-            args += [ "--jars=@FileArg($_rebased_build_config:dist_jar:all_interface_jars)" ]
-          } else if (_use_unprocessed_jars) {
-            args += [ "--jars=@FileArg($_rebased_build_config:deps_info:javac_full_classpath)" ]
-          } else {
-            args += [ "--jars=@FileArg($_rebased_build_config:deps_info:java_runtime_classpath)" ]
-          }
-        }
-      }
-    }
-    if (defined(invoker.dex_path)) {
-      dex(target_name) {
-        deps = [
-          ":$_jar_target_name",
-        ]
-        input_jars = [ invoker.output ]
-        output = invoker.dex_path
-      }
-    }
-  }
-
-  # Creates an Android .aar library.
-  #
-  # Currently supports:
-  #   * AndroidManifest.xml
-  #   * classes.jar
-  #   * jni/
-  #   * res/
-  #   * R.txt
-  #   * proguard.txt
-  # Does not yet support:
-  #   * public.txt
-  #   * annotations.zip
-  #   * assets/
-  # See: https://developer.android.com/studio/projects/android-library.html#aar-contents
-  #
-  # Variables:
-  #   output: Path to the output .aar.
-  #   proguard_configs: List of proguard configs (optional).
-  #   android_manifest: Path to AndroidManifest.xml (optional).
-  #   native_libraries: list of native libraries (optional).
-  #   direct_deps_only: Do not recurse on deps. (optional, defaults false).
-  #
-  # Example
-  #   dist_aar("my_aar") {
-  #     deps = [ ":my_java_lib" ]
-  #     output = "$root_build_dir/MyLibrary.aar"
-  #   }
-  template("dist_aar") {
-    forward_variables_from(invoker, [ "testonly" ])
-
-    _deps = []
-    if (defined(invoker.deps)) {
-      _deps = invoker.deps
-    }
-
-    _direct_deps_only =
-        defined(invoker.direct_deps_only) && invoker.direct_deps_only
-
-    _build_config = "$target_gen_dir/$target_name.build_config"
-    _build_config_target_name = "${target_name}__build_config"
-
-    write_build_config(_build_config_target_name) {
-      type = "dist_aar"
-      forward_variables_from(invoker, [ "proguard_configs" ])
-      possible_config_deps = _deps
-      supports_android = true
-      requires_android = true
-      build_config = _build_config
-    }
-
-    _deps += [ ":$_build_config_target_name" ]
-
-    _rebased_build_config = rebase_path(_build_config, root_build_dir)
-
-    action(target_name) {
-      forward_variables_from(invoker, [ "data" ])
-      depfile = "$target_gen_dir/$target_name.d"
-      deps = _deps
-      script = "//build/android/gyp/dist_aar.py"
-
-      inputs = [
-        _build_config,
-      ]
-
-      # Although these will be listed as deps in the depfile, they must also
-      # appear here so that "gn analyze" knows about them.
-      # https://crbug.com/827197
-      if (defined(invoker.proguard_configs)) {
-        inputs += invoker.proguard_configs
-      }
-
-      outputs = [
-        invoker.output,
-      ]
-
-      args = [
-        "--depfile",
-        rebase_path(depfile, root_build_dir),
-        "--output",
-        rebase_path(invoker.output, root_build_dir),
-        "--dependencies-res-zips=@FileArg($_rebased_build_config:resources:dependency_zips)",
-        "--r-text-files=@FileArg($_rebased_build_config:resources:extra_r_text_files)",
-        "--proguard-configs=@FileArg($_rebased_build_config:deps_info:proguard_all_configs)",
-      ]
-      if (_direct_deps_only) {
-        args += [ "--jars=@FileArg($_rebased_build_config:javac:classpath)" ]
-      } else {
-        args += [ "--jars=@FileArg($_rebased_build_config:deps_info:javac_full_classpath)" ]
-      }
-      if (defined(invoker.android_manifest)) {
-        args += [
-          "--android-manifest",
-          rebase_path(invoker.android_manifest, root_build_dir),
-        ]
-      }
-      if (defined(invoker.native_libraries) && invoker.native_libraries != []) {
-        inputs += invoker.native_libraries
-        _rebased_native_libraries =
-            rebase_path(invoker.native_libraries, root_build_dir)
-
-        args += [
-          "--native-libraries=$_rebased_native_libraries",
-          "--abi=$android_app_abi",
-        ]
-      }
-    }
-  }
-
-  # Declare an Android library target
-  #
-  # This target creates an Android library containing java code and Android
-  # resources.
-  #
-  # Supports all variables of java_library(), plus:
-  #   android_manifest_for_lint: Path to AndroidManifest.xml (optional). This
-  #     manifest will be used by Android lint, but will not be merged into apks.
-  #     To have a manifest merged, add it to an android_resources() target.
-  #   deps: In addition to defining java deps, this can also include
-  #     android_assets() and android_resources() targets.
-  #   dex_path: If set, the resulting .dex.jar file will be placed under this
-  #     path.
-  #   alternative_android_sdk_ijar: if set, the given android_sdk_ijar file
-  #     replaces the default android_sdk_ijar.
-  #   alternative_android_sdk_ijar_dep: the target that generates
-  #      alternative_android_sdk_ijar, must be set if alternative_android_sdk_ijar
-  #      is used.
-  #   alternative_android_sdk_jar: actual jar corresponding to
-  #      alternative_android_sdk_ijar, must be set if alternative_android_sdk_ijar
-  #      is used.
-  #
-  # Example
-  #   android_library("foo_java") {
-  #     java_files = [
-  #       "android/org/chromium/foo/Foo.java",
-  #       "android/org/chromium/foo/FooInterface.java",
-  #       "android/org/chromium/foo/FooService.java",
-  #     ]
-  #     deps = [
-  #       ":bar_java"
-  #     ]
-  #     srcjar_deps = [
-  #       ":foo_generated_enum"
-  #     ]
-  #     jar_excluded_patterns = [
-  #       "*/FooService.class", "org/chromium/FooService\$*.class"
-  #     ]
-  #   }
-  template("android_library") {
-    java_library(target_name) {
-      forward_variables_from(invoker, "*")
-
-      if (defined(alternative_android_sdk_ijar)) {
-        assert(defined(alternative_android_sdk_ijar_dep))
-        assert(defined(alternative_android_sdk_jar))
-      }
-
-      supports_android = true
-      requires_android = true
-
-      if (!defined(jar_excluded_patterns)) {
-        jar_excluded_patterns = []
-      }
-      jar_excluded_patterns += [
-        "*/R.class",
-        "*/R\$*.class",
-        "*/Manifest.class",
-        "*/Manifest\$*.class",
-      ]
-    }
-  }
-
-  # Declare an Android library target for a prebuilt jar
-  #
-  # This target creates an Android library containing java code and Android
-  # resources.
-  #
-  # Supports all variables of android_library().
-  #
-  # Example
-  #   android_java_prebuilt("foo_java") {
-  #     jar_path = "foo.jar"
-  #     deps = [
-  #       ":foo_resources",
-  #       ":bar_java"
-  #     ]
-  #   }
-  template("android_java_prebuilt") {
-    android_library(target_name) {
-      forward_variables_from(invoker, "*")
-    }
-  }
-
-  # Creates org/chromium/base/BuildConfig.java
-  # This doesn't really belong in //build since it genates a file for //base.
-  # However, we don't currently have a better way to include this file in all
-  # apks that depend on //base:base_java.
-  #
-  # Variables:
-  #   use_final_fields: True to use final fields. All other variables are
-  #       ignored when this is false.
-  #   build_config: Path to build_config used for locale list
-  #   enable_multidex: Value for ENABLE_MULTIDEX.
-  #   firebase_app_id: Value for FIREBASE_APP_ID.
-  #
-  template("generate_build_config_srcjar") {
-    java_cpp_template(target_name) {
-      package_path = "org/chromium/base"
-      sources = [
-        "//base/android/java/templates/BuildConfig.template",
-      ]
-      defines = []
-
-      # TODO(agrieve): These two are not target-specific and should be moved
-      #     to BuildHooks.java.
-      # Set these even when !use_final_fields so that they have correct default
-      # values withnin junit_binary().
-      if (is_java_debug || dcheck_always_on) {
-        defines += [ "_DCHECK_IS_ON" ]
-      }
-      if (use_cfi_diag || is_ubsan || is_ubsan_security || is_ubsan_vptr) {
-        defines += [ "_IS_UBSAN" ]
-      }
-
-      if (invoker.use_final_fields) {
-        forward_variables_from(invoker,
-                               [
-                                 "deps",
-                                 "testonly",
-                               ])
-        defines += [ "USE_FINAL" ]
-        if (invoker.enable_multidex) {
-          defines += [ "ENABLE_MULTIDEX" ]
-        }
-        inputs = [
-          invoker.build_config,
-        ]
-        _rebased_build_config = rebase_path(invoker.build_config)
-        defines += [
-          "COMPRESSED_LOCALE_LIST=" +
-              "@FileArg($_rebased_build_config:compressed_locales_java_list)",
-          "UNCOMPRESSED_LOCALE_LIST=" +
-              "@FileArg($_rebased_build_config:uncompressed_locales_java_list)",
-        ]
-        if (defined(invoker.firebase_app_id)) {
-          defines += [ "_FIREBASE_APP_ID=${invoker.firebase_app_id}" ]
-        }
-        if (defined(invoker.resources_version_variable)) {
-          defines += [
-            "_RESOURCES_VERSION_VARIABLE=${invoker.resources_version_variable}",
-          ]
-        }
-      }
-    }
-  }
-
-  # Declare an Android apk target
-  #
-  # This target creates an Android APK containing java code, resources, assets,
-  # and (possibly) native libraries.
-  #
-  # Supports all variables of android_library(), plus:
-  #   android_manifest: Path to AndroidManifest.xml.
-  #   android_manifest_dep: Target that generates AndroidManifest (if applicable)
-  #   png_to_webp: If true, pngs (with the exception of 9-patch) are
-  #     converted to webp during resource packaging.
-  #   dist_ijar_path: Path to create "${target_name}_dist_ijar" target
-  #     (used by instrumentation_test_apk).
-  #   apk_name: Name for final apk.
-  #   final_apk_path: Path to final built apk. Default is
-  #     $root_out_dir/apks/$apk_name.apk. Setting this will override apk_name.
-  #   loadable_modules: List of paths to native libraries to include. Different
-  #     from |shared_libraries| in that:
-  #       * dependencies of this .so are not automatically included
-  #       * ".cr.so" is never added
-  #       * they are not side-loaded for _incremental targets.
-  #       * load_library_from_apk, use_chromium_linker,
-  #         and enable_relocation_packing do not apply
-  #     Use this instead of shared_libraries when you are going to load the library
-  #     conditionally, and only when shared_libraries doesn't work for you.
-  #   shared_libraries: List shared_library targets to bundle. If these
-  #     libraries depend on other shared_library targets, those dependencies will
-  #     also be included in the apk (e.g. for is_component_build).
-  #   secondary_abi_shared_libraries: secondary abi shared_library targets to
-  #     bundle. If these libraries depend on other shared_library targets, those
-  #     dependencies will also be included in the apk (e.g. for is_component_build).
-  #   native_lib_placeholders: List of placeholder filenames to add to the apk
-  #     (optional).
-  #   secondary_native_lib_placeholders: List of placeholder filenames to add to
-  #     the apk for the secondary ABI (optional).
-  #   apk_under_test: For an instrumentation test apk, this is the target of the
-  #     tested apk.
-  #   write_asset_list: Adds an extra file to the assets, which contains a list of
-  #     all other asset files.
-  #   generate_buildconfig_java: If defined and false, skip generating the
-  #     BuildConfig java class describing the build configuration. The default
-  #     is true for non-test APKs.
-  #   firebase_app_id: The value for BuildConfig.FIREBASE_APP_ID (optional).
-  #     Identifier is sent with crash reports to enable Java stack deobfuscation.
-  #   requires_sdk_api_level_23: If defined and true, the apk is intended for
-  #     installation only on Android M or later. In these releases the system
-  #     linker does relocation unpacking, so we can enable it unconditionally.
-  #   secondary_native_libs (deprecated): The path of native libraries for secondary
-  #     app abi.
-  #   proguard_jar_path: The path to proguard.jar you wish to use. If undefined,
-  #     the proguard used will be the checked in one in //third_party/proguard.
-  #   never_incremental: If true, |incremental_apk_by_default| will be ignored.
-  #   aapt_locale_whitelist: If set, all locales not in this list will be
-  #     stripped from resources.arsc.
-  #   exclude_xxxhdpi: Causes all drawable-xxxhdpi images to be excluded
-  #     (mipmaps are still included).
-  #   xxxhdpi_whitelist: A list of globs used when exclude_xxxhdpi=true. Files
-  #     that match this whitelist will still be included.
-  #   shared_resources: True if this is a runtime shared library APK, like
-  #     the system_webview_apk target. Ensures that its resources can be
-  #     used by the loading application process.
-  #   app_as_shared_lib: True if this is a regular application apk that can
-  #     also serve as a runtime shared library, like the monochrome_public_apk
-  #     target. Ensures that the resources are usable both by the APK running
-  #     as an application, or by another process that loads it at runtime.
-  #   shared_resources_whitelist_target: Optional name of a target specifying
-  #     an input R.txt file that lists the resources that can be exported
-  #     by the APK when shared_resources or app_as_shared_lib is defined.
-  #
-  # Example
-  #   android_apk("foo_apk") {
-  #     android_manifest = "AndroidManifest.xml"
-  #     java_files = [
-  #       "android/org/chromium/foo/FooApplication.java",
-  #       "android/org/chromium/foo/FooActivity.java",
-  #     ]
-  #     deps = [
-  #       ":foo_support_java"
-  #       ":foo_resources"
-  #     ]
-  #     srcjar_deps = [
-  #       ":foo_generated_enum"
-  #     ]
-  #     shared_libraries = [
-  #       ":my_shared_lib",
-  #     ]
-  #   }
-  template("android_apk") {
-    forward_variables_from(invoker, [ "testonly" ])
-
-    assert(defined(invoker.final_apk_path) || defined(invoker.apk_name))
-    assert(defined(invoker.android_manifest))
-    _gen_dir = "$target_gen_dir/$target_name"
-    _base_path = "$_gen_dir/$target_name"
-    _build_config = "$target_gen_dir/$target_name.build_config"
-    _build_config_target = "${target_name}__build_config"
-
-    # JUnit tests use resource zip files. These must not be put in gen/
-    # directory or they will not be available to tester bots.
-    _jar_path = "$_base_path.jar"
-    _lib_dex_path = "$_base_path.dex.jar"
-    _rebased_lib_dex_path = rebase_path(_lib_dex_path, root_build_dir)
-    _template_name = target_name
-
-    _enable_multidex =
-        defined(invoker.enable_multidex) && invoker.enable_multidex
-    if (_enable_multidex) {
-      _final_dex_path = "$_gen_dir/classes.dex.zip"
-    } else {
-      _final_dex_path = "$_gen_dir/classes.dex"
-    }
-    _final_dex_target_name = "${_template_name}__final_dex"
-
-    if (defined(invoker.final_apk_path)) {
-      _final_apk_path = invoker.final_apk_path
-    } else {
-      _final_apk_path = "$root_build_dir/apks/${invoker.apk_name}.apk"
-    }
-    _final_apk_path_no_ext_list =
-        process_file_template([ _final_apk_path ],
-                              "{{source_dir}}/{{source_name_part}}")
-    _final_apk_path_no_ext = _final_apk_path_no_ext_list[0]
-    assert(_final_apk_path_no_ext != "")  # Mark as used.
-
-    # resource_sizes.py needs to be able to find the unpacked resources.arsc file based on apk name.
-    _packaged_resources_path =
-        "$root_gen_dir/arsc/" +
-        rebase_path(_final_apk_path_no_ext, root_build_dir) + ".ap_"
-
-    _incremental_install_json_path =
-        "$target_gen_dir/$target_name.incremental.json"
-
-    _version_code = android_default_version_code
-    if (defined(invoker.version_code)) {
-      _version_code = invoker.version_code
-    }
-
-    _version_name = android_default_version_name
-    if (defined(invoker.version_name)) {
-      _version_name = invoker.version_name
-    }
-    _keystore_path = android_keystore_path
-    _keystore_name = android_keystore_name
-    _keystore_password = android_keystore_password
-
-    if (defined(invoker.keystore_path)) {
-      _keystore_path = invoker.keystore_path
-      _keystore_name = invoker.keystore_name
-      _keystore_password = invoker.keystore_password
-    }
-
-    _deps = []
-    if (defined(invoker.deps)) {
-      _deps = invoker.deps
-      set_sources_assignment_filter([ "*manifest*" ])
-      sources = _deps
-      set_sources_assignment_filter([])
-      if (sources != _deps) {
-        _bad_deps = _deps - sources
-        assert(
-            false,
-            "Possible manifest-generating dep found in deps. Use android_manifest_dep for this instead. Found: $_bad_deps")
-      }
-      sources = []
-    }
-
-    _srcjar_deps = []
-    if (defined(invoker.srcjar_deps)) {
-      _srcjar_deps = invoker.srcjar_deps
-    }
-
-    _use_build_hooks =
-        !defined(invoker.no_build_hooks) || !invoker.no_build_hooks
-    if (defined(invoker.build_hooks_android_impl_deps)) {
-      assert(_use_build_hooks,
-             "Cannot set no_build_hooks and build_hooks_android_impl_deps at " +
-                 "the same time")
-      _deps += invoker.build_hooks_android_impl_deps
-    } else if (_use_build_hooks) {
-      _deps += [ "//build/android/buildhooks:build_hooks_android_impl_java" ]
-    }
-
-    _android_root_manifest_deps = []
-    if (defined(invoker.android_manifest_dep)) {
-      _android_root_manifest_deps = [ invoker.android_manifest_dep ]
-    }
-    _android_root_manifest = invoker.android_manifest
-
-    _use_chromium_linker =
-        defined(invoker.use_chromium_linker) && invoker.use_chromium_linker
-    _pack_relocations =
-        defined(invoker.pack_relocations) && invoker.pack_relocations
-
-    _load_library_from_apk =
-        defined(invoker.load_library_from_apk) && invoker.load_library_from_apk
-    _requires_sdk_api_level_23 = defined(invoker.requires_sdk_api_level_23) &&
-                                 invoker.requires_sdk_api_level_23
-
-    assert(_use_chromium_linker || true)  # Mark as used.
-    assert(_requires_sdk_api_level_23 || true)
-    assert(!_pack_relocations || !use_lld,
-           "Use //build/config/android:lld_pack_relocations to pack " +
-               "relocations when use_lld=true.")
-    assert(!_pack_relocations || _use_chromium_linker ||
-               _requires_sdk_api_level_23,
-           "pack_relocations requires either use_chromium_linker " +
-               "or requires_sdk_api_level_23")
-    assert(!_load_library_from_apk || _use_chromium_linker ||
-               _requires_sdk_api_level_23,
-           "load_library_from_apk requires use_chromium_linker " +
-               "or requires_sdk_api_level_23")
-
-    # The dependency that makes the chromium linker, if any is needed.
-    _native_libs_deps = []
-    _shared_libraries_is_valid =
-        defined(invoker.shared_libraries) && invoker.shared_libraries != []
-    _secondary_abi_native_libs_deps = []
-    assert(_secondary_abi_native_libs_deps == [])  # mark as used.
-    _secondary_abi_shared_libraries_is_valid =
-        defined(invoker.secondary_abi_shared_libraries) &&
-        invoker.secondary_abi_shared_libraries != []
-
-    if (is_component_build || is_asan) {
-      if (_shared_libraries_is_valid) {
-        _native_libs_deps += [ "//build/android:cpplib_stripped" ]
-      }
-      if (_secondary_abi_shared_libraries_is_valid) {
-        _secondary_abi_native_libs_deps += [
-          "//build/android:cpplib_stripped($android_secondary_abi_toolchain)",
-        ]
-      }
-    }
-
-    if (_shared_libraries_is_valid) {
-      _native_libs_deps += invoker.shared_libraries
-
-      # To determine the filenames of all dependent shared libraries, write the
-      # runtime deps of |shared_libraries| to a file during "gn gen".
-      # write_build_config.py will then grep this file for *.so to obtain the
-      # complete list.
-      _runtime_deps_file =
-          "$target_gen_dir/${_template_name}.native.runtimedeps"
-      group("${_template_name}__runtime_deps") {
-        deps = _native_libs_deps
-        write_runtime_deps = _runtime_deps_file
-      }
-
-      _native_lib_version_rule = ""
-      if (defined(invoker.native_lib_version_rule)) {
-        _native_lib_version_rule = invoker.native_lib_version_rule
-      }
-      _native_lib_version_arg = "\"\""
-      if (defined(invoker.native_lib_version_arg)) {
-        _native_lib_version_arg = invoker.native_lib_version_arg
-      }
-    } else {
-      # Must exist for instrumentation_test_apk() to depend on.
-      group("${_template_name}__runtime_deps") {
-      }
-    }
-
-    if (_secondary_abi_shared_libraries_is_valid) {
-      _secondary_abi_native_libs_deps += invoker.secondary_abi_shared_libraries
-
-      # To determine the filenames of all dependent shared libraries, write the
-      # runtime deps of |shared_libraries| to a file during "gn gen".
-      # write_build_config.py will then grep this file for *.so to obtain the
-      # complete list.
-      _secondary_abi_runtime_deps_file =
-          "$target_gen_dir/${_template_name}.secondary.abi.native.runtimedeps"
-      group("${_template_name}__secondary_abi__runtime_deps") {
-        deps = _secondary_abi_native_libs_deps
-        write_runtime_deps = _secondary_abi_runtime_deps_file
-      }
-    } else {
-      # Must exist for instrumentation_test_apk() to depend on.
-      group("${_template_name}__secondary_abi_runtime_deps") {
-      }
-    }
-
-    _rebased_build_config = rebase_path(_build_config, root_build_dir)
-
-    _generate_buildconfig_java = !defined(invoker.apk_under_test)
-    if (defined(invoker.generate_buildconfig_java)) {
-      _generate_buildconfig_java = invoker.generate_buildconfig_java
-    }
-
-    _proguard_enabled =
-        defined(invoker.proguard_enabled) && invoker.proguard_enabled
-    if (_proguard_enabled) {
-      _proguard_output_jar_path = "$_base_path.proguard.jar"
-    }
-
-    _incremental_allowed =
-        !(defined(invoker.never_incremental) && invoker.never_incremental)
-
-    _android_manifest =
-        "$target_gen_dir/${_template_name}_manifest/AndroidManifest.xml"
-    _merge_manifest_target = "${_template_name}__merge_manifests"
-    merge_manifests(_merge_manifest_target) {
-      input_manifest = _android_root_manifest
-      output_manifest = _android_manifest
-      build_config = _build_config
-      deps = _android_root_manifest_deps + [ ":$_build_config_target" ]
-    }
-
-    _final_deps = []
-
-    if (_enable_multidex) {
-      _generated_proguard_main_dex_config =
-          "$_base_path.resources.main-dex-proguard.txt"
-    }
-    _generated_proguard_config = "$_base_path.resources.proguard.txt"
-
-    if (_generate_buildconfig_java &&
-        defined(invoker.product_version_resources_dep)) {
-      _deps += [ invoker.product_version_resources_dep ]
-    }
-
-    _compile_resources_target = "${_template_name}__compile_resources"
-
-    if (android_enable_app_bundles) {
-      # Path to the intermediate proto-format resources zip file.
-      _packaged_proto_resources_path =
-          "$root_gen_dir/proto_resources/" +
-          rebase_path(_final_apk_path_no_ext, root_build_dir) + ".proto.ap_"
-
-      # Define another target used to generate an archive containing the
-      # resources compiled in protocol buffer format. This is not used by
-      # the APK itself, but by android_bundle_module() targets that depend
-      # on it.
-      _compile_proto_resources_target =
-          "${_template_name}__compile_proto_resources"
-    }
-
-    compile_apk_resources(_compile_resources_target) {
-      forward_variables_from(invoker,
-                             [
-                               "alternative_android_sdk_jar",
-                               "app_as_shared_lib",
-                               "shared_resources",
-                               "support_zh_hk",
-                               "aapt_locale_whitelist",
-                               "exclude_xxxhdpi",
-                               "png_to_webp",
-                               "xxxhdpi_whitelist",
-                               "no_xml_namespaces",
-                             ])
-      android_manifest = _android_manifest
-      version_code = _version_code
-      version_name = _version_name
-
-      # Subtle: required to avoid GN build errors. "testonly" cannot be added
-      # to the forward_variables_from() above because it was already forwarded
-      # at the start of android_apk(). And if the assignment below is not
-      # performed, GN will complain that some test-only targets depend
-      # on non-test-only ones.
-      testonly = defined(testonly) && testonly
-
-      if (defined(invoker.post_process_package_resources_script)) {
-        post_process_script = invoker.post_process_package_resources_script
-      }
-      srcjar_path = "${target_gen_dir}/${target_name}.srcjar"
-      r_text_out_path = "${target_gen_dir}/${target_name}_R.txt"
-      proguard_file = _generated_proguard_config
-      if (_enable_multidex) {
-        proguard_file_main_dex = _generated_proguard_main_dex_config
-      }
-      output = _packaged_resources_path
-
-      build_config = _build_config
-      deps = _deps + [
-               ":$_merge_manifest_target",
-               ":$_build_config_target",
-             ]
-      if (defined(invoker.shared_resources_whitelist_target)) {
-        _whitelist_gen_dir =
-            get_label_info(invoker.shared_resources_whitelist_target,
-                           "target_gen_dir")
-        _whitelist_target_name =
-            get_label_info(invoker.shared_resources_whitelist_target, "name")
-        shared_resources_whitelist =
-            "${_whitelist_gen_dir}/${_whitelist_target_name}" +
-            "__compile_resources_R.txt"
-        deps += [
-          "${invoker.shared_resources_whitelist_target}__compile_resources",
-        ]
-      }
-
-      if (android_enable_app_bundles) {
-        proto_output = _packaged_proto_resources_path
-        proto_resources_target = _compile_proto_resources_target
-      }
-    }
-    _srcjar_deps += [ ":$_compile_resources_target" ]
-
-    if (_native_libs_deps != []) {
-      _enable_chromium_linker_tests = false
-      if (defined(invoker.enable_chromium_linker_tests)) {
-        _enable_chromium_linker_tests = invoker.enable_chromium_linker_tests
-      }
-      _ordered_libraries_json =
-          "$target_gen_dir/$target_name.ordered_libararies.json"
-      _rebased_ordered_libraries_json =
-          rebase_path(_ordered_libraries_json, root_build_dir)
-      _ordered_libraries_target = "${_template_name}__write_ordered_libraries"
-
-      # TODO(agrieve): Make GN write runtime deps in dependency order so as to
-      # not need this manual sorting step.
-      action(_ordered_libraries_target) {
-        script = "//build/android/gyp/write_ordered_libraries.py"
-        deps = [
-          ":$_build_config_target",
-          ":${_template_name}__runtime_deps",
-        ]
-        inputs = [
-          _runtime_deps_file,
-        ]
-        outputs = [
-          _ordered_libraries_json,
-        ]
-        _rebased_android_readelf = rebase_path(android_readelf, root_build_dir)
-        args = [
-          "--readelf=$_rebased_android_readelf",
-          "--output=$_rebased_ordered_libraries_json",
-          "--runtime-deps=" + rebase_path(_runtime_deps_file, root_build_dir),
-        ]
-        if (defined(invoker.dont_load_shared_libraries)) {
-          args += [ "--exclude-shared-libraries=" +
-                    invoker.dont_load_shared_libraries ]
-        }
-      }
-
-      java_cpp_template("${_template_name}__native_libraries_java") {
-        package_path = "org/chromium/base/library_loader"
-        sources = [
-          "//base/android/java/templates/NativeLibraries.template",
-        ]
-        inputs = [
-          _ordered_libraries_json,
-        ]
-        deps = [
-          ":${_ordered_libraries_target}",
-        ]
-        if (_native_lib_version_rule != "") {
-          deps += [ _native_lib_version_rule ]
-        }
-
-        defines = [
-          "NATIVE_LIBRARIES_LIST=" +
-              "@FileArg($_rebased_ordered_libraries_json:java_libraries_list)",
-          "NATIVE_LIBRARIES_VERSION_NUMBER=$_native_lib_version_arg",
-        ]
-        if (current_cpu == "arm" || current_cpu == "arm64") {
-          defines += [ "ANDROID_APP_CPU_FAMILY_ARM" ]
-        } else if (current_cpu == "x86" || current_cpu == "x64") {
-          defines += [ "ANDROID_APP_CPU_FAMILY_X86" ]
-        } else if (current_cpu == "mipsel" || current_cpu == "mips64el") {
-          defines += [ "ANDROID_APP_CPU_FAMILY_MIPS" ]
-        } else {
-          assert(false, "Unsupported CPU family")
-        }
-        if (_use_chromium_linker) {
-          defines += [ "ENABLE_CHROMIUM_LINKER" ]
-        }
-        if (_load_library_from_apk) {
-          defines += [ "ENABLE_CHROMIUM_LINKER_LIBRARY_IN_ZIP_FILE" ]
-        }
-        if (_enable_chromium_linker_tests) {
-          defines += [ "ENABLE_CHROMIUM_LINKER_TESTS" ]
-        }
-      }
-      _srcjar_deps += [ ":${_template_name}__native_libraries_java" ]
-    }
-
-    if (_generate_buildconfig_java) {
-      generate_build_config_srcjar("${_template_name}__build_config_java") {
-        forward_variables_from(invoker, [ "firebase_app_id" ])
-        use_final_fields = true
-        build_config = _build_config
-        enable_multidex = _enable_multidex
-        if (defined(invoker.product_version_resources_dep)) {
-          resources_version_variable =
-              "org.chromium.base.R.string.product_version"
-        }
-        deps = [
-          ":$_build_config_target",
-        ]
-      }
-      _srcjar_deps += [ ":${_template_name}__build_config_java" ]
-    }
-
-    _java_target = "${_template_name}__java"
-    java_library_impl(_java_target) {
-      forward_variables_from(invoker,
-                             [
-                               "android_manifest",
-                               "android_manifest_dep",
-                               "apk_under_test",
-                               "chromium_code",
-                               "classpath_deps",
-                               "emma_never_instrument",
-                               "java_files",
-                               "no_build_hooks",
-                               "javac_args",
-                             ])
-      type = "android_apk"
-      main_target_name = _template_name
-      supports_android = true
-      requires_android = true
-      deps = _deps
-
-      srcjar_deps = _srcjar_deps
-      final_jar_path = _jar_path
-      dex_path = _lib_dex_path
-      apk_path = _final_apk_path
-
-      if (android_enable_app_bundles) {
-        proto_resources_path = _packaged_proto_resources_path
-      }
-
-      incremental_allowed = _incremental_allowed
-      incremental_apk_path = "${_final_apk_path_no_ext}_incremental.apk"
-      incremental_install_json_path = _incremental_install_json_path
-
-      proguard_enabled = _proguard_enabled
-      if (_proguard_enabled) {
-        proguard_configs = [ _generated_proguard_config ]
-        if (defined(invoker.proguard_configs)) {
-          proguard_configs += invoker.proguard_configs
-        }
-        if (_enable_multidex) {
-          proguard_configs += [ "//build/android/multidex.flags" ]
-        }
-      }
-      non_native_packed_relocations = _pack_relocations
-
-      # Don't depend on the runtime_deps target in order to avoid having to
-      # build the native libraries just to create the .build_config file.
-      # The dep is unnecessary since the runtime_deps file is created by gn gen
-      # and the runtime_deps file is added to write_build_config.py's depfile.
-      if (_native_libs_deps != []) {
-        shared_libraries_runtime_deps_file = _runtime_deps_file
-      }
-      if (_secondary_abi_native_libs_deps != []) {
-        secondary_abi_shared_libraries_runtime_deps_file =
-            _secondary_abi_runtime_deps_file
-      }
-    }
-
-    # TODO(cjhopman): This is only ever needed to calculate the list of tests to
-    # run. See build/android/pylib/instrumentation/test_jar.py. We should be
-    # able to just do that calculation at build time instead.
-    if (defined(invoker.dist_ijar_path)) {
-      _dist_ijar_path = invoker.dist_ijar_path
-      dist_jar("${_template_name}_dist_ijar") {
-        override_build_config = _build_config
-        output = _dist_ijar_path
-        data = [
-          _dist_ijar_path,
-        ]
-        use_interface_jars = true
-        deps = [
-          ":$_build_config_target",
-          ":$_java_target",
-        ]
-      }
-    }
-
-    if (_proguard_enabled) {
-      _proguard_target = "${_template_name}__proguard"
-      proguard(_proguard_target) {
-        forward_variables_from(invoker,
-                               [
-                                 "alternative_android_sdk_jar",
-                                 "proguard_jar_path",
-                               ])
-        deps = _deps + [
-                 ":$_build_config_target",
-                 ":$_compile_resources_target",
-                 ":$_java_target",
-               ]
-        inputs = [
-          _build_config,
-          _jar_path,
-        ]
-
-        output_jar_path = _proguard_output_jar_path
-        args = [
-          "--proguard-configs=@FileArg($_rebased_build_config:deps_info:proguard_all_configs)",
-          "--input-paths=@FileArg($_rebased_build_config:deps_info:java_runtime_classpath)",
-          "--classpath=@FileArg($_rebased_build_config:deps_info:proguard_all_extra_jars)",
-        ]
-        if (defined(invoker.proguard_config_exclusions)) {
-          _rebased_proguard_config_exclusions =
-              rebase_path(invoker.proguard_config_exclusions, root_build_dir)
-          args += [
-            "--proguard-config-exclusions=$_rebased_proguard_config_exclusions",
-          ]
-        }
-      }
-      _dex_sources = [ _proguard_output_jar_path ]
-      _dex_deps = [ ":$_proguard_target" ]
-
-      _copy_proguard_mapping_target = "${_template_name}__copy_proguard_mapping"
-      copy(_copy_proguard_mapping_target) {
-        sources = [
-          "$_proguard_output_jar_path.mapping",
-        ]
-        outputs = [
-          "$_final_apk_path.mapping",
-        ]
-        deps = [
-          ":$_proguard_target",
-        ]
-      }
-    } else {
-      if (_enable_multidex) {
-        # .jar already included in java_runtime_classpath.
-        _dex_sources = []
-      } else {
-        _dex_sources = [ _lib_dex_path ]
-      }
-      _dex_deps = [ ":$_java_target" ]
-    }
-
-    dex("$_final_dex_target_name") {
-      deps = _dex_deps + [ ":$_build_config_target" ]
-      input_jars = _dex_sources
-      output = _final_dex_path
-      enable_multidex = _enable_multidex
-
-      if (_enable_multidex) {
-        forward_variables_from(invoker, [ "negative_main_dex_globs" ])
-        extra_main_dex_proguard_config = _generated_proguard_main_dex_config
-        deps += [ ":$_compile_resources_target" ]
-      }
-
-      # All deps are already included in _dex_sources when proguard is used.
-      if (!_proguard_enabled) {
-        if (_enable_multidex) {
-          _dex_arg_key =
-              "${_rebased_build_config}:deps_info:java_runtime_classpath"
-        } else {
-          _dex_arg_key =
-              "${_rebased_build_config}:final_dex:dependency_dex_files"
-        }
-        build_config = _build_config
-        input_jars_file_arg = "@FileArg($_dex_arg_key)"
-      }
-
-      # http://crbug.com/725224. Fix for bots running out of memory.
-      use_pool = true
-    }
-
-    _native_libs_file_arg_dep = ":$_build_config_target"
-    _native_libs_file_arg = "@FileArg($_rebased_build_config:native:libraries)"
-    _secondary_abi_native_libs_file_arg_dep = ":$_build_config_target"
-    _secondary_abi_native_libs_file_arg =
-        "@FileArg($_rebased_build_config:native:secondary_abi_libraries)"
-    assert(_secondary_abi_native_libs_file_arg != "" &&
-           _secondary_abi_native_libs_file_arg_dep != "")  # Mark as used.
-
-    if (_native_libs_deps != [] && _pack_relocations) {
-      _prepare_native_target_name = "${_template_name}__prepare_native"
-      _native_libs_json = "$_gen_dir/packed-libs/filelist.json"
-      _rebased_native_libs_json = rebase_path(_native_libs_json, root_build_dir)
-      _native_libs_file_arg_dep = ":$_prepare_native_target_name"
-      _native_libs_file_arg = "@FileArg($_rebased_native_libs_json:files)"
-
-      pack_relocation_section(_prepare_native_target_name) {
-        file_list_json = _native_libs_json
-        libraries_filearg =
-            "@FileArg(${_rebased_build_config}:native:libraries)"
-        inputs = [
-          _build_config,
-        ]
-
-        deps = _native_libs_deps
-        deps += [ ":$_build_config_target" ]
-      }
-      if (_secondary_abi_native_libs_deps != []) {
-        _prepare_native_target_name =
-            "${_template_name}_secondary_abi__prepare_native"
-        _native_libs_json =
-            "$_gen_dir/packed-libs/$android_secondary_abi_cpu/filelist.json"
-        _rebased_native_libs_json =
-            rebase_path(_native_libs_json, root_build_dir)
-        _secondary_abi_native_libs_file_arg_dep =
-            ":$_prepare_native_target_name"
-        _secondary_abi_native_libs_file_arg =
-            "@FileArg($_rebased_native_libs_json:files)"
-
-        pack_relocation_section(_prepare_native_target_name) {
-          file_list_json = _native_libs_json
-          libraries_filearg = "@FileArg(${_rebased_build_config}:native:secondary_abi_libraries)"
-          inputs = [
-            _build_config,
-          ]
-
-          deps = _secondary_abi_native_libs_deps
-          deps += [ ":$_build_config_target" ]
-        }
-      }
-    }
-
-    _extra_native_libs = _sanitizer_runtimes
-    _extra_native_libs_deps = []
-    assert(_extra_native_libs_deps == [])  # Mark as used.
-    _extra_native_libs_even_when_incremental = []
-    if (_native_libs_deps != []) {
-      if (_use_chromium_linker) {
-        _extra_native_libs +=
-            [ "$root_shlib_dir/libchromium_android_linker$shlib_extension" ]
-        _extra_native_libs_deps +=
-            [ "//base/android/linker:chromium_android_linker" ]
-      }
-
-      _create_stack_script_rule_name = "${_template_name}__stack_script"
-      _final_deps += [ ":${_create_stack_script_rule_name}" ]
-      stack_script(_create_stack_script_rule_name) {
-        stack_target_name = invoker.target_name
-        deps = _native_libs_deps
-        if (_native_libs_deps != [] && _pack_relocations) {
-          packed_libraries = _native_libs_file_arg
-          deps += [ _native_libs_file_arg_dep ]
-        }
-      }
-    }
-
-    if (defined(invoker.loadable_modules) && invoker.loadable_modules != []) {
-      _extra_native_libs_even_when_incremental += invoker.loadable_modules
-    }
-
-    # Generate .apk.jar.info files if needed.
-    if (defined(invoker.apk_name)) {
-      _apk_jar_info_target = "${target_name}__apk_jar_info"
-      create_apk_jar_info(_apk_jar_info_target) {
-        output = "$root_build_dir/size-info/${invoker.apk_name}.apk.jar.info"
-        apk_build_config = _build_config
-        deps = [
-          ":$_build_config_target",
-          ":$_java_target",
-        ]
-      }
-      _deps += [ ":$_apk_jar_info_target" ]
-    }
-
-    _final_deps += [ ":${_template_name}__create" ]
-    create_apk("${_template_name}__create") {
-      forward_variables_from(invoker,
-                             [
-                               "alternative_android_sdk_jar",
-                               "public_deps",
-                               "secondary_native_libs",
-                               "shared_resources",
-                               "uncompress_shared_libraries",
-                               "write_asset_list",
-                             ])
-      packaged_resources_path = _packaged_resources_path
-      apk_path = _final_apk_path
-      android_manifest = _android_manifest
-      assets_build_config = _build_config
-      base_path = _base_path
-      dex_path = _final_dex_path
-      load_library_from_apk = _load_library_from_apk
-
-      # This is used to generate *.apk.pak.info files.
-      apk_name = invoker.apk_name
-
-      keystore_name = _keystore_name
-      keystore_path = _keystore_path
-      keystore_password = _keystore_password
-
-      # Incremental apk does not use native libs nor final dex.
-      incremental_deps = _deps + [
-                           ":$_merge_manifest_target",
-                           ":$_build_config_target",
-                           ":$_compile_resources_target",
-                         ]
-
-      # This target generates the input file _all_resources_zip_path.
-      deps = _deps + [
-               ":$_merge_manifest_target",
-               ":$_build_config_target",
-               ":$_final_dex_target_name",
-               ":$_compile_resources_target",
-             ]
-
-      if (_native_libs_deps != [] ||
-          _extra_native_libs_even_when_incremental != []) {
-        deps += _native_libs_deps + _extra_native_libs_deps +
-                [ _native_libs_file_arg_dep ]
-        native_libs_filearg = _native_libs_file_arg
-        native_libs = _extra_native_libs
-        native_libs_even_when_incremental =
-            _extra_native_libs_even_when_incremental
-      }
-
-      if (_secondary_abi_native_libs_deps != []) {
-        deps += _secondary_abi_native_libs_deps +
-                [ _secondary_abi_native_libs_file_arg_dep ]
-        secondary_abi_native_libs_filearg = _secondary_abi_native_libs_file_arg
-      }
-
-      # Placeholders necessary for some older devices.
-      # http://crbug.com/395038
-      forward_variables_from(invoker,
-                             [
-                               "native_lib_placeholders",
-                               "secondary_native_lib_placeholders",
-                             ])
-    }
-
-    _write_installer_json_rule_name = "${_template_name}__incremental_json"
-    action(_write_installer_json_rule_name) {
-      script = "//build/android/incremental_install/write_installer_json.py"
-      depfile = "$target_gen_dir/$target_name.d"
-      deps = [
-        _native_libs_file_arg_dep,
-      ]
-
-      outputs = [
-        _incremental_install_json_path,
-      ]
-
-      _rebased_apk_path_no_ext =
-          rebase_path(_final_apk_path_no_ext, root_build_dir)
-      _rebased_incremental_install_json_path =
-          rebase_path(_incremental_install_json_path, root_build_dir)
-      _rebased_depfile = rebase_path(depfile, root_build_dir)
-      _dex_arg_key = "${_rebased_build_config}:final_dex:dependency_dex_files"
-      args = [
-        "--apk-path=${_rebased_apk_path_no_ext}_incremental.apk",
-        "--output-path=$_rebased_incremental_install_json_path",
-        "--dex-file=$_rebased_lib_dex_path",
-        "--dex-file-list=@FileArg($_dex_arg_key)",
-        "--depfile=$_rebased_depfile",
-      ]
-      if (_proguard_enabled) {
-        args += [ "--show-proguard-warning" ]
-      }
-      if (defined(_native_libs_file_arg)) {
-        args += [ "--native-libs=$_native_libs_file_arg" ]
-      }
-      if (_extra_native_libs != []) {
-        # Don't pass in _extra_native_libs_even_when_incremental, since these are
-        # end up in the apk and are not side-loaded.
-        _rebased_extra_native_libs =
-            rebase_path(_extra_native_libs, root_build_dir)
-        args += [ "--native-libs=$_rebased_extra_native_libs" ]
-      }
-      if (_load_library_from_apk) {
-        args += [ "--dont-even-try=Incremental builds do not work with load_library_from_apk. Try setting is_component_build=true in your GN args." ]
-      }
-    }
-
-    _apk_operations = []
-    _incremental_apk_operations = []
-
-    # Generate apk operation related script.
-    if (!defined(invoker.create_apk_script) || invoker.create_apk_script) {
-      _apk_operations_target_name = "${target_name}__apk_operations"
-      action(_apk_operations_target_name) {
-        _generated_script = "$root_build_dir/bin/${invoker.target_name}"
-        script = "//build/android/gyp/create_apk_operations_script.py"
-        outputs = [
-          _generated_script,
-        ]
-        if (_proguard_enabled) {
-          # Required by logcat command.
-          data_deps = [
-            "//build/android/stacktrace:java_deobfuscate",
-          ]
-        }
-        args = [
-          "--script-output-path",
-          rebase_path(_generated_script, root_build_dir),
-          "--apk-path",
-          rebase_path(_final_apk_path, root_build_dir),
-          "--target-cpu=$target_cpu",
-        ]
-        if (defined(invoker.command_line_flags_file)) {
-          args += [
-            "--command-line-flags-file",
-            invoker.command_line_flags_file,
-          ]
-        }
-        if (_incremental_allowed) {
-          args += [
-            "--incremental-install-json-path",
-            rebase_path(_incremental_install_json_path, root_build_dir),
-          ]
-        }
-        if (_proguard_enabled) {
-          args += [
-            "--proguard-mapping-path",
-            rebase_path("$_final_apk_path.mapping", root_build_dir),
-          ]
-        }
-      }
-      _apk_operations += [ ":$_apk_operations_target_name" ]
-      _incremental_apk_operations += [ ":$_apk_operations_target_name" ]
-    }
-
-    group(target_name) {
-      if (_incremental_allowed && incremental_apk_by_default) {
-        deps = [
-          ":${target_name}_incremental",
-        ]
-        assert(_apk_operations != [] || true)  # Prevent "unused variable".
-      } else {
-        forward_variables_from(invoker,
-                               [
-                                 "data",
-                                 "data_deps",
-                               ])
-        public_deps = _final_deps
-
-        # Generate apk related operations at runtime.
-        public_deps += _apk_operations
-
-        # Make the proguard .mapping file easy to find by putting it beside the .apk.
-        if (_proguard_enabled) {
-          deps = [
-            ":$_copy_proguard_mapping_target",
-          ]
-        }
-      }
-    }
-
-    if (_incremental_allowed) {
-      group("${target_name}_incremental") {
-        forward_variables_from(invoker,
-                               [
-                                 "data",
-                                 "data_deps",
-                               ])
-        if (!defined(data_deps)) {
-          data_deps = []
-        }
-
-        # device/commands is used by the installer script to push files via .zip.
-        data_deps += [ "//build/android/pylib/device/commands" ] +
-                     _native_libs_deps + _extra_native_libs_deps
-
-        # Since the _incremental.apk does not include use .so nor .dex from the
-        # actual target, but instead loads them at runtime, we need to explicitly
-        # depend on them here.
-        public_deps = [
-          ":${_java_target}",
-          ":${_template_name}__create_incremental",
-          ":${_write_installer_json_rule_name}",
-        ]
-
-        # Generate incremental apk related operations at runtime.
-        public_deps += _incremental_apk_operations
-      }
-    }
-  }
-
-  # Declare an Android instrumentation test apk
-  #
-  # This target creates an Android instrumentation test apk.
-  #
-  # Variables
-  #   android_manifest: Path to AndroidManifest.xml.
-  #   data_deps: List of dependencies needed at runtime. These will be built but
-  #     won't change the generated .apk in any way (in fact they may be built
-  #     after the .apk is).
-  #   deps: List of dependencies. All Android java resources and libraries in the
-  #     "transitive closure" of these dependencies will be included in the apk.
-  #     Note: this "transitive closure" actually only includes such targets if
-  #     they are depended on through android_library or android_resources targets
-  #     (and so not through builtin targets like 'action', 'group', etc).
-  #   java_files: List of .java files to include in the apk.
-  #   srcjar_deps: List of srcjar dependencies. The .java files in the srcjars
-  #      will be added to java_files and be included in this apk.
-  #   apk_name: Name for final apk.
-  #   final_apk_path: Path to final built apk. Default is
-  #     $root_out_dir/apks/$apk_name.apk. Setting this will override apk_name.
-  #   shared_libraries: List shared_library targets to bundle. If these
-  #     libraries depend on other shared_library targets, those dependencies will
-  #     also be included in the apk (e.g. for is_component_build).
-  #   apk_under_test: The apk being tested.
-  #   javac_args: Additional arguments to pass to javac.
-  #
-  # Example
-  #   instrumentation_test_apk("foo_test_apk") {
-  #     android_manifest = "AndroidManifest.xml"
-  #     apk_name = "FooTest"
-  #     apk_under_test = "Foo"
-  #     java_files = [
-  #       "android/org/chromium/foo/FooTestCase.java",
-  #       "android/org/chromium/foo/FooExampleTest.java",
-  #     ]
-  #     deps = [
-  #       ":foo_test_support_java"
-  #     ]
-  #   }
-  template("instrumentation_test_apk") {
-    assert(defined(invoker.apk_name))
-    testonly = true
-    _apk_target_name = "${target_name}__apk"
-    _test_runner_target_name = "${target_name}__test_runner_script"
-    _dist_ijar_path =
-        "$root_build_dir/test.lib.java/" + invoker.apk_name + ".jar"
-    _incremental_test_runner_target_name =
-        "${_test_runner_target_name}_incremental"
-    _incremental_test_name = "${invoker.target_name}_incremental"
-    if (incremental_apk_by_default) {
-      _incremental_test_runner_target_name = _test_runner_target_name
-      _incremental_test_name = invoker.target_name
-    }
-
-    if (!incremental_apk_by_default) {
-      test_runner_script(_test_runner_target_name) {
-        forward_variables_from(invoker,
-                               [
-                                 "additional_apks",
-                                 "apk_under_test",
-                                 "data",
-                                 "data_deps",
-                                 "deps",
-                                 "ignore_all_data_deps",
-                                 "proguard_enabled",
-                                 "public_deps",
-                               ])
-        test_name = invoker.target_name
-        test_type = "instrumentation"
-        apk_target = ":$_apk_target_name"
-        test_jar = _dist_ijar_path
-      }
-    }
-    test_runner_script(_incremental_test_runner_target_name) {
-      forward_variables_from(invoker,
-                             [
-                               "additional_apks",
-                               "apk_under_test",
-                               "data",
-                               "data_deps",
-                               "deps",
-                               "ignore_all_data_deps",
-                               "public_deps",
-                             ])
-      test_name = _incremental_test_name
-      test_type = "instrumentation"
-      apk_target = ":$_apk_target_name"
-      test_jar = _dist_ijar_path
-      incremental_install = true
-    }
-
-    android_apk(_apk_target_name) {
-      deps = []
-      data_deps = []
-      forward_variables_from(invoker, "*")
-      deps += [ "//testing/android/broker:broker_java" ]
-      data_deps += [
-        "//build/android/pylib/device/commands",
-        "//tools/android/forwarder2",
-        "//tools/android/md5sum",
-      ]
-      if (defined(invoker.apk_under_test)) {
-        data_deps += [ invoker.apk_under_test ]
-      }
-      if (defined(invoker.additional_apks)) {
-        data_deps += invoker.additional_apks
-      }
-      if (defined(invoker.apk_under_test)) {
-        # Prevent a build_hooks_android_impl exising in both the test apks as
-        # well as the apk_under_test.
-        no_build_hooks = true
-      }
-
-      if (defined(invoker.proguard_enabled) && invoker.proguard_enabled) {
-        # When ProGuard is on, we use ProGuard to combine the under test java
-        # code and the test java code. This is to allow us to apply all ProGuard
-        # optimizations that we ship with, but not have them break tests. The
-        # apk under test will still have the same resources, assets, and
-        # manifest, all of which are the ones used in the tests.
-        if (!defined(invoker.proguard_configs)) {
-          proguard_configs = []
-        }
-        proguard_configs += [ "//testing/android/proguard_for_test.flags" ]
-        if (defined(final_apk_path)) {
-          _final_apk_path = final_apk_path
-        } else {
-          _final_apk_path = "$root_build_dir/apks/${apk_name}.apk"
-        }
-        data = [
-          "$_final_apk_path.mapping",
-        ]
-      }
-
-      dist_ijar_path = _dist_ijar_path
-      create_apk_script = false
-    }
-
-    group(target_name) {
-      if (incremental_apk_by_default) {
-        public_deps = [
-          ":${target_name}_incremental",
-        ]
-      } else {
-        public_deps = [
-          ":$_apk_target_name",
-          ":$_test_runner_target_name",
-
-          # Required by test runner to enumerate test list.
-          ":${_apk_target_name}_dist_ijar",
-        ]
-        if (defined(invoker.apk_under_test)) {
-          public_deps += [ invoker.apk_under_test ]
-        }
-      }
-
-      # Ensure unstripped libraries are included in runtime deps so that
-      # symbolization can be done.
-      deps = [
-        ":${_apk_target_name}__runtime_deps",
-        ":${_apk_target_name}__secondary_abi_runtime_deps",
-      ]
-      if (defined(invoker.apk_under_test)) {
-        _under_test_label =
-            get_label_info(invoker.apk_under_test, "label_no_toolchain")
-        deps += [
-          "${_under_test_label}__runtime_deps",
-          "${_under_test_label}__secondary_abi_runtime_deps",
-        ]
-      }
-    }
-
-    group("${target_name}_incremental") {
-      public_deps = [
-        ":$_incremental_test_runner_target_name",
-        ":${_apk_target_name}_dist_ijar",
-        ":${_apk_target_name}_incremental",
-      ]
-      if (defined(invoker.apk_under_test)) {
-        public_deps += [ "${invoker.apk_under_test}_incremental" ]
-      }
-    }
-  }
-
-  # Declare an Android gtest apk
-  #
-  # This target creates an Android apk for running gtest-based unittests.
-  #
-  # Variables
-  #   deps: Specifies the dependencies of this target. These will be passed to
-  #     the underlying android_apk invocation and should include the java and
-  #     resource dependencies of the apk.
-  #   shared_library: shared_library target that contains the unit tests.
-  #   apk_name: The name of the produced apk. If unspecified, it uses the name
-  #             of the shared_library target suffixed with "_apk"
-  #   use_default_launcher: Whether the default activity (NativeUnitTestActivity)
-  #     should be used for launching tests.
-  #   use_native_activity: Test implements ANativeActivity_onCreate().
-  #
-  # Example
-  #   unittest_apk("foo_unittests_apk") {
-  #     deps = [ ":foo_java", ":foo_resources" ]
-  #     shared_library = ":foo_unittests"
-  #   }
-  template("unittest_apk") {
-    _use_native_activity =
-        defined(invoker.use_native_activity) && invoker.use_native_activity
-    _android_manifest = "$target_gen_dir/$target_name/AndroidManifest.xml"
-    assert(invoker.shared_library != "")
-
-    # This trivial assert is needed in case android_manifest is defined,
-    # as otherwise _use_native_activity and _android_manifest would not be used.
-    assert(_use_native_activity != "" && _android_manifest != "")
-
-    if (!defined(invoker.android_manifest)) {
-      jinja_template("${target_name}_manifest") {
-        _native_library_name = get_label_info(invoker.shared_library, "name")
-        input = "//testing/android/native_test/java/AndroidManifest.xml.jinja2"
-        output = _android_manifest
-        variables = [
-          "is_component_build=${is_component_build}",
-          "native_library_name=${_native_library_name}",
-          "use_native_activity=${_use_native_activity}",
-        ]
-      }
-    }
-
-    android_apk(target_name) {
-      data_deps = []
-      forward_variables_from(invoker, "*")
-      testonly = true
-      create_apk_script = false
-
-      assert(!defined(invoker.proguard_enabled) || !invoker.proguard_enabled ||
-             invoker.proguard_configs != [])
-
-      if (!defined(apk_name)) {
-        apk_name = get_label_info(invoker.shared_library, "name")
-      }
-
-      if (!defined(android_manifest)) {
-        android_manifest_dep = ":${target_name}_manifest"
-        android_manifest = _android_manifest
-      }
-
-      final_apk_path = "$root_build_dir/${apk_name}_apk/${apk_name}-debug.apk"
-
-      if (!defined(use_default_launcher) || use_default_launcher) {
-        deps += [ "//testing/android/native_test:native_test_java" ]
-      }
-      shared_libraries = [ invoker.shared_library ]
-      deps += [
-        ":${target_name}__runtime_deps",
-        ":${target_name}__secondary_abi_runtime_deps",
-        "//base:base_java",
-        "//testing/android/reporter:reporter_java",
-      ]
-      data_deps += [
-        "//build/android/pylib/device/commands",
-        "//tools/android/md5sum",
-      ]
-      if (host_os == "linux") {
-        data_deps += [ "//tools/android/forwarder2" ]
-      }
-    }
-  }
-
-  # Generate .java files from .aidl files.
-  #
-  # This target will store the .java files in a srcjar and should be included in
-  # an android_library or android_apk's srcjar_deps.
-  #
-  # Variables
-  #   sources: Paths to .aidl files to compile.
-  #   import_include: Path to directory containing .java files imported by the
-  #     .aidl files.
-  #   interface_file: Preprocessed aidl file to import.
-  #
-  # Example
-  #   android_aidl("foo_aidl") {
-  #     import_include = "java/src"
-  #     sources = [
-  #       "java/src/com/foo/bar/FooBarService.aidl",
-  #       "java/src/com/foo/bar/FooBarServiceCallback.aidl",
-  #     ]
-  #   }
-  template("android_aidl") {
-    action(target_name) {
-      set_sources_assignment_filter([])
-      forward_variables_from(invoker, [ "testonly" ])
-
-      script = "//build/android/gyp/aidl.py"
-      sources = invoker.sources
-
-      _srcjar_path = "${target_gen_dir}/${target_name}.srcjar"
-      _aidl_path = "${android_sdk_build_tools}/aidl"
-      _framework_aidl = "$android_sdk/framework.aidl"
-      _imports = [ _framework_aidl ]
-      if (defined(invoker.interface_file)) {
-        assert(invoker.interface_file != "")
-        _imports += [ invoker.interface_file ]
-      }
-
-      inputs = [ _aidl_path ] + _imports
-
-      depfile = "${target_gen_dir}/${target_name}.d"
-      outputs = [
-        _srcjar_path,
-      ]
-      _rebased_imports = rebase_path(_imports, root_build_dir)
-      args = [
-        "--depfile",
-        rebase_path(depfile, root_build_dir),
-        "--aidl-path",
-        rebase_path(_aidl_path, root_build_dir),
-        "--imports=$_rebased_imports",
-        "--srcjar",
-        rebase_path(_srcjar_path, root_build_dir),
-      ]
-      if (defined(invoker.import_include) && invoker.import_include != []) {
-        # TODO(cjhopman): aidl supports creating a depfile. We should be able to
-        # switch to constructing a depfile for the overall action from that
-        # instead of having all the .java files in the include paths as inputs.
-        _rebased_import_paths = []
-        foreach(_import_path, invoker.import_include) {
-          _rebased_import_path = []
-          _rebased_import_path = [ rebase_path(_import_path, root_build_dir) ]
-          _rebased_import_paths += _rebased_import_path
-          _java_files_build_rel = []
-          _java_files_build_rel =
-              exec_script("//build/android/gyp/find.py",
-                          [ "--pattern=*.java" ] + _rebased_import_path,
-                          "list lines")
-          inputs += rebase_path(_java_files_build_rel, ".", root_build_dir)
-        }
-        args += [ "--includes=$_rebased_import_paths" ]
-      }
-      args += rebase_path(sources, root_build_dir)
-    }
-  }
-
-  # Compile a protocol buffer to java.
-  #
-  # This generates java files from protocol buffers and creates an Android library
-  # containing the classes.
-  #
-  # Variables
-  #   sources (required)
-  #       Paths to .proto files to compile.
-  #
-  #   proto_path (required)
-  #       Root directory of .proto files.
-  #
-  #   generate_nano (optional, default false)
-  #       Whether to generate nano protos. If false, this will use the lite proto generator.
-  #       Nano protos are deprecated, so please use lite new proto libraries.
-  #
-  # Example:
-  #  proto_java_library("foo_proto_java") {
-  #    proto_path = "src/foo"
-  #    sources = [ "$proto_path/foo.proto" ]
-  #  }
-  template("proto_java_library") {
-    set_sources_assignment_filter([])
-    forward_variables_from(invoker, [ "testonly" ])
-    _generate_nano =
-        defined(invoker.generate_nano) && invoker.generate_nano == true
-
-    if (_generate_nano) {
-      # Use the legacy Android nano proto generator.
-      _protoc_dep =
-          "//third_party/android_protobuf:android_protoc($host_toolchain)"
-      _protoc_out_dir = get_label_info(_protoc_dep, "root_out_dir")
-      _protoc_bin = "$_protoc_out_dir/android_protoc"
-      _proto_runtime = "//third_party/android_protobuf:protobuf_nano_javalib"
-    } else {
-      # Use the regular proto library to generate lite protos.
-      _protoc_dep = "//third_party/protobuf:protoc($host_toolchain)"
-      _protoc_out_dir = get_label_info(_protoc_dep, "root_out_dir")
-      _protoc_bin = "$_protoc_out_dir/protoc"
-      _proto_runtime = "//third_party/protobuf:protobuf_lite_javalib"
-    }
-    _proto_path = invoker.proto_path
-    _template_name = target_name
-
-    action("${_template_name}__protoc_java") {
-      _srcjar_path = "$target_gen_dir/$target_name.srcjar"
-      script = "//build/protoc_java.py"
-
-      deps = [
-        _protoc_dep,
-      ]
-      if (defined(invoker.deps)) {
-        deps += invoker.deps
-      }
-
-      sources = invoker.sources
-      depfile = "$target_gen_dir/$target_name.d"
-      outputs = [
-        _srcjar_path,
-      ]
-      args = [
-               "--depfile",
-               rebase_path(depfile, root_build_dir),
-               "--protoc",
-               rebase_path(_protoc_bin, root_build_dir),
-               "--proto-path",
-               rebase_path(_proto_path, root_build_dir),
-               "--srcjar",
-               rebase_path(_srcjar_path, root_build_dir),
-             ] + rebase_path(sources, root_build_dir)
-      if (_generate_nano) {
-        args += [ "--nano" ]
-      }
-    }
-
-    android_library(target_name) {
-      chromium_code = false
-      java_files = []
-      srcjar_deps = [ ":${_template_name}__protoc_java" ]
-      deps = [
-        _proto_runtime,
-      ]
-    }
-  }
-
-  # Declare an Android library target for a prebuilt AAR.
-  #
-  # This target creates an Android library containing java code and Android
-  # resources. For libraries without resources, it will not generate
-  # corresponding android_resources targets.
-  #
-  # To avoid slowing down "gn gen", an associated .info file must be committed
-  # along with the .aar file. In order to create this file, define the target
-  # and then run once with the gn arg "update_android_aar_prebuilts = true".
-  #
-  # Variables
-  #   aar_path: Path to the AAR.
-  #   info_path: Path to the .aar.info file (generated via
-  #       update_android_aar_prebuilts GN arg).
-  #   proguard_configs: List of proguard configs to use in final apk step for
-  #       any apk that depends on this library.
-  #   ignore_aidl: Whether to ignore .aidl files found with the .aar.
-  #   ignore_assets: Whether to ignore assets found in the .aar.
-  #   ignore_native_libraries: Whether to ignore .so files found in the .aar.
-  #   create_srcjar: If false, does not create an R.java file.
-  #   TODO(jbudorick@): remove this arguments after crbug.com/522043 is fixed.
-  #   requires_android: Whether this target can only be used for compiling
-  #       Android related targets.
-  #
-  # Example
-  #   android_aar_prebuilt("foo_java") {
-  #     aar_path = "foo.aar"
-  #   }
-  template("android_aar_prebuilt") {
-    _info_path = "$target_name.info"
-    if (defined(invoker.info_path)) {
-      _info_path = invoker.info_path
-    }
-    _output_path = "${target_gen_dir}/${target_name}"
-    _unpack_target_name = "${target_name}__unpack_aar"
-    _ignore_aidl = defined(invoker.ignore_aidl) && invoker.ignore_aidl
-    _ignore_assets = defined(invoker.ignore_assets) && invoker.ignore_assets
-    _ignore_native_libraries = defined(invoker.ignore_native_libraries) &&
-                               invoker.ignore_native_libraries
-
-    # Scan the AAR file and determine the resources and jar files.
-    # Some libraries might not have resources; others might have two jars.
-    if (update_android_aar_prebuilts) {
-      print("Writing " + rebase_path(_info_path, "//"))
-      exec_script("//build/android/gyp/aar.py",
-                  [
-                    "list",
-                    rebase_path(invoker.aar_path, root_build_dir),
-                    "--output",
-                    rebase_path(_info_path, root_build_dir),
-                  ])
-    }
-
-    # If "gn gen" is failing on the following line, you need to generate an
-    # .info file for your new target by running:
-    #   gn gen --args='target_os="android" update_android_aar_prebuilts=true' out/tmp
-    #   rm -r out/tmp
-    _scanned_files = read_file(_info_path, "scope")
-
-    assert(_ignore_aidl || _scanned_files.aidl == [],
-           "android_aar_prebuilt() aidl not yet supported." +
-               " Implement or use ignore_aidl = true." +
-               " http://crbug.com/644439")
-    assert(_ignore_assets || _scanned_files.assets == [],
-           "android_aar_prebuilt() assets not yet supported." +
-               " Implement or use ignore_assets = true." +
-               " http://crbug.com/643966")
-    assert(_ignore_native_libraries || !_scanned_files.has_native_libraries,
-           "android_aar_prebuilt() with .so files is not supported." +
-               " Use ignore_native_libraries = true to silence this error.")
-    assert(_scanned_files.has_classes_jar || _scanned_files.subjars == [])
-
-    action(_unpack_target_name) {
-      script = "//build/android/gyp/aar.py"  # Unzips the AAR
-      args = [
-        "extract",
-        rebase_path(invoker.aar_path, root_build_dir),
-        "--output-dir",
-        rebase_path(_output_path, root_build_dir),
-        "--assert-info-file",
-        rebase_path(_info_path, root_build_dir),
-      ]
-      inputs = [
-        invoker.aar_path,
-      ]
-      outputs = [
-        "${_output_path}/AndroidManifest.xml",
-      ]
-
-      if (_scanned_files.has_r_text_file) {
-        # Certain packages, in particular Play Services have no R.txt even
-        # though its presence is mandated by AAR spec. Such packages cause
-        # spurious rebuilds if this output is specified unconditionally.
-        outputs += [ "${_output_path}/R.txt" ]
-      }
-
-      if (_scanned_files.resources != []) {
-        outputs += get_path_info(
-                rebase_path(_scanned_files.resources, "", _output_path),
-                "abspath")
-      }
-      if (_scanned_files.has_classes_jar) {
-        outputs += [ "${_output_path}/classes.jar" ]
-      }
-      outputs +=
-          get_path_info(rebase_path(_scanned_files.subjars, "", _output_path),
-                        "abspath")
-      if (_scanned_files.has_proguard_flags) {
-        outputs += [ "${_output_path}/proguard.txt" ]
-      }
-    }
-
-    # Create the android_resources target for resources.
-    if (_scanned_files.resources != [] || _scanned_files.has_r_text_file ||
-        !_scanned_files.is_manifest_empty) {
-      _res_target_name = "${target_name}__res"
-      android_resources(_res_target_name) {
-        forward_variables_from(invoker,
-                               [
-                                 "create_srcjar",
-                                 "deps",
-                                 "testonly",
-                               ])
-        if (!defined(deps)) {
-          deps = []
-        }
-        deps += [ ":$_unpack_target_name" ]
-        resource_dirs = []
-        generated_resource_dirs = []
-        if (_scanned_files.resources != []) {
-          generated_resource_dirs += [ "${_output_path}/res" ]
-        }
-        generated_resource_files =
-            rebase_path(_scanned_files.resources, "", _output_path)
-        android_manifest_dep = ":$_unpack_target_name"
-        android_manifest = "${_output_path}/AndroidManifest.xml"
-        if (_scanned_files.has_r_text_file) {
-          r_text_file = "${_output_path}/R.txt"
-        }
-        v14_skip = true
-      }
-    }
-
-    # Create android_java_prebuilt target for extra jars within jars/.
-    _subjar_targets = []
-    foreach(_tuple, _scanned_files.subjar_tuples) {
-      _current_target = "${target_name}__subjar_${_tuple[0]}"
-      _subjar_targets += [ ":$_current_target" ]
-      java_prebuilt(_current_target) {
-        forward_variables_from(invoker,
-                               [
-                                 "jar_excluded_patterns",
-                                 "jar_included_patterns",
-                                 "requires_android",
-                               ])
-        deps = [
-          ":$_unpack_target_name",
-        ]
-        if (!defined(requires_android)) {
-          requires_android = true
-        }
-        supports_android = true
-        jar_path = "$_output_path/${_tuple[1]}"
-        _base_output_name = get_path_info(jar_path, "name")
-        output_name = "${invoker.target_name}-$_base_output_name"
-      }
-    }
-
-    # Create android_java_prebuilt target for classes.jar.
-    if (_scanned_files.has_classes_jar) {
-      _jar_target_name = "${target_name}__classes"
-      java_prebuilt(_jar_target_name) {
-        forward_variables_from(invoker,
-                               [
-                                 "deps",
-                                 "input_jars_paths",
-                                 "jar_excluded_patterns",
-                                 "jar_included_patterns",
-                                 "proguard_configs",
-                                 "requires_android",
-                                 "testonly",
-                               ])
-        if (!defined(deps)) {
-          deps = []
-        }
-        deps += _subjar_targets + [ ":$_unpack_target_name" ]
-        if (defined(_res_target_name)) {
-          deps += [ ":$_res_target_name" ]
-        }
-        if (!defined(requires_android)) {
-          requires_android = true
-        }
-        supports_android = true
-        jar_path = "$_output_path/classes.jar"
-        output_name = invoker.target_name
-
-        if (_scanned_files.has_proguard_flags) {
-          if (!defined(proguard_configs)) {
-            proguard_configs = []
-          }
-          proguard_configs += [ "$_output_path/proguard.txt" ]
-        }
-      }
-    }
-
-    java_group(target_name) {
-      forward_variables_from(invoker,
-                             [
-                               "testonly",
-                               "visibility",
-                             ])
-      public_deps = [
-        ":$_unpack_target_name",
-      ]
-      deps = []
-      if (defined(_jar_target_name)) {
-        deps += [ ":$_jar_target_name" ]
-
-        # Although subjars are meant to be private, we add them as deps here
-        # because in practice they seem to contain classes required to be in the
-        # classpath.
-        deps += _subjar_targets
-      }
-      if (defined(_res_target_name)) {
-        deps += [ ":$_res_target_name" ]
-      }
-    }
-  }
-}
-
-# Compatibility wrapper to toggle android_deps usage for a dependency.
-#
-# This target creates a wrapper for a dependency allowing it to be loaded
-# either from //third_party/android_deps or from an existing //third_party
-# (or other) target.
-#
-# Variables
-#   fallback_target: Target to use when the android_deps repo is not enabled.
-#   android_deps_target_name: Name of the target from the android_deps repo to
-#     use when the repo is enabled. Is not set, the wrapper's target name will
-#     be used instead.
-#
-# Example
-#   prebuilt_wrapper("android_support_multidex_java") {
-#     android_deps_target_name = "com_android_support_multidex_java"
-#     fallback_target = "$android_support_library_package:$target_name"
-#   }
-template("prebuilt_wrapper") {
-  if (defined(invoker.android_deps_target_name)) {
-    _resolved_android_deps_target_name = invoker.android_deps_target_name
-  } else {
-    _resolved_android_deps_target_name = target_name
-  }
-
-  if (enable_android_deps_repository) {
-    _resolved_target =
-        "//third_party/android_deps:${_resolved_android_deps_target_name}"
-    assert(invoker.fallback_target != "")  # Mark as used.
-  } else {
-    _resolved_target = "${invoker.fallback_target}"
-    assert(_resolved_android_deps_target_name != "")  # Mark as used.
-  }
-
-  java_group(target_name) {
-    forward_variables_from(invoker, [ "testonly" ])
-    deps = [
-      _resolved_target,
-    ]
-  }
-}
diff --git a/build/config/android/sdk.gni b/build/config/android/sdk.gni
deleted file mode 100644
index 2fe0400..0000000
--- a/build/config/android/sdk.gni
+++ /dev/null
@@ -1,10 +0,0 @@
-# Copyright 2017 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# The default SDK release used by public builds. Value may differ in
-# internal builds.
-default_android_sdk_release = "o_mr1"
-
-# SDK releases against which public builds are supported.
-public_sdk_releases = [ "o_mr1" ]
diff --git a/build/config/arm.gni b/build/config/arm.gni
deleted file mode 100644
index abd4dd0..0000000
--- a/build/config/arm.gni
+++ /dev/null
@@ -1,135 +0,0 @@
-# Copyright 2014 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import("//build/config/v8_target_cpu.gni")
-import("//build/secondary/third_party/catapult/devil/devil_arm.gni")
-
-# These are primarily relevant in current_cpu == "arm" contexts, where
-# ARM code is being compiled.  But they can also be relevant in the
-# other contexts when the code will change its behavior based on the
-# cpu it wants to generate code for.
-if (current_cpu == "arm" || v8_current_cpu == "arm") {
-  declare_args() {
-    # Version of the ARM processor when compiling on ARM. Ignored on non-ARM
-    # platforms.
-    arm_version = 7
-
-    # The ARM architecture. This will be a string like "armv6" or "armv7-a".
-    # An empty string means to use the default for the arm_version.
-    arm_arch = ""
-
-    # The ARM floating point hardware. This will be a string like "neon" or
-    # "vfpv3". An empty string means to use the default for the arm_version.
-    arm_fpu = ""
-
-    # The ARM floating point mode. This is either the string "hard", "soft", or
-    # "softfp". An empty string means to use the default one for the
-    # arm_version.
-    arm_float_abi = ""
-
-    # The ARM variant-specific tuning mode. This will be a string like "armv6"
-    # or "cortex-a15". An empty string means to use the default for the
-    # arm_version.
-    arm_tune = ""
-
-    # Whether to use the neon FPU instruction set or not.
-    arm_use_neon = ""
-
-    # Whether to enable optional NEON code paths.
-    arm_optionally_use_neon = false
-
-    # Thumb is a reduced instruction set available on some ARM processors that
-    # has increased code density.
-    arm_use_thumb = true
-  }
-
-  assert(arm_float_abi == "" || arm_float_abi == "hard" ||
-         arm_float_abi == "soft" || arm_float_abi == "softfp")
-
-  if (arm_use_neon == "") {
-    if (current_os == "linux" && target_cpu != v8_target_cpu) {
-      # Don't use neon on V8 simulator builds as a default.
-      arm_use_neon = false
-    } else {
-      arm_use_neon = true
-    }
-  }
-
-  if (arm_version == 6) {
-    if (arm_arch == "") {
-      arm_arch = "armv6"
-    }
-    if (arm_tune != "") {
-      arm_tune = ""
-    }
-    if (arm_float_abi == "") {
-      arm_float_abi = "softfp"
-    }
-    if (arm_fpu == "") {
-      arm_fpu = "vfp"
-    }
-    arm_use_thumb = false
-    arm_use_neon = false
-  } else if (arm_version == 7) {
-    if (arm_arch == "") {
-      arm_arch = "armv7-a"
-    }
-    if (arm_tune == "") {
-      arm_tune = "generic-armv7-a"
-    }
-
-    if (arm_float_abi == "") {
-      if (current_os == "android") {
-        arm_float_abi = "softfp"
-      } else if (target_os == "android") {
-        if (build_devil_arm_deps) {
-          # Raspberry Pis require "hard"
-          arm_float_abi = "hard"
-        } else {
-          arm_float_abi = "softfp"
-        }
-      } else if (current_os == "linux" && target_cpu != v8_target_cpu) {
-        # Default to the same as Android for V8 simulator builds.
-        arm_float_abi = "softfp"
-      } else {
-        arm_float_abi = "hard"
-      }
-    }
-
-    if (arm_fpu == "") {
-      if (arm_use_neon) {
-        arm_fpu = "neon"
-      } else {
-        arm_fpu = "vfpv3-d16"
-      }
-    }
-  } else if (arm_version == 8) {
-    if (arm_arch == "") {
-      arm_arch = "armv8-a"
-    }
-    if (arm_tune == "") {
-      arm_tune = "generic-armv8-a"
-    }
-
-    if (arm_float_abi == "") {
-      if (current_os == "android" || target_os == "android") {
-        arm_float_abi = "softfp"
-      } else {
-        arm_float_abi = "hard"
-      }
-    }
-
-    if (arm_fpu == "") {
-      if (arm_use_neon) {
-        arm_fpu = "neon"
-      } else {
-        arm_fpu = "vfpv3-d16"
-      }
-    }
-  }
-} else if (current_cpu == "arm64" || v8_current_cpu == "arm64") {
-  # arm64 supports only "hard".
-  arm_float_abi = "hard"
-  arm_use_neon = true
-}
diff --git a/build/config/c++/c++.gni b/build/config/c++/c++.gni
deleted file mode 100644
index 85ffde0..0000000
--- a/build/config/c++/c++.gni
+++ /dev/null
@@ -1,42 +0,0 @@
-# Copyright 2017 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import("//build/config/sanitizers/sanitizers.gni")
-
-declare_args() {
-  # Use libc++ (buildtools/third_party/libc++ and
-  # buildtools/third_party/libc++abi) instead of stdlibc++ as standard
-  # library.
-  # Don't check in changes that set this to false for more platforms; doing so
-  # is not supported.
-  use_custom_libcxx =
-      is_msan || is_fuchsia ||
-      (is_linux &&
-       (!is_chromeos || default_toolchain != "//build/toolchain/cros:target"))
-
-  # Use libc++ instead of stdlibc++ when using the host_cpu toolchain, even if
-  # use_custom_libcxx is false. This is useful for cross-compiles where a custom
-  # toolchain for the target_cpu has been set as the default toolchain, but
-  # use_custom_libcxx should still be true when building for the host.  The
-  # expected usage is to set use_custom_libcxx=false and
-  # use_custom_libcxx_for_host=true in the passed in buildargs.
-  use_custom_libcxx_for_host = false
-
-  # ASan, MSan and TSan builds need to override operator new, operator delete,
-  # and some exception handling symbols, so libc++ must be a shared library to
-  # prevent duplicate symbol errors when linking.
-  # Additionally, -fsanitize=vptr requires libc++ to be a shared library
-  # because the ubsan runtime library that implements -fsanitize=vptr calls
-  # dynamic_cast with the ABI type info classes, which won't return the right
-  # answer if each DSO has its own copy of the ABI classes.
-  libcpp_is_static = !is_component_build && !is_asan && !is_msan && !is_tsan &&
-                     !is_ubsan && !is_ubsan_security && !is_ubsan_vptr
-}
-
-use_custom_libcxx =
-    use_custom_libcxx || (use_custom_libcxx_for_host && current_cpu == host_cpu)
-use_custom_libcxx = use_custom_libcxx && !is_nacl
-
-libcxx_prefix = "//buildtools/third_party/libc++/trunk"
-libcxxabi_prefix = "//buildtools/third_party/libc++abi/trunk"
diff --git a/build/config/chrome_build.gni b/build/config/chrome_build.gni
deleted file mode 100644
index 4bb4a04..0000000
--- a/build/config/chrome_build.gni
+++ /dev/null
@@ -1,26 +0,0 @@
-# Copyright 2015 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-declare_args() {
-  # Select the desired branding flavor. False means normal Chromium branding,
-  # true means official Google Chrome branding (requires extra Google-internal
-  # resources).
-  is_chrome_branded = false
-
-  # Break chrome.dll into multple pieces based on process type. Only available
-  # on Windows.
-  is_multi_dll_chrome = is_win && !is_component_build
-
-  # Turn this on to generate order files. See
-  # https://chromium.googlesource.com/chromium/src/+/master/docs/win_order_files.md
-  generate_order_files = false
-}
-
-# Refers to the subdirectory for branding in various places including
-# chrome/app/theme.
-if (is_chrome_branded) {
-  branding_path_component = "google_chrome"
-} else {
-  branding_path_component = "chromium"
-}
diff --git a/build/config/chromecast/BUILD.gn b/build/config/chromecast/BUILD.gn
deleted file mode 100644
index c8b2989..0000000
--- a/build/config/chromecast/BUILD.gn
+++ /dev/null
@@ -1,85 +0,0 @@
-# Copyright 2015 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import("//build/config/chromecast_build.gni")
-
-assert(is_chromecast)
-
-config("static_config") {
-  if (!is_clang) {
-    ldflags = [
-      # Don't allow visible symbols from libraries that contain
-      # assembly code with symbols that aren't hidden properly.
-      # http://b/26390825
-      "-Wl,--exclude-libs=libffmpeg.a",
-    ]
-
-    if (!is_android) {
-      ldflags += [
-        # We want to statically link libstdc++/libgcc on Linux.
-        # (On Android, libstdc++ and libgcc aren't used.)
-        "-static-libstdc++",
-        "-static-libgcc",
-      ]
-    }
-  }
-}
-
-config("ldconfig") {
-  visibility = [ ":*" ]
-
-  # Chromecast executables depend on several shared libraries in
-  # /oem_cast_shlib, $ORIGIN, and $ORIGIN/lib. Add these rpaths to each binary.
-  # This is explicitly disabled in Chrome for security reasons (see comments in
-  # //build/config/gcc/BUILD.gn), but necessary on Chromecast so that OEM's may
-  # override the default libraries shipped in the Cast receiver package.
-  ldflags = [
-    "-Wl,-rpath=/oem_cast_shlib",
-    "-Wl,-rpath=\$ORIGIN/lib",
-    "-Wl,-rpath=\$ORIGIN",
-  ]
-
-  # Binaries which don't live in the same directory as Chrome component
-  # libraries may still depend on them. Explicitly add the component library
-  # directory to the rpath for the component build.
-  if (is_component_build) {
-    ldflags += [ "-Wl,-rpath=/system/chrome" ]
-  }
-}
-
-config("executable_config") {
-  configs = [ ":ldconfig" ]
-
-  if (!is_clang && current_cpu == "arm") {
-    ldflags = [
-      # Export stdlibc++ and libgcc symbols to force shlibs to refer to these
-      # symbols from the executable.
-      "-Wl,--export-dynamic",
-
-      "-lm",  # stdlibc++ requires math.h
-
-      # In case we redefined stdlibc++ symbols (e.g. tc_malloc)
-      "-Wl,--allow-multiple-definition",
-
-      "-Wl,--whole-archive",
-      "-l:libstdc++.a",
-      "-l:libgcc.a",
-      "-Wl,--no-whole-archive",
-    ]
-
-    # Despite including libstdc++/libgcc archives, we still need to specify
-    # static linking for them in order to prevent the executable from having a
-    # dynamic dependency on them.
-    configs += [ ":static_config" ]
-  }
-}
-
-# Shared libaries should not have RPATH or RUNPATH set. This allows the
-# shared libs to inherit RPATH from the parent executable that is loading
-# the shared library. (See internal b/37514052 for more details.)
-config("shared_library_config") {
-  if (current_cpu == "arm") {
-    configs = [ ":static_config" ]
-  }
-}
diff --git a/build/config/chromecast_build.gni b/build/config/chromecast_build.gni
deleted file mode 100644
index d4869d7..0000000
--- a/build/config/chromecast_build.gni
+++ /dev/null
@@ -1,36 +0,0 @@
-# Copyright 2015 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# The args declared in this file should be referenced by components outside of
-# //chromecast. Args needed only in //chromecast should be declared in
-# //chromecast/chromecast.gni.
-declare_args() {
-  # Set this true for a Chromecast build. Chromecast builds are supported on
-  # Linux and Android.
-  is_chromecast = false
-
-  # Set this true for an audio-only Chromecast build.
-  is_cast_audio_only = false
-}
-
-# Note(slan): This arg depends on the value of is_chromecast, and thus must be
-# declared in a separate block. These blocks can be combined when/if
-# crbug.com/542846 is resolved.
-declare_args() {
-  # True if Chromecast build is targeted for linux desktop. This type of build
-  # is useful for testing and development, but currently supports only a subset
-  # of Cast functionality. Though this defaults to true for x86 Linux devices,
-  # this should be overriden manually for an embedded x86 build.
-  # TODO(slan): Remove instances of this when x86 is a fully supported platform.
-  is_cast_desktop_build = is_chromecast && target_os == "linux" &&
-                          (target_cpu == "x86" || target_cpu == "x64")
-}
-
-# Assert that Chromecast is being built for a supported platform.
-assert(is_linux || is_android || is_fuchsia || !is_chromecast,
-       "Chromecast builds are not supported on $target_os")
-
-# Assert that is_cast_audio_only and is_cast_desktop_build are both false on a
-# non-Chromecast build.
-assert(is_chromecast || (!is_cast_audio_only && !is_cast_desktop_build))
diff --git a/build/config/chromeos/rules.gni b/build/config/chromeos/rules.gni
deleted file mode 100644
index 5f1ece8..0000000
--- a/build/config/chromeos/rules.gni
+++ /dev/null
@@ -1,80 +0,0 @@
-# Copyright 2018 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-assert(is_chromeos)
-
-declare_args() {
-  cros_board = getenv("SDK_BOARD")
-  cros_sdk_version = getenv("SDK_VERSION")
-}
-
-template("generate_vm_runner_script") {
-  _cache_path_prefix =
-      "//build/cros_cache/chrome-sdk/tarballs/${cros_board}+${cros_sdk_version}"
-  _vm_image_path = "${_cache_path_prefix}+chromiumos_qemu_image.tar.xz/"
-  _qemu_dir = "${_cache_path_prefix}+app-emulation/"
-
-  action(target_name) {
-    forward_variables_from(invoker,
-                           [
-                             "testonly",
-                             "need_toolchain",
-                           ])
-
-    script = "//build/chromeos/create_vm_test_script.py"
-
-    outputs = [
-      invoker.generated_script,
-    ]
-
-    data = [
-      # We use android test-runner's results libs to construct gtest output
-      # json.
-      "//build/android/pylib/__init__.py",
-      "//build/android/pylib/base/",
-      "//build/android/pylib/results/",
-      invoker.generated_script,
-      "//build/chromeos/",
-      "//build/cros_cache/chrome-sdk/misc/",
-
-      # The LKGM file controls what version of the VM image to download. Add it
-      # as data here so that changes to it will trigger analyze.
-      "//chromeos/CHROMEOS_LKGM",
-      "//third_party/chromite/",
-      _vm_image_path,
-      _qemu_dir,
-    ]
-    if (defined(need_toolchain) && need_toolchain) {
-      data += [ "${_cache_path_prefix}+target_toolchain/" ]
-    }
-
-    # Required arguments used at build time by the runner script generator.
-    args = [
-      "--script-output-path",
-      rebase_path(invoker.generated_script, root_build_dir),
-      "--cros-cache",
-      rebase_path("//build/cros_cache/", root_build_dir),
-      "--board",
-      cros_board,
-    ]
-
-    # When --test-exe is specified, run_vm_test will push the exe to the VM and
-    # execute it. Otherwise it wraps a host-side command and just takes care
-    # launching & tearing-down the VM.
-    if (defined(invoker.test_exe)) {
-      args += [
-        "--test-exe",
-        rebase_path(invoker.test_exe, root_build_dir),
-        "--output-directory",
-        rebase_path(root_out_dir, root_build_dir),
-      ]
-      if (defined(invoker.runtime_deps_file)) {
-        args += [
-          "--runtime-deps-path",
-          rebase_path(invoker.runtime_deps_file, root_build_dir),
-        ]
-      }
-    }
-  }
-}
diff --git a/build/config/clang/BUILD.gn b/build/config/clang/BUILD.gn
deleted file mode 100644
index 11dba35..0000000
--- a/build/config/clang/BUILD.gn
+++ /dev/null
@@ -1,75 +0,0 @@
-# Copyright (c) 2013 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import("clang.gni")
-
-config("find_bad_constructs") {
-  if (clang_use_chrome_plugins) {
-    cflags = []
-
-    # On Windows, the plugin is built directly into clang, so there's
-    # no need to load it dynamically.
-    if (host_os == "mac") {
-      cflags += [
-        "-Xclang",
-        "-load",
-        "-Xclang",
-        rebase_path("${clang_base_path}/lib/libFindBadConstructs.dylib",
-                    root_build_dir),
-      ]
-    } else if (host_os == "linux") {
-      cflags += [
-        "-Xclang",
-        "-load",
-        "-Xclang",
-        rebase_path("${clang_base_path}/lib/libFindBadConstructs.so",
-                    root_build_dir),
-      ]
-    }
-
-    cflags += [
-      "-Xclang",
-      "-add-plugin",
-      "-Xclang",
-      "find-bad-constructs",
-    ]
-
-    cflags += [
-      "-Xclang",
-      "-plugin-arg-find-bad-constructs",
-      "-Xclang",
-      "enforce-in-thirdparty-webkit",
-    ]
-
-    # TODO(dcheng): remove this once the plugin is updated and rolled again.
-    cflags += [
-      "-Xclang",
-      "-plugin-arg-find-bad-constructs",
-      "-Xclang",
-      "check-enum-max-value",
-    ]
-
-    if (is_linux || is_android || is_fuchsia) {
-      cflags += [
-        "-Xclang",
-        "-plugin-arg-find-bad-constructs",
-        "-Xclang",
-        "check-ipc",
-      ]
-    }
-  }
-}
-
-# Enables some extra Clang-specific warnings. Some third-party code won't
-# compile with these so may want to remove this config.
-config("extra_warnings") {
-  cflags = [
-    "-Wheader-hygiene",
-
-    # Warns when a const char[] is converted to bool.
-    "-Wstring-conversion",
-
-    "-Wtautological-overlap-compare",
-  ]
-}
diff --git a/build/config/clang/clang.gni b/build/config/clang/clang.gni
deleted file mode 100644
index 2c2d76f..0000000
--- a/build/config/clang/clang.gni
+++ /dev/null
@@ -1,15 +0,0 @@
-# Copyright 2014 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import("//build/toolchain/toolchain.gni")
-
-default_clang_base_path = "//third_party/llvm-build/Release+Asserts"
-
-declare_args() {
-  # Indicates if the build should use the Chrome-specific plugins for enforcing
-  # coding guidelines, etc. Only used when compiling with Clang.
-  clang_use_chrome_plugins = is_clang && !is_nacl && !use_xcode_clang
-
-  clang_base_path = default_clang_base_path
-}
diff --git a/build/config/compiler/BUILD.gn b/build/config/compiler/BUILD.gn
deleted file mode 100644
index 83337d5..0000000
--- a/build/config/compiler/BUILD.gn
+++ /dev/null
@@ -1,2229 +0,0 @@
-# Copyright (c) 2013 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import("//build/config/android/config.gni")
-import("//build/config/c++/c++.gni")
-import("//build/config/chrome_build.gni")
-import("//build/config/chromecast_build.gni")
-import("//build/config/clang/clang.gni")
-import("//build/config/compiler/compiler.gni")
-import("//build/config/coverage/coverage.gni")
-import("//build/config/host_byteorder.gni")
-import("//build/config/ui.gni")
-import("//build/toolchain/cc_wrapper.gni")
-import("//build/toolchain/toolchain.gni")
-import("//build_overrides/build.gni")
-
-if (current_cpu == "arm" || current_cpu == "arm64") {
-  import("//build/config/arm.gni")
-}
-if (current_cpu == "mipsel" || current_cpu == "mips64el" ||
-    current_cpu == "mips" || current_cpu == "mips64") {
-  import("//build/config/mips.gni")
-}
-if (is_mac) {
-  import("//build/config/mac/symbols.gni")
-}
-if (is_ios) {
-  import("//build/config/ios/ios_sdk.gni")
-}
-if (is_nacl) {
-  # To keep NaCl variables out of builds that don't include NaCl, all
-  # variables defined in nacl/config.gni referenced here should be protected by
-  # is_nacl conditions.
-  import("//build/config/nacl/config.gni")
-}
-
-declare_args() {
-  # Default to warnings as errors for default workflow, where we catch
-  # warnings with known toolchains. Allow overriding this e.g. for Chromium
-  # builds on Linux that could use a different version of the compiler.
-  # With GCC, warnings in no-Chromium code are always not treated as errors.
-  treat_warnings_as_errors = true
-
-  # Normally, Android builds are lightly optimized, even for debug builds, to
-  # keep binary size down. Setting this flag to true disables such optimization
-  android_full_debug = false
-
-  # Whether to use the binary binutils checked into third_party/binutils.
-  # These are not multi-arch so cannot be used except on x86 and x86-64 (the
-  # only two architectures that are currently checked in). Turn this off when
-  # you are using a custom toolchain and need to control -B in cflags.
-  linux_use_bundled_binutils =
-      linux_use_bundled_binutils_override && is_linux &&
-      (current_cpu == "x64" || current_cpu == "x86")
-  binutils_path = rebase_path("//third_party/binutils/Linux_x64/Release/bin",
-                              root_build_dir)
-
-  # Compile in such a way as to make it possible for the profiler to unwind full
-  # stack frames. Setting this flag has a large effect on the performance of the
-  # generated code than just setting profiling, but gives the profiler more
-  # information to analyze.
-  # Requires profiling to be set to true.
-  enable_full_stack_frames_for_profiling = false
-
-  # When we are going to use gold we need to find it.
-  # This is initialized below, after use_gold might have been overridden.
-  gold_path = false
-
-  if (is_win) {
-    # Whether the VS xtree header has been patched to disable warning 4702. If
-    # it has, then we don't need to disable 4702 (unreachable code warning).
-    # The patch is preapplied to the internal toolchain and hence all bots.
-    msvs_xtree_patched = false
-  }
-
-  # Enable fatal linker warnings. Building Chromium with certain versions
-  # of binutils can cause linker warning.
-  # See: https://bugs.chromium.org/p/chromium/issues/detail?id=457359
-  fatal_linker_warnings = true
-
-  # Build with C++ RTTI enabled. Chromium builds without RTTI by default,
-  # but some sanitizers are known to require it, like CFI diagnostics
-  # and UBsan variants.
-  use_rtti = use_cfi_diag || is_ubsan_vptr || is_ubsan_security
-
-  # AFDO (Automatic Feedback Directed Optimizer) is a form of profile-guided
-  # optimization that GCC supports. It used by ChromeOS in their official
-  # builds. To use it, set auto_profile_path to the path to a file containing
-  # the needed gcov profiling data.
-  auto_profile_path = ""
-
-  # Optimize for coverage guided fuzzing (balance between speed and number of
-  # branches)
-  optimize_for_fuzzing = false
-
-  # Optimize symbol files for maximizing goma cache hit rate. This is on by
-  # default only when goma is enabled on Linux because setting this to true may
-  # make it harder to debug binaries.
-  # See below reference for detail.
-  # https://chromium.googlesource.com/chromium/src/+/master/docs/linux_debugging.md#Source-level-debug-with-fdebug-prefix-map
-  strip_absolute_paths_from_debug_symbols = is_linux && use_goma
-
-  # Allow projects that wish to stay on C++11 to override Chromium's default.
-  use_cxx11 = false
-
-  # Strip the debug info of symbols file in lib.unstripped to reduce size.
-  strip_debug_info = false
-
-  # Path to an AFDO profile to use while building with clang, if any. Empty
-  # implies none.
-  clang_sample_profile_path = ""
-
-  # Some configurations have default sample profiles. If this is true and
-  # clang_sample_profile_path is empty, we'll fall back to the default.
-  #
-  # We currently only have default profiles for Chromium in-tree, so we disable
-  # this by default for all downstream projects, since these profiles are likely
-  # nonsensical for said projects.
-  clang_use_default_sample_profile = build_with_chromium && is_official_build &&
-                                     (is_android || is_desktop_linux)
-
-  # Turn this on to have the compiler output extra timing information.
-  compiler_timing = false
-
-  # Set to true to pass --no-rosegment to lld. This is a workaround
-  # for a KI issue in Valgrind,
-  # https://bugs.kde.org/show_bug.cgi?id=384727
-  ro_segment_workaround_for_valgrind = false
-
-  # Turn this on to use ghash feature of lld for faster debug link on Windows.
-  # http://blog.llvm.org/2018/01/improving-link-time-on-windows-with.html
-  use_ghash = false
-}
-
-declare_args() {
-  # C++11 may not be an option if Android test infrastructure is used.
-  use_cxx11_on_android = use_cxx11
-}
-
-declare_args() {
-  # Set to true to use icf, Identical Code Folding.
-  #
-  # icf=all is broken in older golds, see
-  # https://sourceware.org/bugzilla/show_bug.cgi?id=17704
-  # See also https://crbug.com/663886
-  # `linux_use_bundled_binutils` is to avoid breaking Linux distros which may
-  # still have a buggy gold.
-  # chromeos binutils has been patched with the fix, so always use icf there.
-  # The bug only affects x86 and x64, so we can still use ICF when targeting
-  # other architectures.
-  #
-  # lld doesn't have the bug.
-  use_icf = (is_posix || is_fuchsia) && !using_sanitizer &&
-            !(is_android && use_order_profiling) &&
-            (use_lld ||
-             (use_gold &&
-              ((!is_android && linux_use_bundled_binutils) || is_chromeos ||
-               !(current_cpu == "x86" || current_cpu == "x64"))))
-}
-
-# Apply the default logic for these values if they were not set explicitly.
-if (gold_path == false) {
-  if (use_gold) {
-    gold_path = rebase_path("//third_party/binutils/Linux_x64/Release/bin",
-                            root_build_dir)
-  } else {
-    gold_path = ""
-  }
-}
-
-if (use_debug_fission == "default") {
-  use_debug_fission = is_debug && !is_android && !is_win &&
-                      (use_gold || use_lld) && cc_wrapper == ""
-}
-
-# default_include_dirs ---------------------------------------------------------
-#
-# This is a separate config so that third_party code (which would not use the
-# source root and might have conflicting versions of some headers) can remove
-# this and specify their own include paths.
-config("default_include_dirs") {
-  include_dirs = [
-    "//",
-    root_gen_dir,
-  ]
-}
-
-# compiler ---------------------------------------------------------------------
-#
-# Base compiler configuration.
-#
-# See also "runtime_library" below for related stuff and a discussion about
-# where stuff should go. Put warning related stuff in the "warnings" config.
-
-config("compiler") {
-  asmflags = []
-  cflags = []
-  cflags_c = []
-  cflags_cc = []
-  cflags_objc = []
-  cflags_objcc = []
-  ldflags = []
-  defines = []
-  configs = []
-  inputs = []
-
-  # System-specific flags. If your compiler flags apply to one of the
-  # categories here, add it to the associated file to keep this shared config
-  # smaller.
-  if (is_win) {
-    configs += [ "//build/config/win:compiler" ]
-  } else if (is_android) {
-    configs += [ "//build/config/android:compiler" ]
-  } else if (is_linux) {
-    configs += [ "//build/config/linux:compiler" ]
-  } else if (is_nacl) {
-    configs += [ "//build/config/nacl:compiler" ]
-  } else if (is_mac) {
-    configs += [ "//build/config/mac:compiler" ]
-  } else if (is_ios) {
-    configs += [ "//build/config/ios:compiler" ]
-  } else if (is_fuchsia) {
-    configs += [ "//build/config/fuchsia:compiler" ]
-  } else if (current_os == "aix") {
-    configs += [ "//build/config/aix:compiler" ]
-  }
-
-  configs += [
-    # See the definitions below.
-    ":clang_revision",
-    ":compiler_cpu_abi",
-    ":compiler_codegen",
-  ]
-
-  # In general, Windows is totally different, but all the other builds share
-  # some common GCC configuration.
-  if (!is_win) {
-    # Common POSIX compiler flags setup.
-    # --------------------------------
-    cflags += [ "-fno-strict-aliasing" ]  # See http://crbug.com/32204
-
-    # Stack protection.
-    if (is_mac) {
-      # The strong variant of the stack protector significantly increases
-      # binary size, so only enable it in debug mode.
-      if (is_debug) {
-        cflags += [ "-fstack-protector-strong" ]
-      } else {
-        cflags += [ "-fstack-protector" ]
-      }
-    } else if ((is_posix && !is_chromeos && !is_nacl) || is_fuchsia) {
-      # TODO(phajdan.jr): Use -fstack-protector-strong when our gcc supports it.
-      # See also https://crbug.com/533294
-      cflags += [ "--param=ssp-buffer-size=4" ]
-
-      # The x86 toolchain currently has problems with stack-protector.
-      if (is_android && current_cpu == "x86") {
-        cflags += [ "-fno-stack-protector" ]
-      } else if (current_os != "aix") {
-        # Not available on aix.
-        cflags += [ "-fstack-protector" ]
-      }
-    }
-
-    # Linker warnings.
-    if (fatal_linker_warnings && !(is_chromeos && current_cpu == "arm") &&
-        !(is_android && use_order_profiling) && !is_mac && !is_ios &&
-        current_os != "aix") {
-      # TODO(jochen): Enable this on chromeos on arm. http://crbug.com/356580
-      # TODO(lizeb,pasko): Fix link errors when linking with order_profiling=1
-      # crbug.com/485542
-      ldflags += [ "-Wl,--fatal-warnings" ]
-    }
-  } else {
-    cflags += [
-      # Assume UTF-8 by default to avoid code page dependencies.
-      "/utf-8",
-    ]
-    if (is_clang) {
-      # Don't look for includes in %INCLUDE%.
-      cflags += [ "/X" ]
-    }
-  }
-
-  # Eliminate build metadata (__DATE__, __TIME__ and __TIMESTAMP__) for
-  # deterministic build.  See https://crbug.com/314403
-  if (!is_official_build) {
-    if (is_win && !is_clang) {
-      cflags += [
-        "/wd4117",  # Trying to define or undefine a predefined macro.
-        "/D__DATE__=",
-        "/D__TIME__=",
-        "/D__TIMESTAMP__=",
-      ]
-    } else {
-      cflags += [
-        "-Wno-builtin-macro-redefined",
-        "-D__DATE__=",
-        "-D__TIME__=",
-        "-D__TIMESTAMP__=",
-      ]
-    }
-  }
-
-  if (is_clang && is_debug) {
-    # Allow comparing the address of references and 'this' against 0
-    # in debug builds. Technically, these can never be null in
-    # well-defined C/C++ and Clang can optimize such checks away in
-    # release builds, but they may be used in asserts in debug builds.
-    cflags_cc += [
-      "-Wno-undefined-bool-conversion",
-      "-Wno-tautological-undefined-compare",
-    ]
-  }
-
-  # Non-Mac Posix and Fuchsia compiler flags setup.
-  # -----------------------------------
-  if ((is_posix && !(is_mac || is_ios)) || is_fuchsia) {
-    if (enable_profiling) {
-      if (!is_debug) {
-        cflags += [ "-g" ]
-
-        if (enable_full_stack_frames_for_profiling) {
-          cflags += [
-            "-fno-inline",
-            "-fno-optimize-sibling-calls",
-          ]
-        }
-      }
-    }
-
-    if (is_official_build) {
-      # Explicitly pass --build-id to ld. Compilers used to always pass this
-      # implicitly but don't any more (in particular clang when built without
-      # ENABLE_LINKER_BUILD_ID=ON). The crash infrastructure does need a build
-      # id, so explicitly enable it in official builds. It's not needed in
-      # unofficial builds and computing it does slow down the link, so go with
-      # faster links in unofficial builds.
-      ldflags += [ "-Wl,--build-id=sha1" ]
-    }
-
-    if (!is_android) {
-      defines += [
-        # _FILE_OFFSET_BITS=64 should not be set on Android in order to maintain
-        # the behavior of the Android NDK from earlier versions.
-        # See https://android-developers.googleblog.com/2017/09/introducing-android-native-development.html
-        "_FILE_OFFSET_BITS=64",
-        "_LARGEFILE_SOURCE",
-        "_LARGEFILE64_SOURCE",
-      ]
-    }
-
-    if (!is_nacl) {
-      if (exclude_unwind_tables) {
-        cflags += [
-          "-fno-unwind-tables",
-          "-fno-asynchronous-unwind-tables",
-        ]
-        defines += [ "NO_UNWIND_TABLES" ]
-      } else {
-        cflags += [ "-funwind-tables" ]
-      }
-    }
-  }
-
-  # Linux/Android/Fuchsia common flags setup.
-  # ---------------------------------
-  if (is_linux || is_android || is_fuchsia) {
-    if (use_pic) {
-      cflags += [ "-fPIC" ]
-      ldflags += [ "-fPIC" ]
-    }
-
-    # Use pipes for communicating between sub-processes. Faster.
-    cflags += [ "-pipe" ]
-
-    ldflags += [
-      "-Wl,-z,noexecstack",
-      "-Wl,-z,now",
-      "-Wl,-z,relro",
-    ]
-    if (!using_sanitizer) {
-      ldflags += [
-        "-Wl,-z,defs",
-        "-Wl,--as-needed",
-      ]
-    }
-  }
-
-  # Linux-specific compiler flags setup.
-  # ------------------------------------
-  if (is_android && is_clang) {
-    _rebased_android_toolchain_root =
-        rebase_path(android_toolchain_root, root_build_dir)
-
-    # Let clang find the linker in the NDK.
-    ldflags += [ "--gcc-toolchain=$_rebased_android_toolchain_root" ]
-  }
-
-  if (((is_posix || is_fuchsia) && use_lld) ||
-      (target_os == "chromeos" && is_android)) {
-    # NOTE: Some Chrome OS builds globally disable LLD, but they also build some
-    # targets against Android toolchains which should use LLD. Therefore we
-    # explicitly select LLD in these cases.
-    #
-    # TODO(https://crbug.com/837095): This should be cleaned up if/when LLD can
-    # work properly for Chrome OS builds.
-    ldflags += [ "-fuse-ld=lld" ]
-    if (current_cpu == "arm64") {
-      # Reduce the page size from 65536 in order to reduce binary size slightly
-      # by shrinking the alignment gap between segments. This also causes all
-      # segments to be mapped adjacently, which breakpad relies on.
-      ldflags += [ "-Wl,-z,max-page-size=4096" ]
-    }
-  } else if (use_gold) {
-    ldflags += [ "-fuse-ld=gold" ]
-    if (!is_android) {
-      # On Android, this isn't needed.  gcc in the NDK knows to look next to
-      # it with -fuse-ld=gold, and clang gets a --gcc-toolchain flag passed
-      # above.
-      ldflags += [ "-B$gold_path" ]
-
-      if (linux_use_bundled_binutils) {
-        ldflags += [
-          # Experimentation found that using four linking threads
-          # saved ~20% of link time.
-          # https://groups.google.com/a/chromium.org/group/chromium-dev/browse_thread/thread/281527606915bb36
-          # Only apply this to the target linker, since the host
-          # linker might not be gold, but isn't used much anyway.
-          "-Wl,--threads",
-          "-Wl,--thread-count=4",
-        ]
-      }
-    }
-
-    # TODO(thestig): Make this flag work with GN.
-    #if (!is_official_build && !is_chromeos && !(is_asan || is_lsan || is_tsan || is_msan)) {
-    #  ldflags += [
-    #    "-Wl,--detect-odr-violations",
-    #  ]
-    #}
-  } else if (linux_use_bundled_binutils) {
-    # Gold is the default linker for the bundled binutils so we explicitly
-    # enable the bfd linker when use_gold is not set.
-    ldflags += [ "-fuse-ld=bfd" ]
-  }
-
-  if (use_icf) {
-    ldflags += [ "-Wl,--icf=all" ]
-  }
-
-  if (linux_use_bundled_binutils) {
-    cflags += [ "-B$binutils_path" ]
-  }
-
-  if (is_linux) {
-    cflags += [ "-pthread" ]
-    # Do not use the -pthread ldflag here since it becomes a no-op
-    # when using -nodefaultlibs, which would cause an unused argument
-    # error.  "-lpthread" is added in //build/config:default_libs.
-  }
-
-  # Clang-specific compiler flags setup.
-  # ------------------------------------
-  if (is_clang) {
-    cflags += [ "-fcolor-diagnostics" ]
-
-    # Enable -fmerge-all-constants. This used to be the default in clang
-    # for over a decade. It makes clang non-conforming, but is fairly safe
-    # in practice and saves some binary size. We might want to consider
-    # disabling this (https://bugs.llvm.org/show_bug.cgi?id=18538#c13),
-    # but for now it looks like our build might rely on it
-    # (https://crbug.com/829795).
-    cflags += [ "-fmerge-all-constants" ]
-  }
-
-  if (use_lld) {
-    # TODO(thakis): Make the driver pass --color-diagnostics to the linker
-    # if -fcolor-diagnostics is passed to it, and pass -fcolor-diagnostics
-    # in ldflags instead.
-    if (is_win) {
-      # On Windows, we call the linker directly, instead of calling it through
-      # the driver.
-      ldflags += [ "--color-diagnostics" ]
-    } else {
-      ldflags += [ "-Wl,--color-diagnostics" ]
-    }
-  }
-
-  if (is_clang && !is_nacl && current_toolchain == host_toolchain &&
-      target_os != "chromeos") {
-    cflags += [
-      # TODO(hans): Remove this once Clang generates better optimized debug info
-      # by default. https://crbug.com/765793
-      "-Xclang",
-      "-mllvm",
-      "-Xclang",
-      "-instcombine-lower-dbg-declare=0",
-    ]
-  }
-
-  # Print absolute paths in diagnostics. There is no precedent for doing this
-  # on Linux/Mac (GCC doesn't support it), but MSVC does this with /FC and
-  # Windows developers rely on it (crbug.com/636109) so only do this on Windows.
-  if (msvc_use_absolute_paths && is_clang && is_win) {
-    cflags += [ "-fdiagnostics-absolute-paths" ]
-  }
-
-  # Makes builds independent of absolute file path.
-  # clang-cl (used if is_win) doesn't expose this flag.
-  # Currently disabled for nacl since its toolchain lacks this flag (too old).
-  # TODO(zforman): Once nacl's toolchain is updated, remove check.
-  if (is_clang && !is_nacl && !is_win && !is_mac && !is_ios &&
-      strip_absolute_paths_from_debug_symbols) {
-    # This is resolved to path like "$HOME/chromium/src/out/Release".
-    # If debug option is given, clang includes $cwd in debug info.
-    # In such build, this flag generates reproducible obj files
-    # even we use build directory like "out/feature_a" and "out/feature_b" if
-    # we build same files with same compile flag other than this.
-    # Other paths are already givne in relative, no need to normalize it by
-    # using -fdebug-prefix-map.
-    absolute_path = rebase_path(root_out_dir)
-
-    cflags += [ "-fdebug-prefix-map=$absolute_path=." ]
-  }
-
-  # Tells the compiler not to use absolute paths when passing the default
-  # paths to the tools it invokes. We don't want this because we don't
-  # really need it and it can mess up the goma cache entries. It would
-  # be nice if it was on by default in clang, but it isn't.
-  #
-  # TODO(thakis): Figure out if this should be the default, and expose in
-  # clang-cl if not.
-  if (is_clang && !is_win && !is_nacl) {
-    cflags += [ "-no-canonical-prefixes" ]
-  }
-
-  # C11/C++11 compiler flags setup.
-  # ---------------------------
-  if (is_linux || is_android || (is_nacl && is_clang) || current_os == "aix") {
-    if (target_os == "android") {
-      cxx11_override = use_cxx11_on_android
-    } else {
-      cxx11_override = use_cxx11
-    }
-
-    # gnu11/gnu++11 instead of c11/c++11 is needed because some code uses typeof()
-    # (a GNU extension).
-    # TODO(thakis): Eventually switch this to c++11 instead,
-    # http://crbug.com/427584
-    cflags_c += [ "-std=gnu11" ]
-    if (cxx11_override) {
-      # Override Chromium's default for projects that wish to stay on C++11.
-      cflags_cc += [ "-std=gnu++11" ]
-    } else {
-      cflags_cc += [ "-std=gnu++14" ]
-    }
-  } else if (!is_win && !is_nacl) {
-    if (target_os == "android") {
-      cxx11_override = use_cxx11_on_android
-    } else {
-      cxx11_override = use_cxx11
-    }
-
-    # TODO(mcgrathr) - the NaCl GCC toolchain doesn't support either gnu11/gnu++11
-    # or c11/c++11; we technically don't need this toolchain any more, but there
-    # are still a few buildbots using it, so until those are turned off
-    # we need the !is_nacl clause and the (is_nacl && is_clang) clause, above.
-    cflags_c += [ "-std=c11" ]
-    if (cxx11_override) {
-      cflags_cc += [ "-std=c++11" ]
-    } else {
-      cflags_cc += [ "-std=c++14" ]
-    }
-  }
-
-  if (is_mac) {
-    # The system libc++ on Mac doesn't have aligned allocation in C++17.
-    defines += [ "_LIBCPP_HAS_NO_ALIGNED_ALLOCATION" ]
-    cflags_cc += [ "-stdlib=libc++" ]
-    ldflags += [ "-stdlib=libc++" ]
-  }
-
-  # Add flags for link-time optimization. These flags enable
-  # optimizations/transformations that require whole-program visibility at link
-  # time, so they need to be applied to all translation units, and we may end up
-  # with miscompiles if only part of the program is compiled with LTO flags. For
-  # that reason, we cannot allow targets to enable or disable these flags, for
-  # example by disabling the optimize configuration.
-  # TODO(pcc): Make this conditional on is_official_build rather than on gn
-  # flags for specific features.
-  if (!is_debug && use_thin_lto && current_toolchain == default_toolchain) {
-    assert(use_lld || target_os == "chromeos",
-           "gold plugin only supported with ChromeOS")
-
-    cflags += [ "-flto=thin" ]
-
-    # Limit the size of the ThinLTO cache to the lesser of 10% of available disk
-    # space, 10GB and 100000 files.
-    if (use_lld) {
-      cache_policy =
-          "cache_size=10%:cache_size_bytes=10g:cache_size_files=100000"
-    }
-    if (is_win) {
-      # This is a straight translation of the non-Windows flags below.
-      ldflags += [
-        "/opt:lldlto=0",
-        "/opt:lldltojobs=8",
-        "/lldltocache:" +
-            rebase_path("$root_out_dir/thinlto-cache", root_build_dir),
-        "/lldltocachepolicy:$cache_policy",
-      ]
-    } else {
-      ldflags += [ "-flto=thin" ]
-
-      # Limit the parallelism to avoid too aggressive competition between
-      # linker jobs. This is still suboptimal to a potential dynamic
-      # resource allocation scheme, but should be good enough.
-      if (use_lld) {
-        ldflags += [
-          "-Wl,--thinlto-jobs=8",
-          "-Wl,--thinlto-cache-dir=" +
-              rebase_path("$root_out_dir/thinlto-cache", root_build_dir),
-          "-Wl,--thinlto-cache-policy,$cache_policy",
-        ]
-      } else {
-        ldflags += [ "-Wl,-plugin-opt,jobs=8" ]
-        not_needed([ "cache_policy" ])
-      }
-    }
-
-    # Disable optimization for now because they increase binary size by too
-    # much.
-    if (use_lld && (is_android || (is_linux && !is_chromeos))) {
-      ldflags += [ "-Wl,--lto-O0" ]
-    }
-
-    cflags += [ "-fwhole-program-vtables" ]
-    if (!is_win) {
-      ldflags += [ "-fwhole-program-vtables" ]
-    }
-
-    # Work-around for http://openradar.appspot.com/20356002
-    if (is_mac) {
-      ldflags += [ "-Wl,-all_load" ]
-    }
-
-    # This flag causes LTO to create an .ARM.attributes section with the correct
-    # architecture. This is necessary because LLD will refuse to link a program
-    # unless the architecture revision in .ARM.attributes is sufficiently new.
-    # TODO(pcc): The contents of .ARM.attributes should be based on the
-    # -march flag passed at compile time (see llvm.org/pr36291).
-    if (current_cpu == "arm") {
-      ldflags += [ "-march=$arm_arch" ]
-    }
-  }
-
-  if (compiler_timing) {
-    if (is_clang) {
-      if (is_win) {
-        cflags += [ "-Xclang" ]
-      }
-      cflags += [ "-ftime-report" ]
-    } else if (is_win) {
-      cflags += [
-        # "Documented" here:
-        # http://aras-p.info/blog/2017/10/23/Best-unknown-MSVC-flag-d2cgsummary/
-        "/d2cgsummary",
-      ]
-    }
-  }
-
-  # Pass flag to LLD to work around issue in Valgrind related to
-  # location of debug symbols.
-  if (use_lld && ro_segment_workaround_for_valgrind) {
-    ldflags += [ "-Wl,--no-rosegment" ]
-  }
-
-  # Pass the same C/C++ flags to the objective C/C++ compiler.
-  cflags_objc += cflags_c
-  cflags_objcc += cflags_cc
-
-  # Assign any flags set for the C compiler to asmflags so that they are sent
-  # to the assembler. The Windows assembler takes different types of flags
-  # so only do so for posix platforms.
-  if (is_posix || is_fuchsia) {
-    asmflags += cflags
-    asmflags += cflags_c
-  }
-}
-
-# This provides the basic options to select the target CPU and ABI.
-# It is factored out of "compiler" so that special cases can use this
-# without using everything that "compiler" brings in.  Options that
-# tweak code generation for a particular CPU do not belong here!
-# See "compiler_codegen", below.
-config("compiler_cpu_abi") {
-  cflags = []
-  ldflags = []
-  defines = []
-
-  if ((is_posix && !(is_mac || is_ios)) || is_fuchsia) {
-    # CPU architecture. We may or may not be doing a cross compile now, so for
-    # simplicity we always explicitly set the architecture.
-    if (current_cpu == "x64") {
-      cflags += [
-        "-m64",
-        "-march=x86-64",
-      ]
-      ldflags += [ "-m64" ]
-    } else if (current_cpu == "x86") {
-      cflags += [ "-m32" ]
-      ldflags += [ "-m32" ]
-      if (!is_nacl) {
-        cflags += [
-          "-msse2",
-          "-mfpmath=sse",
-          "-mmmx",
-        ]
-      }
-    } else if (current_cpu == "arm") {
-      if (is_clang && !is_android && !is_nacl) {
-        cflags += [ "--target=arm-linux-gnueabihf" ]
-        ldflags += [ "--target=arm-linux-gnueabihf" ]
-      }
-      if (!is_nacl) {
-        cflags += [
-          "-march=$arm_arch",
-          "-mfloat-abi=$arm_float_abi",
-        ]
-      }
-      if (arm_tune != "") {
-        cflags += [ "-mtune=$arm_tune" ]
-      }
-    } else if (current_cpu == "arm64") {
-      if (is_clang && !is_android && !is_nacl && !is_fuchsia) {
-        cflags += [ "--target=aarch64-linux-gnu" ]
-        ldflags += [ "--target=aarch64-linux-gnu" ]
-      }
-    } else if (current_cpu == "mipsel" && !is_nacl) {
-      if (custom_toolchain == "") {
-        if (is_clang) {
-          if (is_android) {
-            cflags += [ "--target=mipsel-linux-android" ]
-            ldflags += [ "--target=mipsel-linux-android" ]
-          } else {
-            cflags += [ "--target=mipsel-linux-gnu" ]
-            ldflags += [ "--target=mipsel-linux-gnu" ]
-          }
-        } else {
-          cflags += [ "-EL" ]
-          ldflags += [ "-EL" ]
-        }
-      }
-
-      if (mips_arch_variant == "r6") {
-        cflags += [ "-mno-odd-spreg" ]
-        ldflags += [ "-mips32r6" ]
-        if (is_clang) {
-          cflags += [
-            "-march=mipsel",
-            "-mcpu=mips32r6",
-          ]
-        } else {
-          cflags += [
-            "-mips32r6",
-            "-Wa,-mips32r6",
-          ]
-          if (is_android) {
-            ldflags += [ "-Wl,-melf32ltsmip" ]
-          }
-        }
-        if (mips_use_msa == true) {
-          cflags += [
-            "-mmsa",
-            "-mfp64",
-          ]
-        }
-      } else if (mips_arch_variant == "r2") {
-        ldflags += [ "-mips32r2" ]
-        if (is_clang) {
-          cflags += [
-            "-march=mipsel",
-            "-mcpu=mips32r2",
-          ]
-        } else {
-          cflags += [
-            "-mips32r2",
-            "-Wa,-mips32r2",
-          ]
-          if (mips_float_abi == "hard" && mips_fpu_mode != "") {
-            cflags += [ "-m$mips_fpu_mode" ]
-          }
-        }
-      } else if (mips_arch_variant == "r1") {
-        ldflags += [ "-mips32" ]
-        if (is_clang) {
-          cflags += [
-            "-march=mipsel",
-            "-mcpu=mips32",
-          ]
-        } else {
-          cflags += [
-            "-mips32",
-            "-Wa,-mips32",
-          ]
-        }
-      } else if (mips_arch_variant == "loongson3") {
-        defines += [ "_MIPS_ARCH_LOONGSON" ]
-        cflags += [
-          "-march=loongson3a",
-          "-mno-branch-likely",
-          "-Wa,-march=loongson3a",
-        ]
-      }
-
-      if (mips_dsp_rev == 1) {
-        cflags += [ "-mdsp" ]
-      } else if (mips_dsp_rev == 2) {
-        cflags += [ "-mdspr2" ]
-      }
-
-      cflags += [ "-m${mips_float_abi}-float" ]
-    } else if (current_cpu == "mips" && !is_nacl) {
-      if (custom_toolchain == "") {
-        if (is_clang) {
-          cflags += [ "--target=mips-linux-gnu" ]
-          ldflags += [ "--target=mips-linux-gnu" ]
-        } else {
-          cflags += [ "-EB" ]
-          ldflags += [ "-EB" ]
-        }
-      }
-
-      if (mips_arch_variant == "r6") {
-        cflags += [
-          "-mips32r6",
-          "-Wa,-mips32r6",
-        ]
-        if (mips_use_msa == true) {
-          cflags += [
-            "-mmsa",
-            "-mfp64",
-          ]
-        }
-      } else if (mips_arch_variant == "r2") {
-        cflags += [
-          "-mips32r2",
-          "-Wa,-mips32r2",
-        ]
-        if (mips_float_abi == "hard" && mips_fpu_mode != "") {
-          cflags += [ "-m$mips_fpu_mode" ]
-        }
-      } else if (mips_arch_variant == "r1") {
-        cflags += [
-          "-mips32",
-          "-Wa,-mips32",
-        ]
-      }
-
-      if (mips_dsp_rev == 1) {
-        cflags += [ "-mdsp" ]
-      } else if (mips_dsp_rev == 2) {
-        cflags += [ "-mdspr2" ]
-      }
-
-      cflags += [ "-m${mips_float_abi}-float" ]
-    } else if (current_cpu == "mips64el") {
-      if (custom_toolchain == "") {
-        if (is_clang) {
-          if (is_android) {
-            cflags += [ "--target=mips64el-linux-android" ]
-            ldflags += [ "--target=mips64el-linux-android" ]
-          } else {
-            cflags += [ "--target=mips64el-linux-gnuabi64" ]
-            ldflags += [ "--target=mips64el-linux-gnuabi64" ]
-          }
-        } else {
-          cflags += [
-            "-EL",
-            "-mabi=64",
-          ]
-          ldflags += [
-            "-EL",
-            "-mabi=64",
-          ]
-        }
-      }
-
-      if (mips_arch_variant == "r6") {
-        if (is_clang) {
-          cflags += [
-            "-march=mips64el",
-            "-mcpu=mips64r6",
-          ]
-        } else {
-          cflags += [
-            "-mips64r6",
-            "-Wa,-mips64r6",
-          ]
-          ldflags += [ "-mips64r6" ]
-        }
-        if (mips_use_msa == true) {
-          cflags += [
-            "-mmsa",
-            "-mfp64",
-          ]
-        }
-      } else if (mips_arch_variant == "r2") {
-        ldflags += [ "-mips64r2" ]
-        if (is_clang) {
-          cflags += [
-            "-march=mips64el",
-            "-mcpu=mips64r2",
-          ]
-        } else {
-          cflags += [
-            "-mips64r2",
-            "-Wa,-mips64r2",
-          ]
-        }
-      } else if (mips_arch_variant == "loongson3") {
-        defines += [ "_MIPS_ARCH_LOONGSON" ]
-        cflags += [
-          "-march=loongson3a",
-          "-mno-branch-likely",
-          "-Wa,-march=loongson3a",
-        ]
-      }
-    } else if (current_cpu == "mips64") {
-      if (custom_toolchain == "") {
-        if (is_clang) {
-          cflags += [ "--target=mips64-linux-gnuabi64" ]
-          ldflags += [ "--target=mips64-linux-gnuabi64" ]
-        } else {
-          cflags += [
-            "-EB",
-            "-mabi=64",
-          ]
-          ldflags += [
-            "-EB",
-            "-mabi=64",
-          ]
-        }
-      }
-
-      if (mips_arch_variant == "r6") {
-        cflags += [
-          "-mips64r6",
-          "-Wa,-mips64r6",
-        ]
-        ldflags += [ "-mips64r6" ]
-
-        if (mips_use_msa == true) {
-          cflags += [
-            "-mmsa",
-            "-mfp64",
-          ]
-        }
-      } else if (mips_arch_variant == "r2") {
-        cflags += [
-          "-mips64r2",
-          "-Wa,-mips64r2",
-        ]
-        ldflags += [ "-mips64r2" ]
-      }
-    } else if (current_cpu == "pnacl" && is_nacl_nonsfi) {
-      if (target_cpu == "x86" || target_cpu == "x64") {
-        cflags += [
-          "-arch",
-          "x86-32-nonsfi",
-          "--pnacl-bias=x86-32-nonsfi",
-          "--target=i686-unknown-nacl",
-        ]
-        ldflags += [
-          "-arch",
-          "x86-32-nonsfi",
-          "--target=i686-unknown-nacl",
-        ]
-      } else if (target_cpu == "arm") {
-        cflags += [
-          "-arch",
-          "arm-nonsfi",
-          "-mfloat-abi=hard",
-          "--pnacl-bias=arm-nonsfi",
-          "--target=armv7-unknown-nacl-gnueabihf",
-        ]
-        ldflags += [
-          "-arch",
-          "arm-nonsfi",
-          "--target=armv7-unknown-nacl-gnueabihf",
-        ]
-      }
-    } else if (current_cpu == "ppc64") {
-      if (v8_current_cpu == "ppc") {
-        cflags += [ "-m32" ]
-        ldflags += [ "-m32" ]
-      } else if (v8_current_cpu == "ppc64") {
-        if (current_os == "aix") {
-          cflags += [ "-maix64" ]
-          ldflags += [ "-maix64" ]
-        } else {
-          cflags += [ "-m64" ]
-          ldflags += [ "-m64" ]
-        }
-      }
-    } else if (current_cpu == "s390x") {
-      if (v8_current_cpu == "s390" && host_byteorder == "little") {
-        cflags += [ "-m32" ]
-        ldflags += [ "-m32" ]
-      } else if (v8_current_cpu == "s390") {
-        cflags += [ "-m31" ]
-        ldflags += [ "-m31" ]
-      } else if (v8_current_cpu == "s390x") {
-        cflags += [ "-m64" ]
-        ldflags += [ "-m64" ]
-      }
-    }
-  }
-
-  asmflags = cflags
-}
-
-# This provides options to tweak code generation that are necessary
-# for particular Chromium code or for working around particular
-# compiler bugs (or the combination of the two).
-config("compiler_codegen") {
-  configs = []
-  cflags = []
-
-  if (is_nacl) {
-    configs += [ "//build/config/nacl:compiler_codegen" ]
-  } else if (is_posix && !is_mac && !is_ios) {
-    if (current_cpu == "x86") {
-      if (is_clang) {
-        cflags += [
-          # Else building libyuv gives clang's register allocator issues,
-          # see llvm.org/PR15798 / crbug.com/233709
-          "-momit-leaf-frame-pointer",
-        ]
-      }
-    } else if (current_cpu == "arm") {
-      if (is_android && !is_clang) {
-        # Clang doesn't support these flags.
-        cflags += [
-          # The tree-sra optimization (scalar replacement for
-          # aggregates enabling subsequent optimizations) leads to
-          # invalid code generation when using the Android NDK's
-          # compiler (r5-r7). This can be verified using
-          # webkit_unit_tests' WTF.Checked_int8_t test.
-          "-fno-tree-sra",
-
-          # The following option is disabled to improve binary
-          # size and performance in gcc 4.9.
-          "-fno-caller-saves",
-        ]
-      }
-    }
-  }
-
-  asmflags = cflags
-}
-
-# This is separate from :compiler_codegen (and not even a sub-config there)
-# so that some targets can remove it from the list with:
-#   configs -= [ "//build/config/compiler:clang_stackrealign" ]
-# See https://crbug.com/556393 for details of where it must be avoided.
-config("clang_stackrealign") {
-  if (is_clang && current_cpu == "x86" && is_linux) {
-    cflags = [
-      # Align the stack on 16-byte boundaries, http://crbug.com/418554.
-      "-mstack-alignment=16",
-      "-mstackrealign",
-    ]
-  }
-}
-
-config("clang_revision") {
-  if (is_clang && clang_base_path == default_clang_base_path) {
-    update_args = [
-      "--print-revision",
-      "--verify-version=$clang_version",
-    ]
-    if (llvm_force_head_revision) {
-      update_args += [ "--llvm-force-head-revision" ]
-    }
-    clang_revision = exec_script("//tools/clang/scripts/update.py",
-                                 update_args,
-                                 "trim string")
-
-    # This is here so that all files get recompiled after a clang roll and
-    # when turning clang on or off. (defines are passed via the command line,
-    # and build system rebuild things when their commandline changes). Nothing
-    # should ever read this define.
-    defines = [ "CR_CLANG_REVISION=\"$clang_revision\"" ]
-  }
-}
-
-config("compiler_arm_fpu") {
-  if (current_cpu == "arm" && !is_ios && !is_nacl) {
-    cflags = [ "-mfpu=$arm_fpu" ]
-    asmflags = cflags
-  }
-}
-
-config("compiler_arm_thumb") {
-  if (current_cpu == "arm" && arm_use_thumb && is_posix &&
-      !(is_mac || is_ios || is_nacl)) {
-    cflags = [ "-mthumb" ]
-    if (is_android && !is_clang) {
-      # Clang doesn't support this option.
-      cflags += [ "-mthumb-interwork" ]
-    }
-  }
-}
-
-config("compiler_arm") {
-  if (current_cpu == "arm" && is_chromeos) {
-    # arm is normally the default mode for clang, but on chromeos a wrapper
-    # is used to pass -mthumb, and therefor change the default.
-    cflags = [ "-marm" ]
-  }
-}
-
-# runtime_library -------------------------------------------------------------
-#
-# Sets the runtime library and associated options.
-#
-# How do you determine what should go in here vs. "compiler" above? Consider if
-# a target might choose to use a different runtime library (ignore for a moment
-# if this is possible or reasonable on your system). If such a target would want
-# to change or remove your option, put it in the runtime_library config. If a
-# target wants the option regardless, put it in the compiler config.
-
-config("runtime_library") {
-  defines = []
-  configs = []
-
-  # TODO(crbug.com/830987): Come up with a better name for is POSIX + Fuchsia
-  # configuration.
-  #
-  # The order of this config is important: it must appear before
-  # android:runtime_library.  This is to ensure libc++ appears before
-  # libandroid_support in the -isystem include order.  Otherwise, there will be
-  # build errors related to symbols declared in math.h.
-  if (is_posix || is_fuchsia) {
-    configs += [ "//build/config/posix:runtime_library" ]
-  }
-
-  # System-specific flags. If your compiler flags apply to one of the
-  # categories here, add it to the associated file to keep this shared config
-  # smaller.
-  if (is_win) {
-    configs += [ "//build/config/win:runtime_library" ]
-  } else if (is_linux) {
-    configs += [ "//build/config/linux:runtime_library" ]
-  } else if (is_ios) {
-    configs += [ "//build/config/ios:runtime_library" ]
-  } else if (is_mac) {
-    configs += [ "//build/config/mac:runtime_library" ]
-  } else if (is_android) {
-    configs += [ "//build/config/android:runtime_library" ]
-  }
-
-  if (is_component_build) {
-    defines += [ "COMPONENT_BUILD" ]
-  }
-}
-
-# default_warnings ------------------------------------------------------------
-#
-# Collects all warning flags that are used by default.  This is used as a
-# subconfig of both chromium_code and no_chromium_code.  This way these
-# flags are guaranteed to appear on the compile command line after -Wall.
-config("default_warnings") {
-  cflags = []
-  cflags_cc = []
-  ldflags = []
-
-  if (is_win) {
-    if (treat_warnings_as_errors) {
-      cflags += [ "/WX" ]
-    }
-    if (fatal_linker_warnings) {
-      ldflags += [ "/WX" ]
-    }
-
-    cflags += [
-      # Warnings permanently disabled:
-
-      # C4091: 'typedef ': ignored on left of 'X' when no variable is
-      #                    declared.
-      # This happens in a number of Windows headers. Dumb.
-      "/wd4091",
-
-      # C4127: conditional expression is constant
-      # This warning can in theory catch dead code and other problems, but
-      # triggers in far too many desirable cases where the conditional
-      # expression is either set by macros or corresponds some legitimate
-      # compile-time constant expression (due to constant template args,
-      # conditionals comparing the sizes of different types, etc.).  Some of
-      # these can be worked around, but it's not worth it.
-      "/wd4127",
-
-      # C4251: 'identifier' : class 'type' needs to have dll-interface to be
-      #        used by clients of class 'type2'
-      # This is necessary for the shared library build.
-      "/wd4251",
-
-      # C4275:  non dll-interface class used as base for dll-interface class
-      # This points out a potential (but rare) problem with referencing static
-      # fields of a non-exported base, through the base's non-exported inline
-      # functions, or directly. The warning is subtle enough that people just
-      # suppressed it when they saw it, so it's not worth it.
-      "/wd4275",
-
-      # C4312 is a VS 2015 64-bit warning for integer to larger pointer.
-      # TODO(brucedawson): fix warnings, crbug.com/554200
-      "/wd4312",
-
-      # C4324 warns when padding is added to fulfill alignas requirements,
-      # but can trigger in benign cases that are difficult to individually
-      # suppress.
-      "/wd4324",
-
-      # C4351: new behavior: elements of array 'array' will be default
-      #        initialized
-      # This is a silly "warning" that basically just alerts you that the
-      # compiler is going to actually follow the language spec like it's
-      # supposed to, instead of not following it like old buggy versions did.
-      # There's absolutely no reason to turn this on.
-      "/wd4351",
-
-      # C4355: 'this': used in base member initializer list
-      # It's commonly useful to pass |this| to objects in a class' initializer
-      # list.  While this warning can catch real bugs, most of the time the
-      # constructors in question don't attempt to call methods on the passed-in
-      # pointer (until later), and annotating every legit usage of this is
-      # simply more hassle than the warning is worth.
-      "/wd4355",
-
-      # C4503: 'identifier': decorated name length exceeded, name was
-      #        truncated
-      # This only means that some long error messages might have truncated
-      # identifiers in the presence of lots of templates.  It has no effect on
-      # program correctness and there's no real reason to waste time trying to
-      # prevent it.
-      "/wd4503",
-
-      # Warning C4589 says: "Constructor of abstract class ignores
-      # initializer for virtual base class." Disable this warning because it
-      # is flaky in VS 2015 RTM. It triggers on compiler generated
-      # copy-constructors in some cases.
-      "/wd4589",
-
-      # C4611: interaction between 'function' and C++ object destruction is
-      #        non-portable
-      # This warning is unavoidable when using e.g. setjmp/longjmp.  MSDN
-      # suggests using exceptions instead of setjmp/longjmp for C++, but
-      # Chromium code compiles without exception support.  We therefore have to
-      # use setjmp/longjmp for e.g. JPEG decode error handling, which means we
-      # have to turn off this warning (and be careful about how object
-      # destruction happens in such cases).
-      "/wd4611",
-
-      # Warnings to evaluate and possibly fix/reenable later:
-
-      "/wd4100",  # Unreferenced formal function parameter.
-      "/wd4121",  # Alignment of a member was sensitive to packing.
-      "/wd4244",  # Conversion: possible loss of data.
-      "/wd4505",  # Unreferenced local function has been removed.
-      "/wd4510",  # Default constructor could not be generated.
-      "/wd4512",  # Assignment operator could not be generated.
-      "/wd4610",  # Class can never be instantiated, constructor required.
-      "/wd4838",  # Narrowing conversion. Doesn't seem to be very useful.
-      "/wd4995",  # 'X': name was marked as #pragma deprecated
-      "/wd4996",  # Deprecated function warning.
-
-      # These are variable shadowing warnings that are new in VS2015. We
-      # should work through these at some point -- they may be removed from
-      # the RTM release in the /W4 set.
-      "/wd4456",
-      "/wd4457",
-      "/wd4458",
-      "/wd4459",
-    ]
-
-    cflags_cc += [
-      # Allow "noexcept" annotations even though we compile with exceptions
-      # disabled.
-      "/wd4577",
-    ]
-
-    if (current_cpu == "x86") {
-      cflags += [
-        # VC++ 2015 changes 32-bit size_t truncation warnings from 4244 to
-        # 4267. Example: short TruncTest(size_t x) { return x; }
-        # Since we disable 4244 we need to disable 4267 during migration.
-        # TODO(jschuh): crbug.com/167187 fix size_t to int truncations.
-        "/wd4267",
-      ]
-    }
-
-    # VS xtree header file needs to be patched or 4702 (unreachable code
-    # warning) is reported if _HAS_EXCEPTIONS=0. Disable the warning if xtree is
-    # not patched.
-    if (!msvs_xtree_patched &&
-        exec_script("../../win_is_xtree_patched.py", [], "value") == 0) {
-      cflags += [ "/wd4702" ]  # Unreachable code.
-    }
-  } else {
-    if ((is_mac || is_ios) && !is_nacl) {
-      # When compiling Objective-C, warns if a method is used whose
-      # availability is newer than the deployment target.
-      cflags += [ "-Wunguarded-availability" ]
-    }
-
-    if (is_ios) {
-      # When compiling Objective-C, warns if a selector named via @selector has
-      # not been defined in any visible interface.
-      cflags += [ "-Wundeclared-selector" ]
-    }
-
-    # Suppress warnings about ABI changes on ARM (Clang doesn't give this
-    # warning).
-    if (current_cpu == "arm" && !is_clang) {
-      cflags += [ "-Wno-psabi" ]
-    }
-
-    if (!is_clang) {
-      cflags_cc += [
-        # See comment for -Wno-c++11-narrowing.
-        "-Wno-narrowing",
-      ]
-
-      # -Wunused-local-typedefs is broken in gcc,
-      # https://gcc.gnu.org/bugzilla/show_bug.cgi?id=63872
-      cflags += [ "-Wno-unused-local-typedefs" ]
-
-      # Don't warn about "maybe" uninitialized. Clang doesn't include this
-      # in -Wall but gcc does, and it gives false positives.
-      cflags += [ "-Wno-maybe-uninitialized" ]
-      cflags += [ "-Wno-deprecated-declarations" ]
-
-      # GCC assumes 'this' is never nullptr and optimizes away code
-      # like "if (this == nullptr) ...": [1].  However, some Chromium
-      # code relies on these types of null pointer checks [2], so
-      # disable this optimization.
-      # [1] https://gcc.gnu.org/gcc-6/porting_to.html#this-cannot-be-null
-      # [2] https://crbug.com/784492#c13
-      cflags += [ "-fno-delete-null-pointer-checks" ]
-
-      # -Wcomment gives too many false positives in the case a
-      # backslash ended comment line is followed by a new line of
-      # comments
-      # https://gcc.gnu.org/bugzilla/show_bug.cgi?id=61638
-      cflags += [ "-Wno-comments" ]
-    }
-  }
-
-  # Common Clang and GCC warning setup.
-  if (!is_win || is_clang) {
-    cflags += [
-      # Disables.
-      "-Wno-missing-field-initializers",  # "struct foo f = {0};"
-      "-Wno-unused-parameter",  # Unused function parameters.
-    ]
-  }
-
-  if (is_clang) {
-    cflags += [
-      # TODO(thakis): Consider -Wloop-analysis (turns on
-      # -Wrange-loop-analysis too).
-
-      # This warns on using ints as initializers for floats in
-      # initializer lists (e.g. |int a = f(); CGSize s = { a, a };|),
-      # which happens in several places in chrome code. Not sure if
-      # this is worth fixing.
-      "-Wno-c++11-narrowing",
-
-      # Warns on switches on enums that cover all enum values but
-      # also contain a default: branch. Chrome is full of that.
-      "-Wno-covered-switch-default",
-
-      # TODO(thakis): This used to be implied by -Wno-unused-function,
-      # which we no longer use. Check if it makes sense to remove
-      # this as well. http://crbug.com/316352
-      "-Wno-unneeded-internal-declaration",
-    ]
-
-    # use_xcode_clang only refers to the iOS toolchain, host binaries use
-    # chromium's clang always.
-    if (!is_nacl && (!use_xcode_clang || current_toolchain == host_toolchain)) {
-      # Flags NaCl (Clang 3.7) and Xcode 7.3 (Clang clang-703.0.31) do not
-      # recognize.
-      cflags += [
-        # TODO(thakis): https://crbug.com/604888
-        "-Wno-undefined-var-template",
-
-        # TODO(thakis): https://crbug.com/617318
-        "-Wno-nonportable-include-path",
-
-        # TODO(hans): https://crbug.com/637306
-        "-Wno-address-of-packed-member",
-
-        # TODO(hans): https://crbug.com/681136
-        "-Wno-unused-lambda-capture",
-
-        # TODO(thakis ): https://crbug.com/683349
-        "-Wno-user-defined-warnings",
-
-        # TODO(thakis): https://crbug.com/753973
-        "-Wno-enum-compare-switch",
-
-        # TODO(hans): https://crbug.com/766891
-        "-Wno-null-pointer-arithmetic",
-
-        # Ignore warnings about MSVC optimization pragmas.
-        # TODO(thakis): Only for no_chromium_code? http://crbug.com/505314
-        "-Wno-ignored-pragma-optimize",
-      ]
-    } else if (use_xcode_clang) {
-      cflags += [
-        # TODO(thakis): https://crbug.com/604888
-        "-Wno-undefined-var-template",
-
-        # TODO(hans): https://crbug.com/637306
-        "-Wno-address-of-packed-member",
-      ]
-    }
-  }
-}
-
-# chromium_code ---------------------------------------------------------------
-#
-# Toggles between higher and lower warnings for code that is (or isn't)
-# part of Chromium.
-
-config("chromium_code") {
-  if (is_win) {
-    cflags = [ "/W4" ]  # Warning level 4.
-  } else {
-    cflags = [ "-Wall" ]
-    if (treat_warnings_as_errors) {
-      cflags += [ "-Werror" ]
-
-      # The compiler driver can sometimes (rarely) emit warnings before calling
-      # the actual linker.  Make sure these warnings are treated as errors as
-      # well.
-      ldflags = [ "-Werror" ]
-    }
-    if (is_clang) {
-      # Enable extra warnings for chromium_code when we control the compiler.
-      cflags += [ "-Wextra" ]
-    }
-
-    # In Chromium code, we define __STDC_foo_MACROS in order to get the
-    # C99 macros on Mac and Linux.
-    defines = [
-      "__STDC_CONSTANT_MACROS",
-      "__STDC_FORMAT_MACROS",
-    ]
-
-    if (!is_debug && !using_sanitizer &&
-        (!is_linux || !is_clang || is_official_build) &&
-        current_cpu != "s390x" && current_cpu != "s390" &&
-        current_cpu != "ppc64" && current_cpu != "ppc64" &&
-        current_cpu != "mips" && current_cpu != "mips64") {
-      # _FORTIFY_SOURCE isn't really supported by Clang now, see
-      # http://llvm.org/bugs/show_bug.cgi?id=16821.
-      # It seems to work fine with Ubuntu 12 headers though, so use it in
-      # official builds.
-      #
-      # Non-chromium code is not guaranteed to compile cleanly with
-      # _FORTIFY_SOURCE. Also, fortified build may fail when optimizations are
-      # disabled, so only do that for Release build.
-      defines += [ "_FORTIFY_SOURCE=2" ]
-    }
-
-    if (is_mac || is_ios) {
-      cflags_objc = [ "-Wobjc-missing-property-synthesis" ]
-      cflags_objcc = [ "-Wobjc-missing-property-synthesis" ]
-    }
-  }
-
-  if (is_clang) {
-    cflags += [
-      # Warn on missing break statements at the end of switch cases.
-      # For intentional fallthrough, use FALLTHROUGH; from
-      # base/compiler_specific.h
-      "-Wimplicit-fallthrough",
-
-      # Thread safety analysis. See base/thread_annotations.h and
-      # https://clang.llvm.org/docs/ThreadSafetyAnalysis.html
-      "-Wthread-safety",
-    ]
-  }
-
-  configs = [ ":default_warnings" ]
-}
-
-config("no_chromium_code") {
-  cflags = []
-  cflags_cc = []
-  defines = []
-
-  if (is_win) {
-    cflags += [
-      "/W3",  # Warning level 3.
-      "/wd4800",  # Disable warning when forcing value to bool.
-      "/wd4267",  # TODO(jschuh): size_t to int.
-      "/wd4996",  # Deprecated function warning.
-    ]
-    defines += [
-      "_CRT_NONSTDC_NO_WARNINGS",
-      "_CRT_NONSTDC_NO_DEPRECATE",
-    ]
-  } else {
-    # GCC may emit unsuppressible warnings so don't add -Werror for no chromium
-    # code. crbug.com/589724
-    if (treat_warnings_as_errors && is_clang) {
-      cflags += [ "-Werror" ]
-      ldflags = [ "-Werror" ]
-    }
-    if (is_clang && !is_nacl) {
-      # TODO(thakis): Remove !is_nacl once
-      # https://codereview.webrtc.org/1552863002/ made its way into chromium.
-      cflags += [ "-Wall" ]
-    }
-  }
-
-  if (is_clang) {
-    cflags += [
-      # Lots of third-party libraries have unused variables. Instead of
-      # suppressing them individually, we just blanket suppress them here.
-      "-Wno-unused-variable",
-    ]
-  }
-
-  configs = [ ":default_warnings" ]
-}
-
-# noshadowing -----------------------------------------------------------------
-#
-# Allows turning -Wshadow on.
-
-config("noshadowing") {
-  # This flag has to be disabled for nacl because the nacl compiler is too
-  # strict about shadowing.
-  if (is_clang && !is_nacl) {
-    cflags = [ "-Wshadow" ]
-  }
-}
-
-# rtti ------------------------------------------------------------------------
-#
-# Allows turning Run-Time Type Identification on or off.
-
-config("rtti") {
-  if (is_win) {
-    cflags_cc = [ "/GR" ]
-  } else {
-    cflags_cc = [ "-frtti" ]
-  }
-}
-
-config("no_rtti") {
-  # Some sanitizer configs may require RTTI to be left enabled globally
-  if (!use_rtti) {
-    if (is_win) {
-      cflags_cc = [ "/GR-" ]
-    } else {
-      cflags_cc = [ "-fno-rtti" ]
-      cflags_objcc = cflags_cc
-    }
-  }
-}
-
-# thin_archive -----------------------------------------------------------------
-#
-# Enables thin archives on posix.  Regular archives directly include the object
-# files used to generate it.  Thin archives merely reference the object files.
-# This makes building them faster since it requires less disk IO, but is
-# inappropriate if you wish to redistribute your static library.
-# This config is added to the global config, so thin archives should already be
-# enabled.  If you want to make a distributable static library, you need to do 2
-# things:
-# 1. Set complete_static_lib so that all dependencies of the library make it
-#    into the library. See `gn help complete_static_lib` for details.
-# 2. Remove the thin_archive config, so that the .a file actually contains all
-#    .o files, instead of just references to .o files in the build directoy
-config("thin_archive") {
-  # Mac and iOS use the mac-specific "libtool" command, not ar, which doesn't
-  # have a "thin archive" mode (it does accept -T, but it means truncating
-  # archive names to 16 characters, which is not what we want).
-  if ((is_posix && !is_nacl && !is_mac && !is_ios) || is_fuchsia) {
-    arflags = [ "-T" ]
-  }
-}
-
-# exceptions -------------------------------------------------------------------
-#
-# Allows turning Exceptions on or off.
-# Note: exceptions are disallowed in Google code.
-
-config("exceptions") {
-  if (is_win) {
-    # Enables exceptions in the STL.
-    if (!use_custom_libcxx) {
-      defines = [ "_HAS_EXCEPTIONS=1" ]
-    }
-    cflags_cc = [ "/EHsc" ]
-  } else {
-    cflags_cc = [ "-fexceptions" ]
-    cflags_objcc = cflags_cc
-  }
-}
-
-config("no_exceptions") {
-  if (is_win) {
-    # Disables exceptions in the STL.
-    # libc++ uses the __has_feature macro to control whether to use exceptions,
-    # so defining this macro is unnecessary. Defining _HAS_EXCEPTIONS to 0 also
-    # breaks libc++ because it depends on MSVC headers that only provide certain
-    # declarations if _HAS_EXCEPTIONS is 1. Those MSVC headers do not use
-    # exceptions, despite being conditional on _HAS_EXCEPTIONS.
-    if (!use_custom_libcxx) {
-      defines = [ "_HAS_EXCEPTIONS=0" ]
-    }
-  } else {
-    cflags_cc = [ "-fno-exceptions" ]
-    cflags_objcc = cflags_cc
-  }
-}
-
-# Warnings ---------------------------------------------------------------------
-
-# This will generate warnings when using Clang if code generates exit-time
-# destructors, which will slow down closing the program.
-# TODO(thakis): Make this a blacklist instead, http://crbug.com/101600
-config("wexit_time_destructors") {
-  # TODO: Enable on Windows too, http://crbug.com/404525
-  if (is_clang && !is_win) {
-    cflags = [ "-Wexit-time-destructors" ]
-  }
-}
-
-# On Windows compiling on x64, VC will issue a warning when converting
-# size_t to int because it will truncate the value. Our code should not have
-# these warnings and one should use a static_cast or a checked_cast for the
-# conversion depending on the case. However, a lot of code still needs to be
-# fixed. Apply this config to such targets to disable the warning.
-#
-# Note that this can be applied regardless of platform and architecture to
-# clean up the call sites. This will only apply the flag when necessary.
-#
-# TODO(jschuh): crbug.com/167187 fix this and delete this config.
-config("no_size_t_to_int_warning") {
-  if (is_win && current_cpu == "x64") {
-    cflags = [ "/wd4267" ]
-  }
-}
-
-# Some code presumes that pointers to structures/objects are compatible
-# regardless of whether what they point to is already known to be valid.
-# gcc 4.9 and earlier had no way of suppressing this warning without
-# suppressing the rest of them.  Here we centralize the identification of
-# the gcc 4.9 toolchains.
-config("no_incompatible_pointer_warnings") {
-  cflags = []
-  if (is_clang) {
-    cflags += [ "-Wno-incompatible-pointer-types" ]
-  } else if (current_cpu == "mipsel" || current_cpu == "mips64el") {
-    cflags += [ "-w" ]
-  } else if (is_chromeos && current_cpu == "arm") {
-    cflags += [ "-w" ]
-  }
-}
-
-# Optimization -----------------------------------------------------------------
-#
-# The BUILDCONFIG file sets the "default_optimization" config on targets by
-# default. It will be equivalent to either "optimize" (release) or
-# "no_optimize" (debug) optimization configs.
-#
-# You can override the optimization level on a per-target basis by removing the
-# default config and then adding the named one you want:
-#
-#   configs -= [ "//build/config/compiler:default_optimization" ]
-#   configs += [ "//build/config/compiler:optimize_max" ]
-
-# Shared settings for both "optimize" and "optimize_max" configs.
-# IMPORTANT: On Windows "/O1" and "/O2" must go before the common flags.
-if (is_win) {
-  common_optimize_on_cflags = [
-    "/Ob2",  # Both explicit and auto inlining.
-    "/Oy-",  # Disable omitting frame pointers, must be after /O2.
-    "/Zc:inline",  # Remove unreferenced COMDAT (faster links).
-  ]
-  if (!is_asan) {
-    common_optimize_on_cflags += [
-      # Put data in separate COMDATs. This allows the linker
-      # to put bit-identical constants at the same address even if
-      # they're unrelated constants, which saves binary size.
-      # This optimization can't be used when ASan is enabled because
-      # it is not compatible with the ASan ODR checker.
-      "/Gw",
-    ]
-  }
-  common_optimize_on_ldflags = []
-
-  # /OPT:ICF is not desirable in Debug builds, since code-folding can result in
-  # misleading symbols in stack traces. It is also incompatible with
-  # incremental linking, which we enable for both Debug and component builds.
-  if (!is_debug && !is_component_build) {
-    common_optimize_on_ldflags += [ "/OPT:ICF" ]  # Redundant COMDAT folding.
-  }
-
-  if (is_official_build) {
-    common_optimize_on_ldflags += [ "/OPT:REF" ]  # Remove unreferenced data.
-
-    # TODO(thakis): Remove is_clang here, https://crbug.com/598772
-    if (!use_lld && !is_clang) {
-      common_optimize_on_ldflags += [
-        # Set the number of LTCG code-gen threads to eight. The default is four.
-        # This gives a 5-10% link speedup.
-        "/cgthreads:8",
-      ]
-      if (use_incremental_wpo) {
-        # Incremental Link-time code generation.
-        common_optimize_on_ldflags += [ "/LTCG:INCREMENTAL" ]
-      } else {
-        common_optimize_on_ldflags += [ "/LTCG" ]  # Link-time code generation.
-      }
-      if (full_wpo_on_official) {
-        if (use_incremental_wpo) {
-          arflags = [ "/LTCG:INCREMENTAL" ]
-        } else {
-          arflags = [ "/LTCG" ]
-        }
-      }
-    }
-  }
-} else {
-  common_optimize_on_cflags = []
-  common_optimize_on_ldflags = []
-
-  if (is_android) {
-    # TODO(jdduke) Re-enable on mips after resolving linking
-    # issues with libc++ (crbug.com/456380).
-    if (current_cpu != "mipsel" && current_cpu != "mips64el") {
-      common_optimize_on_ldflags += [
-        # Warn in case of text relocations.
-        "-Wl,--warn-shared-textrel",
-      ]
-    }
-  }
-
-  if (is_mac || is_ios) {
-    if (symbol_level == 2) {
-      # Mac dead code stripping requires symbols.
-      common_optimize_on_ldflags += [ "-Wl,-dead_strip" ]
-    }
-  } else if (current_os != "aix") {
-    # Non-Mac Posix flags.
-    # Aix does not support these.
-
-    common_optimize_on_cflags += [
-      # Don't emit the GCC version ident directives, they just end up in the
-      # .comment section taking up binary size.
-      "-fno-ident",
-
-      # Put data and code in their own sections, so that unused symbols
-      # can be removed at link time with --gc-sections.
-      "-fdata-sections",
-      "-ffunction-sections",
-    ]
-
-    common_optimize_on_ldflags += [
-      # Specifically tell the linker to perform optimizations.
-      # See http://lwn.net/Articles/192624/ .
-      # -O2 enables string tail merge optimization in gold and lld.
-      "-Wl,-O2",
-      "-Wl,--gc-sections",
-    ]
-  }
-}
-
-config("default_stack_frames") {
-  if (is_posix || is_fuchsia) {
-    if (enable_frame_pointers) {
-      cflags = [ "-fno-omit-frame-pointer" ]
-    } else {
-      cflags = [ "-fomit-frame-pointer" ]
-    }
-  }
-  # On Windows, the flag to enable framepointers "/Oy-" must always come after
-  # the optimization flag [e.g. "/O2"]. The optimization flag is set by one of
-  # the "optimize" configs, see rest of this file. The ordering that cflags are
-  # applied is well-defined by the GN spec, and there is no way to ensure that
-  # cflags set by "default_stack_frames" is applied after those set by an
-  # "optimize" config. Similarly, there is no way to propagate state from this
-  # config into the "optimize" config. We always apply the "/Oy-" config in the
-  # definition for common_optimize_on_cflags definition, even though this may
-  # not be correct.
-}
-
-# Default "optimization on" config.
-config("optimize") {
-  if (is_win) {
-    # TODO(thakis): Remove is_clang here, https://crbug.com/598772
-    if (is_official_build && full_wpo_on_official && !is_clang) {
-      common_optimize_on_cflags += [
-        "/GL",  # Whole program optimization.
-
-        # Disable Warning 4702 ("Unreachable code") for the WPO/PGO builds.
-        # Probably anything that this would catch that wouldn't be caught in a
-        # normal build isn't going to actually be a bug, so the incremental
-        # value of C4702 for PGO builds is likely very small.
-        "/wd4702",
-      ]
-    }
-
-    # Favor size over speed, /O1 must be before the common flags. The GYP
-    # build also specifies /Os and /GF but these are implied by /O1.
-    cflags = [ "/O1" ] + common_optimize_on_cflags + [ "/Oi" ]
-  } else if (optimize_for_size && !is_nacl) {
-    # Favor size over speed.
-    # TODO(crbug.com/718650): Fix -Os in PNaCl compiler and remove the is_nacl
-    # guard above.
-    if (is_clang) {
-      cflags = [ "-Oz" ] + common_optimize_on_cflags
-    } else {
-      cflags = [ "-Os" ] + common_optimize_on_cflags
-    }
-  } else {
-    cflags = [ "-O2" ] + common_optimize_on_cflags
-  }
-  ldflags = common_optimize_on_ldflags
-}
-
-# Same config as 'optimize' but without the WPO flag.
-config("optimize_no_wpo") {
-  if (is_win) {
-    # Favor size over speed, /O1 must be before the common flags. The GYP
-    # build also specifies /Os and /GF but these are implied by /O1.
-    cflags = [ "/O1" ] + common_optimize_on_cflags + [ "/Oi" ]
-  } else if (optimize_for_size && !is_nacl) {
-    # Favor size over speed.
-    # TODO(crbug.com/718650): Fix -Os in PNaCl compiler and remove the is_nacl
-    # guard above.
-    if (is_clang) {
-      cflags = [ "-Oz" ] + common_optimize_on_cflags
-    } else {
-      cflags = [ "-Os" ] + common_optimize_on_cflags
-    }
-  } else if (optimize_for_fuzzing) {
-    cflags = [ "-O1" ] + common_optimize_on_cflags
-  } else {
-    cflags = [ "-O2" ] + common_optimize_on_cflags
-  }
-  ldflags = common_optimize_on_ldflags
-}
-
-# Turn off optimizations.
-config("no_optimize") {
-  if (is_win) {
-    cflags = [
-      "/Od",  # Disable optimization.
-      "/Ob0",  # Disable all inlining (on by default).
-      "/GF",  # Enable string pooling (off by default).
-    ]
-  } else if (is_android && !android_full_debug) {
-    # On Android we kind of optimize some things that don't affect debugging
-    # much even when optimization is disabled to get the binary size down.
-    if (is_clang) {
-      cflags = [ "-Oz" ] + common_optimize_on_cflags
-    } else {
-      cflags = [ "-Os" ] + common_optimize_on_cflags
-    }
-  } else {
-    cflags = [ "-O0" ]
-    ldflags = []
-  }
-}
-
-# Turns up the optimization level. On Windows, this implies whole program
-# optimization and link-time code generation which is very expensive and should
-# be used sparingly.
-config("optimize_max") {
-  if (is_nacl && is_nacl_irt) {
-    # The NaCl IRT is a special case and always wants its own config.
-    # Various components do:
-    #   if (!is_debug) {
-    #     configs -= [ "//build/config/compiler:default_optimization" ]
-    #     configs += [ "//build/config/compiler:optimize_max" ]
-    #   }
-    # So this config has to have the selection logic just like
-    # "default_optimization", below.
-    configs = [ "//build/config/nacl:irt_optimize" ]
-  } else {
-    ldflags = common_optimize_on_ldflags
-    if (is_win) {
-      # Favor speed over size, /O2 must be before the common flags. The GYP
-      # build also specifies /Ot, /Oi, and /GF, but these are implied by /O2.
-      cflags = [ "/O2" ] + common_optimize_on_cflags
-
-      if (is_official_build) {
-        if (!is_clang) {
-          cflags += [
-            "/GL",  # Whole program optimization.
-
-            # Disable Warning 4702 ("Unreachable code") for the WPO/PGO builds.
-            # Probably anything that this would catch that wouldn't be caught
-            # in a normal build isn't going to actually be a bug, so the
-            # incremental value of C4702 for PGO builds is likely very small.
-            "/wd4702",
-          ]
-        }
-        # TODO(crbug.com/598772): Enable -flto for Clang.
-      }
-    } else if (optimize_for_fuzzing) {
-      cflags = [ "-O1" ] + common_optimize_on_cflags
-    } else {
-      cflags = [ "-O2" ] + common_optimize_on_cflags
-    }
-  }
-}
-
-# This config can be used to override the default settings for per-component
-# and whole-program optimization, optimizing the particular target for speed
-# instead of code size. This config is exactly the same as "optimize_max"
-# except that we use -O3 instead of -O2 on non-win, non-IRT platforms.
-#
-# TODO(crbug.com/621335) - rework how all of these configs are related
-# so that we don't need this disclaimer.
-config("optimize_speed") {
-  if (is_nacl && is_nacl_irt) {
-    # The NaCl IRT is a special case and always wants its own config.
-    # Various components do:
-    #   if (!is_debug) {
-    #     configs -= [ "//build/config/compiler:default_optimization" ]
-    #     configs += [ "//build/config/compiler:optimize_max" ]
-    #   }
-    # So this config has to have the selection logic just like
-    # "default_optimization", below.
-    configs = [ "//build/config/nacl:irt_optimize" ]
-  } else {
-    ldflags = common_optimize_on_ldflags
-    if (is_win) {
-      # Favor speed over size, /O2 must be before the common flags. The GYP
-      # build also specifies /Ot, /Oi, and /GF, but these are implied by /O2.
-      cflags = [ "/O2" ] + common_optimize_on_cflags
-
-      # TODO(thakis): Remove is_clang here, https://crbug.com/598772
-      if (is_official_build && !is_clang) {
-        cflags += [
-          "/GL",  # Whole program optimization.
-
-          # Disable Warning 4702 ("Unreachable code") for the WPO/PGO builds.
-          # Probably anything that this would catch that wouldn't be caught in a
-          # normal build isn't going to actually be a bug, so the incremental
-          # value of C4702 for PGO builds is likely very small.
-          "/wd4702",
-        ]
-      }
-    } else if (optimize_for_fuzzing) {
-      cflags = [ "-O1" ] + common_optimize_on_cflags
-    } else {
-      cflags = [ "-O3" ] + common_optimize_on_cflags
-    }
-  }
-}
-
-config("optimize_fuzzing") {
-  cflags = [ "-O1" ] + common_optimize_on_cflags
-  ldflags = common_optimize_on_ldflags
-  visibility = [ ":default_optimization" ]
-}
-
-# The default optimization applied to all targets. This will be equivalent to
-# either "optimize" or "no_optimize", depending on the build flags.
-config("default_optimization") {
-  if (is_nacl && is_nacl_irt) {
-    # The NaCl IRT is a special case and always wants its own config.
-    # It gets optimized the same way regardless of the type of build.
-    configs = [ "//build/config/nacl:irt_optimize" ]
-  } else if (is_debug) {
-    configs = [ ":no_optimize" ]
-  } else if (optimize_for_fuzzing) {
-    assert(!is_win, "Fuzzing optimize level not supported on Windows")
-
-    # Coverage build is quite slow. Using "optimize_for_fuzzing" makes it even
-    # slower as it uses "-O1" instead of "-O3". Prevent that from happening.
-    assert(!use_clang_coverage,
-           "optimize_for_fuzzing=true should not be used with " +
-               "use_clang_coverage=true.")
-    configs = [ ":optimize_fuzzing" ]
-  } else {
-    configs = [ ":optimize" ]
-  }
-}
-
-_clang_sample_profile = ""
-if (is_clang && current_toolchain == default_toolchain) {
-  if (clang_sample_profile_path != "") {
-    _clang_sample_profile = clang_sample_profile_path
-  } else if (clang_use_default_sample_profile) {
-    assert(build_with_chromium,
-           "Our default profiles currently only apply to Chromium")
-    assert(is_android || is_desktop_linux,
-           "The current platform has no default profile")
-    _clang_sample_profile = rebase_path("//chrome/android/profiles/afdo.prof")
-  }
-}
-
-# Clang offers a way to assert that AFDO profiles are accurate, which causes it
-# to optimize functions not represented in a profile more aggressively for size.
-# This config can be toggled in cases where shaving off binary size hurts
-# performance too much.
-config("afdo_optimize_size") {
-  if (_clang_sample_profile != "" && sample_profile_is_accurate) {
-    cflags = [ "-fprofile-sample-accurate" ]
-  }
-}
-
-# GCC and clang support a form of profile-guided optimization called AFDO.
-# There are some targeted places that AFDO regresses (and an icky interaction
-# between //base/allocator:tcmalloc and AFDO on GCC), so we provide a separate
-# config to allow AFDO to be disabled per-target.
-config("afdo") {
-  if (is_clang) {
-    if (_clang_sample_profile != "") {
-      cflags = [ "-fprofile-sample-use=${_clang_sample_profile}" ]
-      inputs = [
-        _clang_sample_profile,
-      ]
-    }
-  } else if (auto_profile_path != "" && current_toolchain == default_toolchain) {
-    cflags = [ "-fauto-profile=${auto_profile_path}" ]
-    inputs = [
-      auto_profile_path,
-    ]
-  }
-}
-
-# Symbols ----------------------------------------------------------------------
-
-# The BUILDCONFIG file sets the "default_symbols" config on targets by
-# default. It will be equivalent to one the three specific symbol levels.
-#
-# You can override the symbol level on a per-target basis by removing the
-# default config and then adding the named one you want:
-#
-#   configs -= [ "//build/config/compiler:default_symbols" ]
-#   configs += [ "//build/config/compiler:symbols" ]
-
-# Full symbols.
-config("symbols") {
-  if (is_win) {
-    if (use_goma || is_clang) {
-      # Note that with VC++ this requires is_win_fastlink, enforced elsewhere.
-      cflags = [ "/Z7" ]  # Debug information in the .obj files.
-    } else {
-      cflags = [ "/Zi" ]  # Produce PDB file, no edit and continue.
-    }
-
-    if (is_win_fastlink && !use_lld) {
-      # TODO(hans): is_win_fastlink=true is incompatible with use_lld. However,
-      # some developers might have enabled it manually, so to ease the
-      # transition to lld, just ignore it rather than asserting. Eventually we
-      # want to assert instead.
-
-      # Tell VS 2015+ to create a PDB that references debug
-      # information in .obj and .lib files instead of copying
-      # it all. This flag is incompatible with /PROFILE
-      ldflags = [ "/DEBUG:FASTLINK" ]
-    } else if (is_clang && use_lld && use_ghash) {
-      cflags += [
-        "-mllvm",
-        "-emit-codeview-ghash-section",
-      ]
-      ldflags = [ "/DEBUG:GHASH" ]
-    } else {
-      ldflags = [ "/DEBUG" ]
-    }
-
-    if (is_clang) {
-      # /DEBUG:FASTLINK requires every object file to have standalone debug
-      # information.
-      if (is_win_fastlink && !use_lld) {
-        cflags += [ "-fstandalone-debug" ]
-      } else {
-        cflags += [ "-fno-standalone-debug" ]
-      }
-    }
-  } else {
-    if (is_mac || is_ios) {
-      cflags = [ "-gdwarf-2" ]
-      if (is_mac && enable_dsyms) {
-        # If generating dSYMs, specify -fno-standalone-debug. This was
-        # originally specified for https://crbug.com/479841 because dsymutil
-        # could not handle a 4GB dSYM file. But dsymutil from Xcodes prior to
-        # version 7 also produces debug data that is incompatible with Breakpad
-        # dump_syms, so this is still required (https://crbug.com/622406).
-        cflags += [ "-fno-standalone-debug" ]
-      }
-    } else {
-      cflags = []
-      if (!use_debug_fission && target_cpu == "arm") {
-        # dump_syms has issues with dwarf4 on arm, https://crbug.com/744956
-        # TODO(thakis): Remove this again once dump_syms is fixed.
-        #
-        # debug fission needs DWARF DIEs to be emitted at version 4.
-        # Chrome OS emits Debug Frame in DWARF1 to make breakpad happy. [1]
-        # Unless Android needs debug fission, DWARF3 is the simplest solution.
-        #
-        # [1] crrev.com/a81d5ade0b043208e06ad71a38bcf9c348a1a52f
-        cflags += [ "-gdwarf-3" ]
-      }
-      cflags += [ "-g2" ]
-    }
-    if (use_debug_fission && !is_nacl && !is_android) {
-      # NOTE: Some Chrome OS builds globally set |use_debug_fission| to true,
-      # but they also build some targets against Android toolchains which aren't
-      # compatible with it.
-      #
-      # TODO(https://crbug.com/837032): See if we can clean this up by e.g. not
-      # setting use_debug_fission globally.
-      cflags += [ "-gsplit-dwarf" ]
-    }
-    asmflags = cflags
-    ldflags = []
-
-    # TODO(thakis): Figure out if there's a way to make this go for 32-bit,
-    # currently we get "warning:
-    # obj/native_client/src/trusted/service_runtime/sel_asm/nacl_switch_32.o:
-    # DWARF info may be corrupt; offsets in a range list entry are in different
-    # sections" there.  Maybe just a bug in nacl_switch_32.S.
-    if (!is_mac && !is_ios && !is_nacl && target_cpu != "x86" &&
-        (use_gold || use_lld)) {
-      if (is_clang) {
-        # This flag enables the GNU-format pubnames and pubtypes sections,
-        # which lld needs in order to generate a correct GDB index.
-        # TODO(pcc): Try to make lld understand non-GNU-format pubnames
-        # sections (llvm.org/PR34820).
-        cflags += [ "-ggnu-pubnames" ]
-      }
-      ldflags += [ "-Wl,--gdb-index" ]
-    }
-  }
-}
-
-# Minimal symbols.
-# This config guarantees to hold symbol for stack trace which are shown to user
-# when crash happens in unittests running on buildbot.
-config("minimal_symbols") {
-  if (is_win) {
-    # Linker symbols for backtraces only.
-    cflags = []
-    ldflags = [ "/DEBUG" ]
-  } else {
-    cflags = []
-    if (target_cpu == "arm") {
-      # dump_syms has issues with dwarf4 on arm, https://crbug.com/744956
-      # TODO(thakis): Remove this again once dump_syms is fixed.
-      cflags += [ "-gdwarf-3" ]
-    }
-    cflags += [ "-g1" ]
-    ldflags = []
-    if (is_android && is_clang) {
-      # Android defaults to symbol_level=1 builds in production builds
-      # (https://crbug.com/648948), but clang, unlike gcc, doesn't emit
-      # DW_AT_linkage_name in -g1 builds. -fdebug-info-for-profiling enables
-      # that (and a bunch of other things we don't need), so that we get
-      # qualified names in stacks.
-      # TODO(thakis): Consider making clang emit DW_AT_linkage_name in -g1 mode;
-      #               failing that consider doing this on non-Android too.
-      cflags += [ "-fdebug-info-for-profiling" ]
-    }
-
-    # Note: -gsplit-dwarf implicitly turns on -g2 with clang, so don't pass it.
-    asmflags = cflags
-  }
-}
-
-# No symbols.
-config("no_symbols") {
-  if (!is_win) {
-    cflags = [ "-g0" ]
-    asmflags = cflags
-  }
-}
-
-# Default symbols.
-config("default_symbols") {
-  if (symbol_level == 0) {
-    configs = [ ":no_symbols" ]
-  } else if (symbol_level == 1) {
-    configs = [ ":minimal_symbols" ]
-  } else if (symbol_level == 2) {
-    configs = [ ":symbols" ]
-  } else {
-    assert(false)
-  }
-
-  # This config is removed by base unittests apk.
-  if (is_android && is_clang && strip_debug_info) {
-    configs += [ ":strip_debug" ]
-  }
-}
-
-config("strip_debug") {
-  if (!defined(ldflags)) {
-    ldflags = []
-  }
-  ldflags += [ "-Wl,--strip-debug" ]
-}
-
-if (is_ios || is_mac) {
-  # On Mac and iOS, this enables support for ARC (automatic ref-counting).
-  # See http://clang.llvm.org/docs/AutomaticReferenceCounting.html.
-  config("enable_arc") {
-    common_flags = [ "-fobjc-arc" ]
-    cflags_objc = common_flags
-    cflags_objcc = common_flags
-  }
-}
diff --git a/build/config/compiler/compiler.gni b/build/config/compiler/compiler.gni
deleted file mode 100644
index 0b13302..0000000
--- a/build/config/compiler/compiler.gni
+++ /dev/null
@@ -1,235 +0,0 @@
-# Copyright 2015 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-if (is_android) {
-  import("//build/config/android/abi.gni")
-}
-import("//build/config/arm.gni")
-import("//build/config/chrome_build.gni")
-import("//build/config/chromecast_build.gni")
-import("//build/config/compiler/pgo/pgo.gni")
-import("//build/config/sanitizers/sanitizers.gni")
-import("//build/toolchain/goma.gni")
-import("//build/toolchain/toolchain.gni")
-import("//build_overrides/build.gni")
-
-declare_args() {
-  # How many symbols to include in the build. This affects the performance of
-  # the build since the symbols are large and dealing with them is slow.
-  #   2 means regular build with symbols.
-  #   1 means minimal symbols, usually enough for backtraces only. Symbols with
-  # internal linkage (static functions or those in anonymous namespaces) may not
-  # appear when using this level.
-  #   0 means no symbols.
-  #   -1 means auto-set according to debug/release and platform.
-  symbol_level = -1
-
-  # Compile in such a way as to enable profiling of the generated code. For
-  # example, don't omit the frame pointer and leave in symbols.
-  enable_profiling = false
-
-  # use_debug_fission: whether to use split DWARF debug info
-  # files. This can reduce link time significantly, but is incompatible
-  # with some utilities such as icecc and ccache. Requires gold and
-  # gcc >= 4.8 or clang.
-  # http://gcc.gnu.org/wiki/DebugFission
-  #
-  # This is a placeholder value indicating that the code below should set
-  # the default.  This is necessary to delay the evaluation of the default
-  # value expression until after its input values such as use_gold have
-  # been set, e.g. by a toolchain_args() block.
-  use_debug_fission = "default"
-
-  # Enables support for ThinLTO, which links 3x-10x faster than full LTO. See
-  # also http://blog.llvm.org/2016/06/thinlto-scalable-and-incremental-lto.html
-  use_thin_lto = is_cfi
-
-  # Tell VS to create a PDB that references information in .obj files rather
-  # than copying it all. This should improve linker performance. mspdbcmf.exe
-  # can be used to convert a fastlink pdb to a normal one.
-  is_win_fastlink = false
-
-  # Whether or not we should turn on incremental WPO. Only affects the VS
-  # Windows build.
-  use_incremental_wpo = false
-
-  # Whether or not we should use position independent code.
-  use_pic = true
-
-  # Whether we're using a sample profile collected on an architecture different
-  # than the one we're compiling for.
-  #
-  # It's currently not possible to collect AFDO profiles on anything but
-  # x86{,_64}.
-  using_mismatched_sample_profile = current_cpu != "x64" && current_cpu != "x86"
-}
-
-assert(!is_cfi || use_thin_lto, "CFI requires ThinLTO")
-
-# Exclude unwind tables for official builds as unwinding can be done from stack
-# dumps produced by Crashpad at a later time "offline" in the crash server.
-# For unofficial (e.g. development) builds and non-Chrome branded (e.g. Cronet
-# which doesn't use Crashpad, crbug.com/479283) builds it's useful to be able
-# to unwind at runtime.
-exclude_unwind_tables =
-    (is_chrome_branded && is_official_build) ||
-    (is_chromecast && !is_cast_desktop_build && !is_debug && !is_fuchsia)
-
-# If true, optimize for size. Does not affect windows builds.
-# Linux & Mac favor speed over size.
-# TODO(brettw) it's weird that Mac and desktop Linux are different. We should
-# explore favoring size over speed in this case as well.
-optimize_for_size = is_android || is_ios
-
-declare_args() {
-  # Whether we should consider the profile we're using to be accurate. Accurate
-  # profiles have the benefit of (potentially substantial) binary size
-  # reductions, by instructing the compiler to optimize cold and uncovered
-  # functions heavily for size. This often comes at the cost of performance.
-  sample_profile_is_accurate = optimize_for_size
-}
-
-# Determine whether to enable or disable frame pointers, based on the platform
-# and build arguments.
-if (is_mac || is_ios || is_linux) {
-  enable_frame_pointers = true
-} else if (is_win) {
-  # 64-bit Windows ABI doesn't support frame pointers.
-  if (current_cpu == "x64") {
-    enable_frame_pointers = false
-  } else {
-    enable_frame_pointers = true
-  }
-} else if (is_chromeos) {
-  # ChromeOS generally prefers frame pointers, to support CWP.
-  # However, Clang does not currently generate usable frame pointers in ARM
-  # 32-bit builds (https://bugs.llvm.org/show_bug.cgi?id=18505) so disable them
-  # there to avoid the unnecessary overhead.
-  enable_frame_pointers = current_cpu != "arm"
-} else if (is_android) {
-  enable_frame_pointers =
-      enable_profiling ||
-      # Ensure that stacks from arm64 crash dumps are usable (crbug.com/391706).
-      current_cpu == "arm64" ||
-      # For x86 Android, unwind tables are huge without frame pointers
-      # (crbug.com/762629). Enabling frame pointers grows the code size slightly
-      # but overall shrinks binaries considerably by avoiding huge unwind
-      # tables.
-      (current_cpu == "x86" && !exclude_unwind_tables && optimize_for_size) ||
-      using_sanitizer
-} else {
-  # Explicitly ask for frame pointers, otherwise:
-  # * Stacks may be missing for sanitizer and profiling builds.
-  # * Debug tcmalloc can crash (crbug.com/636489).
-  enable_frame_pointers = using_sanitizer || enable_profiling || is_debug
-}
-
-# In general assume that if we have frame pointers then we can use them to
-# unwind the stack. However, this requires that they are enabled by default for
-# most translation units, that they are emitted correctly, and that the
-# compiler or platform provides a way to access them.
-can_unwind_with_frame_pointers = enable_frame_pointers
-if (current_cpu == "arm" && arm_use_thumb) {
-  # We cannot currently unwind ARM Thumb frame pointers correctly.
-  # See https://bugs.llvm.org/show_bug.cgi?id=18505
-  can_unwind_with_frame_pointers = false
-} else if (is_win) {
-  # Windows 32-bit does provide frame pointers, but the compiler does not
-  # provide intrinsics to access them, so we don't use them.
-  can_unwind_with_frame_pointers = false
-}
-
-assert(!can_unwind_with_frame_pointers || enable_frame_pointers)
-
-# Unwinding with CFI table is only possible on static library builds and
-# requried only when frame pointers are not enabled.
-can_unwind_with_cfi_table = is_android && !is_component_build &&
-                            !enable_frame_pointers && current_cpu == "arm"
-
-declare_args() {
-  # Whether or not the official builds should be built with full WPO. Enabled by
-  # default for the PGO and the x64 builds.
-  if (chrome_pgo_phase > 0) {
-    full_wpo_on_official = true
-  } else {
-    full_wpo_on_official = false
-  }
-}
-
-declare_args() {
-  # Set to true to use lld, the LLVM linker. This flag may be used on Windows,
-  # Linux or Fuchsia.
-  # TODO(pcc): Enable lld on more architectures on Linux. E.g. we probably need
-  # to fix some of crbug.com/742655 to enable it on ARM.
-  use_lld =
-      is_clang &&
-      (is_win || is_fuchsia || (use_thin_lto && target_os != "chromeos") ||
-       (is_linux && current_cpu == "x64" && target_os != "chromeos") ||
-       (is_android && (current_cpu != "arm" || arm_version >= 7) &&
-        current_cpu != "mipsel" && current_cpu != "mips64el"))
-}
-
-declare_args() {
-  # Whether to use the gold linker from binutils instead of lld or bfd.
-  use_gold = !use_lld && !(is_chromecast && is_linux &&
-                           (current_cpu == "arm" || current_cpu == "mipsel")) &&
-             ((is_linux && (current_cpu == "x64" || current_cpu == "x86" ||
-                            current_cpu == "arm" || current_cpu == "mipsel" ||
-                            current_cpu == "mips64el")) ||
-              (is_android && (current_cpu == "x86" || current_cpu == "x64" ||
-                              current_cpu == "arm" || current_cpu == "arm64")))
-}
-
-# If it wasn't manually set, set to an appropriate default.
-assert(symbol_level >= -1 && symbol_level <= 2, "Invalid symbol_level")
-if (symbol_level == -1) {
-  if (is_android && use_order_profiling) {
-    # With instrumentation enabled, debug info puts libchrome.so over 4gb, which
-    # causes the linker to produce an invalid ELF. http://crbug.com/574476
-    symbol_level = 0
-  } else if (is_android && !is_component_build &&
-             !(android_64bit_target_cpu && !build_apk_secondary_abi)) {
-    # Reduce symbol level when it will cause invalid elf files to be created
-    # (due to file size). https://crbug.com/648948.
-    symbol_level = 1
-  } else if (is_win && use_goma && !is_clang) {
-    # goma doesn't support PDB files, so we disable symbols during goma
-    # compilation because otherwise the redundant debug information generated
-    # by visual studio (repeated in every .obj file) makes linker
-    # memory consumption and link times unsustainable (crbug.com/630074).
-    # Clang on windows does not have this issue.
-    # If you use is_win_fastlink = true or lld or clang then you can set
-    # symbol_level = 2 when using goma.
-    symbol_level = 1
-  } else if ((!is_nacl && !is_linux && !is_fuchsia) || is_debug ||
-             is_official_build || is_chromecast) {
-    # Linux builds slower by having symbols as part of the target binary,
-    # whereas Mac and Windows have them separate, so in Release Linux, default
-    # them off, but keep them on for Official builds and Chromecast builds.
-    symbol_level = 2
-  } else if (using_sanitizer) {
-    # Sanitizers require symbols for filename suppressions to work.
-    symbol_level = 1
-  } else {
-    symbol_level = 0
-  }
-} else if (symbol_level == 2) {
-  if (is_win) {
-    # See crbug.com/630074
-    assert(is_win_fastlink || use_lld || !use_goma || is_clang,
-           "VC++ goma builds that use symbol_level 2 must use " +
-               "is_win_fastlink or use_lld.")
-  }
-}
-
-# Assert that the configuration isn't going to hit https://crbug.com/648948.
-# An exception is made when target_os == "chromeos" as we only use the Android
-# toolchain there to build relatively small binaries.
-assert(ignore_elf32_limitations || !is_android || target_os == "chromeos" ||
-           (android_64bit_target_cpu && !build_apk_secondary_abi) ||
-           is_component_build || symbol_level < 2,
-       "Android 32-bit non-component builds cannot have symbol_level=2 " +
-           "due to 4GiB file size limit, see https://crbug.com/648948. " +
-           "If you really want to try this out, " +
-           "set ignore_elf32_limitations=true.")
diff --git a/build/config/compiler/pgo/BUILD.gn b/build/config/compiler/pgo/BUILD.gn
deleted file mode 100644
index bb147a6..0000000
--- a/build/config/compiler/pgo/BUILD.gn
+++ /dev/null
@@ -1,101 +0,0 @@
-# Copyright 2016 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import("//build/config/clang/clang.gni")
-import("//build/config/compiler/compiler.gni")
-import("//build/config/compiler/pgo/pgo.gni")
-
-# Configuration that enables PGO instrumentation.
-config("pgo_instrumentation_flags") {
-  visibility = [ ":default_pgo_flags" ]
-  cflags = []
-  ldflags = []
-
-  # Only add flags when chrome_pgo_phase == 1, so that variables we would use
-  # are not required to be defined when we're not actually using PGO.
-  if (chrome_pgo_phase == 1) {
-    if (is_clang) {
-      cflags = [ "-fprofile-instr-generate" ]
-      if (is_win) {
-        # Normally, we pass -fprofile-instr-generate to the compiler and it
-        # automatically passes the right flags to the linker.
-        # However, on Windows, we call the linker directly, without going
-        # through the compiler driver. This means we need to pass the right
-        # flags ourselves.
-        _clang_rt_base_path =
-            "$clang_base_path/lib/clang/$clang_version/lib/windows"
-        if (target_cpu == "x86") {
-          _clang_rt_suffix = "-i386.lib"
-        } else if (target_cpu == "x64") {
-          _clang_rt_suffix = "-x86_64.lib"
-        }
-        assert(_clang_rt_suffix != "", "target CPU $target_cpu not supported")
-        ldflags += [ "$_clang_rt_base_path/clang_rt.profile$_clang_rt_suffix" ]
-      } else {
-        ldflags += [ "-fprofile-instr-generate" ]
-      }
-    } else if (is_win) {
-      ldflags = [
-        # In MSVC, we must use /LTCG when using PGO.
-        "/LTCG",
-
-        # Make sure that enough memory gets allocated for the PGO profiling
-        # buffers and also cap this memory. Usually a PGI instrumented build
-        # of chrome_child.dll requires ~55MB of memory for storing its counter
-        # etc, normally the linker should automatically choose an appropriate
-        # amount of memory but it doesn't always do a good estimate and
-        # sometime allocates too little or too much (and so the instrumented
-        # image fails to start). Making sure that the buffer has a size in the
-        # [128 MB, 512 MB] range should prevent this from happening.
-        "/GENPROFILE:MEMMIN=134217728",
-        "/GENPROFILE:MEMMAX=536870912",
-        "/PogoSafeMode",
-      ]
-    }
-  }
-}
-
-# Configuration that enables optimization using profile data.
-config("pgo_optimization_flags") {
-  visibility = [ ":default_pgo_flags" ]
-  cflags = []
-  ldflags = []
-
-  # Only add flags when chrome_pgo_phase == 2, so that variables we would use
-  # are not required to be defined when we're not actually using PGO.
-  if (chrome_pgo_phase == 2) {
-    if (is_clang) {
-      assert(pgo_data_path != "",
-             "Please set pgo_data_path to point at the profile data")
-      cflags += [
-        "-fprofile-instr-use=$pgo_data_path",
-
-        # It's possible to have some profile data legitimately missing,
-        # and at least some profile data always ends up being considered
-        # out of date, so make sure we don't error for those cases.
-        "-Wno-profile-instr-unprofiled",
-        "-Wno-error=profile-instr-out-of-date",
-      ]
-    } else if (is_win) {
-      ldflags += [
-        # In MSVC, we must use /LTCG when using PGO.
-        "/LTCG",
-        "/USEPROFILE",
-      ]
-    }
-  }
-}
-
-# Applies flags necessary when profile-guided optimization is used.
-# Flags are only added if PGO is enabled, so that this config is safe to
-# include by default.
-config("default_pgo_flags") {
-  if (chrome_pgo_phase == 0) {
-    # Nothing. This config should be a no-op when chrome_pgo_phase == 0.
-  } else if (chrome_pgo_phase == 1) {
-    configs = [ ":pgo_instrumentation_flags" ]
-  } else if (chrome_pgo_phase == 2) {
-    configs = [ ":pgo_optimization_flags" ]
-  }
-}
diff --git a/build/config/compiler/pgo/pgo.gni b/build/config/compiler/pgo/pgo.gni
deleted file mode 100644
index 5da004d..0000000
--- a/build/config/compiler/pgo/pgo.gni
+++ /dev/null
@@ -1,17 +0,0 @@
-# Copyright 2016 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-declare_args() {
-  # Specify the current PGO phase.
-  # Here's the different values that can be used:
-  #     0 : Means that PGO is turned off.
-  #     1 : Used during the PGI (instrumentation) phase.
-  #     2 : Used during the PGO (optimization) phase.
-  #
-  # TODO(sebmarchand): Add support for the PGU (update) phase.
-  chrome_pgo_phase = 0
-
-  # When using chrome_pgo_phase = 2, read profile data from this path.
-  pgo_data_path = ""
-}
diff --git a/build/config/coverage/BUILD.gn b/build/config/coverage/BUILD.gn
deleted file mode 100644
index 6a6ec88..0000000
--- a/build/config/coverage/BUILD.gn
+++ /dev/null
@@ -1,23 +0,0 @@
-# Copyright 2017 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import("//build/config/coverage/coverage.gni")
-
-config("default_coverage") {
-  if (use_clang_coverage) {
-    cflags = [
-      "-fprofile-instr-generate",
-      "-fcoverage-mapping",
-      "-fno-use-cxa-atexit",
-
-      # Following experimental flags removes unused header functions from the
-      # coverage mapping data embedded in the test binaries, and the reduction
-      # of binary size enables building Chrome's large unit test targets on
-      # MacOS. Please refer to crbug.com/796290 for more details.
-      "-mllvm",
-      "-limited-coverage-experimental=true",
-    ]
-    ldflags = [ "-fprofile-instr-generate" ]
-  }
-}
diff --git a/build/config/coverage/OWNERS b/build/config/coverage/OWNERS
deleted file mode 100644
index 70d27b6..0000000
--- a/build/config/coverage/OWNERS
+++ /dev/null
@@ -1,3 +0,0 @@
-inferno@chromium.org
-mmoroz@chromium.org
-ochang@chromium.org
diff --git a/build/config/coverage/coverage.gni b/build/config/coverage/coverage.gni
deleted file mode 100644
index 61716dd..0000000
--- a/build/config/coverage/coverage.gni
+++ /dev/null
@@ -1,13 +0,0 @@
-# Copyright 2017 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import("//build/toolchain/toolchain.gni")
-
-declare_args() {
-  # Enable Clang's Source-based Code Coverage.
-  use_clang_coverage = false
-}
-
-assert(!use_clang_coverage || is_clang,
-       "Clang Source-based Code Coverage requires clang.")
diff --git a/build/config/crypto.gni b/build/config/crypto.gni
deleted file mode 100644
index a3d52de..0000000
--- a/build/config/crypto.gni
+++ /dev/null
@@ -1,15 +0,0 @@
-# Copyright (c) 2013 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file declares build flags for the SSL library configuration.
-#
-# TODO(brettw) this should probably be moved to src/crypto or somewhere, and
-# the global build dependency on it should be removed.
-#
-# PLEASE TRY TO AVOID ADDING FLAGS TO THIS FILE in cases where grit isn't
-# required. See the declare_args block of BUILDCONFIG.gn for advice on how
-# to set up feature flags.
-
-# True if NSS is used for certificate handling.
-use_nss_certs = is_linux
diff --git a/build/config/dcheck_always_on.gni b/build/config/dcheck_always_on.gni
deleted file mode 100644
index e58cfce..0000000
--- a/build/config/dcheck_always_on.gni
+++ /dev/null
@@ -1,15 +0,0 @@
-# Copyright (c) 2016 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-declare_args() {
-  # Enables DCHECKs to be built-in, but to default to being non-fatal/log-only.
-  # DCHECKS can then be set as fatal/non-fatal via the DCheckIsFatal feature.
-  # See crbug.com/596231 for details on how this is used.
-  dcheck_is_configurable = false
-}
-
-declare_args() {
-  # Set to true to enable dcheck in Release builds.
-  dcheck_always_on = dcheck_is_configurable
-}
diff --git a/build/config/features.gni b/build/config/features.gni
deleted file mode 100644
index 09cddfd..0000000
--- a/build/config/features.gni
+++ /dev/null
@@ -1,60 +0,0 @@
-# Copyright 2014 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# =============================================
-#   PLEASE DO NOT ADD MORE FLAGS TO THIS FILE
-# =============================================
-#
-# These flags are effectively global. Your feature flag should go near the
-# code it controls. Most of these items are here now because they control
-# legacy global #defines passed to the compiler (now replaced with generated
-# buildflag headers -- see //build/buildflag_header.gni).
-#
-# There is more advice on where to put build flags in the "Build flag" section
-# of //build/config/BUILDCONFIG.gn.
-
-import("//build/config/chrome_build.gni")
-import("//build/config/chromecast_build.gni")
-if (is_android) {
-  import("//build/config/android/config.gni")
-}
-
-declare_args() {
-  # Enables proprietary codecs and demuxers; e.g. H264, AAC, MP3, and MP4.
-  # We always build Google Chrome and Chromecast with proprietary codecs.
-  #
-  # Note: this flag is used by WebRTC which is DEPSed into Chrome. Moving it
-  # out of //build will require using the build_overrides directory.
-  proprietary_codecs = is_chrome_branded || is_chromecast
-
-  # Variable safe_browsing is used to control the build time configuration for
-  # safe browsing feature. Safe browsing can be compiled in 3 different levels:
-  # 0 disables it, 1 enables it fully, and 2 enables mobile protection via an
-  # external API.
-  if (is_ios || is_chromecast) {
-    safe_browsing_mode = 0
-  } else if (is_android) {
-    safe_browsing_mode = 2
-  } else {
-    safe_browsing_mode = 1
-  }
-
-  # Set to true make a build that disables activation of field trial tests
-  # specified in testing/variations/fieldtrial_testing_config_*.json.
-  # Note: this setting is ignored if is_chrome_branded.
-  fieldtrial_testing_like_official_build = is_chrome_branded
-
-  # libudev usage. This currently only affects the content layer.
-  use_udev = is_linux && !is_chromecast
-
-  use_dbus = is_linux && !is_chromecast
-
-  use_gio = is_linux && !is_chromeos && !is_chromecast
-}
-#
-# =============================================
-#   PLEASE DO NOT ADD MORE FLAGS TO THIS FILE
-# =============================================
-#
-# See comment at the top.
diff --git a/build/config/freetype/BUILD.gn b/build/config/freetype/BUILD.gn
deleted file mode 100644
index 0ba9027..0000000
--- a/build/config/freetype/BUILD.gn
+++ /dev/null
@@ -1,16 +0,0 @@
-# Copyright 2017 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import("//build/config/features.gni")
-import("//build/config/freetype/freetype.gni")
-
-group("freetype") {
-  if (use_system_freetype) {
-    public_configs = [ "//build/linux:freetype_from_pkgconfig" ]
-  } else {
-    public_deps = [
-      "//third_party:freetype_harfbuzz",
-    ]
-  }
-}
diff --git a/build/config/freetype/OWNERS b/build/config/freetype/OWNERS
deleted file mode 100644
index 3277f87..0000000
--- a/build/config/freetype/OWNERS
+++ /dev/null
@@ -1,2 +0,0 @@
-bungeman@chromium.org
-drott@chromium.org
diff --git a/build/config/freetype/freetype.gni b/build/config/freetype/freetype.gni
deleted file mode 100644
index b4eced2..0000000
--- a/build/config/freetype/freetype.gni
+++ /dev/null
@@ -1,14 +0,0 @@
-# Copyright 2017 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-declare_args() {
-  # Blink needs a recent and properly build-configured FreeType version to
-  # support OpenType variations, color emoji and avoid security bugs. By default
-  # we ship and link such a version as part of Chrome. For distributions that
-  # prefer to keep linking to the version the system, FreeType must be newer
-  # than version 2.7.1 and have color bitmap support compiled in. WARNING:
-  # System FreeType configurations other than as described WILL INTRODUCE TEXT
-  # RENDERING AND SECURITY REGRESSIONS.
-  use_system_freetype = false
-}
diff --git a/build/config/fuchsia/BUILD.gn b/build/config/fuchsia/BUILD.gn
deleted file mode 100644
index a861ea8..0000000
--- a/build/config/fuchsia/BUILD.gn
+++ /dev/null
@@ -1,139 +0,0 @@
-# Copyright 2017 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import("//build/config/fuchsia/config.gni")
-import("//build/config/fuchsia/rules.gni")
-import("//build/config/sysroot.gni")
-
-assert(is_fuchsia)
-assert(!is_posix)
-
-config("compiler") {
-  sdk_version_file = rebase_path("$fuchsia_sdk/.hash")
-  sdk_version = read_file(sdk_version_file, "trim string")
-  defines = [
-    # To force full builds after SDK updates in case of ABI changes.
-    "FUCHSIA_SDK_VERSION=$sdk_version",
-  ]
-  cflags = []
-  ldflags = []
-  if (current_cpu == "arm64") {
-    cflags += [ "--target=aarch64-fuchsia" ]
-    ldflags += [ "--target=aarch64-fuchsia" ]
-  } else if (current_cpu == "x64") {
-    cflags += [ "--target=x86_64-fuchsia" ]
-    ldflags += [ "--target=x86_64-fuchsia" ]
-  } else {
-    assert(false, "Unsupported architecture")
-  }
-  asmflags = cflags
-
-  ldflags += [
-    # TODO(thakis): Once Fuchsia's libclang_rt.builtin no longer has upstream
-    # patches, we might want to make tools/clang/scripts/update.py build it
-    # and bundle it with the clang package instead of using the library from
-    # the SDK, https://crbug.com/724204
-    # Note: Intentionally 7.0.0 instead of $clang_version because the clang
-    # version of the toolchain_libs directory in the Fuchsia SDK can be
-    # different from the version of Chromium's clang.
-    "-resource-dir",
-    rebase_path(fuchsia_sdk, root_build_dir) + "/toolchain_libs/clang/7.0.0",
-
-    # The stack defaults to 256k on Fuchsia (see
-    # https://fuchsia.googlesource.com/zircon/+/master/system/private/zircon/stack.h#9),
-    # but on other platforms it's much higher, so a variety of code assumes more
-    # will be available. Raise to 8M which matches e.g. macOS.
-    "-Wl,-z,stack-size=0x800000",
-
-    # We always want fdio or else e.g. stdio wouldn't be initialized if fdio
-    # happens to not be directly referenced. The common POSIX-y compiler setup
-    # uses -Wl,--as-needed which drops it if it's simply "-lfdio" from a libs
-    # setting. Disable --as-needed, add fdio, and then set back to --as-needed.
-    # https://crbug.com/731217.
-    "-Wl,--no-as-needed",
-    "-lfdio",
-    "-Wl,--as-needed",
-  ]
-
-  # Add SDK lib dir for -lfdio above.
-  lib_dirs = [ rebase_path("${fuchsia_sdk}/arch/${current_cpu}/lib") ]
-
-  # TODO(crbug.com/821951): Clang enables SafeStack by default when targeting
-  # Fuchsia, but it breaks some tests, notably in V8.
-  cflags += [ "-fno-sanitize=safe-stack" ]
-
-  libs = [ "zircon" ]
-}
-
-# Writes an extended version of fvm.blk to fvm.extended.blk.
-blobstore_extended_path = "$root_out_dir/fvm.extended.blk"
-action("blobstore_extended_fvm") {
-  # The file is grown by 1GB, which should be large enough to hold packaged
-  # binaries and assets. The value should be increased if the size becomes a
-  # limitation in the future.
-  _extend_size = "1073741824"  # 1GB
-
-  if (current_cpu == "arm64") {
-    _target_dir = "//third_party/fuchsia-sdk/sdk/target/aarch64"
-  } else if (current_cpu == "x64") {
-    _target_dir = "//third_party/fuchsia-sdk/sdk/target/x86_64"
-  }
-
-  script = "//build/config/fuchsia/extend_fvm.py"
-
-  inputs = [
-    "${_target_dir}/fvm.blk",
-    "${_target_dir}/bootdata-blob.bin",
-    "${_target_dir}/zircon.bin",
-  ]
-  outputs = [
-    blobstore_extended_path,
-  ]
-
-  args = [
-    rebase_path("${fuchsia_sdk}/tools/fvm"),
-    rebase_path("${_target_dir}/fvm.blk"),
-    rebase_path(blobstore_extended_path),
-    _extend_size,
-  ]
-}
-
-#  _________________________________________
-# / Create a compressed copy-on-write (COW) \
-# \ image based on fvm.blk.                 /
-#  -----------------------------------------
-#         \   ^__^
-#          \  (oo)\_______
-#             (__)\       )\/\
-#                 ||----w |
-#                 ||     ||
-action("blobstore_extended_qcow2") {
-  script = "//build/gn_run_binary.py"
-
-  deps = [
-    ":blobstore_extended_fvm",
-  ]
-  inputs = [
-    blobstore_extended_path,
-  ]
-  outputs = [
-    blobstore_qcow_path,
-  ]
-  data = [
-    blobstore_qcow_path,
-  ]
-
-  args = [
-    rebase_path("//third_party/fuchsia-sdk/sdk/qemu/bin/qemu-img",
-                root_build_dir),
-    "convert",
-    "-f",
-    "raw",
-    "-O",
-    "qcow2",
-    "-c",
-    rebase_path(blobstore_extended_path),
-    rebase_path(blobstore_qcow_path),
-  ]
-}
diff --git a/build/config/fuchsia/OWNERS b/build/config/fuchsia/OWNERS
deleted file mode 100644
index e7034ea..0000000
--- a/build/config/fuchsia/OWNERS
+++ /dev/null
@@ -1 +0,0 @@
-file://build/fuchsia/OWNERS
diff --git a/build/config/fuchsia/build_manifest.py b/build/config/fuchsia/build_manifest.py
deleted file mode 100644
index 0fc8b74..0000000
--- a/build/config/fuchsia/build_manifest.py
+++ /dev/null
@@ -1,228 +0,0 @@
-# Copyright 2018 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Creates a archive manifest used for Fuchsia package generation.
-
-Arguments:
-  root_dir: The absolute path to the Chromium source tree root.
-
-  out_dir: The absolute path to the Chromium build directory.
-
-  app_name: The filename of the package's executable target.
-
-  runtime_deps: The path to the GN runtime deps file.
-
-  output_path: The path of the manifest file which will be written.
-"""
-
-import json
-import os
-import re
-import subprocess
-import sys
-import tempfile
-
-
-def ReadDynamicLibDeps(paths):
-  """Returns a list of NEEDED libraries read from a binary's ELF header."""
-
-  LIBRARY_RE = re.compile(r'.*\(NEEDED\)\s+Shared library: \[(?P<lib>.*)\]')
-  elfinfo = subprocess.check_output(['readelf', '-d'] + paths,
-                                    stderr=open(os.devnull, 'w'))
-  libs = []
-  for line in elfinfo.split('\n'):
-    match = LIBRARY_RE.match(line.rstrip())
-    if match:
-      lib = match.group('lib')
-
-      # Skip libzircon.so, as it is supplied by the OS loader.
-      if lib != 'libzircon.so':
-        libs.append(match.group('lib'))
-
-  return libs
-
-
-def ComputeTransitiveLibDeps(executable_path, available_libs):
-  """Returns a set representing the library dependencies of |executable_path|,
-  the dependencies of its dependencies, and so on.
-
-  A list of candidate library filesystem paths is passed using |available_libs|
-  to help with resolving full paths from the short ELF header filenames."""
-
-  # Stack of binaries (libraries, executables) awaiting traversal.
-  to_visit = [executable_path]
-
-  # The computed set of visited transitive dependencies.
-  deps = set()
-
-  while to_visit:
-    deps = deps.union(to_visit)
-
-    # Resolve the full paths for all of |cur_path|'s NEEDED libraries.
-    dep_paths = {available_libs[dep]
-                 for dep in ReadDynamicLibDeps(list(to_visit))}
-
-    # Add newly discovered dependencies to the pending traversal stack.
-    to_visit = dep_paths.difference(deps)
-
-  return deps
-
-
-def EnumerateDirectoryFiles(path):
-  """Returns a flattened list of all files contained under |path|."""
-
-  output = set()
-  for dirname, _, files in os.walk(path):
-    output = output.union({os.path.join(dirname, f) for f in files})
-  return output
-
-
-def MakePackagePath(file_path, roots):
-  """Computes a path for |file_path| that is relative to one of the directory
-  paths in |roots|.
-
-  file_path: The absolute file path to relativize.
-  roots: A list of absolute directory paths which may serve as a relative root
-         for |file_path|. At least one path must contain |file_path|.
-         Overlapping roots are permitted; the deepest matching root will be
-         chosen.
-
-  Examples:
-
-  >>> MakePackagePath('/foo/bar.txt', ['/foo/'])
-  'bar.txt'
-
-  >>> MakePackagePath('/foo/dir/bar.txt', ['/foo/'])
-  'dir/bar.txt'
-
-  >>> MakePackagePath('/foo/out/Debug/bar.exe', ['/foo/', '/foo/out/Debug/'])
-  'bar.exe'
-  """
-
-  # Prevents greedily matching against a shallow path when a deeper, better
-  # matching path exists.
-  roots.sort(key=len, reverse=True)
-
-  for next_root in roots:
-    if not next_root.endswith(os.sep):
-      next_root += os.sep
-
-    if file_path.startswith(next_root):
-      relative_path = file_path[len(next_root):]
-
-      # Move all dynamic libraries (ending in .so or .so.<number>) to lib/.
-      if re.search('.*\.so(\.\d+)?$', file_path):
-        relative_path = 'lib/' + os.path.basename(relative_path)
-
-      return relative_path
-
-  raise Exception('Error: no matching root paths found for \'%s\'.' % file_path)
-
-
-def _GetStrippedPath(bin_path):
-  """Finds the stripped version of the binary |bin_path| in the build
-  output directory."""
-
-  # Skip the resolution step for binaries that don't have stripped counterparts,
-  # like system libraries or other libraries built outside the Chromium build.
-  if not '.unstripped' in bin_path:
-    return bin_path
-
-  return os.path.normpath(os.path.join(bin_path,
-                                       os.path.pardir,
-                                       os.path.pardir,
-                                       os.path.basename(bin_path)))
-
-
-def _IsBinary(path):
-  """Checks if the file at |path| is an ELF executable by inspecting its FourCC
-  header."""
-
-  with open(path, 'rb') as f:
-    file_tag = f.read(4)
-  return file_tag == '\x7fELF'
-
-
-def BuildManifest(root_dir, out_dir, app_name, app_filename,
-                  sandbox_policy_path, runtime_deps_file, depfile_path,
-                  dynlib_paths, output_path):
-  with open(output_path, 'w') as manifest, open(depfile_path, 'w') as depfile:
-    # Process the runtime deps file for file paths, recursively walking
-    # directories as needed. File paths are stored in absolute form,
-    # so that MakePackagePath() may relativize to either the source root or
-    # output directory.
-    # runtime_deps may contain duplicate paths, so use a set for
-    # de-duplication.
-    expanded_files = set()
-    for next_path in open(runtime_deps_file, 'r'):
-      next_path = next_path.strip()
-      if os.path.isdir(next_path):
-        for root, _, files in os.walk(next_path):
-          for current_file in files:
-            if current_file.startswith('.'):
-              continue
-            expanded_files.add(os.path.abspath(
-                os.path.join(root, current_file)))
-      else:
-        expanded_files.add(os.path.abspath(next_path))
-
-    # Get set of dist libraries available for dynamic linking.
-    dist_libs = set()
-    for next_dir in dynlib_paths.split(','):
-      dist_libs = dist_libs.union(EnumerateDirectoryFiles(next_dir))
-
-    # Compute the set of dynamic libraries used by the application or its
-    # transitive dependencies (dist libs and components), and merge the result
-    # with |expanded_files| so that they are included in the manifest.
-    expanded_files = expanded_files.union(
-        ComputeTransitiveLibDeps(
-            app_filename,
-            {os.path.basename(f): f for f in expanded_files.union(dist_libs)}))
-
-    # Format and write out the manifest contents.
-    app_found = False
-    for current_file in expanded_files:
-      if _IsBinary(current_file):
-        current_file = _GetStrippedPath(current_file)
-
-      in_package_path = MakePackagePath(os.path.join(out_dir, current_file),
-                                        [root_dir, out_dir])
-      if in_package_path == app_filename:
-        in_package_path = 'bin/app'
-        app_found = True
-
-      # The source path is relativized so that it can be used on multiple
-      # environments with differing parent directory structures,
-      # e.g. builder bots and swarming clients.
-      manifest.write('%s=%s\n' % (in_package_path,
-                                  os.path.relpath(current_file, out_dir)))
-
-      # Use libc.so's dynamic linker by aliasing libc.so to ld.so.1.
-      # Fuchsia always looks for the linker implementation in ld.so.1.
-      if os.path.basename(in_package_path) == 'libc.so':
-        manifest.write(
-            '%s=%s\n' % (os.path.dirname(in_package_path) + '/ld.so.1',
-                         os.path.relpath(current_file, out_dir)))
-
-    if not app_found:
-      raise Exception('Could not locate executable inside runtime_deps.')
-
-    with open(os.path.join(os.path.dirname(output_path), 'package'), 'w') \
-        as package_json:
-      json.dump({'version': '0', 'name': app_name}, package_json)
-      manifest.write('meta/package=%s\n' %
-                   os.path.relpath(package_json.name, out_dir))
-
-    manifest.write('meta/sandbox=%s\n' %
-                 os.path.relpath(os.path.join(root_dir, sandbox_policy_path),
-                                 out_dir))
-    depfile.write(
-        "%s: %s" % (os.path.relpath(output_path, out_dir),
-                    " ".join([os.path.relpath(f, out_dir)
-                              for f in expanded_files])))
-  return 0
-
-
-if __name__ == '__main__':
-  sys.exit(BuildManifest(*sys.argv[1:]))
diff --git a/build/config/fuchsia/config.gni b/build/config/fuchsia/config.gni
deleted file mode 100644
index 52ca5e0..0000000
--- a/build/config/fuchsia/config.gni
+++ /dev/null
@@ -1,19 +0,0 @@
-# Copyright 2017 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-assert(current_os == "fuchsia")
-
-declare_args() {
-  # Path to Fuchsia SDK.
-  fuchsia_sdk = "//third_party/fuchsia-sdk/sdk"
-}
-
-# Compute the arch-specific path to packages' dynamic library dependencies.
-if (current_cpu == "arm64") {
-  dist_libroot = fuchsia_sdk + "/arch/arm64/dist/"
-} else if (current_cpu == "x64") {
-  dist_libroot = fuchsia_sdk + "/arch/x64/dist/"
-} else {
-  assert(false, "No libraries available for architecture: $current_cpu")
-}
diff --git a/build/config/fuchsia/extend_fvm.py b/build/config/fuchsia/extend_fvm.py
deleted file mode 100644
index 44e5ee3..0000000
--- a/build/config/fuchsia/extend_fvm.py
+++ /dev/null
@@ -1,26 +0,0 @@
-# Copyright 2018 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Copies a FVM file and extends it by a specified amount.
-
-Arg #1: path to 'fvm'.
-    #2: the path to the source fvm.blk.
-    #3: the path that the extended FVM file will be written to.
-    #4: the additional number of bytes to grow fvm.blk by."""
-
-import os
-import shutil
-import subprocess
-import sys
-
-def ExtendFVM(fvm_tool_path, src_path, dest_path, delta):
-  old_size = os.path.getsize(src_path)
-  new_size = old_size + int(delta)
-  shutil.copyfile(src_path, dest_path)
-  subprocess.check_call([fvm_tool_path, dest_path, 'extend', '--length',
-                         str(new_size)])
-  return 0
-
-if __name__ == '__main__':
-  sys.exit(ExtendFVM(*sys.argv[1:]))
diff --git a/build/config/fuchsia/package.gni b/build/config/fuchsia/package.gni
deleted file mode 100644
index 8afd31a..0000000
--- a/build/config/fuchsia/package.gni
+++ /dev/null
@@ -1,182 +0,0 @@
-# Copyright 2018 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import("//build/config/fuchsia/config.gni")
-import("//build/config/sysroot.gni")
-
-# Creates a Fuchsia .far package file.
-#
-# Parameters are:
-# package_name: The name of the package to build.
-# binary: The name of the executable which should be launched by the package.
-#         Will be renamed as "bin/app" in the package contents.
-# sandbox_policy: A path to the sandbox_policy applied to this package.
-# deps: A list of targets whose output will be included in the package.
-template("package") {
-  pkg = {
-    package_name = target_name
-    forward_variables_from(invoker, "*")
-  }
-  assert(defined(pkg.binary))
-
-  _pm_tool_path = "${fuchsia_sdk}/tools/pm"
-
-  _pkg_out_dir = "$root_out_dir/gen/" + get_label_info(pkg.package_name, "dir")
-  _runtime_deps_file = "$_pkg_out_dir/${pkg.package_name}.runtime_deps"
-  _manifest_file = "$_pkg_out_dir/${pkg.package_name}.archive_manifest"
-  _key_file = "$_pkg_out_dir/signing-key"
-  _meta_far_file = "$_pkg_out_dir/meta.far"
-  _combined_far_file = "$_pkg_out_dir/${pkg.package_name}-0.far"
-  _final_far_file = "$_pkg_out_dir/${pkg.package_name}.far"
-
-  _write_manifest_target = "${pkg.package_name}__write_manifest"
-  _generate_key_target = "${pkg.package_name}__genkey"
-  _package_target = "${pkg.package_name}__pkg"
-  _bundle_target = "${pkg.package_name}__bundle"
-
-  # Generates a manifest file based on the GN runtime deps
-  # suitable for "pm" tool consumption.
-  action(_write_manifest_target) {
-    _depfile = "${target_gen_dir}/${target_name}_stamp.d"
-
-    forward_variables_from(invoker,
-                           [
-                             "deps",
-                             "public_deps",
-                             "testonly",
-                           ])
-
-    script = "//build/config/fuchsia/build_manifest.py"
-
-    inputs = [
-      _runtime_deps_file,
-      "//build/config/fuchsia/sandbox_policy",
-    ]
-
-    outputs = [
-      _manifest_file,
-    ]
-
-    data_deps = pkg.deps
-
-    # Use a depfile to trigger package rebuilds if any of the files (static
-    # assets, shared libraries, etc.) included by the package have changed.
-    depfile = _depfile
-
-    args = [
-      rebase_path("//"),
-      rebase_path(root_out_dir),
-      pkg.package_name,
-      pkg.binary,
-      rebase_path(pkg.sandbox_policy),
-      rebase_path(_runtime_deps_file),
-      rebase_path(_depfile),
-      rebase_path(dist_libroot) + "," + rebase_path("${sysroot}/dist"),
-      rebase_path(_manifest_file),
-    ]
-
-    write_runtime_deps = _runtime_deps_file
-  }
-
-  # Generates a signing key to use for building the package.
-  action(_generate_key_target) {
-    forward_variables_from(invoker, [ "testonly" ])
-
-    script = "//build/gn_run_binary.py"
-
-    outputs = [
-      _key_file,
-    ]
-
-    args = [
-      rebase_path(_pm_tool_path, root_build_dir),
-      "-k",
-      rebase_path(_key_file),
-      "genkey",
-    ]
-  }
-
-  # Creates a signed Fuchsia metadata package.
-  action(_package_target) {
-    forward_variables_from(invoker, [ "testonly" ])
-
-    script = "//build/gn_run_binary.py"
-
-    deps = [
-      ":$_generate_key_target",
-      ":$_write_manifest_target",
-    ]
-
-    inputs = [
-      _key_file,
-    ]
-
-    outputs = [
-      _meta_far_file,
-    ]
-
-    args = [
-      rebase_path(_pm_tool_path, root_build_dir),
-      "-o",
-      rebase_path(_pkg_out_dir),
-      "-k",
-      rebase_path(_key_file),
-      "-m",
-      rebase_path(_manifest_file),
-      "build",
-    ]
-  }
-
-  # Creates a package containing the metadata archive and blob data.
-  action(_bundle_target) {
-    forward_variables_from(invoker, [ "testonly" ])
-
-    script = "//build/gn_run_binary.py"
-
-    deps = [
-      ":$_package_target",
-      ":$_write_manifest_target",
-    ]
-
-    inputs = [
-      _meta_far_file,
-      _manifest_file,
-    ]
-
-    outputs = [
-      _combined_far_file,
-    ]
-
-    args = [
-      rebase_path(_pm_tool_path, root_build_dir),
-      "-o",
-      rebase_path(_pkg_out_dir),
-      "-m",
-      rebase_path(_manifest_file),
-      "archive",
-    ]
-  }
-
-  # Copies the archive to a well-known path.
-  # TODO(kmarshall): Use a 'pm' output flag to write directly to the desired
-  # file path instead.
-  copy(target_name) {
-    forward_variables_from(invoker, [ "testonly" ])
-
-    deps = [
-      ":$_bundle_target",
-    ]
-
-    data = [
-      _final_far_file,
-    ]
-
-    sources = [
-      _combined_far_file,
-    ]
-    outputs = [
-      _final_far_file,
-    ]
-  }
-}
diff --git a/build/config/fuchsia/rules.gni b/build/config/fuchsia/rules.gni
deleted file mode 100644
index d307195..0000000
--- a/build/config/fuchsia/rules.gni
+++ /dev/null
@@ -1,143 +0,0 @@
-# Copyright 2018 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-assert(is_fuchsia)
-
-import("//build/config/chromecast_build.gni")
-import("//build/config/fuchsia/config.gni")
-import("//build/config/fuchsia/package.gni")
-import("//build/config/sysroot.gni")
-
-blobstore_qcow_path = "$root_out_dir/fvm.blk.qcow2"
-
-template("generate_runner_script") {
-  _pkg_dir = "$root_out_dir/gen/" + get_label_info(invoker.package_name, "dir")
-  _manifest_path = "$_pkg_dir/${invoker.package_name}.archive_manifest"
-  _package_path = "$_pkg_dir/${invoker.package_name}.far"
-
-  action(target_name) {
-    forward_variables_from(invoker,
-                           [
-                             "runner_script",
-                             "target",
-                             "testonly",
-                           ])
-
-    deps = [
-      "//build/config/fuchsia:blobstore_extended_qcow2",
-      "//testing/buildbot/filters:fuchsia_filters",
-    ]
-
-    _generated_script = "${invoker.generated_script}"
-
-    script = "//build/fuchsia/create_runner_script.py"
-
-    outputs = [
-      _generated_script,
-    ]
-
-    data = [
-      _generated_script,
-      _manifest_path,
-      "//build/fuchsia/",
-      "//build/util/lib/",
-      "${fuchsia_sdk}/",
-    ]
-
-    # Arguments used at build time by the runner script generator.
-    args = [
-      "--script-output-path",
-      rebase_path(_generated_script, root_build_dir, root_out_dir),
-    ]
-
-    if (defined(invoker.use_test_server) && invoker.use_test_server) {
-      args += [ "--enable-test-server" ]
-    }
-
-    # Arguments used at runtime by the test runner.
-    args += [
-      "--runner-script",
-      runner_script,
-      "--output-directory",
-      rebase_path(root_build_dir, root_build_dir),
-      "--target-cpu",
-      target_cpu,
-      "--package",
-      rebase_path(_package_path, root_out_dir, root_build_dir),
-      "--package-name",
-      invoker.package_name,
-      "--package-manifest",
-      rebase_path(_manifest_path),
-    ]
-  }
-}
-
-# This template is used to generate a runner script for test binaries into the
-# build dir for Fuchsia. It's generally used from the "test" template.
-template("test_runner_script") {
-  generate_runner_script(target_name) {
-    testonly = true
-    runner_script = "test_runner.py"
-    generated_script =
-        "$root_build_dir/bin/run_" + get_label_info(invoker.test_name, "name")
-    forward_variables_from(invoker, "*")
-  }
-}
-
-# This template is used to generate a runner script for arbitrary executables
-# into the build dir for Fuchsia. The executable is specified as a target
-# pass to the "exe_target" attribute.
-template("fuchsia_executable_runner") {
-  forward_variables_from(invoker, [ "exe_target" ])
-
-  _pkg_target = "${target_name}_pkg"
-  _gen_runner_target = "${target_name}_runner"
-  _archive_target = "${target_name}_archive"
-  _exe_name = get_label_info(exe_target, "name")
-
-  # Define the target dependencies as the union of the executable target
-  # and the invoker's deps.
-  if (defined(invoker.deps)) {
-    _combined_deps = invoker.deps + [ exe_target ]
-  } else {
-    _combined_deps = [ exe_target ]
-  }
-
-  package(_pkg_target) {
-    forward_variables_from(invoker, [ "testonly" ])
-    package_name = _exe_name
-    sandbox_policy = "//build/config/fuchsia/sandbox_policy"
-    binary = _exe_name
-    deps = _combined_deps
-  }
-
-  generate_runner_script(_gen_runner_target) {
-    forward_variables_from(invoker, [ "testonly" ])
-    runner_script = "exe_runner.py"
-    generated_script = "$root_build_dir/bin/run_${_exe_name}"
-    package_name = _exe_name
-  }
-
-  group(target_name) {
-    forward_variables_from(invoker, [ "testonly" ])
-    deps = [
-      ":${_archive_target}",
-      ":${_gen_runner_target}",
-      ":${_pkg_target}",
-    ]
-
-    # Disable packaging for Chromecast builds. (https://crbug.com/810069)
-    if (is_chromecast) {
-      deps -= [ ":${_pkg_target}" ]
-    }
-  }
-
-  generate_runner_script(_archive_target) {
-    forward_variables_from(invoker, [ "testonly" ])
-    runner_script = "archive_builder.py"
-    generated_script =
-        "$root_build_dir/bin/archive_" + get_label_info(exe_target, "name")
-    package_name = _exe_name
-  }
-}
diff --git a/build/config/fuchsia/sandbox_policy b/build/config/fuchsia/sandbox_policy
deleted file mode 100644
index 9419bdd..0000000
--- a/build/config/fuchsia/sandbox_policy
+++ /dev/null
@@ -1,4 +0,0 @@
-{
-  "features": [ "persistent-storage", "root-ssl-certificates", "system-temp" ]
-}
-
diff --git a/build/config/fuchsia/testing_sandbox_policy b/build/config/fuchsia/testing_sandbox_policy
deleted file mode 100644
index 9d4db8f..0000000
--- a/build/config/fuchsia/testing_sandbox_policy
+++ /dev/null
@@ -1,5 +0,0 @@
-{
-  "features": [ "persistent-storage", "root-ssl-certificates", "system-temp"],
-  "dev": ["null", "zero"]
-}
-
diff --git a/build/config/gcc/BUILD.gn b/build/config/gcc/BUILD.gn
deleted file mode 100644
index b6f4f5f..0000000
--- a/build/config/gcc/BUILD.gn
+++ /dev/null
@@ -1,140 +0,0 @@
-# Copyright 2014 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import("//build/config/c++/c++.gni")
-import("//build/config/compiler/compiler.gni")
-import("//build/config/sanitizers/sanitizers.gni")
-import("//build/config/sysroot.gni")
-import("//build/toolchain/toolchain.gni")
-
-declare_args() {
-  # When non empty, overrides the target rpath value. This allows a user to
-  # make a Chromium build where binaries and shared libraries are meant to be
-  # installed into separate directories, like /usr/bin/chromium and
-  # /usr/lib/chromium for instance. It is useful when a build system that
-  # generates a whole target root filesystem (like Yocto) is used on top of gn,
-  # especially when cross-compiling.
-  # Note: this gn arg is similar to gyp target_rpath generator flag.
-  gcc_target_rpath = ""
-  ldso_path = ""
-}
-
-# This config causes functions not to be automatically exported from shared
-# libraries. By default, all symbols are exported but this means there are
-# lots of exports that slow everything down. In general we explicitly mark
-# which functiosn we want to export from components.
-#
-# Some third_party code assumes all functions are exported so this is separated
-# into its own config so such libraries can remove this config to make symbols
-# public again.
-#
-# See http://gcc.gnu.org/wiki/Visibility
-config("symbol_visibility_hidden") {
-  cflags = [ "-fvisibility=hidden" ]
-
-  # Visibility attribute is not supported on AIX.
-  if (current_os != "aix") {
-    cflags_cc = [ "-fvisibility-inlines-hidden" ]
-    cflags_objcc = cflags_cc
-  }
-}
-
-# This config is usually set when :symbol_visibility_hidden is removed.
-# It's often a good idea to set visibility explicitly, as there're flags
-# which would error out otherwise (e.g. -fsanitize=cfi-unrelated-cast)
-config("symbol_visibility_default") {
-  cflags = [ "-fvisibility=default" ]
-}
-
-# The rpath is the dynamic library search path. Setting this config on a link
-# step will put the directory where the build generates shared libraries into
-# the rpath.
-#
-# It's important that this *not* be used for release builds we push out.
-# Chrome uses some setuid binaries, and hard links preserve setuid bits. An
-# unprivileged user could gain root privileges by hardlinking a setuid
-# executable and then adding in whatever binaries they want to run into the lib
-# directory.
-#
-# Example bug: https://bugs.debian.org/cgi-bin/bugreport.cgi?bug=520126
-#
-# This is required for component builds since the build generates many shared
-# libraries in the build directory that we expect to be automatically loaded.
-# It will be automatically applied in this case by :executable_ldconfig.
-#
-# In non-component builds, certain test binaries may expect to load dynamic
-# libraries from the current directory. As long as these aren't distributed,
-# this is OK. For these cases use something like this:
-#
-#  if (is_linux && !is_component_build) {
-#    configs += [ "//build/config/gcc:rpath_for_built_shared_libraries" ]
-#  }
-config("rpath_for_built_shared_libraries") {
-  if (!is_android) {
-    # Note: Android doesn't support rpath.
-    if (shlib_subdir != ".") {
-      rpath_link = "${shlib_subdir}/"
-    } else {
-      rpath_link = "."
-    }
-    if (current_toolchain != default_toolchain || gcc_target_rpath == "") {
-      ldflags = [
-        # Want to pass "\$". GN will re-escape as required for ninja.
-        "-Wl,-rpath=\$ORIGIN/${rpath_link}",
-        "-Wl,-rpath-link=${rpath_link}",
-      ]
-    } else {
-      ldflags = [
-        "-Wl,-rpath=${gcc_target_rpath}",
-        "-Wl,-rpath-link=${rpath_link}",
-      ]
-    }
-    if (current_toolchain == default_toolchain && ldso_path != "") {
-      ldflags += [ "-Wl,--dynamic-linker=${ldso_path}" ]
-    }
-  }
-}
-
-# Settings for executables.
-config("executable_ldconfig") {
-  # WARNING! //sandbox/linux:chrome_sandbox will not pick up this
-  # config, because it is a setuid binary that needs special flags.
-  # If you add things to this config, make sure you check to see
-  # if they should be added to that target as well.
-  ldflags = []
-  if (is_android) {
-    ldflags += [
-      "-Bdynamic",
-      "-Wl,-z,nocopyreloc",
-    ]
-  } else {
-    # See the rpath_for... config above for why this is necessary for component
-    # builds. Sanitizers use a custom libc++ where this is also necessary.
-    if (is_component_build || using_sanitizer || !libcpp_is_static) {
-      configs = [ ":rpath_for_built_shared_libraries" ]
-    }
-    if (current_cpu == "mipsel" || current_cpu == "mips64el") {
-      ldflags += [ "-pie" ]
-    }
-  }
-
-  if (!is_android && current_os != "aix") {
-    # Find the path containing shared libraries for this toolchain
-    # relative to the build directory. ${root_out_dir} will be a
-    # subdirectory of ${root_build_dir} when cross compiling.
-    _rpath_link = rebase_path(root_out_dir, root_build_dir)
-    if (shlib_subdir != ".") {
-      _rpath_link += "/$shlib_subdir"
-    }
-    ldflags += [
-      "-Wl,-rpath-link=$_rpath_link",
-
-      # TODO(GYP): Do we need a check on the binutils version here?
-      #
-      # Newer binutils don't set DT_RPATH unless you disable "new" dtags
-      # and the new DT_RUNPATH doesn't work without --no-as-needed flag.
-      "-Wl,--disable-new-dtags",
-    ]
-  }
-}
diff --git a/build/config/gclient_args.gni b/build/config/gclient_args.gni
deleted file mode 100644
index 1fbac17..0000000
--- a/build/config/gclient_args.gni
+++ /dev/null
@@ -1,7 +0,0 @@
-# Generated from '.DEPS.git'
-build_with_chromium = true
-checkout_android = true
-checkout_android_native_support = true
-checkout_libaom = true
-checkout_nacl = true
-checkout_oculus_sdk = false
\ No newline at end of file
diff --git a/build/config/get_host_byteorder.py b/build/config/get_host_byteorder.py
deleted file mode 100755
index c8fcf1f..0000000
--- a/build/config/get_host_byteorder.py
+++ /dev/null
@@ -1,11 +0,0 @@
-#!/usr/bin/env python
-# Copyright 2017 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Get Byteorder of host architecture"""
-
-
-import sys
-
-print sys.byteorder
diff --git a/build/config/host_byteorder.gni b/build/config/host_byteorder.gni
deleted file mode 100644
index 48a1a7f..0000000
--- a/build/config/host_byteorder.gni
+++ /dev/null
@@ -1,27 +0,0 @@
-# Copyright (c) 2017 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This header file defines the "host_byteorder" variable.
-# Not that this is currently used only for building v8.
-# The chromium code generally assumes little-endianness.
-declare_args() {
-  host_byteorder = "undefined"
-}
-
-# Detect host byteorder
-# ppc64 can be either BE or LE
-if (host_cpu == "ppc64") {
-  if (current_os == "aix") {
-    host_byteorder = "big"
-  } else {
-    # Only use the script when absolutely necessary
-    host_byteorder =
-        exec_script("//build/config/get_host_byteorder.py", [], "trim string")
-  }
-} else if (host_cpu == "ppc" || host_cpu == "s390" || host_cpu == "s390x" ||
-           host_cpu == "mips" || host_cpu == "mips64") {
-  host_byteorder = "big"
-} else {
-  host_byteorder = "little"
-}
diff --git a/build/config/ios/BUILD.gn b/build/config/ios/BUILD.gn
deleted file mode 100644
index 47d36fc..0000000
--- a/build/config/ios/BUILD.gn
+++ /dev/null
@@ -1,130 +0,0 @@
-# Copyright 2014 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import("//build/config/coverage/coverage.gni")
-import("//build/config/ios/ios_sdk.gni")
-import("//build/config/sysroot.gni")
-import("//build/toolchain/toolchain.gni")
-
-declare_args() {
-  # Enabling this option makes clang compile to an intermediate
-  # representation ("bitcode"), and not to native code. This is preferred
-  # when including WebRTC in the apps that will be sent to Apple's App Store
-  # and mandatory for the apps that run on watchOS or tvOS.
-  # The option only works when building with Xcode (use_xcode_clang = true).
-  # Mimicking how Xcode handles it, the production builds (is_debug = false)
-  # get real bitcode sections added, while the debug builds (is_debug = true)
-  # only get bitcode-section "markers" added in them.
-  # NOTE: This option is ignored when building versions for the iOS simulator,
-  # where a part of libvpx is compiled from the assembly code written using
-  # Intel assembly syntax; Yasm / Nasm do not support emitting bitcode parts.
-  # That is not a limitation for now as Xcode mandates the presence of bitcode
-  # only when building bitcode-enabled projects for real devices (ARM CPUs).
-  enable_ios_bitcode = false
-}
-
-# This is included by reference in the //build/config/compiler config that
-# is applied to all targets. It is here to separate out the logic.
-config("compiler") {
-  # These flags are shared between the C compiler and linker.
-  common_ios_flags = []
-
-  # CPU architecture.
-  if (current_cpu == "x64") {
-    common_ios_flags += [
-      "-arch",
-      "x86_64",
-    ]
-  } else if (current_cpu == "x86") {
-    common_ios_flags += [
-      "-arch",
-      "i386",
-    ]
-  } else if (current_cpu == "armv7" || current_cpu == "arm") {
-    common_ios_flags += [
-      "-arch",
-      "armv7",
-    ]
-  } else if (current_cpu == "arm64") {
-    common_ios_flags += [
-      "-arch",
-      "arm64",
-    ]
-  }
-
-  # This is here so that all files get recompiled after an Xcode update.
-  # (defines are passed via the command line, and build system rebuild things
-  # when their commandline changes). Nothing should ever read this define.
-  defines = [ "CR_XCODE_VERSION=$xcode_version" ]
-
-  asmflags = common_ios_flags
-  cflags = common_ios_flags
-
-  # Without this, the constructors and destructors of a C++ object inside
-  # an Objective C struct won't be called, which is very bad.
-  cflags_objcc = [ "-fobjc-call-cxx-cdtors" ]
-
-  ldflags = common_ios_flags
-}
-
-# This is included by reference in the //build/config/compiler:runtime_library
-# config that is applied to all targets. It is here to separate out the logic
-# that is iOS-only. Please see that target for advice on what should go in
-# :runtime_library vs. :compiler.
-config("runtime_library") {
-  common_flags = [
-    "-isysroot",
-    sysroot,
-
-    "-stdlib=libc++",
-  ]
-
-  if (use_ios_simulator) {
-    common_flags += [ "-mios-simulator-version-min=$ios_deployment_target" ]
-  } else {
-    common_flags += [ "-miphoneos-version-min=$ios_deployment_target" ]
-  }
-
-  if (use_xcode_clang && enable_ios_bitcode && !use_ios_simulator) {
-    if (is_debug) {
-      common_flags += [ "-fembed-bitcode-marker" ]
-    } else {
-      common_flags += [ "-fembed-bitcode" ]
-    }
-  }
-
-  asmflags = common_flags
-  cflags = common_flags
-  ldflags = common_flags
-
-  if (use_clang_coverage) {
-    configs = [ "//build/config/coverage:default_coverage" ]
-  }
-}
-
-config("ios_executable_flags") {
-}
-
-config("ios_dynamic_flags") {
-  ldflags = [ "-Wl,-ObjC" ]  # Always load Objective-C categories and class.
-}
-
-config("xctest_config") {
-  common_flags = [
-    "-F",
-    "$ios_sdk_platform_path/Developer/Library/Frameworks",
-  ]
-
-  cflags = common_flags
-  ldflags = common_flags
-
-  libs = [
-    "Foundation.framework",
-    "XCTest.framework",
-  ]
-}
-
-group("xctest") {
-  public_configs = [ ":xctest_config" ]
-}
diff --git a/build/config/ios/BuildInfo.plist b/build/config/ios/BuildInfo.plist
deleted file mode 100644
index 3595e5a..0000000
--- a/build/config/ios/BuildInfo.plist
+++ /dev/null
@@ -1,35 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
-<plist version="1.0">
-<dict>
-  <key>BuildMachineOSBuild</key>
-  <string>${BUILD_MACHINE_OS_BUILD}</string>
-  <key>CFBundleSupportedPlatforms</key>
-  <array>
-    <string>${IOS_SUPPORTED_PLATFORM}</string>
-  </array>
-  <key>DTCompiler</key>
-  <string>${GCC_VERSION}</string>
-  <key>DTPlatformName</key>
-  <string>${IOS_PLATFORM_NAME}</string>
-  <key>DTPlatformVersion</key>
-  <string>${IOS_PLATFORM_VERSION}</string>
-  <key>DTPlatformBuild</key>
-  <string>${IOS_PLATFORM_BUILD}</string>
-  <key>DTSDKBuild</key>
-  <string>${IOS_SDK_BUILD}</string>
-  <key>DTSDKName</key>
-  <string>${IOS_SDK_NAME}</string>
-  <key>MinimumOSVersion</key>
-  <string>${IOS_DEPLOYMENT_TARGET}</string>
-  <key>DTXcode</key>
-  <string>${XCODE_VERSION}</string>
-  <key>DTXcodeBuild</key>
-  <string>${XCODE_BUILD}</string>
-  <key>UIDeviceFamily</key>
-  <array>
-    <integer>1</integer>
-    <integer>2</integer>
-  </array>
-</dict>
-</plist>
diff --git a/build/config/ios/Host-Info.plist b/build/config/ios/Host-Info.plist
deleted file mode 100644
index 9f6f5de..0000000
--- a/build/config/ios/Host-Info.plist
+++ /dev/null
@@ -1,126 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
-<plist version="1.0">
-<dict>
-	<key>CFBundleDevelopmentRegion</key>
-	<string>en</string>
-	<key>CFBundleDisplayName</key>
-	<string>${PRODUCT_NAME}</string>
-	<key>CFBundleExecutable</key>
-	<string>${EXECUTABLE_NAME}</string>
-	<key>CFBundleIdentifier</key>
-	<string>${IOS_BUNDLE_ID_PREFIX}.test.${EXECUTABLE_NAME:rfc1034identifier}</string>
-	<key>CFBundleInfoDictionaryVersion</key>
-	<string>6.0</string>
-	<key>CFBundleName</key>
-	<string>${PRODUCT_NAME}</string>
-	<key>CFBundlePackageType</key>
-	<string>APPL</string>
-	<key>CFBundleShortVersionString</key>
-	<string>1.0</string>
-	<key>CFBundleSignature</key>
-	<string>????</string>
-	<key>CFBundleVersion</key>
-	<string>1.0</string>
-	<key>LSRequiresIPhoneOS</key>
-	<true/>
-	<key>NSAppTransportSecurity</key>
-	<dict>
-		<key>NSAllowsArbitraryLoads</key>
-		<true/>
-	</dict>
-	<key>UIRequiredDeviceCapabilities</key>
-	<array>
-		<string>armv7</string>
-	</array>
-	<key>UILaunchImages</key>
-	<array>
-		<dict>
-			<key>UILaunchImageMinimumOSVersion</key>
-			<string>7.0</string>
-			<key>UILaunchImageName</key>
-			<string>Default</string>
-			<key>UILaunchImageOrientation</key>
-			<string>Portrait</string>
-			<key>UILaunchImageSize</key>
-			<string>{320, 480}</string>
-		</dict>
-		<dict>
-			<key>UILaunchImageMinimumOSVersion</key>
-			<string>7.0</string>
-			<key>UILaunchImageName</key>
-			<string>Default</string>
-			<key>UILaunchImageOrientation</key>
-			<string>Portrait</string>
-			<key>UILaunchImageSize</key>
-			<string>{320, 568}</string>
-		</dict>
-		<dict>
-			<key>UILaunchImageMinimumOSVersion</key>
-			<string>8.0</string>
-			<key>UILaunchImageName</key>
-			<string>Default</string>
-			<key>UILaunchImageOrientation</key>
-			<string>Portrait</string>
-			<key>UILaunchImageSize</key>
-			<string>{375, 667}</string>
-		</dict>
-		<dict>
-			<key>UILaunchImageMinimumOSVersion</key>
-			<string>8.0</string>
-			<key>UILaunchImageName</key>
-			<string>Default</string>
-			<key>UILaunchImageOrientation</key>
-			<string>Portrait</string>
-			<key>UILaunchImageSize</key>
-			<string>{414, 736}</string>
-		</dict>
-		<dict>
-			<key>UILaunchImageMinimumOSVersion</key>
-			<string>8.0</string>
-			<key>UILaunchImageName</key>
-			<string>Default</string>
-			<key>UILaunchImageOrientation</key>
-			<string>Landscape</string>
-			<key>UILaunchImageSize</key>
-			<string>{414, 736}</string>
-		</dict>
-	</array>
-	<key>UILaunchImages~ipad</key>
-	<array>
-		<dict>
-			<key>UILaunchImageMinimumOSVersion</key>
-			<string>7.0</string>
-			<key>UILaunchImageName</key>
-			<string>Default</string>
-			<key>UILaunchImageOrientation</key>
-			<string>Portrait</string>
-			<key>UILaunchImageSize</key>
-			<string>{768, 1024}</string>
-		</dict>
-		<dict>
-			<key>UILaunchImageMinimumOSVersion</key>
-			<string>7.0</string>
-			<key>UILaunchImageName</key>
-			<string>Default</string>
-			<key>UILaunchImageOrientation</key>
-			<string>Landscape</string>
-			<key>UILaunchImageSize</key>
-			<string>{768, 1024}</string>
-		</dict>
-	</array>
-	<key>UISupportedInterfaceOrientations</key>
-	<array>
-		<string>UIInterfaceOrientationPortrait</string>
-		<string>UIInterfaceOrientationLandscapeLeft</string>
-		<string>UIInterfaceOrientationLandscapeRight</string>
-	</array>
-	<key>UISupportedInterfaceOrientations~ipad</key>
-	<array>
-		<string>UIInterfaceOrientationPortrait</string>
-		<string>UIInterfaceOrientationPortraitUpsideDown</string>
-		<string>UIInterfaceOrientationLandscapeLeft</string>
-		<string>UIInterfaceOrientationLandscapeRight</string>
-	</array>
-</dict>
-</plist>
diff --git a/build/config/ios/Module-Info.plist b/build/config/ios/Module-Info.plist
deleted file mode 100644
index 13b67c4..0000000
--- a/build/config/ios/Module-Info.plist
+++ /dev/null
@@ -1,24 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
-<plist version="1.0">
-<dict>
-  <key>CFBundleDevelopmentRegion</key>
-  <string>en</string>
-  <key>CFBundleExecutable</key>
-  <string>${EXECUTABLE_NAME}</string>
-  <key>CFBundleIdentifier</key>
-  <string>${IOS_BUNDLE_ID_PREFIX}.${MODULE_BUNDLE_ID:rfc1034identifier}</string>
-  <key>CFBundleInfoDictionaryVersion</key>
-  <string>6.0</string>
-  <key>CFBundleName</key>
-  <string>${PRODUCT_NAME}</string>
-  <key>CFBundlePackageType</key>
-  <string>BNDL</string>
-  <key>CFBundleShortVersionString</key>
-  <string>1.0</string>
-  <key>CFBundleSignature</key>
-  <string>????</string>
-  <key>CFBundleVersion</key>
-  <string>1</string>
-</dict>
-</plist>
diff --git a/build/config/ios/OWNERS b/build/config/ios/OWNERS
deleted file mode 100644
index 0e726c6..0000000
--- a/build/config/ios/OWNERS
+++ /dev/null
@@ -1 +0,0 @@
-file://build/config/mac/OWNERS
diff --git a/build/config/ios/asset_catalog.gni b/build/config/ios/asset_catalog.gni
deleted file mode 100644
index 460cd96..0000000
--- a/build/config/ios/asset_catalog.gni
+++ /dev/null
@@ -1,148 +0,0 @@
-# Copyright 2017 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This template declares a bundle_data target that references an asset
-# catalog so that it is compiled to the asset catalog of the generated
-# bundle.
-#
-# The create_bundle target requires that all asset catalogs are part of an
-# .xcasset bundle. This requirement comes from actool that only receives
-# the path to the .xcasset bundle directory and not to the individual
-# .imageset directories.
-#
-# The requirement is a bit problematic as it prevents compiling only a
-# subset of the asset catakig that are contained in a .xcasset. This template
-# fixes that by instead copying the content of the asset catalog to temporary
-# .xcasset directory (below $root_out_dir) and defining a bundle_data
-# target that refers to those copies (this is efficient as the "copy" is
-# implemented by hardlinking if possible on macOS).
-#
-# Since the create_data target will only refer to the .xcasset directory
-# and additional "action" target that runs a dummy script is defined. It
-# does nothing but pretends to generate the .xcassets directory (while
-# it is really created as a side-effect of the "copy" step). This allows
-# to workaround the check in "gn" that all inputs below $root_out_dir have
-# to be outputs of another target with a public dependency path.
-#
-# This template also ensures that the file are only copied once when the
-# build targets multiple architectures at the same time (aka "fat build").
-#
-# Arguments
-#
-#     sources:
-#       required, list of strings, paths to the file contained in the
-#       asset catalog directory; this must contain the Contents.json file
-#       and all the image referenced by it (not enforced by the template).
-#
-#     asset_type:
-#       required, string, type of the asset catalog, that is the extension
-#       of the directory containing the images and the Contents.json file.
-#
-template("asset_catalog") {
-  assert(defined(invoker.sources) && invoker.sources != [],
-         "sources must be defined and not empty for $target_name")
-
-  assert(defined(invoker.asset_type) && invoker.asset_type != "",
-         "asset_type must be defined and not empty for $target_name")
-
-  if (current_toolchain != default_toolchain) {
-    group(target_name) {
-      public_deps = [
-        ":$target_name($default_toolchain)",
-      ]
-    }
-  } else {
-    _copy_target_name = target_name + "__copy"
-    _data_target_name = target_name
-
-    _sources = invoker.sources
-    _outputs = []
-
-    # The compilation of resources into Assets.car is enabled automatically
-    # by the "create_bundle" target if any of the "bundle_data" sources's
-    # path is in a .xcassets directory and matches one of the know asset
-    # catalog type.
-    _xcassets_dir = "$target_gen_dir/${target_name}.xcassets"
-    _output_dir = "$_xcassets_dir/" +
-                  get_path_info(get_path_info(_sources[0], "dir"), "file")
-
-    foreach(_source, invoker.sources) {
-      _dir = get_path_info(_source, "dir")
-      _outputs += [ "$_output_dir/" + get_path_info(_source, "file") ]
-
-      assert(get_path_info(_dir, "extension") == invoker.asset_type,
-             "$_source dirname must have .${invoker.asset_type} extension")
-    }
-
-    action(_copy_target_name) {
-      # Forward "deps", "public_deps" and "testonly" in case some of the
-      # source files are generated.
-      forward_variables_from(invoker,
-                             [
-                               "deps",
-                               "public_deps",
-                               "testonly",
-                             ])
-
-      script = "//build/config/ios/hardlink.py"
-
-      visibility = [ ":$_data_target_name" ]
-      sources = _sources
-      outputs = _outputs + [ _xcassets_dir ]
-
-      args = [
-        rebase_path(get_path_info(_sources[0], "dir"), root_out_dir),
-        rebase_path(_output_dir, root_out_dir),
-      ]
-    }
-
-    bundle_data(_data_target_name) {
-      forward_variables_from(invoker,
-                             "*",
-                             [
-                               "deps",
-                               "outputs",
-                               "public_deps",
-                               "sources",
-                             ])
-
-      sources = _outputs
-      outputs = [
-        "{{bundle_resources_dir}}/{{source_file_part}}",
-      ]
-      public_deps = [
-        ":$_copy_target_name",
-      ]
-    }
-  }
-}
-
-# Those templates are specialisation of the asset_catalog template for known
-# types of asset catalog types (imageset, launchimage, appiconset).
-#
-# Arguments
-#
-#     sources:
-#       required, list of strings, paths to the file contained in the
-#       asset catalog directory; this must contain the Contents.json file
-#       and all the image referenced by it (not enforced by the template).
-#
-template("appiconset") {
-  asset_catalog(target_name) {
-    forward_variables_from(invoker, "*", [ "asset_type" ])
-    asset_type = "appiconset"
-  }
-}
-template("imageset") {
-  asset_catalog(target_name) {
-    forward_variables_from(invoker, "*", [ "asset_type" ])
-    asset_type = "imageset"
-  }
-}
-template("launchimage") {
-  asset_catalog(target_name) {
-    forward_variables_from(invoker, "*", [ "asset_type" ])
-    asset_type = "launchimage"
-  }
-}
diff --git a/build/config/ios/codesign.py b/build/config/ios/codesign.py
deleted file mode 100644
index fba34f6..0000000
--- a/build/config/ios/codesign.py
+++ /dev/null
@@ -1,531 +0,0 @@
-# Copyright 2016 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import argparse
-import datetime
-import fnmatch
-import glob
-import os
-import plistlib
-import shutil
-import subprocess
-import sys
-import tempfile
-
-
-def GetProvisioningProfilesDir():
-  """Returns the location of the installed mobile provisioning profiles.
-
-  Returns:
-    The path to the directory containing the installed mobile provisioning
-    profiles as a string.
-  """
-  return os.path.join(
-      os.environ['HOME'], 'Library', 'MobileDevice', 'Provisioning Profiles')
-
-
-def LoadPlistFile(plist_path):
-  """Loads property list file at |plist_path|.
-
-  Args:
-    plist_path: path to the property list file to load.
-
-  Returns:
-    The content of the property list file as a python object.
-  """
-  return plistlib.readPlistFromString(subprocess.check_output([
-      'xcrun', 'plutil', '-convert', 'xml1', '-o', '-', plist_path]))
-
-
-class Bundle(object):
-  """Wraps a bundle."""
-
-  def __init__(self, bundle_path):
-    """Initializes the Bundle object with data from bundle Info.plist file."""
-    self._path = bundle_path
-    self._data = LoadPlistFile(os.path.join(self._path, 'Info.plist'))
-
-  @property
-  def path(self):
-    return self._path
-
-  @property
-  def identifier(self):
-    return self._data['CFBundleIdentifier']
-
-  @property
-  def binary_path(self):
-    return os.path.join(self._path, self._data['CFBundleExecutable'])
-
-  def Validate(self, expected_mappings):
-    """Checks that keys in the bundle have the expected value.
-
-    Args:
-      expected_mappings: a dictionary of string to object, each mapping will
-      be looked up in the bundle data to check it has the same value (missing
-      values will be ignored)
-
-    Returns:
-      A dictionary of the key with a different value between expected_mappings
-      and the content of the bundle (i.e. errors) so that caller can format the
-      error message. The dictionary will be empty if there are no errors.
-    """
-    errors = {}
-    for key, expected_value in expected_mappings.iteritems():
-      if key in self._data:
-        value = self._data[key]
-        if value != expected_value:
-          errors[key] = (value, expected_value)
-    return errors
-
-
-class ProvisioningProfile(object):
-  """Wraps a mobile provisioning profile file."""
-
-  def __init__(self, provisioning_profile_path):
-    """Initializes the ProvisioningProfile with data from profile file."""
-    self._path = provisioning_profile_path
-    self._data = plistlib.readPlistFromString(subprocess.check_output([
-        'xcrun', 'security', 'cms', '-D', '-u', 'certUsageAnyCA',
-        '-i', provisioning_profile_path]))
-
-  @property
-  def path(self):
-    return self._path
-
-  @property
-  def application_identifier_pattern(self):
-    return self._data.get('Entitlements', {}).get('application-identifier', '')
-
-  @property
-  def team_identifier(self):
-    return self._data.get('TeamIdentifier', [''])[0]
-
-  @property
-  def entitlements(self):
-    return self._data.get('Entitlements', {})
-
-  @property
-  def expiration_date(self):
-    return self._data.get('ExpirationDate', datetime.datetime.now())
-
-  def ValidToSignBundle(self, bundle_identifier):
-    """Checks whether the provisioning profile can sign bundle_identifier.
-
-    Args:
-      bundle_identifier: the identifier of the bundle that needs to be signed.
-
-    Returns:
-      True if the mobile provisioning profile can be used to sign a bundle
-      with the corresponding bundle_identifier, False otherwise.
-    """
-    return fnmatch.fnmatch(
-        '%s.%s' % (self.team_identifier, bundle_identifier),
-        self.application_identifier_pattern)
-
-  def Install(self, installation_path):
-    """Copies mobile provisioning profile info to |installation_path|."""
-    shutil.copy2(self.path, installation_path)
-
-
-class Entitlements(object):
-  """Wraps an Entitlement plist file."""
-
-  def __init__(self, entitlements_path):
-    """Initializes Entitlements object from entitlement file."""
-    self._path = entitlements_path
-    self._data = LoadPlistFile(self._path)
-
-  @property
-  def path(self):
-    return self._path
-
-  def ExpandVariables(self, substitutions):
-    self._data = self._ExpandVariables(self._data, substitutions)
-
-  def _ExpandVariables(self, data, substitutions):
-    if isinstance(data, str):
-      for key, substitution in substitutions.iteritems():
-        data = data.replace('$(%s)' % (key,), substitution)
-      return data
-
-    if isinstance(data, dict):
-      for key, value in data.iteritems():
-        data[key] = self._ExpandVariables(value, substitutions)
-      return data
-
-    if isinstance(data, list):
-      for i, value in enumerate(data):
-        data[i] = self._ExpandVariables(value, substitutions)
-
-    return data
-
-  def LoadDefaults(self, defaults):
-    for key, value in defaults.iteritems():
-      if key not in self._data:
-        self._data[key] = value
-
-  def WriteTo(self, target_path):
-    plistlib.writePlist(self._data, target_path)
-
-
-def FindProvisioningProfile(bundle_identifier, required):
-  """Finds mobile provisioning profile to use to sign bundle.
-
-  Args:
-    bundle_identifier: the identifier of the bundle to sign.
-
-  Returns:
-    The ProvisioningProfile object that can be used to sign the Bundle
-    object or None if no matching provisioning profile was found.
-  """
-  provisioning_profile_paths = glob.glob(
-      os.path.join(GetProvisioningProfilesDir(), '*.mobileprovision'))
-
-  # Iterate over all installed mobile provisioning profiles and filter those
-  # that can be used to sign the bundle, ignoring expired ones.
-  now = datetime.datetime.now()
-  valid_provisioning_profiles = []
-  one_hour = datetime.timedelta(0, 3600)
-  for provisioning_profile_path in provisioning_profile_paths:
-    provisioning_profile = ProvisioningProfile(provisioning_profile_path)
-    if provisioning_profile.expiration_date - now < one_hour:
-      sys.stderr.write(
-          'Warning: ignoring expired provisioning profile: %s.\n' %
-          provisioning_profile_path)
-      continue
-    if provisioning_profile.ValidToSignBundle(bundle_identifier):
-      valid_provisioning_profiles.append(provisioning_profile)
-
-  if not valid_provisioning_profiles:
-    if required:
-      sys.stderr.write(
-          'Error: no mobile provisioning profile found for "%s".\n' %
-          bundle_identifier)
-      sys.exit(1)
-    return None
-
-  # Select the most specific mobile provisioning profile, i.e. the one with
-  # the longest application identifier pattern (prefer the one with the latest
-  # expiration date as a secondary criteria).
-  selected_provisioning_profile = max(
-      valid_provisioning_profiles,
-      key=lambda p: (len(p.application_identifier_pattern), p.expiration_date))
-
-  one_week = datetime.timedelta(7)
-  if selected_provisioning_profile.expiration_date - now < 2 * one_week:
-    sys.stderr.write(
-        'Warning: selected provisioning profile will expire soon: %s' %
-        selected_provisioning_profile.path)
-  return selected_provisioning_profile
-
-
-def CodeSignBundle(bundle_path, identity, extra_args):
-  process = subprocess.Popen(['xcrun', 'codesign', '--force', '--sign',
-      identity, '--timestamp=none'] + list(extra_args) + [bundle_path],
-      stderr=subprocess.PIPE)
-  _, stderr = process.communicate()
-  if process.returncode:
-    sys.stderr.write(stderr)
-    sys.exit(process.returncode)
-  for line in stderr.splitlines():
-    if line.endswith(': replacing existing signature'):
-      # Ignore warning about replacing existing signature as this should only
-      # happen when re-signing system frameworks (and then it is expected).
-      continue
-    sys.stderr.write(line)
-    sys.stderr.write('\n')
-
-
-def InstallSystemFramework(framework_path, bundle_path, args):
-  """Install framework from |framework_path| to |bundle| and code-re-sign it."""
-  installed_framework_path = os.path.join(
-      bundle_path, 'Frameworks', os.path.basename(framework_path))
-
-  if os.path.exists(installed_framework_path):
-    shutil.rmtree(installed_framework_path)
-
-  shutil.copytree(framework_path, installed_framework_path)
-  CodeSignBundle(installed_framework_path, args.identity,
-      ['--deep', '--preserve-metadata=identifier,entitlements'])
-
-
-def GenerateEntitlements(path, provisioning_profile, bundle_identifier):
-  """Generates an entitlements file.
-
-  Args:
-    path: path to the entitlements template file
-    provisioning_profile: ProvisioningProfile object to use, may be None
-    bundle_identifier: identifier of the bundle to sign.
-  """
-  entitlements = Entitlements(path)
-  if provisioning_profile:
-    entitlements.LoadDefaults(provisioning_profile.entitlements)
-    app_identifier_prefix = provisioning_profile.team_identifier + '.'
-  else:
-    app_identifier_prefix = '*.'
-  entitlements.ExpandVariables({
-      'CFBundleIdentifier': bundle_identifier,
-      'AppIdentifierPrefix': app_identifier_prefix,
-  })
-  return entitlements
-
-
-def GenerateBundleInfoPlist(bundle_path, plist_compiler, partial_plist):
-  """Generates the bundle Info.plist for a list of partial .plist files.
-
-  Args:
-    bundle_path: path to the bundle
-    plist_compiler: string, path to the Info.plist compiler
-    partial_plist: list of path to partial .plist files to merge
-  """
-
-  # Filter empty partial .plist files (this happens if an application
-  # does not include need to compile any asset catalog, in which case
-  # the partial .plist file from the asset catalog compilation step is
-  # just a stamp file).
-  filtered_partial_plist = []
-  for plist in partial_plist:
-    plist_size = os.stat(plist).st_size
-    if plist_size:
-      filtered_partial_plist.append(plist)
-
-  # Invoke the plist_compiler script. It needs to be a python script.
-  subprocess.check_call([
-      'python', plist_compiler, 'merge', '-f', 'binary1',
-      '-o', os.path.join(bundle_path, 'Info.plist'),
-  ] + filtered_partial_plist)
-
-
-class Action(object):
-  """Class implementing one action supported by the script."""
-
-  @classmethod
-  def Register(cls, subparsers):
-    parser = subparsers.add_parser(cls.name, help=cls.help)
-    parser.set_defaults(func=cls._Execute)
-    cls._Register(parser)
-
-
-class CodeSignBundleAction(Action):
-  """Class implementing the code-sign-bundle action."""
-
-  name = 'code-sign-bundle'
-  help = 'perform code signature for a bundle'
-
-  @staticmethod
-  def _Register(parser):
-    parser.add_argument(
-        '--entitlements', '-e', dest='entitlements_path',
-        help='path to the entitlements file to use')
-    parser.add_argument(
-        'path', help='path to the iOS bundle to codesign')
-    parser.add_argument(
-        '--identity', '-i', required=True,
-        help='identity to use to codesign')
-    parser.add_argument(
-        '--binary', '-b', required=True,
-        help='path to the iOS bundle binary')
-    parser.add_argument(
-        '--framework', '-F', action='append', default=[], dest='frameworks',
-        help='install and resign system framework')
-    parser.add_argument(
-        '--disable-code-signature', action='store_true', dest='no_signature',
-        help='disable code signature')
-    parser.add_argument(
-        '--disable-embedded-mobileprovision', action='store_false',
-        default=True, dest='embedded_mobileprovision',
-        help='disable finding and embedding mobileprovision')
-    parser.add_argument(
-        '--platform', '-t', required=True,
-        help='platform the signed bundle is targeting')
-    parser.add_argument(
-        '--partial-info-plist', '-p', action='append', default=[],
-        help='path to partial Info.plist to merge to create bundle Info.plist')
-    parser.add_argument(
-        '--plist-compiler-path', '-P', action='store',
-        help='path to the plist compiler script (for --partial-info-plist)')
-    parser.set_defaults(no_signature=False)
-
-  @staticmethod
-  def _Execute(args):
-    if not args.identity:
-      args.identity = '-'
-
-    if args.partial_info_plist:
-      GenerateBundleInfoPlist(
-          args.path,
-          args.plist_compiler_path,
-          args.partial_info_plist)
-
-    bundle = Bundle(args.path)
-
-    # According to Apple documentation, the application binary must be the same
-    # as the bundle name without the .app suffix. See crbug.com/740476 for more
-    # information on what problem this can cause.
-    #
-    # To prevent this class of error, fail with an error if the binary name is
-    # incorrect in the Info.plist as it is not possible to update the value in
-    # Info.plist at this point (the file has been copied by a different target
-    # and ninja would consider the build dirty if it was updated).
-    #
-    # Also checks that the name of the bundle is correct too (does not cause the
-    # build to be considered dirty, but still terminate the script in case of an
-    # incorrect bundle name).
-    #
-    # Apple documentation is available at:
-    # https://developer.apple.com/library/content/documentation/CoreFoundation/Conceptual/CFBundles/BundleTypes/BundleTypes.html
-    bundle_name = os.path.splitext(os.path.basename(bundle.path))[0]
-    errors = bundle.Validate({
-        'CFBundleName': bundle_name,
-        'CFBundleExecutable': bundle_name,
-    })
-    if errors:
-      for key in sorted(errors):
-        value, expected_value = errors[key]
-        sys.stderr.write('%s: error: %s value incorrect: %s != %s\n' % (
-            bundle.path, key, value, expected_value))
-      sys.stderr.flush()
-      sys.exit(1)
-
-    # Delete existing embedded mobile provisioning.
-    embedded_provisioning_profile = os.path.join(
-        bundle.path, 'embedded.mobileprovision')
-    if os.path.isfile(embedded_provisioning_profile):
-      os.unlink(embedded_provisioning_profile)
-
-    # Delete existing code signature.
-    signature_file = os.path.join(args.path, '_CodeSignature', 'CodeResources')
-    if os.path.isfile(signature_file):
-      shutil.rmtree(os.path.dirname(signature_file))
-
-    # Install system frameworks if requested.
-    for framework_path in args.frameworks:
-      InstallSystemFramework(framework_path, args.path, args)
-
-    # Copy main binary into bundle.
-    if os.path.isfile(bundle.binary_path):
-      os.unlink(bundle.binary_path)
-    shutil.copy(args.binary, bundle.binary_path)
-
-    if args.no_signature:
-      return
-
-    codesign_extra_args = []
-
-    if args.embedded_mobileprovision:
-      # Find mobile provisioning profile and embeds it into the bundle (if a
-      # code signing identify has been provided, fails if no valid mobile
-      # provisioning is found).
-      provisioning_profile_required = args.identity != '-'
-      provisioning_profile = FindProvisioningProfile(
-          bundle.identifier, provisioning_profile_required)
-      if provisioning_profile and args.platform != 'iphonesimulator':
-        provisioning_profile.Install(embedded_provisioning_profile)
-
-        if args.entitlements_path is not None:
-          temporary_entitlements_file = \
-              tempfile.NamedTemporaryFile(suffix='.xcent')
-          codesign_extra_args.extend(
-              ['--entitlements', temporary_entitlements_file.name])
-
-          entitlements = GenerateEntitlements(
-              args.entitlements_path, provisioning_profile, bundle.identifier)
-          entitlements.WriteTo(temporary_entitlements_file.name)
-
-    CodeSignBundle(bundle.path, args.identity, codesign_extra_args)
-
-
-class CodeSignFileAction(Action):
-  """Class implementing code signature for a single file."""
-
-  name = 'code-sign-file'
-  help = 'code-sign a single file'
-
-  @staticmethod
-  def _Register(parser):
-    parser.add_argument(
-        'path', help='path to the file to codesign')
-    parser.add_argument(
-        '--identity', '-i', required=True,
-        help='identity to use to codesign')
-    parser.add_argument(
-        '--output', '-o',
-        help='if specified copy the file to that location before signing it')
-    parser.set_defaults(sign=True)
-
-  @staticmethod
-  def _Execute(args):
-    if not args.identity:
-      args.identity = '-'
-
-    install_path = args.path
-    if args.output:
-
-      if os.path.isfile(args.output):
-        os.unlink(args.output)
-      elif os.path.isdir(args.output):
-        shutil.rmtree(args.output)
-
-      if os.path.isfile(args.path):
-        shutil.copy(args.path, args.output)
-      elif os.path.isdir(args.path):
-        shutil.copytree(args.path, args.output)
-
-      install_path = args.output
-
-    CodeSignBundle(install_path, args.identity,
-      ['--deep', '--preserve-metadata=identifier,entitlements'])
-
-
-class GenerateEntitlementsAction(Action):
-  """Class implementing the generate-entitlements action."""
-
-  name = 'generate-entitlements'
-  help = 'generate entitlements file'
-
-  @staticmethod
-  def _Register(parser):
-    parser.add_argument(
-        '--entitlements', '-e', dest='entitlements_path',
-        help='path to the entitlements file to use')
-    parser.add_argument(
-        'path', help='path to the entitlements file to generate')
-    parser.add_argument(
-        '--info-plist', '-p', required=True,
-        help='path to the bundle Info.plist')
-
-  @staticmethod
-  def _Execute(args):
-    info_plist = LoadPlistFile(args.info_plist)
-    bundle_identifier = info_plist['CFBundleIdentifier']
-    provisioning_profile = FindProvisioningProfile(bundle_identifier, False)
-    entitlements = GenerateEntitlements(
-        args.entitlements_path, provisioning_profile, bundle_identifier)
-    entitlements.WriteTo(args.path)
-
-
-def Main():
-  parser = argparse.ArgumentParser('codesign iOS bundles')
-  parser.add_argument('--developer_dir', required=False,
-                      help='Path to Xcode.')
-  subparsers = parser.add_subparsers()
-
-  actions = [
-      CodeSignBundleAction,
-      CodeSignFileAction,
-      GenerateEntitlementsAction,
-  ]
-
-  for action in actions:
-    action.Register(subparsers)
-
-  args = parser.parse_args()
-  if args.developer_dir:
-    os.environ['DEVELOPER_DIR'] = args.developer_dir
-  args.func(args)
-
-
-if __name__ == '__main__':
-  sys.exit(Main())
diff --git a/build/config/ios/dummy.py b/build/config/ios/dummy.py
deleted file mode 100644
index b23b7da..0000000
--- a/build/config/ios/dummy.py
+++ /dev/null
@@ -1,15 +0,0 @@
-# Copyright 2017 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Empty script that does nothing and return success error code.
-
-This script is used by some gn targets that pretend creating some output
-but instead depend on another target creating the output indirectly (in
-general this output is a directory that is used as input by a bundle_data
-target).
-
-It ignores all parameters and terminate with a success error code. It
-does the same thing as the unix command "true", but gn can only invoke
-python scripts.
-"""
diff --git a/build/config/ios/entitlements.plist b/build/config/ios/entitlements.plist
deleted file mode 100644
index 429762e..0000000
--- a/build/config/ios/entitlements.plist
+++ /dev/null
@@ -1,12 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
-<plist version="1.0">
-<dict>
-	<key>application-identifier</key>
-	<string>$(AppIdentifierPrefix)$(CFBundleIdentifier)</string>
-	<key>keychain-access-groups</key>
-	<array>
-		<string>$(AppIdentifierPrefix)$(CFBundleIdentifier)</string>
-	</array>
-</dict>
-</plist>
diff --git a/build/config/ios/find_signing_identity.py b/build/config/ios/find_signing_identity.py
deleted file mode 100644
index 7add474..0000000
--- a/build/config/ios/find_signing_identity.py
+++ /dev/null
@@ -1,47 +0,0 @@
-# Copyright (c) 2015 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import argparse
-import os
-import subprocess
-import sys
-import re
-
-def ListIdentities():
-  return subprocess.check_output([
-    'xcrun',
-    'security',
-    'find-identity',
-    '-v',
-    '-p',
-    'codesigning',
-  ])
-
-
-def FindValidIdentity(identity_description):
-  lines = list(map(str.strip, ListIdentities().splitlines()))
-  # Look for something like "2) XYZ "iPhone Developer: Name (ABC)""
-  exp = re.compile('[0-9]+\) ([A-F0-9]+) "([^"]*)"')
-  for line in lines:
-    res = exp.match(line)
-    if res is None:
-      continue
-    if identity_description in res.group(2):
-      yield res.group(1)
-
-
-if __name__ == '__main__':
-  parser = argparse.ArgumentParser('codesign iOS bundles')
-  parser.add_argument(
-      '--developer_dir', required=False,
-      help='Path to Xcode.')
-  parser.add_argument(
-      '--identity-description', required=True,
-      help='Text description used to select the code signing identity.')
-  args = parser.parse_args()
-  if args.developer_dir:
-    os.environ['DEVELOPER_DIR'] = args.developer_dir
-
-  for identity in FindValidIdentity(args.identity_description):
-    print identity
diff --git a/build/config/ios/generate_umbrella_header.py b/build/config/ios/generate_umbrella_header.py
deleted file mode 100644
index 8547e18..0000000
--- a/build/config/ios/generate_umbrella_header.py
+++ /dev/null
@@ -1,75 +0,0 @@
-# Copyright 2018 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Generates an umbrella header for an iOS framework."""
-
-import argparse
-import datetime
-import os
-import re
-import string
-
-
-HEADER_TEMPLATE = string.Template('''\
-// Copyright $year The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-//
-// This file is auto-generated by //build/ios/config/generate_umbrella_header.py
-
-#ifndef $header_guard
-#define $header_guard
-
-$imports
-
-#endif  // $header_guard
-''')
-
-
-def ComputeHeaderGuard(file_path):
-  """Computes the header guard for a file path.
-
-  Args:
-    file_path: The path to convert into an header guard.
-  Returns:
-    The header guard string for the file_path.
-  """
-  return re.sub(r'[.+/\\]', r'_', file_path.upper()) + '_'
-
-
-def WriteUmbrellaHeader(output_path, imported_headers):
-  """Writes the umbrella header.
-
-  Args:
-    output_path: The path to the umbrella header.
-    imported_headers: A list of headers to #import in the umbrella header.
-  """
-  year = datetime.date.today().year
-  header_guard = ComputeHeaderGuard(output_path)
-  imports = '\n'.join([
-      '#import "%s"' % os.path.basename(header)
-          for header in sorted(imported_headers)
-      ])
-  with open(output_path, 'w') as output_file:
-    output_file.write(
-        HEADER_TEMPLATE.safe_substitute({
-            'year': year,
-            'header_guard': header_guard,
-            'imports': imports,
-        }))
-
-
-def Main():
-  parser = argparse.ArgumentParser(description=__doc__)
-  parser.add_argument('--output-path', required=True, type=str,
-                      help='Path to the generated umbrella header.')
-  parser.add_argument('imported_headers', type=str, nargs='+',
-                      help='Headers to #import in the umbrella header.')
-  options = parser.parse_args()
-
-  return WriteUmbrellaHeader(options.output_path, options.imported_headers)
-
-
-if __name__ == '__main__':
-  Main()
diff --git a/build/config/ios/hardlink.py b/build/config/ios/hardlink.py
deleted file mode 100644
index 91dbf62..0000000
--- a/build/config/ios/hardlink.py
+++ /dev/null
@@ -1,69 +0,0 @@
-# Copyright 2017 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Recursively create hardlink to target named output."""
-
-
-import argparse
-import os
-import shutil
-
-
-def CreateHardlinkHelper(target, output):
-  """Recursively create a hardlink named output pointing to target.
-
-  Args:
-    target: path to an existing file or directory
-    output: path to the newly created hardlink
-
-  This function assumes that output does not exists but that the parent
-  directory containing output does. If those conditions are false, then
-  the function will fails with an exception corresponding to an OS error.
-  """
-  if os.path.islink(target):
-    os.symlink(os.readlink(target), output)
-  elif not os.path.isdir(target):
-    try:
-      os.link(target, output)
-    except:
-      shutil.copy(target, output)
-  else:
-    os.mkdir(output)
-    for name in os.listdir(target):
-      CreateHardlinkHelper(
-          os.path.join(target, name),
-          os.path.join(output, name))
-
-
-def CreateHardlink(target, output):
-  """Recursively create a hardlink named output pointing to target.
-
-  Args:
-    target: path to an existing file or directory
-    output: path to the newly created hardlink
-
-  If output already exists, it is first removed. In all cases, the
-  parent directory containing output is created.
-  """
-  if os.path.exists(output):
-    shutil.rmtree(output)
-
-  parent_dir = os.path.dirname(os.path.abspath(output))
-  if not os.path.isdir(parent_dir):
-    os.makedirs(parent_dir)
-
-  CreateHardlinkHelper(target, output)
-
-
-def Main():
-  parser = argparse.ArgumentParser()
-  parser.add_argument('target', help='path to the file or directory to link to')
-  parser.add_argument('output', help='name of the hardlink to create')
-  args = parser.parse_args()
-
-  CreateHardlink(args.target, args.output)
-
-
-if __name__ == '__main__':
-  Main()
diff --git a/build/config/ios/ios_sdk.gni b/build/config/ios/ios_sdk.gni
deleted file mode 100644
index 35de13b..0000000
--- a/build/config/ios/ios_sdk.gni
+++ /dev/null
@@ -1,166 +0,0 @@
-# Copyright 2015 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import("//build/config/ios/ios_sdk_overrides.gni")
-import("//build/toolchain/toolchain.gni")
-
-declare_args() {
-  # SDK path to use. When empty this will use the default SDK based on the
-  # value of use_ios_simulator.
-  ios_sdk_path = ""
-  ios_sdk_name = ""
-  ios_sdk_version = ""
-  ios_sdk_platform = ""
-  ios_sdk_platform_path = ""
-  xcode_version = ""
-  xcode_version_int = 0
-  xcode_build = ""
-  machine_os_build = ""
-
-  # The iOS Code signing identity to use
-  # TODO(GYP), TODO(sdfresne): Consider having a separate
-  # ios_enable_code_signing_flag=<bool> flag to make the invocation clearer.
-  ios_enable_code_signing = true
-  ios_code_signing_identity = ""
-  ios_code_signing_identity_description = "iPhone Developer"
-
-  # Prefix for CFBundleIdentifier property of iOS bundles (correspond to the
-  # "Organization Identifier" in Xcode). Code signing will fail if no mobile
-  # provisioning for the selected code signing identify support that prefix.
-  ios_app_bundle_id_prefix = "org.chromium"
-
-  # If true, then allow using Xcode to automatically manage certificates. This
-  # requires loading a separate Xcode project and enable automatically managed
-  # certificates. When true, all test application will use the same bundle id
-  # to avoid running out of certificates if using a free account.
-  ios_automatically_manage_certs = true
-
-  # If non-empty, this list must contain valid cpu architecture, and the final
-  # build will be a multi-architecture build (aka fat build) supporting the
-  # main $target_cpu architecture and all of $additional_target_cpus.
-  #
-  # For example to build an application that will run on both arm64 and armv7
-  # devices, you would use the following in args.gn file when running "gn args":
-  #
-  #   target_os = "ios"
-  #   target_cpu = "arm64"
-  #   additional_target_cpus = [ "arm" ]
-  #
-  # You can also pass the value via "--args" parameter for "gn gen" command by
-  # using the syntax --args='additional_target_cpus=["arm"] target_cpu="arm64"'.
-  additional_target_cpus = []
-}
-
-assert(custom_toolchain == "" || additional_target_cpus == [],
-       "cannot define both custom_toolchain and additional_target_cpus")
-
-use_ios_simulator = current_cpu == "x86" || current_cpu == "x64"
-
-ios_generic_test_bundle_id_suffix = "generic-unit-test"
-
-# Initialize additional_toolchains from additional_target_cpus. Assert here
-# that the list does not contains $target_cpu nor duplicates as this would
-# cause weird errors during the build.
-additional_toolchains = []
-if (additional_target_cpus != []) {
-  foreach(_additional_target_cpu, additional_target_cpus) {
-    assert(_additional_target_cpu != target_cpu,
-           "target_cpu must not be listed in additional_target_cpus")
-
-    _toolchain = "//build/toolchain/mac:ios_clang_$_additional_target_cpu"
-    foreach(_additional_toolchain, additional_toolchains) {
-      assert(_toolchain != _additional_toolchain,
-             "additional_target_cpus must not contains duplicate values")
-    }
-
-    additional_toolchains += [ _toolchain ]
-  }
-}
-
-if (ios_sdk_path == "") {
-  # Compute default target.
-  if (use_ios_simulator) {
-    ios_sdk_name = "iphonesimulator"
-    ios_sdk_platform = "iPhoneSimulator"
-  } else {
-    ios_sdk_name = "iphoneos"
-    ios_sdk_platform = "iPhoneOS"
-  }
-
-  ios_sdk_info_args = []
-  if (!use_system_xcode) {
-    ios_sdk_info_args += [
-      "--developer_dir",
-      hermetic_xcode_path,
-    ]
-  }
-  ios_sdk_info_args += [ ios_sdk_name ]
-  script_name = "//build/config/mac/sdk_info.py"
-  _ios_sdk_result = exec_script(script_name, ios_sdk_info_args, "scope")
-  ios_sdk_path = _ios_sdk_result.sdk_path
-  ios_sdk_version = _ios_sdk_result.sdk_version
-  ios_sdk_platform_path = _ios_sdk_result.sdk_platform_path
-  ios_sdk_build = _ios_sdk_result.sdk_build
-  xcode_version = _ios_sdk_result.xcode_version
-  xcode_version_int = _ios_sdk_result.xcode_version_int
-  xcode_build = _ios_sdk_result.xcode_build
-  machine_os_build = _ios_sdk_result.machine_os_build
-  if (use_ios_simulator) {
-    # This is weird, but Xcode sets DTPlatformBuild to an empty field for
-    # simulator builds.
-    ios_platform_build = ""
-  } else {
-    ios_platform_build = ios_sdk_build
-  }
-}
-
-if (ios_enable_code_signing && !use_ios_simulator) {
-  find_signing_identity_args = [
-    "--identity-description",
-    ios_code_signing_identity_description,
-  ]
-  if (!use_system_xcode) {
-    find_signing_identity_args += [
-      "--developer_dir",
-      hermetic_xcode_path,
-    ]
-  }
-
-  # If an identity is not provided, look for one on the host
-  if (ios_code_signing_identity == "") {
-    _ios_identities = exec_script("find_signing_identity.py",
-                                  find_signing_identity_args,
-                                  "list lines")
-    if (_ios_identities == []) {
-      print("Tried to prepare a device build without specifying a code signing")
-      print("identity and could not detect one automatically either.")
-      print("TIP: Simulator builds don't require code signing...")
-      assert(false)
-    } else {
-      _ios_identities_len = 0
-      foreach(_, _ios_identities) {
-        _ios_identities_len += 1
-      }
-
-      ios_code_signing_identity = _ios_identities[0]
-      if (_ios_identities_len != 1) {
-        print("Warning: Multiple codesigning identities match " +
-              "\"$ios_code_signing_identity_description\"")
-        foreach(_ios_identity, _ios_identities) {
-          _selected = ""
-          if (ios_code_signing_identity == _ios_identity) {
-            _selected = " (selected)"
-          }
-          print("Warning: - $_ios_identity$_selected")
-        }
-        print("Warning: Please use either ios_code_signing_identity or ")
-        print("Warning: ios_code_signing_identity_description variable to ")
-        print("Warning: control which identity is selected.")
-        print()
-      }
-    }
-  }
-}
-
-assert(xcode_version_int >= 900, "Xcode 9 is required.")
diff --git a/build/config/ios/ios_sdk_overrides.gni b/build/config/ios/ios_sdk_overrides.gni
deleted file mode 100644
index 00105af..0000000
--- a/build/config/ios/ios_sdk_overrides.gni
+++ /dev/null
@@ -1,17 +0,0 @@
-# Copyright 2017 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file contains arguments that subprojects may choose to override. It
-# asserts that those overrides are used, to prevent unused args warnings.
-
-declare_args() {
-  # Version of iOS that we're targeting.
-  ios_deployment_target = "10.0"
-}
-
-# Always assert that ios_deployment_target is used on non-iOS platforms to
-# prevent unused args warnings.
-if (!is_ios) {
-  assert(ios_deployment_target == "10.0" || true)
-}
diff --git a/build/config/ios/rules.gni b/build/config/ios/rules.gni
deleted file mode 100644
index df6033b..0000000
--- a/build/config/ios/rules.gni
+++ /dev/null
@@ -1,2021 +0,0 @@
-# Copyright 2015 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import("//build/config/ios/ios_sdk.gni")
-import("//build/config/mac/base_rules.gni")
-import("//build/config/mac/symbols.gni")
-import("//build/toolchain/toolchain.gni")
-
-# Invokes lipo on multiple arch-specific binaries to create a fat binary.
-#
-# Arguments
-#
-#   arch_binary_target
-#     name of the target generating the arch-specific binaries, they must
-#     be named $target_out_dir/$toolchain_cpu/$arch_binary_output.
-#
-#   arch_binary_output
-#     (optional, defaults to the name of $arch_binary_target) base name of
-#     the arch-specific binary generated by arch_binary_target.
-#
-#   output_name
-#     (optional, defaults to $target_name) base name of the target output,
-#     the full path will be $target_out_dir/$output_name.
-#
-#   configs
-#     (optional) a list of configurations, this is used to check whether
-#     the binary should be stripped, when "enable_stripping" is true.
-#
-template("lipo_binary") {
-  assert(defined(invoker.arch_binary_target),
-         "arch_binary_target must be defined for $target_name")
-
-  _target_name = target_name
-  _output_name = target_name
-  if (defined(invoker.output_name)) {
-    _output_name = invoker.output_name
-  }
-
-  _all_target_cpu = [ current_cpu ] + additional_target_cpus
-  _all_toolchains = [ current_toolchain ] + additional_toolchains
-
-  _arch_binary_target = invoker.arch_binary_target
-  _arch_binary_output = get_label_info(_arch_binary_target, "name")
-  if (defined(invoker.arch_binary_output)) {
-    _arch_binary_output = invoker.arch_binary_output
-  }
-
-  action(_target_name) {
-    forward_variables_from(invoker,
-                           "*",
-                           [
-                             "arch_binary_output",
-                             "arch_binary_target",
-                             "configs",
-                             "output_name",
-                           ])
-
-    script = "//build/toolchain/mac/linker_driver.py"
-
-    # http://crbug.com/762840. Fix for bots running out of memory.
-    pool = "//build/toolchain:link_pool($default_toolchain)"
-
-    outputs = [
-      "$target_out_dir/$_output_name",
-    ]
-
-    deps = []
-    _index = 0
-    inputs = []
-    foreach(_cpu, _all_target_cpu) {
-      _toolchain = _all_toolchains[_index]
-      _index = _index + 1
-
-      inputs +=
-          [ get_label_info("$_arch_binary_target($_toolchain)",
-                           "target_out_dir") + "/$_cpu/$_arch_binary_output" ]
-
-      deps += [ "$_arch_binary_target($_toolchain)" ]
-    }
-
-    args = []
-    if (!use_system_xcode) {
-      args += [
-        "--developer_dir",
-        hermetic_xcode_path,
-      ]
-    }
-    args += [
-              "xcrun",
-              "lipo",
-              "-create",
-              "-output",
-              rebase_path("$target_out_dir/$_output_name", root_build_dir),
-            ] + rebase_path(inputs, root_build_dir)
-
-    if (enable_dsyms) {
-      _dsyms_output_dir = "$root_out_dir/$_output_name.dSYM"
-      outputs += [
-        "$_dsyms_output_dir/",
-        "$_dsyms_output_dir/Contents/Info.plist",
-        "$_dsyms_output_dir/Contents/Resources/DWARF/$_output_name",
-      ]
-      args += [ "-Wcrl,dsym," + rebase_path("$root_out_dir/.", root_build_dir) ]
-    }
-
-    if (enable_stripping) {
-      args += [ "-Wcrl,strip,-x,-S" ]
-      if (save_unstripped_output) {
-        outputs += [ "$root_out_dir/$_output_name.unstripped" ]
-        args += [ "-Wcrl,unstripped," +
-                  rebase_path("$root_out_dir/.", root_build_dir) ]
-      }
-    }
-  }
-}
-
-# Wrapper around create_bundle taking care of code signature settings.
-#
-# Arguments
-#
-#   product_type
-#       string, product type for the generated Xcode project.
-#
-#   bundle_gen_dir
-#       (optional) directory where the bundle is generated; must be below
-#       root_out_dir and defaults to root_out_dir if omitted.
-#
-#   bundle_deps
-#       (optional) list of additional dependencies.
-#
-#   bundle_deps_filter
-#       (optional) list of dependencies to filter (for more information
-#       see "gn help bundle_deps_filter").
-#
-#   bundle_extension
-#       string, extension of the bundle, used to generate bundle name.
-#
-#   bundle_binary_target
-#       (optional) string, label of the target generating the bundle main
-#       binary. This target and bundle_binary_path are mutually exclusive.
-#
-#   bundle_binary_output
-#       (optional) string, base name of the binary generated by the
-#       bundle_binary_target target, defaults to the target name.
-#
-#   bundle_binary_path
-#       (optional) string, path to the bundle main binary. This target and
-#       bundle_binary_target are mutually exclusive.
-#
-#   output_name:
-#       (optional) string, name of the generated application, if omitted,
-#       defaults to the target_name.
-#
-#   extra_system_frameworks
-#       (optional) list of system framework to copy to the bundle.
-#
-#   enable_code_signing
-#       (optional) boolean, control whether code signing is enabled or not,
-#       default to ios_enable_code_signing if not defined.
-#
-#   entitlements_path:
-#       (optional) path to the template to use to generate the application
-#       entitlements by performing variable substitutions, defaults to
-#       //build/config/ios/entitlements.plist.
-#
-#   entitlements_target:
-#       (optional) label of the target generating the application
-#       entitlements (must generate a single file as output); cannot be
-#       defined if entitlements_path is set.
-#
-#   disable_entitlements
-#       (optional, defaults to false) boolean, control whether entitlements willi
-#       be embedded in the application during signature. If false and no
-#       entitlements are provided, default empty entitlements will be used.
-#
-#   disable_embedded_mobileprovision
-#       (optional, default to false) boolean, control whether mobile provisions
-#       will be embedded in the bundle. If true, the existing
-#       embedded.mobileprovision will be deleted.
-#
-#   xcode_extra_attributes
-#       (optional) scope, extra attributes for Xcode projects.
-#
-#   xcode_test_application_name:
-#       (optional) string, name of the test application for Xcode unit or ui
-#       test target.
-#
-#   primary_info_plist:
-#       (optional) path to Info.plist to merge with the $partial_info_plist
-#       generated by the compilation of the asset catalog.
-#
-#   partial_info_plist:
-#       (optional) path to the partial Info.plist generated by the asset
-#       catalog compiler; if defined $primary_info_plist must also be defined.
-#
-template("create_signed_bundle") {
-  assert(defined(invoker.product_type),
-         "product_type must be defined for $target_name")
-  assert(defined(invoker.bundle_extension),
-         "bundle_extension must be defined for $target_name")
-  assert(defined(invoker.bundle_binary_target) !=
-             defined(invoker.bundle_binary_path),
-         "Only one of bundle_binary_target or bundle_binary_path may be " +
-             "specified for $target_name")
-  assert(!defined(invoker.partial_info_plist) ||
-             defined(invoker.primary_info_plist),
-         "primary_info_plist must be defined when partial_info_plist is " +
-             "defined for $target_name")
-
-  if (defined(invoker.xcode_test_application_name)) {
-    assert(
-        invoker.product_type == "com.apple.product-type.bundle.unit-test" ||
-            invoker.product_type == "com.apple.product-type.bundle.ui-testing",
-        "xcode_test_application_name can be only defined for Xcode unit or ui test target.")
-  }
-
-  _target_name = target_name
-  _output_name = target_name
-  if (defined(invoker.output_name)) {
-    _output_name = invoker.output_name
-  }
-
-  if (defined(invoker.bundle_binary_path)) {
-    _bundle_binary_path = invoker.bundle_binary_path
-  } else {
-    _bundle_binary_target = invoker.bundle_binary_target
-    _bundle_binary_output = get_label_info(_bundle_binary_target, "name")
-    if (defined(invoker.bundle_binary_output)) {
-      _bundle_binary_output = invoker.bundle_binary_output
-    }
-    _bundle_binary_path =
-        get_label_info(_bundle_binary_target, "target_out_dir") +
-        "/$_bundle_binary_output"
-  }
-
-  _bundle_gen_dir = root_out_dir
-  if (defined(invoker.bundle_gen_dir)) {
-    _bundle_gen_dir = invoker.bundle_gen_dir
-  }
-
-  _bundle_extension = invoker.bundle_extension
-
-  _enable_embedded_mobileprovision = true
-  if (defined(invoker.disable_embedded_mobileprovision)) {
-    _enable_embedded_mobileprovision = !invoker.disable_embedded_mobileprovision
-  }
-
-  _enable_entitlements = true
-  if (defined(invoker.disable_entitlements)) {
-    _enable_entitlements = !invoker.disable_entitlements
-  }
-
-  if (_enable_entitlements) {
-    if (!defined(invoker.entitlements_target)) {
-      _entitlements_path = "//build/config/ios/entitlements.plist"
-      if (defined(invoker.entitlements_path)) {
-        _entitlements_path = invoker.entitlements_path
-      }
-    } else {
-      assert(!defined(invoker.entitlements_path),
-             "Cannot define both entitlements_path and entitlements_target " +
-                 "for $target_name")
-
-      _entitlements_target_outputs =
-          get_target_outputs(invoker.entitlements_target)
-      _entitlements_path = _entitlements_target_outputs[0]
-    }
-  }
-
-  _enable_code_signing = ios_enable_code_signing
-  if (defined(invoker.enable_code_signing)) {
-    _enable_code_signing = invoker.enable_code_signing
-  }
-
-  create_bundle(_target_name) {
-    forward_variables_from(invoker,
-                           [
-                             "bundle_deps_filter",
-                             "data_deps",
-                             "deps",
-                             "partial_info_plist",
-                             "product_type",
-                             "public_configs",
-                             "public_deps",
-                             "testonly",
-                             "visibility",
-                             "xcode_extra_attributes",
-                             "xcode_test_application_name",
-                           ])
-
-    bundle_root_dir = "$_bundle_gen_dir/$_output_name$_bundle_extension"
-    bundle_contents_dir = bundle_root_dir
-    bundle_resources_dir = bundle_contents_dir
-    bundle_executable_dir = bundle_contents_dir
-    bundle_plugins_dir = "$bundle_contents_dir/PlugIns"
-
-    if (!defined(public_deps)) {
-      public_deps = []
-    }
-
-    if (defined(invoker.bundle_binary_target)) {
-      public_deps += [ invoker.bundle_binary_target ]
-    }
-
-    if (defined(invoker.bundle_deps)) {
-      if (!defined(deps)) {
-        deps = []
-      }
-      deps += invoker.bundle_deps
-    }
-    if (!defined(deps)) {
-      deps = []
-    }
-
-    code_signing_script = "//build/config/ios/codesign.py"
-    code_signing_sources = [ _bundle_binary_path ]
-    if (_enable_entitlements) {
-      if (defined(invoker.entitlements_target)) {
-        deps += [ invoker.entitlements_target ]
-      }
-      code_signing_sources += [ _entitlements_path ]
-    }
-    code_signing_outputs = [ "$bundle_contents_dir/$_output_name" ]
-    if (_enable_code_signing) {
-      code_signing_outputs +=
-          [ "$bundle_contents_dir/_CodeSignature/CodeResources" ]
-    }
-    if (ios_code_signing_identity != "" && !use_ios_simulator &&
-        _enable_embedded_mobileprovision) {
-      code_signing_outputs +=
-          [ "$bundle_contents_dir/embedded.mobileprovision" ]
-    }
-
-    if (defined(invoker.extra_system_frameworks)) {
-      foreach(_framework, invoker.extra_system_frameworks) {
-        code_signing_outputs += [ "$bundle_contents_dir/Frameworks/" +
-                                  get_path_info(_framework, "file") ]
-      }
-    }
-
-    code_signing_args = []
-    if (!use_system_xcode) {
-      code_signing_args += [
-        "--developer_dir",
-        hermetic_xcode_path,
-      ]
-    }
-    code_signing_args += [
-      "code-sign-bundle",
-      "-t=" + ios_sdk_name,
-      "-i=" + ios_code_signing_identity,
-      "-b=" + rebase_path(_bundle_binary_path, root_build_dir),
-    ]
-    if (_enable_entitlements) {
-      code_signing_args +=
-          [ "-e=" + rebase_path(_entitlements_path, root_build_dir) ]
-    }
-    if (!_enable_embedded_mobileprovision) {
-      code_signing_args += [ "--disable-embedded-mobileprovision" ]
-    }
-    code_signing_args += [ rebase_path(bundle_root_dir, root_build_dir) ]
-    if (!_enable_code_signing) {
-      code_signing_args += [ "--disable-code-signature" ]
-    }
-    if (defined(invoker.extra_system_frameworks)) {
-      # All framework in extra_system_frameworks are expected to be
-      # system framework and the path to be already system absolute
-      # so do not use rebase_path here.
-      foreach(_framework, invoker.extra_system_frameworks) {
-        code_signing_args += [ "-F=" + _framework ]
-      }
-    }
-    if (defined(invoker.partial_info_plist)) {
-      _partial_info_plists = [
-        invoker.primary_info_plist,
-        invoker.partial_info_plist,
-      ]
-
-      _plist_compiler_path = "//build/config/mac/plist_util.py"
-
-      code_signing_sources += _partial_info_plists
-      code_signing_sources += [ _plist_compiler_path ]
-      code_signing_outputs += [ "$bundle_contents_dir/Info.plist" ]
-
-      code_signing_args +=
-          [ "-P=" + rebase_path(_plist_compiler_path, root_build_dir) ]
-      foreach(_partial_info_plist, _partial_info_plists) {
-        code_signing_args +=
-            [ "-p=" + rebase_path(_partial_info_plist, root_build_dir) ]
-      }
-    }
-  }
-}
-
-# Generates Info.plist files for Mac apps and frameworks.
-#
-# Arguments
-#
-#     info_plist:
-#         (optional) string, path to the Info.plist file that will be used for
-#         the bundle.
-#
-#     info_plist_target:
-#         (optional) string, if the info_plist is generated from an action,
-#         rather than a regular source file, specify the target name in lieu
-#         of info_plist. The two arguments are mutually exclusive.
-#
-#     executable_name:
-#         string, name of the generated target used for the product
-#         and executable name as specified in the output Info.plist.
-#
-#     extra_substitutions:
-#         (optional) string array, 'key=value' pairs for extra fields which are
-#         specified in a source Info.plist template.
-template("ios_info_plist") {
-  assert(defined(invoker.info_plist) != defined(invoker.info_plist_target),
-         "Only one of info_plist or info_plist_target may be specified in " +
-             target_name)
-
-  if (defined(invoker.info_plist)) {
-    _info_plist = invoker.info_plist
-  } else {
-    _info_plist_target_output = get_target_outputs(invoker.info_plist_target)
-    _info_plist = _info_plist_target_output[0]
-  }
-
-  info_plist(target_name) {
-    format = "binary1"
-    extra_substitutions = []
-    if (defined(invoker.extra_substitutions)) {
-      extra_substitutions = invoker.extra_substitutions
-    }
-    extra_substitutions += [
-      "IOS_BUNDLE_ID_PREFIX=$ios_app_bundle_id_prefix",
-      "IOS_PLATFORM_BUILD=$ios_platform_build",
-      "IOS_PLATFORM_NAME=$ios_sdk_name",
-      "IOS_PLATFORM_VERSION=$ios_sdk_version",
-      "IOS_SDK_BUILD=$ios_sdk_build",
-      "IOS_SDK_NAME=$ios_sdk_name$ios_sdk_version",
-      "IOS_SUPPORTED_PLATFORM=$ios_sdk_platform",
-    ]
-    plist_templates = [
-      "//build/config/ios/BuildInfo.plist",
-      _info_plist,
-    ]
-    if (defined(invoker.info_plist_target)) {
-      deps = [
-        invoker.info_plist_target,
-      ]
-    }
-    forward_variables_from(invoker,
-                           [
-                             "executable_name",
-                             "output_name",
-                             "visibility",
-                             "testonly",
-                           ])
-  }
-}
-
-# Template to build an application bundle for iOS.
-#
-# This should be used instead of "executable" built-in target type on iOS.
-# As the template forward the generation of the application executable to
-# an "executable" target, all arguments supported by "executable" targets
-# are also supported by this template.
-#
-# Arguments
-#
-#   output_name:
-#       (optional) string, name of the generated application, if omitted,
-#       defaults to the target_name.
-#
-#   extra_substitutions:
-#       (optional) list of string in "key=value" format, each value will
-#       be used as an additional variable substitution rule when generating
-#       the application Info.plist
-#
-#   info_plist:
-#       (optional) string, path to the Info.plist file that will be used for
-#       the bundle.
-#
-#   info_plist_target:
-#       (optional) string, if the info_plist is generated from an action,
-#       rather than a regular source file, specify the target name in lieu
-#       of info_plist. The two arguments are mutually exclusive.
-#
-#   entitlements_path:
-#       (optional) path to the template to use to generate the application
-#       entitlements by performing variable substitutions, defaults to
-#       //build/config/ios/entitlements.plist.
-#
-#   entitlements_target:
-#       (optional) label of the target generating the application
-#       entitlements (must generate a single file as output); cannot be
-#       defined if entitlements_path is set.
-#
-#   bundle_extension:
-#       (optional) bundle extension including the dot, default to ".app".
-#
-#   product_type
-#       (optional) string, product type for the generated Xcode project,
-#       default to "com.apple.product-type.application". Should generally
-#       not be overridden.
-#
-#   enable_code_signing
-#       (optional) boolean, control whether code signing is enabled or not,
-#       default to ios_enable_code_signing if not defined.
-#
-#   variants
-#       (optional) list of scopes, each scope needs to define the attributes
-#       "name" and "bundle_deps"; if defined and non-empty, then one bundle
-#       named $target_out_dir/$variant/$output_name will be created for each
-#       variant with the same binary but the correct bundle_deps, the bundle
-#       at $target_out_dir/$output_name will be a copy of the first variant.
-#
-# For more information, see "gn help executable".
-template("ios_app_bundle") {
-  _output_name = target_name
-  _target_name = target_name
-  if (defined(invoker.output_name)) {
-    _output_name = invoker.output_name
-  }
-
-  _arch_executable_source = _target_name + "_arch_executable_sources"
-  _arch_executable_target = _target_name + "_arch_executable"
-  _lipo_executable_target = _target_name + "_executable"
-
-  if (defined(invoker.variants) && invoker.variants != []) {
-    _variants = []
-
-    foreach(_variant, invoker.variants) {
-      assert(defined(_variant.name) && _variant.name != "",
-             "name must be defined for all $target_name variants")
-
-      assert(defined(_variant.bundle_deps),
-             "bundle_deps must be defined for all $target_name variants")
-
-      _variants += [
-        {
-          name = _variant.name
-          bundle_deps = _variant.bundle_deps
-          target_name = "${_target_name}_variants_${_variant.name}"
-          bundle_gen_dir = "$root_out_dir/variants/${_variant.name}"
-        },
-      ]
-    }
-  } else {
-    # If no variants are passed to the template, use a fake variant with
-    # no name to avoid duplicating code. As no variant can have an empty
-    # name except this fake variant, it is possible to know if a variant
-    # is fake or not.
-    _variants = [
-      {
-        name = ""
-        bundle_deps = []
-        target_name = _target_name
-        bundle_gen_dir = root_out_dir
-      },
-    ]
-  }
-
-  _default_variant = _variants[0]
-
-  if (current_toolchain != default_toolchain) {
-    # For use of _variants and _default_variant for secondary toolchain to
-    # avoid the "Assignment had no effect" error from gn.
-    assert(_variants != [])
-    assert(_default_variant.target_name != "")
-  }
-
-  source_set(_arch_executable_source) {
-    forward_variables_from(invoker,
-                           "*",
-                           [
-                             "bundle_deps",
-                             "bundle_deps_filter",
-                             "bundle_extension",
-                             "enable_code_signing",
-                             "entitlements_path",
-                             "entitlements_target",
-                             "extra_substitutions",
-                             "extra_system_frameworks",
-                             "info_plist",
-                             "info_plist_target",
-                             "output_name",
-                             "product_type",
-                             "visibility",
-                           ])
-
-    visibility = [ ":$_arch_executable_target" ]
-  }
-
-  if (current_toolchain == default_toolchain || use_ios_simulator) {
-    _generate_entitlements_target = _target_name + "_gen_entitlements"
-    _generate_entitlements_output =
-        get_label_info(":$_generate_entitlements_target($default_toolchain)",
-                       "target_out_dir") + "/$_output_name.xcent"
-  }
-
-  executable(_arch_executable_target) {
-    forward_variables_from(invoker,
-                           "*",
-                           [
-                             "bundle_deps",
-                             "bundle_deps_filter",
-                             "bundle_extension",
-                             "enable_code_signing",
-                             "entitlements_path",
-                             "entitlements_target",
-                             "extra_substitutions",
-                             "extra_system_frameworks",
-                             "info_plist",
-                             "info_plist_target",
-                             "output_name",
-                             "product_type",
-                             "sources",
-                             "visibility",
-                           ])
-
-    visibility = [ ":$_lipo_executable_target($default_toolchain)" ]
-    if (current_toolchain != default_toolchain) {
-      visibility += [ ":$_target_name" ]
-    }
-
-    if (!defined(deps)) {
-      deps = []
-    }
-    deps += [ ":$_arch_executable_source" ]
-
-    if (!defined(libs)) {
-      libs = []
-    }
-    libs += [ "UIKit.framework" ]
-
-    if (!defined(ldflags)) {
-      ldflags = []
-    }
-    ldflags += [
-      "-Xlinker",
-      "-rpath",
-      "-Xlinker",
-      "@executable_path/Frameworks",
-      "-Xlinker",
-      "-objc_abi_version",
-      "-Xlinker",
-      "2",
-    ]
-
-    if (use_ios_simulator) {
-      deps += [ ":$_generate_entitlements_target($default_toolchain)" ]
-
-      if (!defined(inputs)) {
-        inputs = []
-      }
-      inputs += [ _generate_entitlements_output ]
-
-      if (!defined(ldflags)) {
-        ldflags = []
-      }
-      ldflags += [
-        "-Xlinker",
-        "-sectcreate",
-        "-Xlinker",
-        "__TEXT",
-        "-Xlinker",
-        "__entitlements",
-        "-Xlinker",
-        rebase_path(_generate_entitlements_output, root_build_dir),
-      ]
-    }
-
-    output_name = _output_name
-    output_prefix_override = true
-    output_dir = "$target_out_dir/$current_cpu"
-  }
-
-  if (current_toolchain != default_toolchain) {
-    # For fat builds, only the default toolchain will generate an application
-    # bundle. For the other toolchains, the template is only used for building
-    # the arch-specific binary, thus the default target is just a group().
-
-    group(_target_name) {
-      forward_variables_from(invoker,
-                             [
-                               "visibility",
-                               "testonly",
-                             ])
-      public_deps = [
-        ":$_arch_executable_target",
-      ]
-    }
-  } else {
-    lipo_binary(_lipo_executable_target) {
-      forward_variables_from(invoker,
-                             [
-                               "configs",
-                               "testonly",
-                             ])
-
-      visibility = []
-      foreach(_variant, _variants) {
-        visibility += [ ":${_variant.target_name}" ]
-      }
-
-      output_name = _output_name
-      arch_binary_target = ":$_arch_executable_target"
-      arch_binary_output = _output_name
-    }
-
-    _generate_info_plist = target_name + "_generate_info_plist"
-    ios_info_plist(_generate_info_plist) {
-      forward_variables_from(invoker,
-                             [
-                               "extra_substitutions",
-                               "info_plist",
-                               "info_plist_target",
-                             ])
-
-      executable_name = _output_name
-    }
-
-    if (current_toolchain == default_toolchain) {
-      if (!defined(invoker.entitlements_target)) {
-        _entitlements_path = "//build/config/ios/entitlements.plist"
-        if (defined(invoker.entitlements_path)) {
-          _entitlements_path = invoker.entitlements_path
-        }
-      } else {
-        assert(!defined(invoker.entitlements_path),
-               "Cannot define both entitlements_path and entitlements_target" +
-                   "for $_target_name")
-
-        _entitlements_target_outputs =
-            get_target_outputs(invoker.entitlements_target)
-        _entitlements_path = _entitlements_target_outputs[0]
-      }
-
-      action(_generate_entitlements_target) {
-        _gen_info_plist_outputs = get_target_outputs(":$_generate_info_plist")
-        _info_plist_path = _gen_info_plist_outputs[0]
-
-        script = "//build/config/ios/codesign.py"
-        deps = [
-          ":$_generate_info_plist",
-        ]
-        if (defined(invoker.entitlements_target)) {
-          deps += [ invoker.entitlements_target ]
-        }
-        sources = [
-          _entitlements_path,
-          _info_plist_path,
-        ]
-        outputs = [
-          _generate_entitlements_output,
-        ]
-
-        args = []
-        if (!use_system_xcode) {
-          args += [
-            "--developer_dir",
-            hermetic_xcode_path,
-          ]
-        }
-        args += [
-                  "generate-entitlements",
-                  "-e=" + rebase_path(_entitlements_path, root_build_dir),
-                  "-p=" + rebase_path(_info_plist_path, root_build_dir),
-                ] + rebase_path(outputs, root_build_dir)
-      }
-    }
-
-    _app_product_type = "com.apple.product-type.application"
-    _product_type = _app_product_type
-    if (defined(invoker.product_type)) {
-      _product_type = invoker.product_type
-    }
-
-    _app_bundle_extension = ".app"
-    _bundle_extension = _app_bundle_extension
-    if (defined(invoker.bundle_extension)) {
-      _bundle_extension = invoker.bundle_extension
-    }
-
-    # Only write PkgInfo for real application, not application extension (they
-    # have the same product type but a different extension).
-    _write_pkg_info = _product_type == _app_product_type &&
-                      _bundle_extension == _app_bundle_extension
-
-    if (_write_pkg_info) {
-      _create_pkg_info = target_name + "_pkg_info"
-      action(_create_pkg_info) {
-        forward_variables_from(invoker, [ "testonly" ])
-        script = "//build/config/mac/write_pkg_info.py"
-        sources = get_target_outputs(":$_generate_info_plist")
-        outputs = [
-          # Cannot name the output PkgInfo as the name will not be unique if
-          # multiple ios_app_bundle are defined in the same BUILD.gn file. The
-          # file is renamed in the bundle_data outputs to the correct name.
-          "$target_gen_dir/$target_name",
-        ]
-        args = [ "--plist" ] + rebase_path(sources, root_build_dir) +
-               [ "--output" ] + rebase_path(outputs, root_build_dir)
-        deps = [
-          ":$_generate_info_plist",
-        ]
-      }
-
-      _bundle_data_pkg_info = target_name + "_bundle_data_pkg_info"
-      bundle_data(_bundle_data_pkg_info) {
-        forward_variables_from(invoker, [ "testonly" ])
-        sources = get_target_outputs(":$_create_pkg_info")
-        outputs = [
-          "{{bundle_resources_dir}}/PkgInfo",
-        ]
-        public_deps = [
-          ":$_create_pkg_info",
-        ]
-      }
-    }
-
-    foreach(_variant, _variants) {
-      create_signed_bundle(_variant.target_name) {
-        forward_variables_from(invoker,
-                               [
-                                 "bundle_deps",
-                                 "bundle_deps_filter",
-                                 "data_deps",
-                                 "deps",
-                                 "enable_code_signing",
-                                 "entitlements_path",
-                                 "entitlements_target",
-                                 "extra_system_frameworks",
-                                 "public_configs",
-                                 "public_deps",
-                                 "testonly",
-                                 "visibility",
-                               ])
-
-        output_name = _output_name
-        bundle_gen_dir = _variant.bundle_gen_dir
-        bundle_binary_target = ":$_lipo_executable_target"
-        bundle_binary_output = _output_name
-        bundle_extension = _bundle_extension
-        product_type = _product_type
-
-        _generate_info_plist_outputs =
-            get_target_outputs(":$_generate_info_plist")
-        primary_info_plist = _generate_info_plist_outputs[0]
-        partial_info_plist =
-            "$target_gen_dir/${_variant.target_name}_partial_info.plist"
-
-        if (!defined(deps)) {
-          deps = []
-        }
-        deps += [ ":$_generate_info_plist" ]
-
-        if (!defined(bundle_deps)) {
-          bundle_deps = []
-        }
-        if (_write_pkg_info) {
-          bundle_deps += [ ":$_bundle_data_pkg_info" ]
-        }
-        bundle_deps += _variant.bundle_deps
-
-        if (use_ios_simulator) {
-          if (!defined(data_deps)) {
-            data_deps = []
-          }
-          data_deps += [ "//testing/iossim" ]
-        }
-      }
-    }
-
-    if (_default_variant.name != "") {
-      _bundle_short_name = "$_output_name$_bundle_extension"
-      action(_target_name) {
-        forward_variables_from(invoker, [ "testonly" ])
-
-        script = "//build/config/ios/hardlink.py"
-        public_deps = []
-        foreach(_variant, _variants) {
-          public_deps += [ ":${_variant.target_name}" ]
-        }
-
-        sources = [
-          "${_default_variant.bundle_gen_dir}/$_bundle_short_name",
-        ]
-        outputs = [
-          "$root_out_dir/$_bundle_short_name",
-        ]
-
-        args = rebase_path(sources, root_out_dir) +
-               rebase_path(outputs, root_out_dir)
-      }
-    }
-  }
-}
-
-set_defaults("ios_app_bundle") {
-  configs = default_executable_configs
-}
-
-# Template to build an application extension bundle for iOS.
-#
-# This should be used instead of "executable" built-in target type on iOS.
-# As the template forward the generation of the application executable to
-# an "executable" target, all arguments supported by "executable" targets
-# are also supported by this template.
-#
-# Arguments
-#
-#   output_name:
-#       (optional) string, name of the generated application, if omitted,
-#       defaults to the target_name.
-#
-#   extra_substitutions:
-#       (optional) list of string in "key=value" format, each value will
-#       be used as an additional variable substitution rule when generating
-#       the application Info.plist
-#
-#   info_plist:
-#       (optional) string, path to the Info.plist file that will be used for
-#       the bundle.
-#
-#   info_plist_target:
-#       (optional) string, if the info_plist is generated from an action,
-#       rather than a regular source file, specify the target name in lieu
-#       of info_plist. The two arguments are mutually exclusive.
-#
-# For more information, see "gn help executable".
-template("ios_appex_bundle") {
-  ios_app_bundle(target_name) {
-    forward_variables_from(invoker,
-                           "*",
-                           [
-                             "bundle_extension",
-                             "product_type",
-                           ])
-    bundle_extension = ".appex"
-    product_type = "com.apple.product-type.app-extension"
-
-    # Add linker flags required for an application extension (determined by
-    # inspecting the link command-line when using Xcode 9.0+).
-    if (!defined(ldflags)) {
-      ldflags = []
-    }
-    ldflags += [
-      "-e",
-      "_NSExtensionMain",
-      "-fapplication-extension",
-    ]
-  }
-}
-
-set_defaults("ios_appex_bundle") {
-  configs = default_executable_configs
-}
-
-# Compile a xib or storyboard file and add it to a bundle_data so that it is
-# available at runtime in the bundle.
-#
-# Arguments
-#
-#   source:
-#       string, path of the xib or storyboard to compile.
-#
-# Forwards all variables to the bundle_data target.
-template("bundle_data_ib_file") {
-  assert(defined(invoker.source), "source needs to be defined for $target_name")
-
-  _source_extension = get_path_info(invoker.source, "extension")
-  assert(_source_extension == "xib" || _source_extension == "storyboard",
-         "source must be a .xib or .storyboard for $target_name")
-
-  _target_name = target_name
-  if (_source_extension == "xib") {
-    _compile_ib_file = target_name + "_compile_xib"
-    _output_extension = "nib"
-  } else {
-    _compile_ib_file = target_name + "_compile_storyboard"
-    _output_extension = "storyboardc"
-  }
-
-  compile_ib_files(_compile_ib_file) {
-    sources = [
-      invoker.source,
-    ]
-    output_extension = _output_extension
-    visibility = [ ":$_target_name" ]
-    ibtool_flags = [
-      "--minimum-deployment-target",
-      ios_deployment_target,
-      "--auto-activate-custom-fonts",
-      "--target-device",
-      "iphone",
-      "--target-device",
-      "ipad",
-    ]
-  }
-
-  bundle_data(_target_name) {
-    forward_variables_from(invoker, "*", [ "source" ])
-
-    if (!defined(public_deps)) {
-      public_deps = []
-    }
-    public_deps += [ ":$_compile_ib_file" ]
-
-    sources = get_target_outputs(":$_compile_ib_file")
-
-    outputs = [
-      "{{bundle_resources_dir}}/{{source_file_part}}",
-    ]
-  }
-}
-
-# Compile a strings file and add it to a bundle_data so that it is available
-# at runtime in the bundle.
-#
-# Arguments
-#
-#   source:
-#       string, path of the strings file to compile.
-#
-#   output:
-#       string, path of the compiled file in the final bundle.
-#
-# Forwards all variables to the bundle_data target.
-template("bundle_data_strings") {
-  assert(defined(invoker.source), "source needs to be defined for $target_name")
-  assert(defined(invoker.output), "output needs to be defined for $target_name")
-
-  _source_extension = get_path_info(invoker.source, "extension")
-  assert(_source_extension == "strings",
-         "source must be a .strings for $target_name")
-
-  _target_name = target_name
-  _convert_target = target_name + "_compile_strings"
-
-  convert_plist(_convert_target) {
-    visibility = [ ":$_target_name" ]
-    source = invoker.source
-    output =
-        "$target_gen_dir/$_target_name/" + get_path_info(invoker.source, "file")
-    format = "binary1"
-  }
-
-  bundle_data(_target_name) {
-    forward_variables_from(invoker,
-                           "*",
-                           [
-                             "source",
-                             "output",
-                           ])
-
-    if (!defined(public_deps)) {
-      public_deps = []
-    }
-    public_deps += [ ":$_convert_target" ]
-
-    sources = get_target_outputs(":$_convert_target")
-
-    outputs = [
-      invoker.output,
-    ]
-  }
-}
-
-# Template to package a shared library into an iOS framework bundle.
-#
-# By default, the bundle target this template generates does not link the
-# resulting framework into anything that depends on it. If a dependency wants
-# a link-time (as well as build-time) dependency on the framework bundle,
-# depend against "$target_name+link". If only the build-time dependency is
-# required (e.g., for copying into another bundle), then use "$target_name".
-#
-# Arguments
-#
-#     output_name:
-#         (optional) string, name of the generated framework without the
-#         .framework suffix. If omitted, defaults to target_name.
-#
-#     public_headers:
-#         (optional) list of paths to header file that needs to be copied
-#         into the framework bundle Headers subdirectory. If omitted or
-#         empty then the Headers subdirectory is not created.
-#
-#     sources
-#         (optional) list of files. Needs to be defined and non-empty if
-#         public_headers is defined and non-empty.
-#
-#   enable_code_signing
-#       (optional) boolean, control whether code signing is enabled or not,
-#       default to ios_enable_code_signing if not defined.
-#
-# This template provides two targets for the resulting framework bundle. The
-# link-time behavior varies depending on which of the two targets below is
-# added as a dependency:
-#   - $target_name only adds a build-time dependency. Targets that depend on
-#     it will not link against the framework.
-#   - $target_name+link adds a build-time and link-time dependency. Targets
-#     that depend on it will link against the framework.
-#
-# The build-time-only dependency is used for when a target needs to use the
-# framework either only for resources, or because the target loads it at run-
-# time, via dlopen() or NSBundle. The link-time dependency will cause the
-# dependee to have the framework loaded by dyld at launch.
-#
-# Example of build-time only dependency:
-#
-#     framework_bundle("CoreTeleportation") {
-#       sources = [ ... ]
-#     }
-#
-#     bundle_data("core_teleportation_bundle_data") {
-#       deps = [ ":CoreTeleportation" ]
-#       sources = [ "$root_out_dir/CoreTeleportation.framework" ]
-#       outputs = [ "{{bundle_contents_dir}}/Frameworks/{{source_file_part}}" ]
-#     }
-#
-#     app_bundle("GoatTeleporter") {
-#       sources = [ ... ]
-#       deps = [
-#         ":core_teleportation_bundle_data",
-#       ]
-#     }
-#
-# The GoatTeleporter.app will not directly link against
-# CoreTeleportation.framework, but it will be included in the bundle's
-# Frameworks directory.
-#
-# Example of link-time dependency:
-#
-#     framework_bundle("CoreTeleportation") {
-#       sources = [ ... ]
-#       ldflags = [
-#         "-install_name",
-#         "@executable_path/../Frameworks/$target_name.framework"
-#       ]
-#     }
-#
-#     bundle_data("core_teleportation_bundle_data") {
-#       deps = [ ":CoreTeleportation+link" ]
-#       sources = [ "$root_out_dir/CoreTeleportation.framework" ]
-#       outputs = [ "{{bundle_contents_dir}}/Frameworks/{{source_file_part}}" ]
-#     }
-#
-#     app_bundle("GoatTeleporter") {
-#       sources = [ ... ]
-#       deps = [
-#         ":core_teleportation_bundle_data",
-#       ]
-#     }
-#
-# Note that the framework is still copied to the app's bundle, but dyld will
-# load this library when the app is launched because it uses the "+link"
-# target as a dependency. This also requires that the framework set its
-# install_name so that dyld can locate it.
-#
-# See "gn help shared_library" for more information on arguments supported
-# by shared library target.
-template("ios_framework_bundle") {
-  _target_name = target_name
-  _output_name = target_name
-  if (defined(invoker.output_name)) {
-    _output_name = invoker.output_name
-  }
-
-  _has_public_headers =
-      defined(invoker.public_headers) && invoker.public_headers != []
-
-  # Public configs are not propagated across toolchain (see crbug.com/675224)
-  # so some configs have to be defined for both default_toolchain and all others
-  # toolchains when performing a fat build. Use "get_label_info" to construct
-  # the path since they need to be relative to the default_toolchain.
-
-  _default_toolchain_root_out_dir =
-      get_label_info("$_target_name($default_toolchain)", "root_out_dir")
-  _default_toolchain_target_gen_dir =
-      get_label_info("$_target_name($default_toolchain)", "target_gen_dir")
-
-  if (_has_public_headers) {
-    _framework_headers_target = _target_name + "_framework_headers"
-    _framework_headers_config = _target_name + "_framework_headers_config"
-    config(_framework_headers_config) {
-      # The link settings are inherited from the framework_bundle config.
-      cflags = [
-        "-F",
-        rebase_path("$_default_toolchain_root_out_dir/.", root_build_dir),
-      ]
-    }
-
-    _headers_map_config = _target_name + "_headers_map"
-    _header_map_filename =
-        "$_default_toolchain_target_gen_dir/$_output_name.headers.hmap"
-    config(_headers_map_config) {
-      visibility = [ ":$_target_name" ]
-      include_dirs = [ _header_map_filename ]
-    }
-  }
-
-  _arch_shared_library_source = _target_name + "_arch_shared_library_sources"
-  _arch_shared_library_target = _target_name + "_arch_shared_library"
-  _lipo_shared_library_target = _target_name + "_shared_library"
-  _link_target_name = _target_name + "+link"
-
-  _framework_public_config = _target_name + "_public_config"
-  config(_framework_public_config) {
-    # TODO(sdefresne): should we have a framework_dirs similar to lib_dirs
-    # and include_dirs to avoid duplicate values on the command-line.
-    visibility = [ ":$_target_name" ]
-    ldflags = [
-      "-F",
-      rebase_path("$_default_toolchain_root_out_dir/.", root_build_dir),
-    ]
-    lib_dirs = [ root_out_dir ]
-    libs = [ "$_output_name.framework" ]
-  }
-
-  source_set(_arch_shared_library_source) {
-    forward_variables_from(invoker,
-                           "*",
-                           [
-                             "bundle_deps",
-                             "bundle_deps_filter",
-                             "data_deps",
-                             "enable_code_signing",
-                             "extra_substitutions",
-                             "info_plist",
-                             "info_plist_target",
-                             "output_name",
-                             "visibility",
-                           ])
-
-    visibility = [ ":$_arch_shared_library_target" ]
-
-    if (_has_public_headers) {
-      configs += [
-        ":$_framework_headers_config",
-        ":$_headers_map_config",
-      ]
-
-      if (!defined(deps)) {
-        deps = []
-      }
-      deps += [ ":$_framework_headers_target($default_toolchain)" ]
-    }
-  }
-
-  shared_library(_arch_shared_library_target) {
-    forward_variables_from(invoker,
-                           "*",
-                           [
-                             "bundle_deps",
-                             "bundle_deps_filter",
-                             "data_deps",
-                             "enable_code_signing",
-                             "extra_substitutions",
-                             "info_plist",
-                             "info_plist_target",
-                             "output_name",
-                             "sources",
-                             "visibility",
-                           ])
-
-    visibility = [ ":$_lipo_shared_library_target($default_toolchain)" ]
-    if (current_toolchain != default_toolchain) {
-      visibility += [ ":$_target_name" ]
-    }
-
-    if (!defined(deps)) {
-      deps = []
-    }
-    deps += [ ":$_arch_shared_library_source" ]
-    if (_has_public_headers) {
-      deps += [ ":$_framework_headers_target($default_toolchain)" ]
-    }
-    if (!defined(ldflags)) {
-      ldflags = []
-    }
-    ldflags += [
-      "-Xlinker",
-      "-install_name",
-      "-Xlinker",
-      "@rpath/$_output_name.framework/$_output_name",
-      "-Xlinker",
-      "-objc_abi_version",
-      "-Xlinker",
-      "2",
-    ]
-
-    output_extension = ""
-    output_name = _output_name
-    output_prefix_override = true
-    output_dir = "$target_out_dir/$current_cpu"
-  }
-
-  if (current_toolchain != default_toolchain) {
-    # For fat builds, only the default toolchain will generate a framework
-    # bundle. For the other toolchains, the template is only used for building
-    # the arch-specific binary, thus the default target is just a group().
-
-    group(_target_name) {
-      forward_variables_from(invoker,
-                             [
-                               "visibility",
-                               "testonly",
-                             ])
-      public_deps = [
-        ":$_arch_shared_library_target",
-      ]
-    }
-
-    group(_link_target_name) {
-      forward_variables_from(invoker,
-                             [
-                               "public_configs",
-                               "visibility",
-                               "testonly",
-                             ])
-      public_deps = [
-        ":$_link_target_name($default_toolchain)",
-      ]
-
-      if (_has_public_headers) {
-        if (!defined(public_configs)) {
-          public_configs = []
-        }
-        public_configs += [ ":$_framework_headers_config" ]
-      }
-      if (!defined(all_dependent_configs)) {
-        all_dependent_configs = []
-      }
-      all_dependent_configs += [ ":$_framework_public_config" ]
-    }
-
-    if (defined(invoker.bundle_deps)) {
-      assert(invoker.bundle_deps != [], "mark bundle_deps as used")
-    }
-  } else {
-    if (_has_public_headers) {
-      _public_headers = invoker.public_headers
-      _framework_root = "$root_out_dir/$_output_name.framework"
-
-      _compile_headers_map_target = _target_name + "_compile_headers_map"
-      action(_compile_headers_map_target) {
-        visibility = [ ":$_framework_headers_target" ]
-        forward_variables_from(invoker,
-                               [
-                                 "deps",
-                                 "public_deps",
-                                 "testonly",
-                               ])
-        script = "//build/config/ios/write_framework_hmap.py"
-        outputs = [
-          _header_map_filename,
-        ]
-
-        # The header map generation only wants the list of headers, not all of
-        # sources, so filter any non-header source files from "sources". It is
-        # less error prone that having the developer duplicate the list of all
-        # headers in addition to "sources".
-        set_sources_assignment_filter([
-                                        "*.c",
-                                        "*.cc",
-                                        "*.cpp",
-                                        "*.m",
-                                        "*.mm",
-                                      ])
-        sources = invoker.sources
-        set_sources_assignment_filter([])
-
-        args = [
-                 rebase_path(_header_map_filename),
-                 rebase_path(_framework_root, root_build_dir),
-               ] + rebase_path(sources, root_build_dir)
-      }
-
-      _create_module_map_target = _target_name + "_module_map"
-      action(_create_module_map_target) {
-        visibility = [ ":$_framework_headers_target" ]
-        script = "//build/config/ios/write_framework_modulemap.py"
-        outputs = [
-          "$_framework_root/Modules/module.modulemap",
-        ]
-        args = [ rebase_path("$_framework_root", root_build_dir) ]
-      }
-
-      _copy_public_headers_target = _target_name + "_copy_public_headers"
-      copy(_copy_public_headers_target) {
-        forward_variables_from(invoker,
-                               [
-                                 "testonly",
-                                 "deps",
-                               ])
-        visibility = [ ":$_framework_headers_target" ]
-        sources = _public_headers
-        outputs = [
-          "$_framework_root/Headers/{{source_file_part}}",
-        ]
-
-        # Do not use forward_variables_from for "public_deps" as
-        # we do not want to forward those dependencies.
-        if (defined(invoker.public_deps)) {
-          if (!defined(deps)) {
-            deps = []
-          }
-          deps += invoker.public_deps
-        }
-      }
-
-      group(_framework_headers_target) {
-        forward_variables_from(invoker, [ "testonly" ])
-        deps = [
-          ":$_compile_headers_map_target",
-          ":$_create_module_map_target",
-        ]
-        public_deps = [
-          ":$_copy_public_headers_target",
-        ]
-      }
-    }
-
-    lipo_binary(_lipo_shared_library_target) {
-      forward_variables_from(invoker,
-                             [
-                               "configs",
-                               "testonly",
-                             ])
-
-      visibility = [ ":$_target_name" ]
-      output_name = _output_name
-      arch_binary_target = ":$_arch_shared_library_target"
-      arch_binary_output = _output_name
-    }
-
-    _info_plist_target = _target_name + "_info_plist"
-    _info_plist_bundle = _target_name + "_info_plist_bundle"
-    ios_info_plist(_info_plist_target) {
-      visibility = [ ":$_info_plist_bundle" ]
-      executable_name = _output_name
-      forward_variables_from(invoker,
-                             [
-                               "extra_substitutions",
-                               "info_plist",
-                               "info_plist_target",
-                             ])
-    }
-
-    bundle_data(_info_plist_bundle) {
-      visibility = [ ":$_target_name" ]
-      forward_variables_from(invoker, [ "testonly" ])
-      sources = get_target_outputs(":$_info_plist_target")
-      outputs = [
-        "{{bundle_contents_dir}}/Info.plist",
-      ]
-      public_deps = [
-        ":$_info_plist_target",
-      ]
-    }
-
-    create_signed_bundle(_target_name) {
-      forward_variables_from(invoker,
-                             [
-                               "bundle_deps",
-                               "bundle_deps_filter",
-                               "data_deps",
-                               "deps",
-                               "enable_code_signing",
-                               "public_configs",
-                               "public_deps",
-                               "testonly",
-                               "visibility",
-                             ])
-
-      product_type = "com.apple.product-type.framework"
-      bundle_extension = ".framework"
-
-      output_name = _output_name
-      bundle_binary_target = ":$_lipo_shared_library_target"
-      bundle_binary_output = _output_name
-
-      # Framework do not have entitlements nor mobileprovision because they use
-      # the one from the bundle using them (.app or .appex) as they are just
-      # dynamic library with shared code.
-      disable_entitlements = true
-      disable_embedded_mobileprovision = true
-
-      if (!defined(deps)) {
-        deps = []
-      }
-      deps += [ ":$_info_plist_bundle" ]
-    }
-
-    group(_link_target_name) {
-      forward_variables_from(invoker,
-                             [
-                               "public_configs",
-                               "public_deps",
-                               "testonly",
-                               "visibility",
-                             ])
-      if (!defined(public_deps)) {
-        public_deps = []
-      }
-      public_deps += [ ":$_target_name" ]
-
-      if (_has_public_headers) {
-        if (!defined(public_configs)) {
-          public_configs = []
-        }
-        public_configs += [ ":$_framework_headers_config" ]
-      }
-      if (!defined(all_dependent_configs)) {
-        all_dependent_configs = []
-      }
-      all_dependent_configs += [ ":$_framework_public_config" ]
-    }
-
-    bundle_data(_target_name + "+bundle") {
-      forward_variables_from(invoker,
-                             [
-                               "testonly",
-                               "visibility",
-                             ])
-      public_deps = [
-        ":$_target_name",
-      ]
-      sources = [
-        "$root_out_dir/$_output_name.framework",
-      ]
-      outputs = [
-        "{{bundle_resources_dir}}/Frameworks/$_output_name.framework",
-      ]
-    }
-  }
-}
-
-set_defaults("ios_framework_bundle") {
-  configs = default_shared_library_configs
-}
-
-# Template to build a xctest bundle that contains a loadable module for iOS.
-#
-# Arguments
-#
-#   deps:
-#       list of labels to depends on, these values are used to create the
-#       loadable module.
-#
-#   product_type
-#       string, product type for the generated Xcode project, use
-#       "com.apple.product-type.bundle.unit-test" for unit test and
-#       "com.apple.product-type.bundle.ui-testing" for UI testing.
-#
-#   host_target:
-#       string, name of the target that depends on the generated bundle, this
-#       value is used to restrict visibilities.
-#
-#   xcode_test_application_name:
-#       string, name of the test application for Xcode unit or ui test target.
-#
-#   output_name
-#       (optional) string, name of the generated application, if omitted,
-#       defaults to the target_name.
-#
-# This template defines two targets, one named "${target_name}" is the xctest
-# bundle, and the other named "${target_name}_bundle" is a bundle_data that
-# wraps the xctest bundle and that only the "${host_target}" can depend on.
-#
-template("ios_xctest_bundle") {
-  assert(defined(invoker.deps), "deps must be defined for $target_name")
-  assert(defined(invoker.product_type),
-         "product_type must be defined for $target_name")
-  assert(invoker.product_type == "com.apple.product-type.bundle.unit-test" ||
-             invoker.product_type == "com.apple.product-type.bundle.ui-testing",
-         "product_type defined for $target_name is invalid.")
-  assert(defined(invoker.host_target),
-         "host_target must be defined for $target_name")
-  assert(defined(invoker.xcode_test_application_name),
-         "xcode_test_application_name must be defined for $target_name")
-
-  # Silence "assignment had no effect" error for non-default toolchains as
-  # following variables are only used in the expansion of the template for the
-  # default toolchain.
-  assert(invoker.configs != [])
-  assert(invoker.host_target != target_name)
-  assert(invoker.xcode_test_application_name != target_name)
-
-  _target_name = target_name
-  _output_name = target_name
-
-  if (defined(invoker.output_name)) {
-    _output_name = invoker.output_name
-  }
-
-  _arch_loadable_module_source = _target_name + "_arch_loadable_module_source"
-  _arch_loadable_module_target = _target_name + "_arch_loadable_module"
-  _lipo_loadable_module_target = _target_name + "_loadable_module"
-
-  source_set(_arch_loadable_module_source) {
-    forward_variables_from(invoker, [ "deps" ])
-
-    testonly = true
-    visibility = [ ":$_arch_loadable_module_target" ]
-  }
-
-  loadable_module(_arch_loadable_module_target) {
-    testonly = true
-    visibility = [ ":$_lipo_loadable_module_target($default_toolchain)" ]
-    if (current_toolchain != default_toolchain) {
-      visibility += [ ":$_target_name" ]
-    }
-
-    deps = [
-      ":$_arch_loadable_module_source",
-    ]
-    configs += [ "//build/config/ios:xctest_config" ]
-
-    output_dir = "$target_out_dir/$current_cpu"
-    output_name = _output_name
-    output_prefix_override = true
-    output_extension = ""
-  }
-
-  if (current_toolchain != default_toolchain) {
-    # For fat builds, only the default toolchain will generate a test bundle.
-    # For the other toolchains, the template is only used for building the
-    # arch-specific binary, thus the default target is just a group().
-    group(_target_name) {
-      forward_variables_from(invoker, [ "visibility" ])
-      testonly = true
-
-      public_deps = [
-        ":$_arch_loadable_module_target",
-      ]
-    }
-  } else {
-    _info_plist_target = _target_name + "_info_plist"
-    _info_plist_bundle = _target_name + "_info_plist_bundle"
-
-    ios_info_plist(_info_plist_target) {
-      testonly = true
-      visibility = [ ":$_info_plist_bundle" ]
-
-      info_plist = "//build/config/ios/Module-Info.plist"
-      executable_name = _output_name
-
-      if (ios_automatically_manage_certs) {
-        # Use a fixed bundle identifier for EarlGrey tests when using Xcode to
-        # manage the certificates as the number of free certs is limited.
-        extra_substitutions = [
-          "MODULE_BUNDLE_ID=gtest.${ios_generic_test_bundle_id_suffix}-module",
-        ]
-      } else {
-        extra_substitutions = [ "MODULE_BUNDLE_ID=gtest.$_output_name" ]
-      }
-    }
-
-    bundle_data(_info_plist_bundle) {
-      testonly = true
-      visibility = [ ":$_target_name" ]
-
-      public_deps = [
-        ":$_info_plist_target",
-      ]
-
-      sources = get_target_outputs(":$_info_plist_target")
-      outputs = [
-        "{{bundle_contents_dir}}/Info.plist",
-      ]
-    }
-
-    lipo_binary(_lipo_loadable_module_target) {
-      forward_variables_from(invoker, [ "configs" ])
-
-      testonly = true
-      visibility = [ ":$_target_name" ]
-
-      output_name = _output_name
-      arch_binary_target = ":$_arch_loadable_module_target"
-      arch_binary_output = _output_name
-    }
-
-    _xctest_bundle = _target_name + "_bundle"
-    create_signed_bundle(_target_name) {
-      forward_variables_from(invoker,
-                             [
-                               "enable_code_signing",
-                               "product_type",
-                               "xcode_test_application_name",
-                             ])
-
-      testonly = true
-      visibility = [ ":$_xctest_bundle" ]
-
-      bundle_extension = ".xctest"
-
-      output_name = _output_name
-      bundle_binary_target = ":$_lipo_loadable_module_target"
-      bundle_binary_output = _output_name
-
-      # Test files need to be known to Xcode for proper indexing and discovery
-      # of tests function for XCTest, but the compilation is done via ninja and
-      # thus must prevent Xcode from linking object files via this hack.
-      xcode_extra_attributes = {
-        OTHER_LDFLAGS = "-help"
-        ONLY_ACTIVE_ARCH = "YES"
-        DEBUG_INFORMATION_FORMAT = "dwarf"
-
-        # For XCUITest, Xcode requires specifying the host application name via
-        # the TEST_TARGET_NAME attribute.
-        if (invoker.product_type == "com.apple.product-type.bundle.ui-testing") {
-          TEST_TARGET_NAME = invoker.xcode_test_application_name
-        }
-      }
-
-      deps = [
-        ":$_info_plist_bundle",
-      ]
-    }
-
-    bundle_data(_xctest_bundle) {
-      forward_variables_from(invoker, [ "host_target" ])
-
-      testonly = true
-      visibility = [ ":$host_target" ]
-
-      public_deps = [
-        ":$_target_name",
-      ]
-      sources = [
-        "$root_out_dir/$_output_name.xctest",
-      ]
-      outputs = [
-        "{{bundle_plugins_dir}}/$_output_name.xctest",
-      ]
-    }
-  }
-}
-
-set_defaults("ios_xctest_bundle") {
-  configs = default_shared_library_configs
-}
-
-# For Chrome on iOS we want to run XCTests for all our build configurations
-# (Debug, Release, ...). In addition, the symbols visibility is configured to
-# private by default. To simplify testing with those constraints, our tests are
-# compiled in the TEST_HOST target instead of the .xctest bundle.
-template("ios_xctest_test") {
-  _target_name = target_name
-  _output_name = target_name
-  if (defined(invoker.output_name)) {
-    _output_name = invoker.output_name
-  }
-
-  _xctest_target = _target_name + "_module"
-  _xctest_output = _output_name + "_module"
-
-  _host_target = _target_name
-  _host_output = _output_name
-
-  _xctest_shell_source_target = _xctest_target + "shell_source"
-  source_set(_xctest_shell_source_target) {
-    sources = [
-      "//build/config/ios/xctest_shell.mm",
-    ]
-
-    configs += [ "//build/config/ios:xctest_config" ]
-  }
-
-  ios_xctest_bundle(_xctest_target) {
-    output_name = _xctest_output
-    product_type = "com.apple.product-type.bundle.unit-test"
-    host_target = _host_target
-    xcode_test_application_name = _host_output
-
-    deps = [
-      ":$_xctest_shell_source_target",
-    ]
-  }
-
-  ios_app_bundle(_host_target) {
-    forward_variables_from(invoker, "*", [ "testonly" ])
-
-    testonly = true
-    output_name = _host_output
-    configs += [ "//build/config/ios:xctest_config" ]
-
-    if (!defined(invoker.info_plist) && !defined(invoker.info_plist_target)) {
-      info_plist = "//build/config/ios/Host-Info.plist"
-      if (ios_automatically_manage_certs) {
-        # Use the same bundle identifier for EarlGrey tests as for unit tests
-        # when managing certificates as the number of free certs is limited.
-        if (!defined(extra_substitutions)) {
-          extra_substitutions = []
-        }
-        extra_substitutions +=
-            [ "EXECUTABLE_NAME=gtest.${ios_generic_test_bundle_id_suffix}" ]
-      }
-    }
-
-    # Xcode needs those two framework installed in the application (and signed)
-    # for the XCTest to run, so install them using extra_system_frameworks.
-    _ios_platform_library = "$ios_sdk_platform_path/Developer/Library"
-    extra_system_frameworks = [
-      "$_ios_platform_library/Frameworks/XCTest.framework",
-      "$_ios_platform_library/PrivateFrameworks/IDEBundleInjection.framework",
-    ]
-
-    _xctest_bundle = _xctest_target + "_bundle"
-    if (current_toolchain == default_toolchain) {
-      if (!defined(bundle_deps)) {
-        bundle_deps = []
-      }
-      bundle_deps += [ ":$_xctest_bundle" ]
-    }
-
-    if (!defined(ldflags)) {
-      ldflags = []
-    }
-    ldflags += [
-      "-Xlinker",
-      "-rpath",
-      "-Xlinker",
-      "@executable_path/Frameworks",
-      "-Xlinker",
-      "-rpath",
-      "-Xlinker",
-      "@loader_path/Frameworks",
-    ]
-  }
-}
-
-set_defaults("ios_xctest_test") {
-  configs = default_executable_configs
-}
-
-# Template to build a xcuitest test runner bundle.
-#
-# Xcode requires a test runner application with a copy of the XCTest dynamic
-# library bundle in it for the XCUITest to run. The test runner bundle is created
-# by copying the system bundle XCTRunner.app from Xcode SDK with the plist file
-# being properly tweaked, and a xctest and it needs to be code signed in order
-# to run on devices.
-#
-# Arguments
-#
-#   xctest_bundle
-#       string, name of the dependent xctest bundle target.
-#
-#   output_name
-#       (optional) string, name of the generated application, if omitted,
-#       defaults to the target_name.
-#
-template("ios_xcuitest_test_runner_bundle") {
-  assert(defined(invoker.xctest_bundle),
-         "xctest_bundle must be defined for $target_name")
-
-  _target_name = target_name
-  _output_name = target_name
-  if (defined(invoker.output_name)) {
-    _output_name = invoker.output_name
-  }
-
-  _xctrunner_path =
-      "$ios_sdk_platform_path/Developer/Library/Xcode/Agents/XCTRunner.app"
-
-  _info_plist_merge_plist = _target_name + "_info_plist_merge_plist"
-  _info_plist_target = _target_name + "_info_plist"
-  _info_plist_bundle = _target_name + "_info_plist_bundle"
-
-  action(_info_plist_merge_plist) {
-    testonly = true
-    script = "//build/config/mac/plist_util.py"
-
-    sources = [
-      "$_xctrunner_path/Info.plist",
-
-      # NOTE: The XCTRunnerAddition+Info.plist must come after the Info.plist
-      # because it overrides the values under "CFBundleIdentifier" and
-      # "CFBundleName".
-      "//ios/chrome/app/resources/XCTRunnerAddition+Info.plist",
-    ]
-
-    _output_name = "$target_gen_dir/${_target_name}_merged.plist"
-    outputs = [
-      _output_name,
-    ]
-    args = [
-             "merge",
-             "-f=xml1",
-             "-o=" + rebase_path(_output_name, root_build_dir),
-           ] + rebase_path(sources, root_build_dir)
-  }
-
-  ios_info_plist(_info_plist_target) {
-    testonly = true
-    visibility = [ ":$_info_plist_bundle" ]
-
-    executable_name = _output_name
-    info_plist_target = ":$_info_plist_merge_plist"
-
-    if (ios_automatically_manage_certs) {
-      # Use the same bundle identifier for XCUITest tests as for unit tests
-      # when managing certificates as the number of free certs is limited.
-      extra_substitutions =
-          [ "EXECUTABLE_NAME=gtest.${ios_generic_test_bundle_id_suffix}" ]
-    }
-  }
-
-  bundle_data(_info_plist_bundle) {
-    testonly = true
-    visibility = [ ":$_target_name" ]
-
-    public_deps = [
-      ":$_info_plist_target",
-    ]
-
-    sources = get_target_outputs(":$_info_plist_target")
-    outputs = [
-      "{{bundle_contents_dir}}/Info.plist",
-    ]
-  }
-
-  _pkginfo_bundle = _target_name + "_pkginfo_bundle"
-  bundle_data(_pkginfo_bundle) {
-    testonly = true
-    visibility = [ ":$_target_name" ]
-
-    sources = [
-      "$_xctrunner_path/PkgInfo",
-    ]
-
-    outputs = [
-      "{{bundle_contents_dir}}/PkgInfo",
-    ]
-  }
-
-  _xctest_bundle = invoker.xctest_bundle
-  create_signed_bundle(_target_name) {
-    testonly = true
-
-    bundle_binary_path = "$_xctrunner_path/XCTRunner"
-    bundle_extension = ".app"
-    product_type = "com.apple.product-type.application"
-
-    output_name = _output_name
-
-    # Xcode needs the following frameworks installed in the application
-    # (and signed) for the XCUITest to run, so install them using
-    # extra_system_frameworks.
-    extra_system_frameworks = [
-      "$ios_sdk_platform_path/Developer/Library/Frameworks/XCTest.framework",
-      "$ios_sdk_platform_path/Developer/Library/PrivateFrameworks/XCTAutomationSupport.framework",
-    ]
-
-    bundle_deps = [
-      ":$_info_plist_bundle",
-      ":$_pkginfo_bundle",
-      ":$_xctest_bundle",
-    ]
-  }
-}
-
-# Template to build a XCUITest that consists of two parts: the test runner
-# application bundle and the xctest dynamic library.
-#
-# Arguments
-#
-#   deps:
-#       list of labels to depends on, these values are used to create the
-#       xctest dynamic library.
-#
-#   xcode_test_application_name:
-#       string, name of the test application for the ui test target.
-#
-# This template defines two targets, one named "${target_name}_module" is the
-# xctest dynamic library, and the other named "${target_name}_runner" is the
-# test runner application bundle.
-#
-template("ios_xcuitest_test") {
-  assert(defined(invoker.deps), "deps must be defined for $target_name")
-  assert(defined(invoker.xcode_test_application_name),
-         "xcode_test_application_name must be defined for $target_name")
-
-  _xcuitest_target = target_name
-  _xcuitest_runner_target = _xcuitest_target + "_runner"
-  _xcuitest_module_target = _xcuitest_target + "_module"
-
-  group(_xcuitest_target) {
-    testonly = true
-
-    deps = [
-      ":$_xcuitest_runner_target",
-    ]
-  }
-
-  _xcuitest_module_output = _xcuitest_target
-  ios_xctest_bundle(_xcuitest_module_target) {
-    forward_variables_from(invoker, [ "xcode_test_application_name" ])
-
-    product_type = "com.apple.product-type.bundle.ui-testing"
-    host_target = _xcuitest_runner_target
-    output_name = _xcuitest_module_output
-
-    deps = invoker.deps
-  }
-
-  _xcuitest_runner_output = _xcuitest_target + "-Runner"
-  ios_xcuitest_test_runner_bundle(_xcuitest_runner_target) {
-    output_name = _xcuitest_runner_output
-    xctest_bundle = _xcuitest_module_target + "_bundle"
-  }
-}
-
-set_defaults("ios_xcuitest_test") {
-  configs = default_executable_configs
-}
diff --git a/build/config/ios/write_framework_hmap.py b/build/config/ios/write_framework_hmap.py
deleted file mode 100644
index 8f6b143..0000000
--- a/build/config/ios/write_framework_hmap.py
+++ /dev/null
@@ -1,97 +0,0 @@
-# Copyright 2016 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import os
-import struct
-import sys
-
-def Main(args):
-  if len(args) < 4:
-    print >> sys.stderr, "Usage: %s output.hmap Foo.framework header1.h..." %\
-        (args[0])
-    return 1
-
-  (out, framework, all_headers) = args[1], args[2], args[3:]
-
-  framework_name = os.path.basename(framework).split('.')[0]
-  all_headers = map(os.path.abspath, all_headers)
-  filelist = {}
-  for header in all_headers:
-    filename = os.path.basename(header)
-    filelist[filename] = header
-    filelist[os.path.join(framework_name, filename)] = header
-  WriteHmap(out, filelist)
-  return 0
-
-
-def NextGreaterPowerOf2(x):
-  return 2**(x).bit_length()
-
-
-def WriteHmap(output_name, filelist):
-  """Generates a header map based on |filelist|.
-
-  Per Mark Mentovai:
-    A header map is structured essentially as a hash table, keyed by names used
-    in #includes, and providing pathnames to the actual files.
-
-  The implementation below and the comment above comes from inspecting:
-    http://www.opensource.apple.com/source/distcc/distcc-2503/distcc_dist/include_server/headermap.py?txt
-  while also looking at the implementation in clang in:
-    https://llvm.org/svn/llvm-project/cfe/trunk/lib/Lex/HeaderMap.cpp
-  """
-  magic = 1751998832
-  version = 1
-  _reserved = 0
-  count = len(filelist)
-  capacity = NextGreaterPowerOf2(count)
-  strings_offset = 24 + (12 * capacity)
-  max_value_length = len(max(filelist.items(), key=lambda (k,v):len(v))[1])
-
-  out = open(output_name, 'wb')
-  out.write(struct.pack('<LHHLLLL', magic, version, _reserved, strings_offset,
-                        count, capacity, max_value_length))
-
-  # Create empty hashmap buckets.
-  buckets = [None] * capacity
-  for file, path in filelist.items():
-    key = 0
-    for c in file:
-      key += ord(c.lower()) * 13
-
-    # Fill next empty bucket.
-    while buckets[key & capacity - 1] is not None:
-      key = key + 1
-    buckets[key & capacity - 1] = (file, path)
-
-  next_offset = 1
-  for bucket in buckets:
-    if bucket is None:
-      out.write(struct.pack('<LLL', 0, 0, 0))
-    else:
-      (file, path) = bucket
-      key_offset = next_offset
-      prefix_offset = key_offset + len(file) + 1
-      suffix_offset = prefix_offset + len(os.path.dirname(path) + os.sep) + 1
-      next_offset = suffix_offset + len(os.path.basename(path)) + 1
-      out.write(struct.pack('<LLL', key_offset, prefix_offset, suffix_offset))
-
-  # Pad byte since next offset starts at 1.
-  out.write(struct.pack('<x'))
-
-  for bucket in buckets:
-    if bucket is not None:
-      (file, path) = bucket
-      out.write(struct.pack('<%ds' % len(file), file))
-      out.write(struct.pack('<s', '\0'))
-      base = os.path.dirname(path) + os.sep
-      out.write(struct.pack('<%ds' % len(base), base))
-      out.write(struct.pack('<s', '\0'))
-      path = os.path.basename(path)
-      out.write(struct.pack('<%ds' % len(path), path))
-      out.write(struct.pack('<s', '\0'))
-
-
-if __name__ == '__main__':
-  sys.exit(Main(sys.argv))
diff --git a/build/config/ios/write_framework_modulemap.py b/build/config/ios/write_framework_modulemap.py
deleted file mode 100644
index b6da571..0000000
--- a/build/config/ios/write_framework_modulemap.py
+++ /dev/null
@@ -1,26 +0,0 @@
-# Copyright 2016 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import os
-import sys
-
-def Main(framework):
-  # Find the name of the binary based on the part before the ".framework".
-  binary = os.path.basename(framework).split('.')[0]
-  module_path = os.path.join(framework, 'Modules');
-  if not os.path.exists(module_path):
-    os.mkdir(module_path)
-  module_template = 'framework module %s {\n' \
-                    '  umbrella header "%s.h"\n' \
-                    '\n' \
-                    '  export *\n' \
-                    '  module * { export * }\n' \
-                    '}\n' % (binary, binary)
-
-  module_file = open(os.path.join(module_path, 'module.modulemap'), 'w')
-  module_file.write(module_template)
-  module_file.close()
-
-if __name__ == '__main__':
-  Main(sys.argv[1])
diff --git a/build/config/ios/xctest_shell.mm b/build/config/ios/xctest_shell.mm
deleted file mode 100644
index dcf5bad..0000000
--- a/build/config/ios/xctest_shell.mm
+++ /dev/null
@@ -1,19 +0,0 @@
-// Copyright 2016 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#import <UIKit/UIKit.h>
-#import <XCTest/XCTest.h>
-
-// For Chrome on iOS we want to run EarlGrey tests (that are XCTests) for all
-// our build configurations (Debug, Release, ...). In addition, the symbols
-// visibility is configured to private by default. To simplify testing with
-// those constraints, our tests are compiled in the TEST_HOST target instead
-// of the .xctest bundle that all link against this single test (just there to
-// ensure that the bundle is not empty).
-
-@interface XCTestShellEmptyClass : NSObject
-@end
-
-@implementation XCTestShellEmptyClass
-@end
diff --git a/build/config/jumbo.gni b/build/config/jumbo.gni
deleted file mode 100644
index 6b146d9..0000000
--- a/build/config/jumbo.gni
+++ /dev/null
@@ -1,281 +0,0 @@
-# Copyright 2017 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import("//build/split_static_library.gni")  # When someone uses that target_type
-import("//build/toolchain/goma.gni")
-
-declare_args() {
-  # If true, use a jumbo build (files compiled together) to speed up
-  # compilation.
-  use_jumbo_build = false
-
-  # A list of targets to exclude from jumbo builds, for optimal round trip time
-  # when frequently changing a set of cpp files.
-  jumbo_build_excluded = []
-
-  # How many files to group at most. Smaller numbers give more
-  # parallellism, higher numbers give less total CPU usage. Higher
-  # numbers also give longer single-file recompilation times.
-  #
-  # Recommendations:
-  # Higher numbers than 100 does not reduce wall clock compile times
-  # even for 4 cores or less so no reason to go higher than 100.
-  # Going from 50 to 100 with a 4 core CPU saves about 3% CPU time and
-  # 3% wall clock time in a tree with blink, v8 and content
-  # jumbofied. At the same time it increases the compile time for the
-  # largest jumbo chunks by 10-20% and reduces the chance to use all
-  # available CPU cores. So set the default to 50 to balance between
-  # high and low-core build performance. -1 means do the default which
-  # varies depending on whether goma is enabled.
-  jumbo_file_merge_limit = -1
-}
-
-# Normal builds benefit from lots of jumbification
-jumbo_file_merge_default = 50
-
-# Goma builds benefit from more parallelism
-jumbo_file_merge_goma = 8
-
-# Use one of the targets jumbo_source_set, jumbo_static_library,
-# jumbo_split_static_library or jumbo_component to generate a target
-# which merges sources if possible to compile much faster.
-#
-# Special values.
-#
-#   target_type
-#      The kind of target to build. For example the string
-#      "static_library".
-#
-#   always_build_jumbo
-#      If set and set to true, then use jumbo compile even when it is
-#      globally disabled. Otherwise it has no effect.
-#
-#   never_build_jumbo
-#      If set and set to true, then do not jumbo compile even if it is
-#      globally enabled. Otherwise it has no effect.
-#
-#   jumbo_excluded_sources
-#      If set to a list of files, those files will not be merged with
-#      the rest. This can be necessary if merging the files causes
-#      compilation issues and fixing the issues is impractical.
-template("internal_jumbo_target") {
-  use_jumbo_build_for_target = use_jumbo_build
-  if (defined(invoker.always_build_jumbo) && invoker.always_build_jumbo) {
-    use_jumbo_build_for_target = true
-  }
-  if (defined(invoker.never_build_jumbo) && invoker.never_build_jumbo) {
-    use_jumbo_build_for_target = false
-  }
-  foreach(excluded_target, jumbo_build_excluded) {
-    if (target_name == excluded_target) {
-      use_jumbo_build_for_target = false
-    }
-  }
-
-  excluded_sources = []
-  if (defined(invoker.jumbo_excluded_sources)) {
-    excluded_sources += invoker.jumbo_excluded_sources
-  }
-
-  if (defined(invoker.sources)) {
-    invoker_sources = invoker.sources
-  } else {
-    invoker_sources = []
-  }
-
-  gen_target_dir = invoker.target_gen_dir
-
-  assert(gen_target_dir != "")  # Prevent "unused variable".
-
-  if (use_jumbo_build_for_target) {
-    jumbo_files = []
-
-    # Split the sources list into chunks that are not excessively large
-    current_file_index = 0
-    next_chunk_start = 0
-    next_chunk_number = 1
-    merge_limit = jumbo_file_merge_limit
-    if (merge_limit == -1) {
-      if (use_goma) {
-        merge_limit = jumbo_file_merge_goma
-      } else {
-        merge_limit = jumbo_file_merge_default
-      }
-    }
-    has_c_file = false
-    has_objective_c_file = false
-    has_S_file = false
-    assert(merge_limit > 0)
-    foreach(source_file, invoker_sources) {
-      source_ext = get_path_info(source_file, "extension")
-      if (source_ext == "c") {
-        has_c_file = true
-      } else if (source_ext == "mm") {
-        has_objective_c_file = true
-      } else if (source_ext == "S") {
-        has_S_file = true
-      } else if (source_ext == "cc" || source_ext == "cpp") {
-        if (current_file_index == next_chunk_start) {
-          jumbo_files += [ "$gen_target_dir/" + target_name + "_jumbo_" +
-                           next_chunk_number + ".cc" ]
-          next_chunk_number += 1
-          next_chunk_start += merge_limit
-        }
-        current_file_index += 1
-      }
-    }
-
-    if (jumbo_files == [] || current_file_index == 1) {
-      # Empty sources list or a sources list with only header files or
-      # at most one non-header file.
-      use_jumbo_build_for_target = false
-      assert(current_file_index <= 1)  # Prevent "unused variable"
-      assert(next_chunk_start >= 0)  # Prevent "unused variable"
-      assert(next_chunk_number <= 2)  # Prevent "unused variable"
-    }
-
-    if (has_c_file) {
-      jumbo_files += [ "$gen_target_dir/" + target_name + "_jumbo_c.c" ]
-    }
-    if (has_objective_c_file) {
-      jumbo_files += [ "$gen_target_dir/" + target_name + "_jumbo_mm.mm" ]
-    }
-    if (has_S_file) {
-      jumbo_files += [ "$gen_target_dir/" + target_name + "_jumbo_S.S" ]
-    }
-  }
-
-  if (use_jumbo_build_for_target) {
-    merge_action_name = target_name + "__jumbo_merge"
-
-    # Create an action that calls a script that merges all the source files.
-    action(merge_action_name) {
-      script = "//build/config/merge_for_jumbo.py"
-      response_file_contents =
-          rebase_path(invoker_sources - excluded_sources, root_build_dir)
-      outputs = jumbo_files
-      args = [ "--outputs" ] + rebase_path(outputs, root_build_dir) +
-             [ "--file-list={{response_file_name}}" ]
-    }
-  } else {
-    # If the list subtraction triggers a gn error,
-    # jumbo_excluded_sources lists a file that is not in sources.
-    sources_after_exclusion = invoker_sources - excluded_sources
-    assert(sources_after_exclusion != [] || true)  # Prevent "unused variable".
-  }
-
-  target_type = invoker.target_type
-  if (use_jumbo_build_for_target && target_type == "split_static_library") {
-    # Meaningless and also impossible if split_count > len(jumbo_files)
-    target_type = "static_library"
-
-    # Prevent "unused variable" warning.
-    assert(!defined(invoker.split_count) || invoker.split_count > 0)
-  }
-
-  # Perform the actual operation, either on the original sources or
-  # the sources post-jumbo merging.
-  target(target_type, target_name) {
-    deps = []
-    if (defined(invoker.deps)) {
-      deps += invoker.deps
-    }
-
-    # Take everything else not handled above from the invoker.
-    variables_to_not_forward = [ "deps" ]
-    if (use_jumbo_build_for_target) {
-      deps += [ ":" + merge_action_name ]
-      variables_to_not_forward += [ "sources" ]
-      assert(jumbo_files != [])
-      set_sources_assignment_filter([])  # Prefiltered.
-      sources = jumbo_files + excluded_sources
-
-      # Need to keep the headers in sources so that dependency checks
-      # work.
-      foreach(source_file, invoker_sources) {
-        if (get_path_info(source_file, "extension") == "h") {
-          sources += [ source_file ]
-        }
-      }
-
-      # Change include_dirs to make sure that the jumbo file can find its
-      # #included files.
-      variables_to_not_forward += [ "include_dirs" ]
-      include_dirs = []
-      if (defined(invoker.include_dirs)) {
-        include_dirs = invoker.include_dirs
-      }
-      include_dirs += [ root_build_dir ]
-    }
-    forward_variables_from(invoker, "*", variables_to_not_forward)
-  }
-}
-
-# See documentation above by "internal_jumbo_target".
-template("jumbo_source_set") {
-  internal_jumbo_target(target_name) {
-    target_type = "source_set"
-    forward_variables_from(invoker, "*")
-  }
-}
-
-set_defaults("jumbo_source_set") {
-  # This sets the default list of configs when the jumbo_source_set target
-  # is defined. The default_compiler_configs comes from BUILDCONFIG.gn and
-  # is the list normally applied to static libraries and source sets.
-  configs = default_compiler_configs
-}
-
-# See documentation above by "internal_jumbo_target".
-template("jumbo_static_library") {
-  internal_jumbo_target(target_name) {
-    target_type = "static_library"
-    forward_variables_from(invoker, "*")
-  }
-}
-
-set_defaults("jumbo_static_library") {
-  # This sets the default list of configs when the jumbo_static_library target
-  # is defined. The default_compiler_configs comes from BUILDCONFIG.gn and
-  # is the list normally applied to static libraries and source sets.
-  configs = default_compiler_configs
-}
-
-# See documentation above by "internal_jumbo_target".
-template("jumbo_split_static_library") {
-  internal_jumbo_target(target_name) {
-    target_type = "split_static_library"
-    forward_variables_from(invoker, "*")
-  }
-}
-
-set_defaults("jumbo_split_static_library") {
-  # This sets the default list of configs when the
-  # jumbo_split_static_library target is defined. The
-  # default_compiler_configs comes from BUILDCONFIG.gn and is the list
-  # normally applied to static libraries and source sets.
-  configs = default_compiler_configs
-}
-
-# See documentation above by "internal_jumbo_target".
-template("jumbo_component") {
-  internal_jumbo_target(target_name) {
-    target_type = "component"
-    forward_variables_from(invoker, "*")
-  }
-}
-
-set_defaults("jumbo_component") {
-  # This sets the default list of configs when the jumbo_component
-  # target is defined. This code is a clone of set_defaults for the
-  # ordinary "component" template.
-  if (is_component_build) {
-    configs = default_shared_library_configs
-    if (is_android) {
-      configs -= [ "//build/config/android:hide_all_but_jni_onload" ]
-    }
-  } else {
-    configs = default_compiler_configs
-  }
-}
diff --git a/build/config/linux/BUILD.gn b/build/config/linux/BUILD.gn
deleted file mode 100644
index e3488ab..0000000
--- a/build/config/linux/BUILD.gn
+++ /dev/null
@@ -1,101 +0,0 @@
-# Copyright (c) 2013 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import("//build/config/c++/c++.gni")
-import("//build/config/linux/pkg_config.gni")
-import("//build/config/ui.gni")
-
-group("linux") {
-  visibility = [ "//:optimize_gn_gen" ]
-}
-
-# This is included by reference in the //build/config/compiler config that
-# is applied to all targets. It is here to separate out the logic that is
-# Linux-only. This is not applied to Android, but is applied to ChromeOS.
-config("compiler") {
-}
-
-# This is included by reference in the //build/config/compiler:runtime_library
-# config that is applied to all targets. It is here to separate out the logic
-# that is Linux-only. Please see that target for advice on what should go in
-# :runtime_library vs. :compiler.
-config("runtime_library") {
-  # Set here because OS_CHROMEOS cannot be autodetected in build_config.h like
-  # OS_LINUX and the like.
-  if (is_chromeos) {
-    defines = [ "OS_CHROMEOS" ]
-  }
-
-  if ((!is_chromeos || default_toolchain != "//build/toolchain/cros:target") &&
-      (!use_custom_libcxx || current_cpu == "mipsel")) {
-    libs = [ "atomic" ]
-  }
-}
-
-config("x11") {
-  libs = [
-    "X11",
-    "X11-xcb",
-    "xcb",
-    "Xcomposite",
-    "Xcursor",
-    "Xdamage",
-    "Xext",
-    "Xfixes",
-    "Xi",
-    "Xrender",
-    "Xtst",
-  ]
-}
-
-config("xcomposite") {
-  libs = [ "Xcomposite" ]
-}
-
-config("xext") {
-  libs = [ "Xext" ]
-}
-
-config("xrandr") {
-  libs = [ "Xrandr" ]
-}
-
-config("xscrnsaver") {
-  libs = [ "Xss" ]
-}
-
-config("xfixes") {
-  libs = [ "Xfixes" ]
-}
-
-config("libcap") {
-  libs = [ "cap" ]
-}
-
-config("xi") {
-  libs = [ "Xi" ]
-}
-
-config("xtst") {
-  libs = [ "Xtst" ]
-}
-
-config("libresolv") {
-  libs = [ "resolv" ]
-}
-
-if (use_glib) {
-  pkg_config("glib") {
-    packages = [
-      "glib-2.0",
-      "gmodule-2.0",
-      "gobject-2.0",
-      "gthread-2.0",
-    ]
-    defines = [
-      "GLIB_VERSION_MAX_ALLOWED=GLIB_VERSION_2_32",
-      "GLIB_VERSION_MIN_REQUIRED=GLIB_VERSION_2_26",
-    ]
-  }
-}
diff --git a/build/config/linux/OWNERS b/build/config/linux/OWNERS
deleted file mode 100644
index 280ba47..0000000
--- a/build/config/linux/OWNERS
+++ /dev/null
@@ -1 +0,0 @@
-thomasanderson@chromium.org
diff --git a/build/config/linux/atk/BUILD.gn b/build/config/linux/atk/BUILD.gn
deleted file mode 100644
index 89eedff..0000000
--- a/build/config/linux/atk/BUILD.gn
+++ /dev/null
@@ -1,52 +0,0 @@
-# Copyright 2016 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import("//build/config/features.gni")
-import("//build/config/linux/gtk/gtk.gni")
-import("//build/config/linux/pkg_config.gni")
-import("//build/config/ui.gni")
-
-# CrOS doesn't install GTK or any gnome packages.
-assert(!is_chromeos)
-
-# These packages should _only_ be expected when building for a target.
-assert(current_toolchain == default_toolchain)
-
-if (use_atk) {
-  assert(use_glib, "use_atk=true requires that use_glib=true")
-}
-
-pkg_config("atk_base") {
-  packages = [ "atk" ]
-  atk_lib_dir = exec_script(pkg_config_script,
-                            pkg_config_args + [
-                                  "--libdir",
-                                  "atk",
-                                ],
-                            "string")
-  defines = [ "ATK_LIB_DIR=\"$atk_lib_dir\"" ]
-  if (use_gtk3) {
-    packages += [ "atk-bridge-2.0" ]
-    defines += [ "USE_ATK_BRIDGE" ]
-  }
-}
-
-# gn orders flags on a target before flags from configs. The default config
-# adds -Wall, and these flags have to be after -Wall -- so they need to
-# come from a config and can't be on the target directly.
-config("atk") {
-  configs = [ ":atk_base" ]
-
-  cflags = [
-    # G_DEFINE_TYPE automatically generates a *get_instance_private
-    # inline function after glib 2.37. That's unused. Prevent to
-    # complain about it.
-    "-Wno-unused-function",
-  ]
-
-  if (is_clang) {
-    # glib uses the pre-c++11 typedef-as-static_assert hack.
-    cflags += [ "-Wno-unused-local-typedef" ]
-  }
-}
diff --git a/build/config/linux/dbus/BUILD.gn b/build/config/linux/dbus/BUILD.gn
deleted file mode 100644
index f11cf71..0000000
--- a/build/config/linux/dbus/BUILD.gn
+++ /dev/null
@@ -1,14 +0,0 @@
-# Copyright 2016 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import("//build/config/features.gni")
-import("//build/config/linux/pkg_config.gni")
-
-assert(use_dbus)
-
-# Note: if your target also depends on //dbus, you don't need to add this
-# config (it will get added automatically if you depend on //dbus).
-pkg_config("dbus") {
-  packages = [ "dbus-1" ]
-}
diff --git a/build/config/linux/dri/BUILD.gn b/build/config/linux/dri/BUILD.gn
deleted file mode 100644
index cad883b..0000000
--- a/build/config/linux/dri/BUILD.gn
+++ /dev/null
@@ -1,18 +0,0 @@
-# Copyright 2017 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import("//build/config/linux/pkg_config.gni")
-
-assert(is_linux, "This file should only be referenced on Linux")
-
-pkg_config("dri") {
-  packages = [ "dri" ]
-  dri_driver_dir = exec_script(pkg_config_script,
-                               pkg_config_args + [
-                                     "--dridriverdir",
-                                     "dri",
-                                   ],
-                               "string")
-  defines = [ "DRI_DRIVER_DIR=\"$dri_driver_dir\"" ]
-}
diff --git a/build/config/linux/gtk/BUILD.gn b/build/config/linux/gtk/BUILD.gn
deleted file mode 100644
index eb75461..0000000
--- a/build/config/linux/gtk/BUILD.gn
+++ /dev/null
@@ -1,40 +0,0 @@
-# Copyright 2016 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import("//build/config/linux/gtk/gtk.gni")
-import("//build/config/linux/pkg_config.gni")
-
-assert(is_linux, "This file should only be referenced on Linux")
-
-# The target in this file will automatically reference GTK2 or GTK3 depending
-# on the state of the build flag. Some builds reference both 2 and 3, and some
-# builds reference neither, so both need to be available but in different
-# directories so pkg-config is only run when necessary.
-
-# Basically no parts of Chrome should depend on GTK. To prevent accidents, the
-# parts that explicitly need GTK are whitelisted on this target.
-group("gtk") {
-  visibility = [
-    "//chrome/test:interactive_ui_tests",
-    "//examples:peerconnection_client",
-    "//gpu/gles2_conform_support:gles2_conform_test_windowless",
-    "//remoting/host",
-    "//remoting/host/linux",
-    "//remoting/host/it2me:common",
-    "//remoting/host/it2me:remote_assistance_host",
-    "//remoting/host:remoting_me2me_host_static",
-    "//remoting/test:it2me_standalone_host_main",
-    "//webrtc/examples:peerconnection_client",
-  ]
-
-  if (use_gtk3) {
-    public_deps = [
-      "//build/config/linux/gtk3",
-    ]
-  } else {
-    public_deps = [
-      "//build/config/linux/gtk2",
-    ]
-  }
-}
diff --git a/build/config/linux/gtk/gtk.gni b/build/config/linux/gtk/gtk.gni
deleted file mode 100644
index 53e943e..0000000
--- a/build/config/linux/gtk/gtk.gni
+++ /dev/null
@@ -1,12 +0,0 @@
-# Copyright 2016 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# Include this file if you need to know at build time whether we're compiling
-# against GTK 2 or 3. But in most cases you can just depend on
-# //build/config/linux/gtk and it will switch for you.
-
-declare_args() {
-  # Whether to compile against GTKv3 instead of GTKv2.
-  use_gtk3 = true
-}
diff --git a/build/config/linux/gtk2/BUILD.gn b/build/config/linux/gtk2/BUILD.gn
deleted file mode 100644
index 010d592..0000000
--- a/build/config/linux/gtk2/BUILD.gn
+++ /dev/null
@@ -1,43 +0,0 @@
-# Copyright 2015 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import("//build/config/linux/pkg_config.gni")
-
-assert(is_linux, "This file should only be referenced on Linux")
-
-# Depend on //build/config/linux/gtk2 to use GTKv2. Depend on
-# //build/config/linux/gtk to get GTK 2 or 3 depending on the build flags.
-#
-# GN doesn't check visibility for configs so we give this an obviously internal
-# name to discourage random targets from accidentally depending on this and
-# bypassing the GTK target's visibility.
-pkg_config("gtk2_internal_config") {
-  # Gtk requires gmodule, but it does not list it as a dependency in some
-  # misconfigured systems.
-  packages = [
-    "gmodule-2.0",
-    "gtk+-2.0",
-    "gthread-2.0",
-  ]
-}
-
-# Basically no parts of Chrome should depend on GTK. To prevent accidents, the
-# parts that explicitly need GTK2 are whitelisted on this target.
-group("gtk2") {
-  visibility = [
-    "//build/config/linux/gtk",
-    "//chrome/browser/ui/libgtkui:*",
-  ]
-  public_configs = [ ":gtk2_internal_config" ]
-}
-
-# Depend on "gtkprint" to get this.
-pkg_config("gtkprint2_internal_config") {
-  packages = [ "gtk+-unix-print-2.0" ]
-}
-
-group("gtkprint2") {
-  visibility = [ "//chrome/browser/ui/libgtkui:libgtk2ui" ]
-  public_configs = [ ":gtkprint2_internal_config" ]
-}
diff --git a/build/config/linux/gtk3/BUILD.gn b/build/config/linux/gtk3/BUILD.gn
deleted file mode 100644
index 0cfd05f..0000000
--- a/build/config/linux/gtk3/BUILD.gn
+++ /dev/null
@@ -1,43 +0,0 @@
-# Copyright 2015 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import("//build/config/linux/pkg_config.gni")
-
-assert(is_linux, "This file should only be referenced on Linux")
-
-# Depend on //build/config/linux/gtk3 to use GTKv3. Depend on
-# //build/config/linux/gtk to get GTK 2 or 3 depending on the build flags.
-#
-# GN doesn't check visibility for configs so we give this an obviously internal
-# name to discourage random targets from accidentally depending on this and
-# bypassing the GTK target's visibility.
-pkg_config("gtk3_internal_config") {
-  # Gtk requires gmodule, but it does not list it as a dependency in some
-  # misconfigured systems.
-  packages = [
-    "gmodule-2.0",
-    "gtk+-3.0",
-    "gthread-2.0",
-  ]
-}
-
-# Basically no parts of Chrome should depend on GTK. To prevent accidents, the
-# parts that explicitly need GTK3 are whitelisted on this target.
-group("gtk3") {
-  visibility = [
-    "//build/config/linux/gtk",
-    "//chrome/browser/ui/libgtkui:*",
-  ]
-  public_configs = [ ":gtk3_internal_config" ]
-}
-
-# Depend on "gtkprint3" to get this.
-pkg_config("gtkprint3_internal_config") {
-  packages = [ "gtk+-unix-print-3.0" ]
-}
-
-group("gtkprint3") {
-  visibility = [ "//chrome/browser/ui/libgtkui:libgtk3ui" ]
-  public_configs = [ ":gtkprint3_internal_config" ]
-}
diff --git a/build/config/linux/libffi/BUILD.gn b/build/config/linux/libffi/BUILD.gn
deleted file mode 100644
index a404172..0000000
--- a/build/config/linux/libffi/BUILD.gn
+++ /dev/null
@@ -1,9 +0,0 @@
-# Copyright 2016 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import("//build/config/linux/pkg_config.gni")
-
-pkg_config("libffi") {
-  packages = [ "libffi" ]
-}
diff --git a/build/config/linux/pangocairo/BUILD.gn b/build/config/linux/pangocairo/BUILD.gn
deleted file mode 100644
index ddcc754..0000000
--- a/build/config/linux/pangocairo/BUILD.gn
+++ /dev/null
@@ -1,19 +0,0 @@
-# Copyright 2016 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import("//build/config/linux/pangocairo/pangocairo.gni")
-import("//build/config/linux/pkg_config.gni")
-
-if (use_pangocairo) {
-  pkg_config("pangocairo") {
-    packages = [ "pangocairo" ]
-
-    # We don't want pkgconfig for pangocairo to explicitly request FreeType to get
-    # linked, because we control which FreeType to link to.
-    extra_args = [
-      "-v",
-      "freetype",
-    ]
-  }
-}
diff --git a/build/config/linux/pangocairo/pangocairo.gni b/build/config/linux/pangocairo/pangocairo.gni
deleted file mode 100644
index ca99445..0000000
--- a/build/config/linux/pangocairo/pangocairo.gni
+++ /dev/null
@@ -1,7 +0,0 @@
-# Copyright 2017 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import("//build/config/ui.gni")
-
-use_pangocairo = is_linux && !use_ozone
diff --git a/build/config/linux/pkg-config.py b/build/config/linux/pkg-config.py
deleted file mode 100755
index 5ef7322..0000000
--- a/build/config/linux/pkg-config.py
+++ /dev/null
@@ -1,232 +0,0 @@
-#!/usr/bin/env python
-# Copyright (c) 2013 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import json
-import os
-import subprocess
-import sys
-import re
-from optparse import OptionParser
-
-# This script runs pkg-config, optionally filtering out some results, and
-# returns the result.
-#
-# The result will be [ <includes>, <cflags>, <libs>, <lib_dirs>, <ldflags> ]
-# where each member is itself a list of strings.
-#
-# You can filter out matches using "-v <regexp>" where all results from
-# pkgconfig matching the given regular expression will be ignored. You can
-# specify more than one regular expression my specifying "-v" more than once.
-#
-# You can specify a sysroot using "-s <sysroot>" where sysroot is the absolute
-# system path to the sysroot used for compiling. This script will attempt to
-# generate correct paths for the sysroot.
-#
-# When using a sysroot, you must also specify the architecture via
-# "-a <arch>" where arch is either "x86" or "x64".
-#
-# CrOS systemroots place pkgconfig files at <systemroot>/usr/share/pkgconfig
-# and one of <systemroot>/usr/lib/pkgconfig or <systemroot>/usr/lib64/pkgconfig
-# depending on whether the systemroot is for a 32 or 64 bit architecture. They
-# specify the 'lib' or 'lib64' of the pkgconfig path by defining the
-# 'system_libdir' variable in the args.gn file. pkg_config.gni communicates this
-# variable to this script with the "--system_libdir <system_libdir>" flag. If no
-# flag is provided, then pkgconfig files are assumed to come from
-# <systemroot>/usr/lib/pkgconfig.
-#
-# Additionally, you can specify the option --atleast-version. This will skip
-# the normal outputting of a dictionary and instead print true or false,
-# depending on the return value of pkg-config for the given package.
-
-
-def SetConfigPath(options):
-  """Set the PKG_CONFIG_LIBDIR environment variable.
-
-  This takes into account any sysroot and architecture specification from the
-  options on the given command line.
-  """
-
-  sysroot = options.sysroot
-  assert sysroot
-
-  # Compute the library path name based on the architecture.
-  arch = options.arch
-  if sysroot and not arch:
-    print "You must specify an architecture via -a if using a sysroot."
-    sys.exit(1)
-
-  libdir = sysroot + '/usr/' + options.system_libdir + '/pkgconfig'
-  libdir += ':' + sysroot + '/usr/share/pkgconfig'
-  os.environ['PKG_CONFIG_LIBDIR'] = libdir
-  return libdir
-
-
-def GetPkgConfigPrefixToStrip(options, args):
-  """Returns the prefix from pkg-config where packages are installed.
-
-  This returned prefix is the one that should be stripped from the beginning of
-  directory names to take into account sysroots.
-  """
-  # Some sysroots, like the Chromium OS ones, may generate paths that are not
-  # relative to the sysroot. For example,
-  # /path/to/chroot/build/x86-generic/usr/lib/pkgconfig/pkg.pc may have all
-  # paths relative to /path/to/chroot (i.e. prefix=/build/x86-generic/usr)
-  # instead of relative to /path/to/chroot/build/x86-generic (i.e prefix=/usr).
-  # To support this correctly, it's necessary to extract the prefix to strip
-  # from pkg-config's |prefix| variable.
-  prefix = subprocess.check_output([options.pkg_config,
-      "--variable=prefix"] + args, env=os.environ)
-  if prefix[-4] == '/usr':
-    return prefix[4:]
-  return prefix
-
-
-def MatchesAnyRegexp(flag, list_of_regexps):
-  """Returns true if the first argument matches any regular expression in the
-  given list."""
-  for regexp in list_of_regexps:
-    if regexp.search(flag) != None:
-      return True
-  return False
-
-
-def RewritePath(path, strip_prefix, sysroot):
-  """Rewrites a path by stripping the prefix and prepending the sysroot."""
-  if os.path.isabs(path) and not path.startswith(sysroot):
-    if path.startswith(strip_prefix):
-      path = path[len(strip_prefix):]
-    path = path.lstrip('/')
-    return os.path.join(sysroot, path)
-  else:
-    return path
-
-
-def main():
-  # If this is run on non-Linux platforms, just return nothing and indicate
-  # success. This allows us to "kind of emulate" a Linux build from other
-  # platforms.
-  if "linux" not in sys.platform:
-    print "[[],[],[],[],[]]"
-    return 0
-
-  parser = OptionParser()
-  parser.add_option('-d', '--debug', action='store_true')
-  parser.add_option('-p', action='store', dest='pkg_config', type='string',
-                    default='pkg-config')
-  parser.add_option('-v', action='append', dest='strip_out', type='string')
-  parser.add_option('-s', action='store', dest='sysroot', type='string')
-  parser.add_option('-a', action='store', dest='arch', type='string')
-  parser.add_option('--system_libdir', action='store', dest='system_libdir',
-                    type='string', default='lib')
-  parser.add_option('--atleast-version', action='store',
-                    dest='atleast_version', type='string')
-  parser.add_option('--libdir', action='store_true', dest='libdir')
-  parser.add_option('--dridriverdir', action='store_true', dest='dridriverdir')
-  (options, args) = parser.parse_args()
-
-  # Make a list of regular expressions to strip out.
-  strip_out = []
-  if options.strip_out != None:
-    for regexp in options.strip_out:
-      strip_out.append(re.compile(regexp))
-
-  if options.sysroot:
-    libdir = SetConfigPath(options)
-    if options.debug:
-      sys.stderr.write('PKG_CONFIG_LIBDIR=%s\n' % libdir)
-    prefix = GetPkgConfigPrefixToStrip(options, args)
-  else:
-    prefix = ''
-
-  if options.atleast_version:
-    # When asking for the return value, just run pkg-config and print the return
-    # value, no need to do other work.
-    if not subprocess.call([options.pkg_config,
-                            "--atleast-version=" + options.atleast_version] +
-                            args):
-      print "true"
-    else:
-      print "false"
-    return 0
-
-  if options.libdir:
-    cmd = [options.pkg_config, "--variable=libdir"] + args
-    if options.debug:
-      sys.stderr.write('Running: %s\n' % cmd)
-    try:
-      libdir = subprocess.check_output(cmd)
-    except:
-      print "Error from pkg-config."
-      return 1
-    sys.stdout.write(libdir.strip())
-    return 0
-
-  if options.dridriverdir:
-    cmd = [options.pkg_config, "--variable=dridriverdir"] + args
-    if options.debug:
-      sys.stderr.write('Running: %s\n' % cmd)
-    try:
-      dridriverdir = subprocess.check_output(cmd)
-    except:
-      print "Error from pkg-config."
-      return 1
-    sys.stdout.write(dridriverdir.strip())
-    return
-
-  cmd = [options.pkg_config, "--cflags", "--libs"] + args
-  if options.debug:
-    sys.stderr.write('Running: %s\n' % ' '.join(cmd))
-
-  try:
-    flag_string = subprocess.check_output(cmd)
-  except:
-    sys.stderr.write('Could not run pkg-config.\n')
-    return 1
-
-  # For now just split on spaces to get the args out. This will break if
-  # pkgconfig returns quoted things with spaces in them, but that doesn't seem
-  # to happen in practice.
-  all_flags = flag_string.strip().split(' ')
-
-
-  sysroot = options.sysroot
-  if not sysroot:
-    sysroot = ''
-
-  includes = []
-  cflags = []
-  libs = []
-  lib_dirs = []
-  ldflags = []
-
-  for flag in all_flags[:]:
-    if len(flag) == 0 or MatchesAnyRegexp(flag, strip_out):
-      continue;
-
-    if flag[:2] == '-l':
-      libs.append(RewritePath(flag[2:], prefix, sysroot))
-    elif flag[:2] == '-L':
-      lib_dirs.append(RewritePath(flag[2:], prefix, sysroot))
-    elif flag[:2] == '-I':
-      includes.append(RewritePath(flag[2:], prefix, sysroot))
-    elif flag[:3] == '-Wl':
-      ldflags.append(flag)
-    elif flag == '-pthread':
-      # Many libs specify "-pthread" which we don't need since we always include
-      # this anyway. Removing it here prevents a bunch of duplicate inclusions
-      # on the command line.
-      pass
-    else:
-      cflags.append(flag)
-
-  # Output a GN array, the first one is the cflags, the second are the libs. The
-  # JSON formatter prints GN compatible lists when everything is a list of
-  # strings.
-  print json.dumps([includes, cflags, libs, lib_dirs, ldflags])
-  return 0
-
-
-if __name__ == '__main__':
-  sys.exit(main())
diff --git a/build/config/linux/pkg_config.gni b/build/config/linux/pkg_config.gni
deleted file mode 100644
index edf0752..0000000
--- a/build/config/linux/pkg_config.gni
+++ /dev/null
@@ -1,125 +0,0 @@
-# Copyright (c) 2013 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import("//build/config/sysroot.gni")
-
-# Defines a config specifying the result of running pkg-config for the given
-# packages. Put the package names you want to query in the "packages" variable
-# inside the template invocation.
-#
-# You can also add defines via the "defines" variable. This can be useful to
-# add this to the config to pass defines that the library expects to get by
-# users of its headers.
-#
-# Example:
-#   pkg_config("mything") {
-#     packages = [ "mything1", "mything2" ]
-#     defines = [ "ENABLE_AWESOME" ]
-#   }
-#
-# You can also use "extra args" to filter out results (see pkg-config.py):
-#   extra_args = [ "-v, "foo" ]
-# To ignore libs and ldflags (only cflags/defines will be set, which is useful
-# when doing manual dynamic linking), set:
-#   ignore_libs = true
-
-declare_args() {
-  # A pkg-config wrapper to call instead of trying to find and call the right
-  # pkg-config directly. Wrappers like this are common in cross-compilation
-  # environments.
-  # Leaving it blank defaults to searching PATH for 'pkg-config' and relying on
-  # the sysroot mechanism to find the right .pc files.
-  pkg_config = ""
-
-  # A optional pkg-config wrapper to use for tools built on the host.
-  host_pkg_config = ""
-
-  # CrOS systemroots place pkgconfig files at <systemroot>/usr/share/pkgconfig
-  # and one of <systemroot>/usr/lib/pkgconfig or <systemroot>/usr/lib64/pkgconfig
-  # depending on whether the systemroot is for a 32 or 64 bit architecture.
-  #
-  # When build under GYP, CrOS board builds specify the 'system_libdir' variable
-  # as part of the GYP_DEFINES provided by the CrOS emerge build or simple
-  # chrome build scheme. This variable permits controlling this for GN builds
-  # in similar fashion by setting the `system_libdir` variable in the build's
-  # args.gn file to 'lib' or 'lib64' as appropriate for the target architecture.
-  system_libdir = "lib"
-}
-
-pkg_config_script = "//build/config/linux/pkg-config.py"
-
-# Define the args we pass to the pkg-config script for other build files that
-# need to invoke it manually.
-pkg_config_args = []
-
-if (sysroot != "") {
-  # Pass the sysroot if we're using one (it requires the CPU arch also).
-  pkg_config_args += [
-    "-s",
-    rebase_path(sysroot),
-    "-a",
-    current_cpu,
-  ]
-}
-
-if (pkg_config != "") {
-  pkg_config_args += [
-    "-p",
-    pkg_config,
-  ]
-}
-
-# Only use the custom libdir when building with the target sysroot.
-if (target_sysroot != "" && sysroot == target_sysroot) {
-  pkg_config_args += [
-    "--system_libdir",
-    system_libdir,
-  ]
-}
-
-if (host_pkg_config != "") {
-  host_pkg_config_args = [
-    "-p",
-    host_pkg_config,
-  ]
-} else {
-  host_pkg_config_args = pkg_config_args
-}
-
-template("pkg_config") {
-  assert(defined(invoker.packages),
-         "Variable |packages| must be defined to be a list in pkg_config.")
-  config(target_name) {
-    if (host_toolchain == current_toolchain) {
-      args = host_pkg_config_args + invoker.packages
-    } else {
-      args = pkg_config_args + invoker.packages
-    }
-    if (defined(invoker.extra_args)) {
-      args += invoker.extra_args
-    }
-
-    pkgresult = exec_script(pkg_config_script, args, "value")
-    cflags = pkgresult[1]
-
-    # We want the system include paths to use -isystem instead of -I to suppress
-    # warnings in those headers.
-    foreach(include, pkgresult[0]) {
-      include_relativized = rebase_path(include, root_build_dir)
-      cflags += [ "-isystem$include_relativized" ]
-    }
-
-    if (!defined(invoker.ignore_libs) || !invoker.ignore_libs) {
-      libs = pkgresult[2]
-      lib_dirs = pkgresult[3]
-      ldflags = pkgresult[4]
-    }
-
-    forward_variables_from(invoker,
-                           [
-                             "defines",
-                             "visibility",
-                           ])
-  }
-}
diff --git a/build/config/locales.gni b/build/config/locales.gni
deleted file mode 100644
index 2b608b7..0000000
--- a/build/config/locales.gni
+++ /dev/null
@@ -1,187 +0,0 @@
-# Copyright 2014 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# Android doesn't ship all locales in order to save space (but webview does).
-# http://crbug.com/369218
-if (is_android) {
-  android_chrome_omitted_locales = [
-    "bn",
-    "et",
-    "gu",
-    "kn",
-    "ml",
-    "mr",
-    "ms",
-    "ta",
-    "te",
-  ]
-}
-
-# Chrome on iOS only ships with a subset of the locales supported by other
-# version of Chrome as the corresponding locales are not supported by the
-# operating system (but for simplicity, the corresponding .pak files are
-# still generated).
-if (is_ios) {
-  ios_unsupported_locales = [
-    "am",
-    "bn",
-    "et",
-    "fil",
-    "gu",
-    "kn",
-    "lv",
-    "ml",
-    "mr",
-    "sl",
-    "sw",
-    "ta",
-    "te",
-  ]
-}
-
-# Note: keep in sync with below.
-locales = [
-  "am",
-  "ar",
-  "bg",
-  "bn",
-  "ca",
-  "cs",
-  "da",
-  "de",
-  "el",
-  "en-GB",
-  "en-US",
-  "es",
-  "et",
-  "fa",
-  "fi",
-  "fil",
-  "fr",
-  "gu",
-  "he",
-  "hi",
-  "hr",
-  "hu",
-  "id",
-  "it",
-  "ja",
-  "kn",
-  "ko",
-  "lt",
-  "lv",
-  "ml",
-  "mr",
-  "ms",
-  "nb",
-  "nl",
-  "pl",
-  "pt-PT",
-  "ro",
-  "ru",
-  "sk",
-  "sl",
-  "sr",
-  "sv",
-  "sw",
-  "ta",
-  "te",
-  "th",
-  "tr",
-  "uk",
-  "vi",
-  "zh-CN",
-  "zh-TW",
-]
-
-# Chrome on iOS uses different names for "es-419" and "pt-BR" (called
-# respectively "es-MX" and "pt" on iOS).
-if (!is_ios) {
-  locales += [
-    "es-419",
-    "pt-BR",
-  ]
-} else {
-  locales += [
-    "es-MX",
-    "pt",
-  ]
-
-  ios_packed_locales = locales - ios_unsupported_locales
-}
-
-locales_with_fake_bidi = locales + [ "fake-bidi" ]
-
-# Same as the locales list but in the format Mac expects for output files:
-# it uses underscores instead of hyphens, and "en" instead of "en-US".
-locales_as_mac_outputs = [
-  "am",
-  "ar",
-  "bg",
-  "bn",
-  "ca",
-  "cs",
-  "da",
-  "de",
-  "el",
-  "en_GB",
-  "en",
-  "es",
-  "et",
-  "fa",
-  "fi",
-  "fil",
-  "fr",
-  "gu",
-  "he",
-  "hi",
-  "hr",
-  "hu",
-  "id",
-  "it",
-  "ja",
-  "kn",
-  "ko",
-  "lt",
-  "lv",
-  "ml",
-  "mr",
-  "ms",
-  "nb",
-  "nl",
-  "pl",
-  "pt_PT",
-  "ro",
-  "ru",
-  "sk",
-  "sl",
-  "sr",
-  "sv",
-  "sw",
-  "ta",
-  "te",
-  "th",
-  "tr",
-  "uk",
-  "vi",
-  "zh_CN",
-  "zh_TW",
-]
-
-# Chrome on iOS uses different names for "es-419" and "pt-BR" (called
-# respectively "es-MX" and "pt" on iOS).
-if (!is_ios) {
-  locales_as_mac_outputs += [
-    "es_419",
-    "pt_BR",
-  ]
-} else {
-  locales_as_mac_outputs += [
-    "es_MX",
-    "pt",
-  ]
-
-  ios_packed_locales_as_mac_outputs =
-      locales_as_mac_outputs - ios_unsupported_locales
-}
diff --git a/build/config/mac/BUILD.gn b/build/config/mac/BUILD.gn
deleted file mode 100644
index 60e3f05..0000000
--- a/build/config/mac/BUILD.gn
+++ /dev/null
@@ -1,99 +0,0 @@
-# Copyright (c) 2013 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import("//build/config/sysroot.gni")
-import("//build/config/mac/mac_sdk.gni")
-import("//build/config/mac/symbols.gni")
-
-# This is included by reference in the //build/config/compiler config that
-# is applied to all targets. It is here to separate out the logic.
-config("compiler") {
-  # These flags are shared between the C compiler and linker.
-  common_mac_flags = []
-
-  # CPU architecture.
-  if (current_cpu == "x64") {
-    common_mac_flags += [
-      "-arch",
-      "x86_64",
-    ]
-  } else if (current_cpu == "x86") {
-    common_mac_flags += [
-      "-arch",
-      "i386",
-    ]
-  }
-
-  # This is here so that all files get recompiled after an Xcode update.
-  # (defines are passed via the command line, and build system rebuild things
-  # when their commandline changes). Nothing should ever read this define.
-  defines = [ "CR_XCODE_VERSION=$xcode_version" ]
-
-  asmflags = common_mac_flags
-  cflags = common_mac_flags
-
-  # Without this, the constructors and destructors of a C++ object inside
-  # an Objective C struct won't be called, which is very bad.
-  cflags_objcc = [ "-fobjc-call-cxx-cdtors" ]
-
-  ldflags = common_mac_flags
-
-  # Create a new read-only segment for protected memory. The default segments
-  # (__TEXT and __DATA) are mapped read-execute and read-write by default.
-  ldflags += [ "-segprot", "PROTECTED_MEMORY", "rw", "r" ]
-
-  if (save_unstripped_output) {
-    ldflags += [ "-Wcrl,unstripped," + rebase_path(root_out_dir) ]
-  }
-}
-
-# This is included by reference in the //build/config/compiler:runtime_library
-# config that is applied to all targets. It is here to separate out the logic
-# that is Mac-only. Please see that target for advice on what should go in
-# :runtime_library vs. :compiler.
-config("runtime_library") {
-  common_flags = [
-    "-isysroot",
-    rebase_path(sysroot, root_build_dir),
-    "-mmacosx-version-min=$mac_deployment_target",
-  ]
-
-  asmflags = common_flags
-  cflags = common_flags
-  ldflags = common_flags
-
-  # Prevent Mac OS X AssertMacros.h (included by system header) from defining
-  # macros that collide with common names, like 'check', 'require', and
-  # 'verify'.
-  # http://opensource.apple.com/source/CarbonHeaders/CarbonHeaders-18.1/AssertMacros.h
-  defines = [ "__ASSERT_MACROS_DEFINE_VERSIONS_WITHOUT_UNDERSCORE=0" ]
-}
-
-# On Mac, this is used for everything except static libraries.
-config("mac_dynamic_flags") {
-  ldflags = [ "-Wl,-ObjC" ]  # Always load Objective-C categories and classes.
-
-  if (is_component_build) {
-    ldflags += [
-      # Path for loading shared libraries for unbundled binaries.
-      "-Wl,-rpath,@loader_path/.",
-
-      # Path for loading shared libraries for bundled binaries. Get back from
-      # Binary.app/Contents/MacOS.
-      "-Wl,-rpath,@loader_path/../../..",
-    ]
-  }
-}
-
-# The ldflags referenced below are handled by
-# //build/toolchain/mac/linker_driver.py.
-# Remove this config if a target wishes to change the arguments passed to the
-# strip command during linking. This config by default strips all symbols
-# from a binary, but some targets may wish to specify an exports file to
-# preserve specific symbols.
-config("strip_all") {
-  if (enable_stripping) {
-    ldflags = [ "-Wcrl,strip,-x,-S" ]
-  }
-}
diff --git a/build/config/mac/BuildInfo.plist b/build/config/mac/BuildInfo.plist
deleted file mode 100644
index d32bf2e..0000000
--- a/build/config/mac/BuildInfo.plist
+++ /dev/null
@@ -1,18 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
-<plist version="1.0">
-<dict>
-  <key>BuildMachineOSBuild</key>
-  <string>${BUILD_MACHINE_OS_BUILD}</string>
-  <key>DTCompiler</key>
-  <string>${GCC_VERSION}</string>
-  <key>DTSDKBuild</key>
-  <string>${MAC_SDK_BUILD}</string>
-  <key>DTSDKName</key>
-  <string>${MAC_SDK_NAME}</string>
-  <key>DTXcode</key>
-  <string>${XCODE_VERSION}</string>
-  <key>DTXcodeBuild</key>
-  <string>${XCODE_BUILD}</string>
-</dict>
-</plist>
diff --git a/build/config/mac/OWNERS b/build/config/mac/OWNERS
deleted file mode 100644
index 14747a0..0000000
--- a/build/config/mac/OWNERS
+++ /dev/null
@@ -1,4 +0,0 @@
-rsesek@chromium.org
-sdefresne@chromium.org
-
-# COMPONENT: Build
diff --git a/build/config/mac/base_rules.gni b/build/config/mac/base_rules.gni
deleted file mode 100644
index 6934833..0000000
--- a/build/config/mac/base_rules.gni
+++ /dev/null
@@ -1,305 +0,0 @@
-# Copyright 2016 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file contains rules that are shared between Mac and iOS.
-
-import("//build/toolchain/toolchain.gni")
-import("//build/config/mac/symbols.gni")
-
-if (is_mac) {
-  import("//build/config/mac/mac_sdk.gni")
-} else if (is_ios) {
-  import("//build/config/ios/ios_sdk.gni")
-}
-
-# Convert plist file to given format.
-#
-# Arguments
-#
-#   source:
-#     string, path to the plist file to convert
-#
-#   output:
-#     string, path to the converted plist, must be under $root_build_dir
-#
-#   format:
-#     string, the format to `plutil -convert` the plist to.
-template("convert_plist") {
-  assert(defined(invoker.source), "source must be defined for $target_name")
-  assert(defined(invoker.output), "output must be defined for $target_name")
-  assert(defined(invoker.format), "format must be defined for $target_name")
-
-  action(target_name) {
-    forward_variables_from(invoker,
-                           [
-                             "visibility",
-                             "testonly",
-                             "deps",
-                           ])
-
-    script = "//build/config/mac/xcrun.py"
-    sources = [
-      invoker.source,
-    ]
-    outputs = [
-      invoker.output,
-    ]
-    args = []
-    if (!use_system_xcode) {
-      args += [
-        "--developer_dir",
-        hermetic_xcode_path,
-      ]
-    }
-    args += [
-      "plutil",
-      "-convert",
-      invoker.format,
-      "-o",
-      rebase_path(invoker.output, root_build_dir),
-      rebase_path(invoker.source, root_build_dir),
-    ]
-  }
-}
-
-# Template to merge multiple plist files and perform variable substitutions.
-#
-# Arguments
-#
-#     plist_templates:
-#         string array, paths to plist files which will be used for the bundle.
-#
-#     format:
-#         string, the format to `plutil -convert` the plist to when
-#         generating the output.
-#
-#     substitutions:
-#         string array, 'key=value' pairs used to replace ${key} by value
-#         when generating the output plist file.
-#
-#     output_name:
-#         string, name of the generated plist file.
-template("compile_plist") {
-  assert(defined(invoker.plist_templates),
-         "A list of template plist files must be specified for $target_name")
-  assert(defined(invoker.format),
-         "The plist format must be specified for $target_name")
-  assert(defined(invoker.substitutions),
-         "A list of key=value pairs must be specified for $target_name")
-  assert(defined(invoker.output_name),
-         "The name of the output file must be specified for $target_name")
-
-  _output_name = invoker.output_name
-  _merged_name = get_path_info(_output_name, "dir") + "/" +
-                 get_path_info(_output_name, "name") + "_merged." +
-                 get_path_info(_output_name, "extension")
-
-  _merge_target = target_name + "_merge"
-
-  action(_merge_target) {
-    forward_variables_from(invoker,
-                           [
-                             "deps",
-                             "testonly",
-                           ])
-
-    script = "//build/config/mac/plist_util.py"
-    sources = invoker.plist_templates
-    outputs = [
-      _merged_name,
-    ]
-    args = [
-             "merge",
-             "-f=" + invoker.format,
-             "-o=" + rebase_path(_merged_name, root_build_dir),
-           ] + rebase_path(invoker.plist_templates, root_build_dir)
-  }
-
-  action(target_name) {
-    forward_variables_from(invoker,
-                           [
-                             "testonly",
-                             "visibility",
-                           ])
-    script = "//build/config/mac/plist_util.py"
-    sources = [
-      _merged_name,
-    ]
-    outputs = [
-      _output_name,
-    ]
-    args = [
-      "substitute",
-      "-f=" + invoker.format,
-      "-o=" + rebase_path(_output_name, root_build_dir),
-      "-t=" + rebase_path(_merged_name, root_build_dir),
-    ]
-    foreach(_substitution, invoker.substitutions) {
-      args += [ "-s=$_substitution" ]
-    }
-    deps = [
-      ":$_merge_target",
-    ]
-  }
-}
-
-# Template to merge multiple .entitlements files performing variable
-# substitutions.
-#
-# Arguments
-#
-#     entitlements_templates:
-#         string array, paths to entitlements files which will be used for the
-#         bundle.
-#
-#     substitutions:
-#         string array, 'key=value' pairs used to replace ${key} by value
-#         when generating the output plist file.
-#
-#     output_name:
-#         string, name of the generated entitlements file.
-template("compile_entitlements") {
-  assert(defined(invoker.entitlements_templates),
-         "A list of template plist files must be specified for $target_name")
-
-  compile_plist(target_name) {
-    forward_variables_from(invoker,
-                           "*",
-                           [
-                             "entitlements_templates",
-                             "format",
-                             "plist_templates",
-                           ])
-
-    plist_templates = invoker.entitlements_templates
-
-    # Entitlements files are always encoded in xml1.
-    format = "xml1"
-
-    # Entitlements files use unsubstitued variables, so define substitutions
-    # to leave those variables untouched.
-    if (!defined(substitutions)) {
-      substitutions = []
-    }
-
-    substitutions += [
-      "AppIdentifierPrefix=\$(AppIdentifierPrefix)",
-      "CFBundleIdentifier=\$(CFBundleIdentifier)",
-    ]
-  }
-}
-
-# The base template used to generate Info.plist files for iOS and Mac apps and
-# frameworks.
-#
-# Arguments
-#
-#     plist_templates:
-#         string array, paths to plist files which will be used for the bundle.
-#
-#     executable_name:
-#         string, name of the generated target used for the product
-#         and executable name as specified in the output Info.plist.
-#
-#     format:
-#         string, the format to `plutil -convert` the plist to when
-#         generating the output.
-#
-#     extra_substitutions:
-#         (optional) string array, 'key=value' pairs for extra fields which are
-#         specified in a source Info.plist template.
-#
-#     output_name:
-#         (optional) string, name of the generated plist file, default to
-#         "$target_gen_dir/$target_name.plist".
-template("info_plist") {
-  assert(defined(invoker.executable_name),
-         "The executable_name must be specified for $target_name")
-  executable_name = invoker.executable_name
-
-  compile_plist(target_name) {
-    forward_variables_from(invoker,
-                           [
-                             "plist_templates",
-                             "testonly",
-                             "deps",
-                             "visibility",
-                             "format",
-                           ])
-
-    if (defined(invoker.output_name)) {
-      output_name = invoker.output_name
-    } else {
-      output_name = "$target_gen_dir/$target_name.plist"
-    }
-
-    substitutions = [
-      "BUILD_MACHINE_OS_BUILD=$machine_os_build",
-      "EXECUTABLE_NAME=$executable_name",
-      "GCC_VERSION=com.apple.compilers.llvm.clang.1_0",
-      "PRODUCT_NAME=$executable_name",
-      "XCODE_BUILD=$xcode_build",
-      "XCODE_VERSION=$xcode_version",
-    ]
-    if (is_mac) {
-      substitutions += [ "MACOSX_DEPLOYMENT_TARGET=$mac_deployment_target" ]
-    } else if (is_ios) {
-      substitutions += [ "IOS_DEPLOYMENT_TARGET=$ios_deployment_target" ]
-    }
-    if (defined(invoker.extra_substitutions)) {
-      substitutions += invoker.extra_substitutions
-    }
-  }
-}
-
-# Template to compile .xib and .storyboard files.
-#
-# Arguments
-#
-#     sources:
-#         list of string, sources to compile
-#
-#     ibtool_flags:
-#         (optional) list of string, additional flags to pass to the ibtool
-template("compile_ib_files") {
-  action_foreach(target_name) {
-    forward_variables_from(invoker,
-                           [
-                             "testonly",
-                             "visibility",
-                           ])
-    assert(defined(invoker.sources),
-           "sources must be specified for $target_name")
-    assert(defined(invoker.output_extension),
-           "output_extension must be specified for $target_name")
-
-    ibtool_flags = []
-    if (defined(invoker.ibtool_flags)) {
-      ibtool_flags = invoker.ibtool_flags
-    }
-
-    _output_extension = invoker.output_extension
-
-    script = "//build/config/mac/compile_ib_files.py"
-    sources = invoker.sources
-    outputs = [
-      "$target_gen_dir/$target_name/{{source_name_part}}.$_output_extension",
-    ]
-    args = [
-      "--input",
-      "{{source}}",
-      "--output",
-      rebase_path(
-          "$target_gen_dir/$target_name/{{source_name_part}}.$_output_extension",
-          root_build_dir),
-    ]
-    if (!use_system_xcode) {
-      args += [
-        "--developer_dir",
-        hermetic_xcode_path,
-      ]
-    }
-    args += ibtool_flags
-  }
-}
diff --git a/build/config/mac/compile_ib_files.py b/build/config/mac/compile_ib_files.py
deleted file mode 100644
index 281e554..0000000
--- a/build/config/mac/compile_ib_files.py
+++ /dev/null
@@ -1,61 +0,0 @@
-# Copyright 2016 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-
-import argparse
-import logging
-import os
-import re
-import subprocess
-import sys
-
-
-def main():
-  parser = argparse.ArgumentParser(
-      description='A script to compile xib and storyboard.',
-      fromfile_prefix_chars='@')
-  parser.add_argument('-o', '--output', required=True,
-                      help='Path to output bundle.')
-  parser.add_argument('-i', '--input', required=True,
-                      help='Path to input xib or storyboard.')
-  parser.add_argument('--developer_dir', required=False,
-                      help='Path to Xcode.')
-  args, unknown_args = parser.parse_known_args()
-
-  if args.developer_dir:
-    os.environ['DEVELOPER_DIR'] = args.developer_dir
-
-  ibtool_args = [
-      'xcrun', 'ibtool',
-      '--errors', '--warnings', '--notices',
-      '--output-format', 'human-readable-text'
-  ]
-  ibtool_args += unknown_args
-  ibtool_args += [
-      '--compile',
-      os.path.abspath(args.output),
-      os.path.abspath(args.input)
-  ]
-
-  ibtool_section_re = re.compile(r'/\*.*\*/')
-  ibtool_re = re.compile(r'.*note:.*is clipping its content')
-  try:
-    stdout = subprocess.check_output(ibtool_args)
-  except subprocess.CalledProcessError as e:
-    print(e.output)
-    raise
-  current_section_header = None
-  for line in stdout.splitlines():
-    if ibtool_section_re.match(line):
-      current_section_header = line
-    elif not ibtool_re.match(line):
-      if current_section_header:
-        print(current_section_header)
-        current_section_header = None
-      print(line)
-  return 0
-
-
-if __name__ == '__main__':
-  sys.exit(main())
diff --git a/build/config/mac/mac_sdk.gni b/build/config/mac/mac_sdk.gni
deleted file mode 100644
index 5aa4ef1..0000000
--- a/build/config/mac/mac_sdk.gni
+++ /dev/null
@@ -1,99 +0,0 @@
-# Copyright 2014 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import("//build/config/chrome_build.gni")
-import("//build/config/mac/mac_sdk_overrides.gni")
-import("//build/toolchain/toolchain.gni")
-
-declare_args() {
-  # Minimum supported version of macOS. Must be of the form x.x.x for
-  # Info.plist files.
-  mac_deployment_target = "10.9.0"
-
-  # Path to a specific version of the Mac SDK, not including a slash at the end.
-  # If empty, the path to the lowest version greater than or equal to
-  # mac_sdk_min is used.
-  mac_sdk_path = ""
-
-  # The SDK name as accepted by xcodebuild.
-  mac_sdk_name = "macosx"
-}
-
-# Check that the version of macOS SDK used is the one requested when building
-# a version of Chrome shipped to the users. Disable the check if building for
-# iOS as the version macOS SDK used is not relevant for the tool build for the
-# host (they are not shipped) --- this is required as Chrome on iOS is usually
-# build with the latest version of Xcode that may not ship with the version of
-# the macOS SDK used to build Chrome on mac.
-# TODO(crbug.com/635745): the check for target_os should be replaced by a
-# check that current_toolchain is default_toolchain, and the file should
-# assert that current_os is "mac" once this file is no longer included by
-# iOS toolchains.
-_verify_sdk = is_chrome_branded && is_official_build && target_os != "ios"
-
-find_sdk_args = [ "--print_sdk_path" ]
-if (!use_system_xcode) {
-  find_sdk_args += [
-    "--developer_dir",
-    hermetic_xcode_path,
-  ]
-}
-if (_verify_sdk) {
-  find_sdk_args += [
-    "--verify",
-    mac_sdk_min,
-    "--sdk_path=" + mac_sdk_path,
-  ]
-} else {
-  find_sdk_args += [ mac_sdk_min ]
-}
-
-# The tool will print the SDK path on the first line, and the version on the
-# second line.
-find_sdk_lines =
-    exec_script("//build/mac/find_sdk.py", find_sdk_args, "list lines")
-mac_sdk_version = find_sdk_lines[1]
-if (mac_sdk_path == "") {
-  mac_sdk_path = find_sdk_lines[0]
-}
-
-script_name = "//build/config/mac/sdk_info.py"
-sdk_info_args = []
-if (!use_system_xcode) {
-  sdk_info_args += [
-    "--developer_dir",
-    hermetic_xcode_path,
-  ]
-}
-sdk_info_args += [ mac_sdk_name ]
-
-_mac_sdk_result = exec_script(script_name, sdk_info_args, "scope")
-xcode_version = _mac_sdk_result.xcode_version
-xcode_build = _mac_sdk_result.xcode_build
-machine_os_build = _mac_sdk_result.machine_os_build
-
-if (mac_sdk_version != mac_sdk_min &&
-    exec_script("//build/check_return_value.py",
-                [
-                  "test",
-                  xcode_version,
-                  "-ge",
-                  "0730",
-                ],
-                "value") != 1) {
-  print(
-      "********************************************************************************")
-  print(
-      " WARNING: The Mac OS X SDK is incompatible with the version of Xcode. To fix,")
-  print(
-      "          either upgrade Xcode to the latest version or install the Mac OS X")
-  print(
-      "          $mac_sdk_min SDK. For more information, see https://crbug.com/620127.")
-  print()
-  print(" Current SDK Version:   $mac_sdk_version")
-  print(" Current Xcode Version: $xcode_version ($xcode_build)")
-  print(
-      "********************************************************************************")
-  assert(false, "SDK is incompatible with Xcode")
-}
diff --git a/build/config/mac/mac_sdk_overrides.gni b/build/config/mac/mac_sdk_overrides.gni
deleted file mode 100644
index 3632678..0000000
--- a/build/config/mac/mac_sdk_overrides.gni
+++ /dev/null
@@ -1,22 +0,0 @@
-# Copyright 2017 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file contains arguments that subprojects may choose to override. It
-# asserts that those overrides are used, to prevent unused args warnings.
-
-_sdk_min_from_env = getenv("FORCE_MAC_SDK_MIN")
-declare_args() {
-  # Minimum supported version of the Mac SDK.
-  if (_sdk_min_from_env == "") {
-    mac_sdk_min = "10.12"
-  } else {
-    mac_sdk_min = _sdk_min_from_env
-  }
-}
-
-# Always assert that mac_sdk_min is used on non-macOS platforms to prevent
-# unused args warnings.
-if (!is_mac) {
-  assert(mac_sdk_min == "10.12" || true)
-}
diff --git a/build/config/mac/package_framework.py b/build/config/mac/package_framework.py
deleted file mode 100644
index f669528..0000000
--- a/build/config/mac/package_framework.py
+++ /dev/null
@@ -1,60 +0,0 @@
-# Copyright 2016 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import argparse
-import errno
-import os
-import shutil
-import sys
-
-def Main():
-  parser = argparse.ArgumentParser(description='Create Mac Framework symlinks')
-  parser.add_argument('--framework', action='store', type=str, required=True)
-  parser.add_argument('--version', action='store', type=str)
-  parser.add_argument('--contents', action='store', type=str, nargs='+')
-  parser.add_argument('--stamp', action='store', type=str, required=True)
-  args = parser.parse_args()
-
-  VERSIONS = 'Versions'
-  CURRENT = 'Current'
-
-  # Ensure the Foo.framework/Versions/A/ directory exists and create the
-  # Foo.framework/Versions/Current symlink to it.
-  if args.version:
-    try:
-      os.makedirs(os.path.join(args.framework, VERSIONS, args.version), 0744)
-    except OSError as e:
-      if e.errno != errno.EEXIST:
-        raise e
-    _Relink(os.path.join(args.version),
-            os.path.join(args.framework, VERSIONS, CURRENT))
-
-  # Establish the top-level symlinks in the framework bundle. The dest of
-  # the symlinks may not exist yet.
-  if args.contents:
-    for item in args.contents:
-      _Relink(os.path.join(VERSIONS, CURRENT, item),
-              os.path.join(args.framework, item))
-
-  # Write out a stamp file.
-  if args.stamp:
-    with open(args.stamp, 'w') as f:
-      f.write(str(args))
-
-  return 0
-
-
-def _Relink(dest, link):
-  """Creates a symlink to |dest| named |link|. If |link| already exists,
-  it is overwritten."""
-  try:
-    os.remove(link)
-  except OSError as e:
-    if e.errno != errno.ENOENT:
-      shutil.rmtree(link)
-  os.symlink(dest, link)
-
-
-if __name__ == '__main__':
-  sys.exit(Main())
diff --git a/build/config/mac/plist_util.py b/build/config/mac/plist_util.py
deleted file mode 100644
index bba0208..0000000
--- a/build/config/mac/plist_util.py
+++ /dev/null
@@ -1,226 +0,0 @@
-# Copyright 2016 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import argparse
-import plistlib
-import os
-import re
-import subprocess
-import sys
-import tempfile
-import shlex
-
-
-# Xcode substitutes variables like ${PRODUCT_NAME} or $(PRODUCT_NAME) when
-# compiling Info.plist. It also supports supports modifiers like :identifier
-# or :rfc1034identifier. SUBSTITUTION_REGEXP_LIST is a list of regular
-# expressions matching a variable substitution pattern with an optional
-# modifier, while INVALID_CHARACTER_REGEXP matches all characters that are
-# not valid in an "identifier" value (used when applying the modifier).
-INVALID_CHARACTER_REGEXP = re.compile(r'[_/\s]')
-SUBSTITUTION_REGEXP_LIST = (
-    re.compile(r'\$\{(?P<id>[^}]*?)(?P<modifier>:[^}]*)?\}'),
-    re.compile(r'\$\((?P<id>[^}]*?)(?P<modifier>:[^}]*)?\)'),
-)
-
-
-class SubstitutionError(Exception):
-  def __init__(self, key):
-    super(SubstitutionError, self).__init__()
-    self.key = key
-
-  def __str__(self):
-    return "SubstitutionError: {}".format(self.key)
-
-
-def InterpolateString(value, substitutions):
-  """Interpolates variable references into |value| using |substitutions|.
-
-  Inputs:
-    value: a string
-    substitutions: a mapping of variable names to values
-
-  Returns:
-    A new string with all variables references ${VARIABLES} replaced by their
-    value in |substitutions|. Raises SubstitutionError if a variable has no
-    substitution.
-  """
-  def repl(match):
-    variable = match.group('id')
-    if variable not in substitutions:
-      raise SubstitutionError(variable)
-    # Some values need to be identifier and thus the variables references may
-    # contains :modifier attributes to indicate how they should be converted
-    # to identifiers ("identifier" replaces all invalid characters by '_' and
-    # "rfc1034identifier" replaces them by "-" to make valid URI too).
-    modifier = match.group('modifier')
-    if modifier == ':identifier':
-      return INVALID_CHARACTER_REGEXP.sub('_', substitutions[variable])
-    elif modifier == ':rfc1034identifier':
-      return INVALID_CHARACTER_REGEXP.sub('-', substitutions[variable])
-    else:
-      return substitutions[variable]
-  for substitution_regexp in SUBSTITUTION_REGEXP_LIST:
-    value = substitution_regexp.sub(repl, value)
-  return value
-
-
-def Interpolate(value, substitutions):
-  """Interpolates variable references into |value| using |substitutions|.
-
-  Inputs:
-    value: a value, can be a dictionary, list, string or other
-    substitutions: a mapping of variable names to values
-
-  Returns:
-    A new value with all variables references ${VARIABLES} replaced by their
-    value in |substitutions|. Raises SubstitutionError if a variable has no
-    substitution.
-  """
-  if isinstance(value, dict):
-      return {k: Interpolate(v, substitutions) for k, v in value.iteritems()}
-  if isinstance(value, list):
-    return [Interpolate(v, substitutions) for v in value]
-  if isinstance(value, str):
-    return InterpolateString(value, substitutions)
-  return value
-
-
-def LoadPList(path):
-  """Loads Plist at |path| and returns it as a dictionary."""
-  fd, name = tempfile.mkstemp()
-  try:
-    subprocess.check_call(['plutil', '-convert', 'xml1', '-o', name, path])
-    with os.fdopen(fd, 'r') as f:
-      return plistlib.readPlist(f)
-  finally:
-    os.unlink(name)
-
-
-def SavePList(path, format, data):
-  """Saves |data| as a Plist to |path| in the specified |format|."""
-  fd, name = tempfile.mkstemp()
-  try:
-    # "plutil" does not replace the destination file but update it in place,
-    # so if more than one hardlink points to destination all of them will be
-    # modified. This is not what is expected, so delete destination file if
-    # it does exist.
-    if os.path.exists(path):
-      os.unlink(path)
-    with os.fdopen(fd, 'w') as f:
-      plistlib.writePlist(data, f)
-    subprocess.check_call(['plutil', '-convert', format, '-o', path, name])
-  finally:
-    os.unlink(name)
-
-
-def MergePList(plist1, plist2):
-  """Merges |plist1| with |plist2| recursively.
-
-  Creates a new dictionary representing a Property List (.plist) files by
-  merging the two dictionary |plist1| and |plist2| recursively (only for
-  dictionary values). List value will be concatenated.
-
-  Args:
-    plist1: a dictionary representing a Property List (.plist) file
-    plist2: a dictionary representing a Property List (.plist) file
-
-  Returns:
-    A new dictionary representing a Property List (.plist) file by merging
-    |plist1| with |plist2|. If any value is a dictionary, they are merged
-    recursively, otherwise |plist2| value is used. If values are list, they
-    are concatenated.
-  """
-  result = plist1.copy()
-  for key, value in plist2.iteritems():
-    if isinstance(value, dict):
-      old_value = result.get(key)
-      if isinstance(old_value, dict):
-        value = MergePList(old_value, value)
-    if isinstance(value, list):
-      value = plist1.get(key, []) + plist2.get(key, [])
-    result[key] = value
-  return result
-
-
-class Action(object):
-  """Class implementing one action supported by the script."""
-
-  @classmethod
-  def Register(cls, subparsers):
-    parser = subparsers.add_parser(cls.name, help=cls.help)
-    parser.set_defaults(func=cls._Execute)
-    cls._Register(parser)
-
-
-class MergeAction(Action):
-  """Class to merge multiple plist files."""
-
-  name = 'merge'
-  help = 'merge multiple plist files'
-
-  @staticmethod
-  def _Register(parser):
-    parser.add_argument(
-        '-o', '--output', required=True,
-        help='path to the output plist file')
-    parser.add_argument(
-        '-f', '--format', required=True, choices=('xml1', 'binary1', 'json'),
-        help='format of the plist file to generate')
-    parser.add_argument(
-          'path', nargs="+",
-          help='path to plist files to merge')
-
-  @staticmethod
-  def _Execute(args):
-    data = {}
-    for filename in args.path:
-      data = MergePList(data, LoadPList(filename))
-    SavePList(args.output, args.format, data)
-
-
-class SubstituteAction(Action):
-  """Class implementing the variable substitution in a plist file."""
-
-  name = 'substitute'
-  help = 'perform pattern substitution in a plist file'
-
-  @staticmethod
-  def _Register(parser):
-    parser.add_argument(
-        '-o', '--output', required=True,
-        help='path to the output plist file')
-    parser.add_argument(
-        '-t', '--template', required=True,
-        help='path to the template file')
-    parser.add_argument(
-        '-s', '--substitution', action='append', default=[],
-        help='substitution rule in the format key=value')
-    parser.add_argument(
-        '-f', '--format', required=True, choices=('xml1', 'binary1', 'json'),
-        help='format of the plist file to generate')
-
-  @staticmethod
-  def _Execute(args):
-    substitutions = {}
-    for substitution in args.substitution:
-      key, value = substitution.split('=', 1)
-      substitutions[key] = value
-    data = Interpolate(LoadPList(args.template), substitutions)
-    SavePList(args.output, args.format, data)
-
-
-def Main():
-  parser = argparse.ArgumentParser(description='manipulate plist files')
-  subparsers = parser.add_subparsers()
-
-  for action in [MergeAction, SubstituteAction]:
-    action.Register(subparsers)
-
-  args = parser.parse_args()
-  args.func(args)
-
-
-if __name__ == '__main__':
-  sys.exit(Main())
diff --git a/build/config/mac/prepare_framework_version.py b/build/config/mac/prepare_framework_version.py
deleted file mode 100644
index 5e8a53f..0000000
--- a/build/config/mac/prepare_framework_version.py
+++ /dev/null
@@ -1,42 +0,0 @@
-# Copyright 2016 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import os
-import shutil
-import sys
-
-# Ensures that the current version matches the last-produced version, which is
-# stored in the version_file. If it does not, then the framework_root_dir is
-# obliterated.
-# Usage: python prepare_framework_version.py out/obj/version_file \
-#                                            out/Framework.framework \
-#                                            'A'
-
-def PrepareFrameworkVersion(version_file, framework_root_dir, version):
-  # Test what the current framework version is. Stop if it is up-to-date.
-  try:
-    with open(version_file, 'r') as f:
-      current_version = f.read()
-      if current_version == version:
-        return
-  except IOError:
-    pass
-
-  # The framework version has changed, so clobber the framework.
-  if os.path.exists(framework_root_dir):
-    shutil.rmtree(framework_root_dir)
-
-  # Write out the new framework version file, making sure its containing
-  # directory exists.
-  dirname = os.path.dirname(version_file)
-  if not os.path.isdir(dirname):
-    os.makedirs(dirname, 0700)
-
-  with open(version_file, 'w+') as f:
-    f.write(version)
-
-
-if __name__ == '__main__':
-  PrepareFrameworkVersion(sys.argv[1], sys.argv[2], sys.argv[3])
-  sys.exit(0)
diff --git a/build/config/mac/rules.gni b/build/config/mac/rules.gni
deleted file mode 100644
index 9872661..0000000
--- a/build/config/mac/rules.gni
+++ /dev/null
@@ -1,672 +0,0 @@
-# Copyright 2015 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import("//build/config/mac/base_rules.gni")
-
-# Generates Info.plist files for Mac apps and frameworks.
-#
-# Arguments
-#
-#     info_plist:
-#         (optional) string, path to the Info.plist file that will be used for
-#         the bundle.
-#
-#     info_plist_target:
-#         (optional) string, if the info_plist is generated from an action,
-#         rather than a regular source file, specify the target name in lieu
-#         of info_plist. The two arguments are mutually exclusive.
-#
-#     executable_name:
-#         string, name of the generated target used for the product
-#         and executable name as specified in the output Info.plist.
-#
-#     extra_substitutions:
-#         (optional) string array, 'key=value' pairs for extra fields which are
-#         specified in a source Info.plist template.
-template("mac_info_plist") {
-  assert(defined(invoker.info_plist) != defined(invoker.info_plist_target),
-         "Only one of info_plist or info_plist_target may be specified in " +
-             target_name)
-
-  if (defined(invoker.info_plist)) {
-    _info_plist = invoker.info_plist
-  } else {
-    _info_plist_target_output = get_target_outputs(invoker.info_plist_target)
-    _info_plist = _info_plist_target_output[0]
-  }
-
-  info_plist(target_name) {
-    format = "xml1"
-    extra_substitutions = []
-    if (defined(invoker.extra_substitutions)) {
-      extra_substitutions = invoker.extra_substitutions
-    }
-    extra_substitutions += [
-      "MAC_SDK_BUILD=$mac_sdk_version",
-      "MAC_SDK_NAME=$mac_sdk_name$mac_sdk_version",
-    ]
-    plist_templates = [
-      "//build/config/mac/BuildInfo.plist",
-      _info_plist,
-    ]
-    if (defined(invoker.info_plist_target)) {
-      deps = [
-        invoker.info_plist_target,
-      ]
-    }
-    forward_variables_from(invoker,
-                           [
-                             "testonly",
-                             "executable_name",
-                           ])
-  }
-}
-
-# Template to compile and package Mac XIB files as bundle data.
-#
-# Arguments
-#
-#     sources:
-#         list of string, sources to comiple
-#
-#     output_path:
-#         (optional) string, the path to use for the outputs list in the
-#         bundle_data step. If unspecified, defaults to bundle_resources_dir.
-template("mac_xib_bundle_data") {
-  _target_name = target_name
-  _compile_target_name = _target_name + "_compile_ibtool"
-
-  compile_ib_files(_compile_target_name) {
-    forward_variables_from(invoker, [ "testonly" ])
-    visibility = [ ":$_target_name" ]
-    sources = invoker.sources
-    output_extension = "nib"
-    ibtool_flags = [
-      "--minimum-deployment-target",
-      mac_deployment_target,
-
-      # TODO(rsesek): Enable this once all the bots are on Xcode 7+.
-      # "--target-device",
-      # "mac",
-    ]
-  }
-
-  bundle_data(_target_name) {
-    forward_variables_from(invoker,
-                           [
-                             "testonly",
-                             "visibility",
-                           ])
-
-    public_deps = [
-      ":$_compile_target_name",
-    ]
-    sources = get_target_outputs(":$_compile_target_name")
-
-    _output_path = "{{bundle_resources_dir}}"
-    if (defined(invoker.output_path)) {
-      _output_path = invoker.output_path
-    }
-
-    outputs = [
-      "$_output_path/{{source_file_part}}",
-    ]
-  }
-}
-
-# Template to package a shared library into a Mac framework bundle.
-#
-# By default, the bundle target this template generates does not link the
-# resulting framework into anything that depends on it. If a dependency wants
-# a link-time (as well as build-time) dependency on the framework bundle,
-# depend against "$target_name+link". If only the build-time dependency is
-# required (e.g., for copying into another bundle), then use "$target_name".
-#
-# Arguments
-#
-#     framework_version:
-#         string, version of the framework. Typically this is a
-#         single letter, like "A".
-#
-#     framework_contents:
-#         list of string, top-level items in the framework. This is
-#         the list of symlinks to create in the .framework directory that link
-#         into Versions/Current/.
-#
-#     info_plist:
-#         (optional) string, path to the Info.plist file that will be used for
-#         the bundle.
-#
-#     info_plist_target:
-#         (optional) string, if the info_plist is generated from an action,
-#         rather than a regular source file, specify the target name in lieu
-#         of info_plist. The two arguments are mutually exclusive.
-#
-#     output_name:
-#         (optional) string, name of the generated framework without the
-#         .framework suffix. If omitted, defaults to target_name.
-#
-#     extra_substitutions:
-#         (optional) string array, 'key=value' pairs for extra fields which are
-#         specified in a source Info.plist template.
-#
-# This template provides two targets for the resulting framework bundle. The
-# link-time behavior varies depending on which of the two targets below is
-# added as a dependency:
-#   - $target_name only adds a build-time dependency. Targets that depend on
-#     it will not link against the framework.
-#   - $target_name+link adds a build-time and link-time dependency. Targets
-#     that depend on it will link against the framework.
-#
-# The build-time-only dependency is used for when a target needs to use the
-# framework either only for resources, or because the target loads it at run-
-# time, via dlopen() or NSBundle. The link-time dependency will cause the
-# dependee to have the framework loaded by dyld at launch.
-#
-# Example of build-time only dependency:
-#
-#     mac_framework_bundle("CoreTeleportation") {
-#       sources = [ ... ]
-#     }
-#
-#     bundle_data("core_teleportation_bundle_data") {
-#       deps = [ ":CoreTeleportation" ]
-#       sources = [ "$root_out_dir/CoreTeleportation.framework" ]
-#       outputs = [ "{{bundle_contents_dir}}/Frameworks/{{source_file_part}}" ]
-#     }
-#
-#     app_bundle("GoatTeleporter") {
-#       sources = [ ... ]
-#       deps = [
-#         ":core_teleportation_bundle_data",
-#       ]
-#     }
-#
-# The GoatTeleporter.app will not directly link against
-# CoreTeleportation.framework, but it will be included in the bundle's
-# Frameworks directory.
-#
-# Example of link-time dependency:
-#
-#     mac_framework_bundle("CoreTeleportation") {
-#       sources = [ ... ]
-#       ldflags = [
-#         "-install_name",
-#         "@executable_path/../Frameworks/$target_name.framework"
-#       ]
-#     }
-#
-#     bundle_data("core_teleportation_bundle_data") {
-#       deps = [ ":CoreTeleportation+link" ]
-#       sources = [ "$root_out_dir/CoreTeleportation.framework" ]
-#       outputs = [ "{{bundle_contents_dir}}/Frameworks/{{source_file_part}}" ]
-#     }
-#
-#     app_bundle("GoatTeleporter") {
-#       sources = [ ... ]
-#       deps = [
-#         ":core_teleportation_bundle_data",
-#       ]
-#     }
-#
-# Note that the framework is still copied to the app's bundle, but dyld will
-# load this library when the app is launched because it uses the "+link"
-# target as a dependency. This also requires that the framework set its
-# install_name so that dyld can locate it.
-#
-# See "gn help shared_library" for more information on arguments supported
-# by shared library target.
-template("mac_framework_bundle") {
-  assert(defined(invoker.deps),
-         "Dependencies must be specified for $target_name")
-  assert(invoker.framework_version != "", "framework_version is required")
-  assert(defined(invoker.framework_contents), "framework_contents is required")
-
-  _info_plist_target = target_name + "_info_plist"
-
-  mac_info_plist(_info_plist_target) {
-    executable_name = target_name
-    if (defined(invoker.output_name)) {
-      executable_name = invoker.output_name
-    }
-    forward_variables_from(invoker,
-                           [
-                             "extra_substitutions",
-                             "info_plist",
-                             "info_plist_target",
-                             "testonly",
-                           ])
-  }
-
-  _info_plist_bundle_data = _info_plist_target + "_bundle_data"
-
-  bundle_data(_info_plist_bundle_data) {
-    forward_variables_from(invoker, [ "testonly" ])
-    sources = get_target_outputs(":$_info_plist_target")
-    outputs = [
-      "{{bundle_resources_dir}}/Info.plist",
-    ]
-    public_deps = [
-      ":$_info_plist_target",
-    ]
-  }
-
-  _target_name = target_name
-  _output_name = target_name
-  if (defined(invoker.output_name)) {
-    _output_name = invoker.output_name
-  }
-
-  # Create a file to track the build dependency on the framework_version and
-  # framework_contents variables.
-  _framework_toc = [
-                     "Version=" + invoker.framework_version,
-                     _output_name,
-                   ] + invoker.framework_contents
-  _framework_contents = [ _output_name ] + invoker.framework_contents
-  _framework_toc_file = "$target_out_dir/${target_name}.toc"
-  write_file(_framework_toc_file, _framework_toc)
-
-  # Create local variables for referencing different parts of the bundle.
-  _framework_target = _target_name
-  _framework_name = _output_name + ".framework"
-  _framework_base_dir = "$root_out_dir/$_framework_name"
-  _framework_root_dir =
-      _framework_base_dir + "/Versions/${invoker.framework_version}"
-
-  # Clean the entire framework if the framework_version changes.
-  _version_file = "$target_out_dir/${target_name}_version"
-  exec_script("//build/config/mac/prepare_framework_version.py",
-              [
-                rebase_path(_version_file),
-                rebase_path(_framework_base_dir),
-                invoker.framework_version,
-              ])
-
-  # Create the symlinks.
-  _framework_package_target = target_name + "_package"
-  action(_framework_package_target) {
-    script = "//build/config/mac/package_framework.py"
-
-    # The TOC file never needs to be read, since its contents are the values
-    # of GN variables. It is only used to trigger this rule when the values
-    # change.
-    inputs = [
-      _framework_toc_file,
-    ]
-
-    _stamp_file = "$target_out_dir/run_${_framework_package_target}.stamp"
-    outputs = [
-      _stamp_file,
-    ]
-
-    visibility = [ ":$_framework_target" ]
-
-    args = [
-             "--framework",
-             rebase_path(_framework_base_dir, root_build_dir),
-             "--stamp",
-             rebase_path(_stamp_file, root_build_dir),
-             "--version",
-             invoker.framework_version,
-             "--contents",
-           ] + _framework_contents
-
-    # It is not possible to list _framework_contents as outputs, since
-    # ninja does not properly stat symbolic links.
-    # https://github.com/ninja-build/ninja/issues/1186
-  }
-
-  _link_shared_library_target = target_name + "_shared_library"
-  _shared_library_bundle_data = target_name + "_shared_library_bundle_data"
-
-  shared_library(_link_shared_library_target) {
-    forward_variables_from(invoker,
-                           "*",
-                           [
-                             "assert_no_deps",
-                             "bundle_deps",
-                             "code_signing_enabled",
-                             "data_deps",
-                             "info_plist",
-                             "info_plist_target",
-                             "output_name",
-                             "visibility",
-                           ])
-    visibility = [ ":$_shared_library_bundle_data" ]
-    output_name = _output_name
-    output_prefix_override = true
-    output_extension = ""
-    output_dir = "$target_out_dir/$_link_shared_library_target"
-  }
-
-  bundle_data(_shared_library_bundle_data) {
-    visibility = [ ":$_framework_target" ]
-    forward_variables_from(invoker, [ "testonly" ])
-    sources = [
-      "$target_out_dir/$_link_shared_library_target/$_output_name",
-    ]
-    outputs = [
-      "{{bundle_executable_dir}}/$_output_name",
-    ]
-    public_deps = [
-      ":$_link_shared_library_target",
-    ]
-  }
-
-  _framework_public_config = _target_name + "_public_config"
-  config(_framework_public_config) {
-    # TODO(sdefresne): should we have a framework_dirs similar to lib_dirs
-    # and include_dirs to avoid duplicate values on the command-line.
-    visibility = [ ":$_framework_target" ]
-    ldflags = [
-      "-F",
-      rebase_path("$root_out_dir/.", root_build_dir),
-    ]
-    lib_dirs = [ root_out_dir ]
-    libs = [ _framework_name ]
-  }
-
-  create_bundle(_framework_target) {
-    forward_variables_from(invoker,
-                           [
-                             "data_deps",
-                             "deps",
-                             "public_deps",
-                             "testonly",
-                           ])
-
-    if (defined(invoker.visibility)) {
-      visibility = invoker.visibility
-      visibility += [ ":$_target_name+link" ]
-    }
-
-    if (!defined(deps)) {
-      deps = []
-    }
-    deps += [ ":$_info_plist_bundle_data" ]
-
-    if (defined(invoker.bundle_deps)) {
-      deps += invoker.bundle_deps
-    }
-
-    if (!defined(public_deps)) {
-      public_deps = []
-    }
-    public_deps += [
-      ":$_framework_package_target",
-      ":$_shared_library_bundle_data",
-    ]
-
-    bundle_root_dir = _framework_base_dir
-    bundle_contents_dir = _framework_root_dir
-    bundle_resources_dir = "$bundle_contents_dir/Resources"
-    bundle_executable_dir = bundle_contents_dir
-  }
-
-  group(_target_name + "+link") {
-    forward_variables_from(invoker,
-                           [
-                             "public_configs",
-                             "testonly",
-                             "visibility",
-                           ])
-    public_deps = [
-      ":$_target_name",
-    ]
-    if (!defined(public_configs)) {
-      public_configs = []
-    }
-    public_configs += [ ":$_framework_public_config" ]
-  }
-}
-
-set_defaults("mac_framework_bundle") {
-  configs = default_shared_library_configs
-}
-
-# Template to create a Mac executable application bundle.
-#
-# Arguments
-#
-#     package_type:
-#         (optional) string, the product package type to create. Options are:
-#             "app" to create a .app bundle (default)
-#             "xpc" to create an .xpc service bundle
-#
-#     info_plist:
-#         (optional) string, path to the Info.plist file that will be used for
-#         the bundle.
-#
-#     info_plist_target:
-#         (optional) string, if the info_plist is generated from an action,
-#         rather than a regular source file, specify the target name in lieu
-#         of info_plist. The two arguments are mutually exclusive.
-#
-#     output_name:
-#         (optional) string, name of the generated app without the
-#         .app suffix. If omitted, defaults to target_name.
-#
-#     extra_configs:
-#         (optional) list of label, additional configs to apply to the
-#         executable target.
-#
-#     remove_configs:
-#         (optional) list of label, default configs to remove from the target.
-#
-#     extra_substitutions:
-#         (optional) string array, 'key=value' pairs for extra fields which are
-#         specified in a source Info.plist template.
-template("mac_app_bundle") {
-  _target_name = target_name
-  _output_name = target_name
-  if (defined(invoker.output_name)) {
-    _output_name = invoker.output_name
-  }
-
-  _package_type = "app"
-  if (defined(invoker.package_type)) {
-    _package_type = invoker.package_type
-  }
-
-  if (_package_type == "app") {
-    _output_extension = "app"
-    _product_type = "com.apple.product-type.application"
-    _write_pkg_info = true
-  } else if (_package_type == "xpc") {
-    _output_extension = "xpc"
-    _product_type = "com.apple.product-type.xpc-service"
-    _write_pkg_info = false
-  } else {
-    assert(false, "Unsupported packge_type: " + packge_type)
-  }
-
-  _executable_target = target_name + "_executable"
-  _executable_bundle_data = _executable_target + "_bundle_data"
-
-  _info_plist_target = target_name + "_info_plist"
-
-  mac_info_plist(_info_plist_target) {
-    executable_name = _output_name
-    forward_variables_from(invoker,
-                           [
-                             "extra_substitutions",
-                             "info_plist",
-                             "info_plist_target",
-                             "testonly",
-                           ])
-  }
-
-  if (_write_pkg_info) {
-    _pkg_info_target = target_name + "_pkg_info"
-
-    action(_pkg_info_target) {
-      forward_variables_from(invoker, [ "testonly" ])
-      script = "//build/config/mac/write_pkg_info.py"
-      sources = get_target_outputs(":$_info_plist_target")
-      outputs = [
-        "$target_gen_dir/$_pkg_info_target",
-      ]
-      args = [ "--plist" ] + rebase_path(sources, root_build_dir) +
-             [ "--output" ] + rebase_path(outputs, root_build_dir)
-      deps = [
-        ":$_info_plist_target",
-      ]
-    }
-  }
-
-  executable(_executable_target) {
-    visibility = [ ":$_executable_bundle_data" ]
-    forward_variables_from(invoker,
-                           "*",
-                           [
-                             "assert_no_deps",
-                             "data_deps",
-                             "info_plist",
-                             "output_name",
-                             "visibility",
-                           ])
-    if (defined(extra_configs)) {
-      configs += extra_configs
-    }
-    if (defined(remove_configs)) {
-      configs -= remove_configs
-    }
-    output_name = _output_name
-    output_dir = "$target_out_dir/$_executable_target"
-  }
-
-  bundle_data(_executable_bundle_data) {
-    visibility = [ ":$_target_name" ]
-    forward_variables_from(invoker, [ "testonly" ])
-    sources = [
-      "$target_out_dir/$_executable_target/$_output_name",
-    ]
-    outputs = [
-      "{{bundle_executable_dir}}/$_output_name",
-    ]
-    public_deps = [
-      ":$_executable_target",
-    ]
-  }
-
-  _info_plist_bundle_data = _info_plist_target + "_bundle_data"
-
-  bundle_data(_info_plist_bundle_data) {
-    forward_variables_from(invoker, [ "testonly" ])
-    visibility = [ ":$_target_name" ]
-    sources = get_target_outputs(":$_info_plist_target")
-    outputs = [
-      "{{bundle_contents_dir}}/Info.plist",
-    ]
-    public_deps = [
-      ":$_info_plist_target",
-    ]
-  }
-
-  if (_write_pkg_info) {
-    _pkg_info_bundle_data = _pkg_info_target + "_bundle_data"
-
-    bundle_data(_pkg_info_bundle_data) {
-      forward_variables_from(invoker, [ "testonly" ])
-      visibility = [ ":$_target_name" ]
-      sources = get_target_outputs(":$_pkg_info_target")
-      outputs = [
-        "{{bundle_contents_dir}}/PkgInfo",
-      ]
-      public_deps = [
-        ":$_pkg_info_target",
-      ]
-    }
-  }
-
-  create_bundle(_target_name) {
-    forward_variables_from(invoker,
-                           [
-                             "data_deps",
-                             "deps",
-                             "public_deps",
-                             "testonly",
-                           ])
-    if (!defined(deps)) {
-      deps = []
-    }
-    deps += [
-      ":$_executable_bundle_data",
-      ":$_info_plist_bundle_data",
-    ]
-    if (_write_pkg_info) {
-      deps += [ ":$_pkg_info_bundle_data" ]
-    }
-    product_type = _product_type
-    bundle_root_dir = "$root_out_dir/${_output_name}.${_output_extension}"
-    bundle_contents_dir = "$bundle_root_dir/Contents"
-    bundle_resources_dir = "$bundle_contents_dir/Resources"
-    bundle_executable_dir = "$bundle_contents_dir/MacOS"
-  }
-}
-
-# Template to package a loadable_module into a .plugin bundle.
-#
-# This takes no extra arguments that differ from a loadable_module.
-template("mac_plugin_bundle") {
-  assert(defined(invoker.deps),
-         "Dependencies must be specified for $target_name")
-
-  _target_name = target_name
-  _loadable_module_target = _target_name + "_loadable_module"
-  _loadable_module_bundle_data = _loadable_module_target + "_bundle_data"
-
-  _output_name = _target_name
-  if (defined(invoker.output_name)) {
-    _output_name = invoker.output_name
-  }
-
-  loadable_module(_loadable_module_target) {
-    visibility = [ ":$_loadable_module_bundle_data" ]
-    forward_variables_from(invoker,
-                           "*",
-                           [
-                             "assert_no_deps",
-                             "data_deps",
-                             "output_name",
-                             "visibility",
-                           ])
-    output_dir = "$target_out_dir"
-    output_name = _output_name
-  }
-
-  bundle_data(_loadable_module_bundle_data) {
-    forward_variables_from(invoker, [ "testonly" ])
-    visibility = [ ":$_target_name" ]
-    sources = [
-      "$target_out_dir/${_output_name}.so",
-    ]
-    outputs = [
-      "{{bundle_executable_dir}}/$_output_name",
-    ]
-    public_deps = [
-      ":$_loadable_module_target",
-    ]
-  }
-
-  create_bundle(_target_name) {
-    forward_variables_from(invoker,
-                           [
-                             "data_deps",
-                             "deps",
-                             "public_deps",
-                             "testonly",
-                             "visibility",
-                           ])
-    if (!defined(deps)) {
-      deps = []
-    }
-    deps += [ ":$_loadable_module_bundle_data" ]
-
-    bundle_root_dir = "$root_out_dir/$_output_name.plugin"
-    bundle_contents_dir = "$bundle_root_dir/Contents"
-    bundle_executable_dir = "$bundle_contents_dir/MacOS"
-  }
-}
diff --git a/build/config/mac/sdk_info.py b/build/config/mac/sdk_info.py
deleted file mode 100644
index 8a9edc1..0000000
--- a/build/config/mac/sdk_info.py
+++ /dev/null
@@ -1,77 +0,0 @@
-# Copyright 2014 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import argparse
-import os
-import subprocess
-import sys
-
-# This script prints information about the build system, the operating
-# system and the iOS or Mac SDK (depending on the platform "iphonesimulator",
-# "iphoneos" or "macosx" generally).
-#
-# In the GYP build, this is done inside GYP itself based on the SDKROOT
-# variable.
-
-def FormatVersion(version):
-  """Converts Xcode version to a format required for Info.plist."""
-  version = version.replace('.', '')
-  version = version + '0' * (3 - len(version))
-  return version.zfill(4)
-
-
-def FillXcodeVersion(settings):
-  """Fills the Xcode version and build number into |settings|."""
-  lines = subprocess.check_output(['xcodebuild', '-version']).splitlines()
-  settings['xcode_version'] = FormatVersion(lines[0].split()[-1])
-  settings['xcode_version_int'] = int(settings['xcode_version'], 10)
-  settings['xcode_build'] = lines[-1].split()[-1]
-
-
-def FillMachineOSBuild(settings):
-  """Fills OS build number into |settings|."""
-  settings['machine_os_build'] = subprocess.check_output(
-      ['sw_vers', '-buildVersion']).strip()
-
-
-def FillSDKPathAndVersion(settings, platform, xcode_version):
-  """Fills the SDK path and version for |platform| into |settings|."""
-  settings['sdk_path'] = subprocess.check_output([
-      'xcrun', '-sdk', platform, '--show-sdk-path']).strip()
-  settings['sdk_version'] = subprocess.check_output([
-      'xcrun', '-sdk', platform, '--show-sdk-version']).strip()
-  settings['sdk_platform_path'] = subprocess.check_output([
-      'xcrun', '-sdk', platform, '--show-sdk-platform-path']).strip()
-  # TODO: unconditionally use --show-sdk-build-version once Xcode 7.2 or
-  # higher is required to build Chrome for iOS or OS X.
-  if xcode_version >= '0720':
-    settings['sdk_build'] = subprocess.check_output([
-        'xcrun', '-sdk', platform, '--show-sdk-build-version']).strip()
-  else:
-    settings['sdk_build'] = settings['sdk_version']
-
-
-if __name__ == '__main__':
-  parser = argparse.ArgumentParser()
-  parser.add_argument("--developer_dir", required=False)
-  args, unknownargs = parser.parse_known_args()
-  if args.developer_dir:
-    os.environ['DEVELOPER_DIR'] = args.developer_dir
-
-  if len(unknownargs) != 1:
-    sys.stderr.write(
-        'usage: %s [iphoneos|iphonesimulator|macosx]\n' %
-        os.path.basename(sys.argv[0]))
-    sys.exit(1)
-
-  settings = {}
-  FillMachineOSBuild(settings)
-  FillXcodeVersion(settings)
-  FillSDKPathAndVersion(settings, unknownargs[0], settings['xcode_version'])
-
-  for key in sorted(settings):
-    value = settings[key]
-    if isinstance(value, str):
-      value = '"%s"' % value
-    print '%s=%s' % (key, value)
diff --git a/build/config/mac/symbols.gni b/build/config/mac/symbols.gni
deleted file mode 100644
index 6166b12..0000000
--- a/build/config/mac/symbols.gni
+++ /dev/null
@@ -1,30 +0,0 @@
-# Copyright 2016 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import("//build/config/chrome_build.gni")
-import("//build/config/sanitizers/sanitizers.gni")
-
-# This file declares arguments and configs that control whether dSYM debug
-# info is produced and whether build products are stripped.
-
-declare_args() {
-  # Produce dSYM files for targets that are configured to do so. dSYM
-  # generation is controlled globally as it is a linker output (produced via
-  # the //build/toolchain/mac/linker_driver.py. Enabling this will result in
-  # all shared library, loadable module, and executable targets having a dSYM
-  # generated.
-  enable_dsyms = is_official_build || using_sanitizer
-
-  # Strip symbols from linked targets by default. If this is enabled, the
-  # //build/config/mac:strip_all config will be applied to all linked targets.
-  # If custom stripping parameters are required, remove that config from a
-  # linked target and apply custom -Wcrl,strip flags. See
-  # //build/toolchain/mac/linker_driver.py for more information.
-  enable_stripping = is_official_build
-}
-
-# Save unstripped copies of targets with a ".unstripped" suffix. This is
-# useful to preserve the original output when enable_stripping=true but
-# we're not actually generating real dSYMs.
-save_unstripped_output = enable_stripping && !enable_dsyms
diff --git a/build/config/mac/write_pkg_info.py b/build/config/mac/write_pkg_info.py
deleted file mode 100644
index 3e2c3c9..0000000
--- a/build/config/mac/write_pkg_info.py
+++ /dev/null
@@ -1,47 +0,0 @@
-# Copyright 2016 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import argparse
-import os
-import plist_util
-import sys
-
-# This script creates a PkgInfo file for an OS X .app bundle's plist.
-# Usage: python write_pkg_info.py --plist Foo.app/Contents/Info.plist \
-#           --output Foo.app/Contents/PkgInfo
-
-def Main():
-  parser = argparse.ArgumentParser(
-      description='A script to write PkgInfo files for .app bundles.')
-  parser.add_argument('--plist', required=True,
-                      help='Path to the Info.plist for the .app.')
-  parser.add_argument('--output', required=True,
-                      help='Path to the desired output file.')
-  args = parser.parse_args()
-
-  # Remove the output if it exists already.
-  if os.path.exists(args.output):
-    os.unlink(args.output)
-
-  plist = plist_util.LoadPList(args.plist)
-  package_type = plist['CFBundlePackageType']
-  if package_type != 'APPL':
-    raise ValueError('Expected CFBundlePackageType to be %s, got %s' % \
-        ('AAPL', package_type))
-
-  # The format of PkgInfo is eight characters, representing the bundle type
-  # and bundle signature, each four characters. If that is missing, four
-  # '?' characters are used instead.
-  signature_code = plist.get('CFBundleSignature', '????')
-  if len(signature_code) != 4:
-    raise ValueError('CFBundleSignature should be exactly four characters, ' +
-        'got %s' % signature_code)
-
-  with open(args.output, 'w') as fp:
-    fp.write('%s%s' % (package_type, signature_code))
-  return 0
-
-
-if __name__ == '__main__':
-  sys.exit(Main())
diff --git a/build/config/mac/xcrun.py b/build/config/mac/xcrun.py
deleted file mode 100644
index 1f8dc20..0000000
--- a/build/config/mac/xcrun.py
+++ /dev/null
@@ -1,28 +0,0 @@
-# Copyright 2016 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import argparse
-import os
-import subprocess
-import sys
-
-if __name__ == '__main__':
-  parser = argparse.ArgumentParser(
-      description='A script to execute a command via xcrun.')
-  parser.add_argument('--stamp', action='store', type=str,
-      help='Write a stamp file to this path on success.')
-  parser.add_argument('--developer_dir', required=False,
-                      help='Path to Xcode.')
-  args, unknown_args = parser.parse_known_args()
-
-  if args.developer_dir:
-    os.environ['DEVELOPER_DIR'] = args.developer_dir
-
-  rv = subprocess.check_call(['xcrun'] + unknown_args)
-  if rv == 0 and args.stamp:
-    if os.path.exists(args.stamp):
-      os.unlink(args.stamp)
-    open(args.stamp, 'w+').close()
-
-  sys.exit(rv)
diff --git a/build/config/merge_for_jumbo.py b/build/config/merge_for_jumbo.py
deleted file mode 100755
index 573b747..0000000
--- a/build/config/merge_for_jumbo.py
+++ /dev/null
@@ -1,88 +0,0 @@
-#!/usr/bin/env python
-#
-# Copyright 2016 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""This script creates a "jumbo" file which merges all incoming files
-for compiling.
-
-"""
-
-from __future__ import print_function
-
-import argparse
-import cStringIO
-import os
-
-def write_jumbo_files(inputs, outputs, written_input_set, written_output_set):
-  output_count = len(outputs)
-  input_count = len(inputs)
-
-  written_inputs = 0
-  for output_index, output_file in enumerate(outputs):
-    written_output_set.add(output_file)
-    if os.path.isfile(output_file):
-      with open(output_file, "r") as current:
-        current_jumbo_file = current.read()
-    else:
-      current_jumbo_file = None
-
-    out = cStringIO.StringIO()
-    out.write("/* This is a Jumbo file. Don't edit. */\n\n")
-    out.write("/* Generated with merge_for_jumbo.py. */\n\n")
-    input_limit = (output_index + 1) * input_count / output_count
-    while written_inputs < input_limit:
-      filename = inputs[written_inputs]
-      written_inputs += 1
-      out.write("#include \"%s\"\n" % filename)
-      written_input_set.add(filename)
-    new_jumbo_file = out.getvalue()
-    out.close()
-
-    if new_jumbo_file != current_jumbo_file:
-      with open(output_file, "w") as out:
-        out.write(new_jumbo_file)
-
-
-def main():
-  parser = argparse.ArgumentParser()
-  parser.add_argument("--outputs", nargs="+", required=True,
-                      help='List of output files to split input into')
-  parser.add_argument("--file-list", required=True)
-  parser.add_argument("--verbose", action="store_true")
-  args = parser.parse_args()
-
-  lines = []
-  # If written with gn |write_file| each file is on its own line.
-  with open(args.file_list) as file_list_file:
-    lines = [line.strip() for line in file_list_file if line.strip()]
-  # If written with gn |response_file_contents| the files are space separated.
-  all_inputs = []
-  for line in lines:
-    all_inputs.extend(line.split())
-
-  written_output_set = set()  # Just for double checking
-  written_input_set = set()  # Just for double checking
-  for language_ext in (".cc", ".c", ".mm", ".S"):
-    if language_ext == ".cc":
-      ext_pattern = (".cc", ".cpp")
-    else:
-      ext_pattern = tuple([language_ext])
-
-    outputs = [x for x in args.outputs if x.endswith(ext_pattern)]
-    inputs = [x for x in all_inputs if x.endswith(ext_pattern)]
-
-    if not outputs:
-      assert not inputs
-      continue
-
-    write_jumbo_files(inputs, outputs, written_input_set, written_output_set)
-
-  assert set(args.outputs) == written_output_set, "Did not fill all outputs"
-  if args.verbose:
-    print("Generated %s (%d files) based on %s" % (
-      str(args.outputs), len(written_input_set), args.file_list))
-
-if __name__ == "__main__":
-  main()
diff --git a/build/config/mips.gni b/build/config/mips.gni
deleted file mode 100644
index 28194a4..0000000
--- a/build/config/mips.gni
+++ /dev/null
@@ -1,62 +0,0 @@
-# Copyright 2015 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import("//build/config/v8_target_cpu.gni")
-
-# These are primarily relevant in current_cpu == "mips*" contexts, where
-# MIPS code is being compiled.  But they can also be relevant in the
-# other contexts when the code will change its behavior based on the
-# cpu it wants to generate code for.
-if (current_cpu == "mipsel" || v8_current_cpu == "mipsel" ||
-    current_cpu == "mips" || v8_current_cpu == "mips") {
-  declare_args() {
-    # MIPS arch variant. Possible values are:
-    #   "r1"
-    #   "r2"
-    #   "r6"
-    #   "loongson3"
-    mips_arch_variant = "r1"
-
-    # MIPS DSP ASE revision. Possible values are:
-    #   0: unavailable
-    #   1: revision 1
-    #   2: revision 2
-    mips_dsp_rev = 0
-
-    # MIPS SIMD Arch compilation flag.
-    mips_use_msa = false
-
-    # MIPS floating-point ABI. Possible values are:
-    #   "hard": sets the GCC -mhard-float option.
-    #   "soft": sets the GCC -msoft-float option.
-    mips_float_abi = "hard"
-
-    # MIPS32 floating-point register width. Possible values are:
-    #   "fp32": sets the GCC -mfp32 option.
-    #   "fp64": sets the GCC -mfp64 option.
-    #   "fpxx": sets the GCC -mfpxx option.
-    mips_fpu_mode = "fp32"
-  }
-} else if (current_cpu == "mips64el" || v8_current_cpu == "mips64el" ||
-           current_cpu == "mips64" || v8_current_cpu == "mips64") {
-  # MIPS arch variant. Possible values are:
-  #   "r2"
-  #   "r6"
-  #   "loongson3"
-  if (current_os == "android" || target_os == "android") {
-    declare_args() {
-      mips_arch_variant = "r6"
-
-      # MIPS SIMD Arch compilation flag.
-      mips_use_msa = true
-    }
-  } else {
-    declare_args() {
-      mips_arch_variant = "r2"
-
-      # MIPS SIMD Arch compilation flag.
-      mips_use_msa = false
-    }
-  }
-}
diff --git a/build/config/nacl/BUILD.gn b/build/config/nacl/BUILD.gn
deleted file mode 100644
index d7b22ec..0000000
--- a/build/config/nacl/BUILD.gn
+++ /dev/null
@@ -1,143 +0,0 @@
-# Copyright (c) 2014 The Native Client Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import("//build/config/nacl/config.gni")
-
-# Native Client Definitions
-config("nacl_defines") {
-  if (is_linux || is_android || is_nacl) {
-    defines = [
-      "_POSIX_C_SOURCE=199506",
-      "_XOPEN_SOURCE=600",
-      "_GNU_SOURCE=1",
-      "__STDC_LIMIT_MACROS=1",
-    ]
-  } else if (is_win) {
-    defines = [ "__STDC_LIMIT_MACROS=1" ]
-  }
-
-  if (current_cpu == "pnacl" && !is_nacl_nonsfi) {
-    # TODO: Remove the following definition once NACL_BUILD_ARCH and
-    # NACL_BUILD_SUBARCH are defined by the PNaCl toolchain.
-    defines += [ "NACL_BUILD_ARCH=pnacl" ]
-  }
-}
-
-config("nexe_defines") {
-  defines = [
-    "DYNAMIC_ANNOTATIONS_ENABLED=1",
-    "DYNAMIC_ANNOTATIONS_PREFIX=NACL_",
-  ]
-}
-
-config("nacl_warnings") {
-  if (is_win) {
-    # Some NaCl code uses forward declarations of static const variables,
-    # with initialized definitions later on.  (The alternative would be
-    # many, many more forward declarations of everything used in that
-    # const variable's initializer before the definition.)  The Windows
-    # compiler is too stupid to notice that there is an initializer later
-    # in the file, and warns about the forward declaration.
-    cflags = [ "/wd4132" ]
-  }
-}
-
-# The base target that all targets in the NaCl build should depend on.
-# This allows configs to be modified for everything in the NaCl build, even when
-# the NaCl build is composed into the Chrome build.  (GN has no functionality to
-# add flags to everything in //native_client, having a base target works around
-# that limitation.)
-source_set("nacl_base") {
-  public_configs = [
-    ":nacl_defines",
-    ":nacl_warnings",
-  ]
-  if (current_os == "nacl") {
-    public_configs += [ ":nexe_defines" ]
-  }
-}
-
-config("compiler") {
-  configs = []
-  cflags = []
-  ldflags = []
-  libs = []
-
-  if (is_clang && current_cpu != "pnacl") {
-    # -no-integrated-as is the default in nacl-clang for historical
-    # compatibility with inline assembly code and so forth.  But there
-    # are no such cases in Chromium code, and -integrated-as is nicer in
-    # general.  Moreover, the IRT must be built using LLVM's assembler
-    # on x86-64 to preserve sandbox base address hiding.  Use it
-    # everywhere for consistency (and possibly quicker builds).
-    cflags += [ "-integrated-as" ]
-  }
-  if (is_nacl_nonsfi) {
-    cflags += [ "--pnacl-allow-translate" ]
-    ldflags += [
-      "--pnacl-allow-translate",
-      "--pnacl-allow-native",
-      "-Wl,--noirt",
-      "-Wt,--noirt",
-      "-Wt,--noirtshim",
-
-      # The clang driver automatically injects -lpthread when using libc++, but
-      # the toolchain doesn't have it yet.  To get around this, use
-      # -nodefaultlibs and make each executable target depend on
-      # "//native_client/src/nonsfi/irt:nacl_sys_private".
-      "-nodefaultlibs",
-    ]
-    libs += [
-      "c++",
-      "m",
-      "c",
-      "pnaclmm",
-    ]
-    include_dirs = [ "//native_client/src/public/linux_syscalls" ]
-  }
-
-  asmflags = cflags
-}
-
-config("compiler_codegen") {
-  cflags = []
-
-  if (is_nacl_irt) {
-    cflags += [
-      # A debugger should be able to unwind IRT call frames.  This is
-      # the default behavior on x86-64 and when compiling C++ with
-      # exceptions enabled; the change is for the benefit of x86-32 C.
-      # The frame pointer is unnecessary when unwind tables are used.
-      "-fasynchronous-unwind-tables",
-      "-fomit-frame-pointer",
-    ]
-
-    if (current_cpu == "x86") {
-      # The x86-32 IRT needs to be callable with an under-aligned
-      # stack; so we disable SSE instructions, which can fault on
-      # misaligned addresses.  See
-      # https://code.google.com/p/nativeclient/issues/detail?id=3935
-      cflags += [
-        "-mstackrealign",
-        "-mno-sse",
-      ]
-    }
-  }
-
-  asmflags = cflags
-}
-
-config("irt_optimize") {
-  cflags = [
-    # Optimize for space, keep the IRT nexe small.
-    "-Os",
-
-    # These are omitted from non-IRT libraries to keep the libraries
-    # themselves small.
-    "-ffunction-sections",
-    "-fdata-sections",
-  ]
-
-  ldflags = [ "-Wl,--gc-sections" ]
-}
diff --git a/build/config/nacl/config.gni b/build/config/nacl/config.gni
deleted file mode 100644
index 77e15fc..0000000
--- a/build/config/nacl/config.gni
+++ /dev/null
@@ -1,55 +0,0 @@
-# Copyright 2015 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-declare_args() {
-  # Native Client supports both Newlib and Glibc C libraries where Newlib
-  # is assumed to be the default one; use this to determine whether Glibc
-  # is being used instead.
-  is_nacl_glibc = false
-}
-
-is_nacl_irt = false
-is_nacl_nonsfi = false
-
-nacl_toolchain_dir = "//native_client/toolchain/${host_os}_x86"
-
-if (is_nacl_glibc) {
-  if (current_cpu == "x86" || current_cpu == "x64") {
-    nacl_toolchain_package = "nacl_x86_glibc"
-  } else if (current_cpu == "arm") {
-    nacl_toolchain_package = "nacl_arm_glibc"
-  }
-} else {
-  nacl_toolchain_package = "pnacl_newlib"
-}
-
-if (current_cpu == "pnacl") {
-  _nacl_tuple = "pnacl"
-} else if (current_cpu == "x86" || current_cpu == "x64") {
-  _nacl_tuple = "x86_64-nacl"
-} else if (current_cpu == "arm") {
-  _nacl_tuple = "arm-nacl"
-} else if (current_cpu == "mipsel") {
-  _nacl_tuple = "mipsel-nacl"
-} else {
-  # In order to allow this file to be included unconditionally
-  # from build files that can't depend on //components/nacl/features.gni
-  # we provide a dummy value that should be harmless if nacl isn't needed.
-  # If nacl *is* needed this will result in a real error, indicating that
-  # people need to set the toolchain path correctly.
-  _nacl_tuple = "unknown"
-}
-
-nacl_toolchain_bindir = "${nacl_toolchain_dir}/${nacl_toolchain_package}/bin"
-nacl_toolchain_tooldir =
-    "${nacl_toolchain_dir}/${nacl_toolchain_package}/${_nacl_tuple}"
-nacl_toolprefix = "${nacl_toolchain_bindir}/${_nacl_tuple}-"
-
-nacl_irt_toolchain = "//build/toolchain/nacl:irt_" + target_cpu
-is_nacl_irt = current_toolchain == nacl_irt_toolchain
-
-# Non-SFI mode is a lightweight sandbox used by Chrome OS for running ARC
-# applications.
-nacl_nonsfi_toolchain = "//build/toolchain/nacl:newlib_pnacl_nonsfi"
-is_nacl_nonsfi = current_toolchain == nacl_nonsfi_toolchain
diff --git a/build/config/nacl/rules.gni b/build/config/nacl/rules.gni
deleted file mode 100644
index 9bb4ede..0000000
--- a/build/config/nacl/rules.gni
+++ /dev/null
@@ -1,188 +0,0 @@
-# Copyright 2015 The Native Client Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import("//build/config/nacl/config.gni")
-
-# Generate a nmf file
-#
-# Native Client Manifest (nmf) is a JSON file that tells the browser where to
-# download and load Native Client application files and libraries.
-#
-# Variables:
-#   executables: .nexe/.pexe/.bc executables to generate nmf for
-#   lib_prefix: path to prepend to shared libraries in the nmf
-#   nmf: the name and the path of the output file
-#   nmfflags: additional flags for the nmf generator
-#   stage_dependencies: directory for staging libraries
-template("generate_nmf") {
-  assert(defined(invoker.executables), "Must define executables")
-  assert(defined(invoker.nmf), "Must define nmf")
-
-  action(target_name) {
-    forward_variables_from(invoker,
-                           [
-                             "deps",
-                             "data_deps",
-                             "executables",
-                             "lib_prefix",
-                             "nmf",
-                             "nmfflags",
-                             "public_deps",
-                             "stage_dependencies",
-                             "testonly",
-                             "visibility",
-                           ])
-    if (!defined(nmfflags)) {
-      nmfflags = []
-    }
-
-    # TODO(phosek): Remove this conditional once
-    # https://bugs.chromium.org/p/nativeclient/issues/detail?id=4339 is
-    # resolved.
-    if (current_cpu == "pnacl") {
-      objdump = rebase_path("${nacl_toolchain_bindir}/x86_64-nacl-objdump")
-    } else {
-      objdump = rebase_path("${nacl_toolprefix}objdump")
-    }
-    if (host_os == "win") {
-      objdump += ".exe"
-    }
-
-    script = "//native_client_sdk/src/tools/create_nmf.py"
-    inputs = [
-      objdump,
-    ]
-    sources = executables
-    outputs = [
-      nmf,
-    ]
-    if (is_nacl_glibc) {
-      if (defined(stage_dependencies)) {
-        nmfflags += [ "--stage-dependencies=" +
-                      rebase_path(stage_dependencies, root_build_dir) ]
-        lib_path = stage_dependencies
-      } else {
-        lib_path = root_build_dir
-      }
-      if (defined(lib_prefix)) {
-        nmfflags += [ "--lib-prefix=" + lib_prefix ]
-        lib_path += "/${lib_prefix}"
-      }
-
-      # Starts empty so the code below can use += everywhere.
-      data = []
-
-      nmfflags +=
-          [ "--library-path=" + rebase_path(root_out_dir, root_build_dir) ]
-
-      # NOTE: There is no explicit dependency for the lib directory
-      # (lib32 and lib64 for x86/x64) created in the product directory.
-      # They are created as a side-effect of nmf creation.
-      if (current_cpu != "x86" && current_cpu != "x64") {
-        nmfflags +=
-            [ "--library-path=" +
-              rebase_path("${nacl_toolchain_tooldir}/lib", root_build_dir) ]
-        if (current_cpu == "arm") {
-          data += [ "${lib_path}/libarm/" ]
-        } else {
-          data += [ "${lib_path}/lib/" ]
-        }
-      } else {
-        # For x86-32, the lib/ directory is called lib32/ instead.
-        if (current_cpu == "x86") {
-          nmfflags +=
-              [ "--library-path=" +
-                rebase_path("${nacl_toolchain_tooldir}/lib32", root_build_dir) ]
-          data += [ "${lib_path}/lib32/" ]
-        }
-
-        # x86-32 Windows needs to build both x86-32 and x86-64 NaCl
-        # binaries into the same nmf covering both architectures.  That
-        # gets handled at a higher level (see the nacl_test_data template),
-        # so a single generate_nmf invocation gets both x86-32 and x86-64
-        # nexes listed in executables.
-        if (current_cpu == "x64" || target_os == "win") {
-          # For x86-64, the lib/ directory is called lib64/ instead
-          # when copied by create_nmf.py.
-          glibc_tc = "//build/toolchain/nacl:glibc"
-          assert(current_toolchain == "${glibc_tc}_${current_cpu}")
-          if (current_cpu == "x64") {
-            x64_out_dir = root_out_dir
-          } else {
-            x64_out_dir = get_label_info(":${target_name}(${glibc_tc}_x64)",
-                                         "root_out_dir")
-          }
-          nmfflags += [
-            "--library-path=" + rebase_path(x64_out_dir, root_build_dir),
-            "--library-path=" +
-                rebase_path("${nacl_toolchain_tooldir}/lib", root_build_dir),
-          ]
-          data += [ "${lib_path}/lib64/" ]
-        }
-      }
-    }
-    args = [
-             "--no-default-libpath",
-             "--objdump=" + rebase_path(objdump, root_build_dir),
-             "--output=" + rebase_path(nmf, root_build_dir),
-           ] + nmfflags + rebase_path(sources, root_build_dir)
-    if (is_nacl_glibc && current_cpu == "arm") {
-      deps += [ "//native_client/src/untrusted/elf_loader:elf_loader" ]
-    }
-  }
-}
-
-# Generate a nmf file for Non-SFI tests
-#
-# Non-SFI tests use a different manifest format from regular Native Client and
-# as such requires a different generator.
-#
-# Variables:
-#   executable: Non-SFI .nexe executable to generate nmf for
-#   nmf: the name and the path of the output file
-#   nmfflags: additional flags for the nmf generator
-template("generate_nonsfi_test_nmf") {
-  assert(defined(invoker.executable), "Must define executable")
-  assert(defined(invoker.nmf), "Must define nmf")
-
-  action(target_name) {
-    forward_variables_from(invoker,
-                           [
-                             "deps",
-                             "data_deps",
-                             "executable",
-                             "nmf",
-                             "testonly",
-                             "public_deps",
-                             "visibility",
-                           ])
-
-    script = "//ppapi/tests/create_nonsfi_test_nmf.py"
-    sources = [
-      executable,
-    ]
-    outputs = [
-      nmf,
-    ]
-
-    # NOTE: We use target_cpu rather than current_cpu on purpose because
-    # current_cpu is always going to be pnacl for Non-SFI, but the Non-SFI
-    # .nexe executable is always translated to run on the target machine.
-    if (target_cpu == "x86") {
-      arch = "x86-32"
-    } else if (target_cpu == "x64") {
-      arch = "x86-64"
-    } else {
-      arch = target_cpu
-    }
-    args = [
-      "--program=" + rebase_path(executable, root_build_dir),
-      "--arch=${arch}",
-      "--output=" + rebase_path(nmf, root_build_dir),
-    ]
-    if (defined(invoker.nmfflags)) {
-      args += invoker.nmfflags
-    }
-  }
-}
diff --git a/build/config/pch.gni b/build/config/pch.gni
deleted file mode 100644
index 93bd2fe..0000000
--- a/build/config/pch.gni
+++ /dev/null
@@ -1,12 +0,0 @@
-# Copyright 2016 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import("//build/toolchain/goma.gni")
-
-declare_args() {
-  # Precompiled header file support is by default available,
-  # but for distributed build system uses (like goma) or when
-  # doing official builds.
-  enable_precompiled_headers = !is_official_build && !use_goma
-}
diff --git a/build/config/posix/BUILD.gn b/build/config/posix/BUILD.gn
deleted file mode 100644
index 91405fd..0000000
--- a/build/config/posix/BUILD.gn
+++ /dev/null
@@ -1,135 +0,0 @@
-# Copyright 2015 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import("//buildtools/deps_revisions.gni")
-import("//build/config/c++/c++.gni")
-import("//build/config/clang/clang.gni")
-import("//build/config/compiler/compiler.gni")
-import("//build/config/sanitizers/sanitizers.gni")
-import("//build/config/sysroot.gni")
-import("//build/toolchain/toolchain.gni")
-
-# TODO(crbug.com/830987): Come up with a better name for is POSIX + Fuchsia
-# configuration.
-assert(is_posix || is_fuchsia)
-
-group("posix") {
-  visibility = [ "//:optimize_gn_gen" ]
-}
-
-# This is included by reference in the //build/config/compiler:runtime_library
-# config that is applied to all targets. It is here to separate out the logic
-# that is Posix-only. Please see that target for advice on what should go in
-# :runtime_library vs. :compiler.
-config("runtime_library") {
-  asmflags = []
-  cflags = []
-  cflags_c = []
-  cflags_cc = []
-  cflags_objc = []
-  cflags_objcc = []
-  defines = []
-  ldflags = []
-  lib_dirs = []
-  libs = []
-
-  if (use_custom_libcxx) {
-    if (libcpp_is_static) {
-      # Don't leak any symbols on a static build.
-      defines += [
-        "_LIBCPP_DISABLE_VISIBILITY_ANNOTATIONS",
-        "_LIBCXXABI_DISABLE_VISIBILITY_ANNOTATIONS",
-      ]
-    }
-    if (!is_clang) {
-      # Gcc has a built-in abs() definition with default visibility.
-      # If it was not disabled, it would conflict with libc++'s abs()
-      # with hidden visibility.
-      cflags += [ "-fno-builtin-abs" ]
-    }
-    cflags_cc += [
-      "-nostdinc++",
-      "-isystem" + rebase_path("$libcxx_prefix/include", root_build_dir),
-      "-isystem" + rebase_path("$libcxxabi_prefix/include", root_build_dir),
-    ]
-    defines += [
-      "CR_LIBCXX_REVISION=$libcxx_svn_revision",
-      "CR_LIBCXXABI_REVISION=$libcxxabi_svn_revision",
-    ]
-
-    # Make sure we don't link against libc++ or libstdc++.
-    if (is_clang) {
-      # //build/config/android:runtime_library adds -nostdlib, which suppresses
-      # linking against all system libraries.  -nostdlib++ would be redundant,
-      # and would generate an unused warning in this case.
-      if (!is_android) {
-        ldflags += [ "-nostdlib++" ]
-      }
-    } else {
-      ldflags += [ "-nodefaultlibs" ]
-
-      # Unfortunately, there's no way to disable linking against just libc++
-      # (gcc doesn't have -notstdlib++:
-      # https://gcc.gnu.org/bugzilla/show_bug.cgi?id=83931); -nodefaultlibs
-      # removes all of the default libraries, so add back the ones that we need.
-      libs += [
-        "c",
-        "gcc_s",
-        "m",
-        "rt",
-      ]
-    }
-  }
-
-  if (!is_mac && !is_ios && sysroot != "") {
-    # Pass the sysroot to all C compiler variants, the assembler, and linker.
-    sysroot_flags = [ "--sysroot=" + rebase_path(sysroot, root_build_dir) ]
-    if (is_linux) {
-      # This is here so that all files get recompiled after a sysroot roll and
-      # when turning the sysroot on or off. (defines are passed via the command
-      # line, and build system rebuilds things when their commandline
-      # changes). Nothing should ever read this define.
-      sysroot_hash =
-          exec_script("//build/linux/sysroot_scripts/install-sysroot.py",
-                      [ "--print-hash=$current_cpu" ],
-                      "trim string",
-                      [ "//build/linux/sysroot_scripts/sysroots.json" ])
-      defines += [ "CR_SYSROOT_HASH=$sysroot_hash" ]
-    }
-    asmflags += sysroot_flags
-
-    link_sysroot_flags =
-        [ "--sysroot=" + rebase_path(link_sysroot, root_build_dir) ]
-    ldflags += link_sysroot_flags
-
-    # When use_custom_libcxx=true, some -isystem flags get passed to
-    # cflags_cc to set up libc++ include paths.  We want to make sure
-    # the sysroot includes take lower precendence than the libc++
-    # ones, so they must appear later in the command line.  However,
-    # the gn reference states "These variant-specific versions of
-    # cflags* will be appended on the compiler command line after
-    # 'cflags'."  Because of this, we must set the sysroot flags for
-    # all cflags variants instead of using 'cflags' directly.
-    cflags_c += sysroot_flags
-    cflags_cc += sysroot_flags
-    cflags_objc += sysroot_flags
-    cflags_objcc += sysroot_flags
-
-    # Need to get some linker flags out of the sysroot.
-    ld_paths = exec_script("sysroot_ld_path.py",
-                           [
-                             rebase_path("//build/linux/sysroot_ld_path.sh",
-                                         root_build_dir),
-                             rebase_path(link_sysroot),
-                           ],
-                           "list lines")
-    foreach(ld_path, ld_paths) {
-      ld_path = rebase_path(ld_path, root_build_dir)
-      ldflags += [
-        "-L" + ld_path,
-        "-Wl,-rpath-link=" + ld_path,
-      ]
-    }
-  }
-}
diff --git a/build/config/posix/sysroot_ld_path.py b/build/config/posix/sysroot_ld_path.py
deleted file mode 100644
index 7056207..0000000
--- a/build/config/posix/sysroot_ld_path.py
+++ /dev/null
@@ -1,21 +0,0 @@
-# Copyright (c) 2013 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file takes two arguments, the relative location of the shell script that
-# does the checking, and the name of the sysroot.
-
-# TODO(brettw) the build/linux/sysroot_ld_path.sh script should be rewritten in
-# Python in this file.
-
-import subprocess
-import sys
-
-if len(sys.argv) != 3:
-  print "Need two arguments"
-  sys.exit(1)
-
-result = subprocess.check_output([sys.argv[1], sys.argv[2]]).strip()
-result = result.replace(" ", "\n")
-if result != "":
-  print result
diff --git a/build/config/python.gni b/build/config/python.gni
deleted file mode 100644
index e24025f..0000000
--- a/build/config/python.gni
+++ /dev/null
@@ -1,61 +0,0 @@
-# Copyright 2018 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# Creates a group() that lists Python sources as |data|.
-# Having such targets serves two purposes:
-# 1) Causes files to be included in runtime_deps, so that they are uploaded to
-#    swarming when running tests remotely.
-# 2) Causes "gn analyze" to know about all Python inputs so that tests will be
-#    re-run when relevant Python files change.
-#
-# All non-trivial Python scripts should use a "pydeps" file to track their
-# sources. To create a .pydep file for a target in //example:
-#
-#   build/print_python_deps.py \
-#       --root example \
-#       --output example/$target_name.pydeps \
-#       path/to/your/script.py
-#
-# Keep the .pydep file up-to-date by adding to //PRESUBMIT.py under one of:
-#     _ANDROID_SPECIFIC_PYDEPS_FILES, _GENERIC_PYDEPS_FILES
-#
-# Variables
-#   pydeps_file: Path to .pydeps file to read sources from (optional).
-#   data: Additional files to include in data. E.g. non-.py files needed by the
-#         library, or .py files that are conditionally / lazily imported.
-#
-# Example
-#   python_library("my_library_py") {
-#      pydeps_file = "my_library.pydeps"
-#      data = [ "foo.dat" ]
-#   }
-template("python_library") {
-  group(target_name) {
-    forward_variables_from(invoker,
-                           [
-                             "data_deps",
-                             "deps",
-                             "testonly",
-                             "visibility",
-                           ])
-
-    if (defined(invoker.pydeps_file)) {
-      _py_files = read_file(invoker.pydeps_file, "list lines")
-
-      # Filter out comments.
-      set_sources_assignment_filter([ "#*" ])
-      sources = _py_files
-
-      # Even though the .pydep file is not used at runtime, it must be added
-      # so that "gn analyze" will mark the target as changed when .py files
-      # are removed but none are added or modified.
-      data = sources + [ invoker.pydeps_file ]
-    } else {
-      data = []
-    }
-    if (defined(invoker.data)) {
-      data += invoker.data
-    }
-  }
-}
diff --git a/build/config/sanitizers/BUILD.gn b/build/config/sanitizers/BUILD.gn
deleted file mode 100644
index ebad6b0..0000000
--- a/build/config/sanitizers/BUILD.gn
+++ /dev/null
@@ -1,614 +0,0 @@
-# Copyright 2014 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import("//build_overrides/build.gni")
-import("//build/config/chrome_build.gni")
-import("//build/config/chromecast_build.gni")
-import("//build/config/clang/clang.gni")
-import("//build/config/sanitizers/sanitizers.gni")
-import("//build/toolchain/toolchain.gni")
-
-if (is_ios) {
-  import("//build/config/ios/ios_sdk.gni")
-}
-
-# Contains the dependencies needed for sanitizers to link into
-# executables and shared_libraries. Unconditionally depend upon
-# "//build/config:exe_and_shlib_deps" to pull in this target.
-group("deps") {
-  visibility = [ "//build/config:exe_and_shlib_deps" ]
-  if (using_sanitizer) {
-    public_configs = [
-      ":sanitizer_options_link_helper",
-
-      # Even when a target removes default_sanitizer_flags, it may be depending
-      # on a library that did not remove default_sanitizer_flags. Thus, we need
-      # to add the ldflags here as well as in default_sanitizer_flags.
-      ":default_sanitizer_ldflags",
-    ]
-    deps = [
-      ":options_sources",
-    ]
-    if (is_win) {
-      exe = ".exe"
-    } else {
-      exe = ""
-    }
-    data = [
-      "//tools/valgrind/asan/",
-      "$clang_base_path/bin/llvm-symbolizer${exe}",
-    ]
-    if (use_prebuilt_instrumented_libraries ||
-        use_locally_built_instrumented_libraries) {
-      deps += [ "//third_party/instrumented_libraries:deps" ]
-    }
-
-    # ASAN is supported on iOS but the runtime library depends on the compiler
-    # used (Chromium version of clang versus Xcode version of clang). Only copy
-    # the ASAN runtime on iOS if building with Chromium clang.
-    if (is_win || is_mac || (is_ios && !use_xcode_clang)) {
-      data_deps = [
-        ":copy_asan_runtime",
-      ]
-    }
-    if (is_mac || (is_ios && !use_xcode_clang)) {
-      public_deps = [
-        ":asan_runtime_bundle_data",
-      ]
-    }
-  }
-}
-
-if ((is_mac || is_win || (is_ios && !use_xcode_clang)) && using_sanitizer) {
-  if (is_mac) {
-    _clang_rt_dso_path = "darwin/libclang_rt.asan_osx_dynamic.dylib"
-  } else if (is_ios) {
-    _clang_rt_dso_path = "darwin/libclang_rt.asan_iossim_dynamic.dylib"
-  } else if (is_win && target_cpu == "x86") {
-    _clang_rt_dso_path = "windows/clang_rt.asan_dynamic-i386.dll"
-  } else if (is_win && target_cpu == "x64") {
-    _clang_rt_dso_path = "windows/clang_rt.asan_dynamic-x86_64.dll"
-  }
-
-  _clang_rt_dso_full_path =
-      "$clang_base_path/lib/clang/$clang_version/lib/$_clang_rt_dso_path"
-
-  if (!is_ios) {
-    copy("copy_asan_runtime") {
-      sources = [
-        _clang_rt_dso_full_path,
-      ]
-      outputs = [
-        "$root_out_dir/{{source_file_part}}",
-      ]
-    }
-  } else {
-    # On iOS, the runtime library need to be code signed (adhoc signature)
-    # starting with Xcode 8, so use an action instead of a copy on iOS.
-    action("copy_asan_runtime") {
-      script = "//build/config/ios/codesign.py"
-      sources = [
-        _clang_rt_dso_full_path,
-      ]
-      outputs = [
-        "$root_out_dir/" + get_path_info(sources[0], "file"),
-      ]
-      args = [
-        "code-sign-file",
-        "--identity=" + ios_code_signing_identity,
-        "--output=" + rebase_path(outputs[0], root_build_dir),
-        rebase_path(sources[0], root_build_dir),
-      ]
-    }
-  }
-
-  if (is_mac || is_ios) {
-    bundle_data("asan_runtime_bundle_data") {
-      sources = get_target_outputs(":copy_asan_runtime")
-      outputs = [
-        "{{bundle_executable_dir}}/{{source_file_part}}",
-      ]
-      public_deps = [
-        ":copy_asan_runtime",
-      ]
-    }
-  }
-}
-
-config("sanitizer_options_link_helper") {
-  if (is_mac || is_ios) {
-    ldflags = [ "-Wl,-U,_sanitizer_options_link_helper" ]
-  } else if (!is_win) {
-    ldflags = [ "-Wl,-u_sanitizer_options_link_helper" ]
-  }
-}
-
-static_library("options_sources") {
-  # This is a static_library instead of a source_set, as it shouldn't be
-  # unconditionally linked into targets.
-  visibility = [
-    ":deps",
-    "//:gn_visibility",
-  ]
-  sources = [
-    "//build/sanitizers/sanitizer_options.cc",
-  ]
-
-  # Don't compile this target with any sanitizer code. It can be called from
-  # the sanitizer runtimes, so instrumenting these functions could cause
-  # recursive calls into the runtime if there is an error.
-  configs -= [ "//build/config/sanitizers:default_sanitizer_flags" ]
-
-  if (is_asan) {
-    if (!defined(asan_suppressions_file)) {
-      asan_suppressions_file = "//build/sanitizers/asan_suppressions.cc"
-    }
-    sources += [ asan_suppressions_file ]
-  }
-
-  if (is_lsan) {
-    if (!defined(lsan_suppressions_file)) {
-      lsan_suppressions_file = "//build/sanitizers/lsan_suppressions.cc"
-    }
-    sources += [ lsan_suppressions_file ]
-  }
-
-  if (is_tsan) {
-    if (!defined(tsan_suppressions_file)) {
-      tsan_suppressions_file = "//build/sanitizers/tsan_suppressions.cc"
-    }
-    sources += [ tsan_suppressions_file ]
-  }
-}
-
-# Applies linker flags necessary when either :deps or :default_sanitizer_flags
-# are used.
-config("default_sanitizer_ldflags") {
-  visibility = [
-    ":default_sanitizer_flags",
-    ":deps",
-  ]
-
-  if (is_posix || is_fuchsia) {
-    ldflags = []
-    if (is_asan) {
-      ldflags += [ "-fsanitize=address" ]
-      if (is_mac) {
-        # https://crbug.com/708707
-        ldflags += [ "-fno-sanitize-address-use-after-scope" ]
-      } else {
-        ldflags += [ "-fsanitize-address-use-after-scope" ]
-      }
-    }
-    if (is_lsan) {
-      ldflags += [ "-fsanitize=leak" ]
-    }
-    if (is_tsan) {
-      ldflags += [ "-fsanitize=thread" ]
-    }
-    if (is_msan) {
-      ldflags += [ "-fsanitize=memory" ]
-    }
-    if (is_ubsan || is_ubsan_security) {
-      ldflags += [ "-fsanitize=undefined" ]
-    }
-    if (is_ubsan_null) {
-      ldflags += [ "-fsanitize=null" ]
-    }
-    if (is_ubsan_vptr) {
-      ldflags += [ "-fsanitize=vptr" ]
-    }
-
-    if (use_sanitizer_coverage) {
-      ldflags += [ "-fsanitize-coverage=$sanitizer_coverage_flags" ]
-    }
-
-    if (is_cfi && current_toolchain == default_toolchain) {
-      ldflags += [ "-fsanitize=cfi-vcall" ]
-      if (use_cfi_cast) {
-        ldflags += [
-          "-fsanitize=cfi-derived-cast",
-          "-fsanitize=cfi-unrelated-cast",
-        ]
-      }
-      if (use_cfi_icall) {
-        ldflags += [ "-fsanitize=cfi-icall" ]
-      }
-      if (use_cfi_diag) {
-        ldflags += [ "-fno-sanitize-trap=cfi" ]
-        if (use_cfi_recover) {
-          ldflags += [ "-fsanitize-recover=cfi" ]
-        }
-      }
-    }
-  } else if (is_win && is_asan) {
-    # Windows directly calls link.exe instead of the compiler driver when
-    # linking.  Hence, pass the runtime libraries instead of -fsanitize=address.
-    # In the static-library build, libraries are different for executables
-    # and dlls, see link_executable and link_shared_library below.
-    # This here handles only the component build.
-    if (target_cpu == "x64") {
-      # Windows 64-bit.
-      if (is_component_build) {
-        libs = [
-          "clang_rt.asan_dynamic-x86_64.lib",
-          "clang_rt.asan_dynamic_runtime_thunk-x86_64.lib",
-        ]
-      }
-    } else {
-      assert(target_cpu == "x86", "WinASan unsupported architecture")
-      if (is_component_build) {
-        libs = [
-          "clang_rt.asan_dynamic-i386.lib",
-          "clang_rt.asan_dynamic_runtime_thunk-i386.lib",
-        ]
-      }
-    }
-  }
-}
-
-config("common_sanitizer_flags") {
-  cflags = []
-
-  # Sanitizers need line table info for stack traces. They don't need type info
-  # or variable info, so we can leave that out to speed up the build (unless
-  # it's explicitly asked for by setting |sanitizer_keep_symbols| to true).
-  if (using_sanitizer) {
-    assert(is_clang, "sanitizers only supported with clang")
-    assert(!is_official_build, "sanitizers not supported in official builds")
-
-    if (!sanitizer_no_symbols) {
-      if (!sanitizer_keep_symbols) {
-        cflags += [ "-gline-tables-only" ]
-      }
-
-      cflags += [
-        # Column info in debug data confuses Visual Studio's debugger, so don't
-        # use this by default.  However, clusterfuzz needs it for good
-        # attribution of reports to CLs, so turn it on there.
-        "-gcolumn-info",
-      ]
-    }
-
-    # Frame pointers are controlled in //build/config/compiler:default_stack_frames
-  }
-}
-
-config("asan_flags") {
-  cflags = []
-  if (is_asan) {
-    cflags += [ "-fsanitize=address" ]
-    if (!is_mac) {
-      cflags += [ "-fsanitize-address-use-after-scope" ]
-    } else {
-      # https://crbug.com/708707
-      cflags += [ "-fno-sanitize-address-use-after-scope" ]
-    }
-    if (!asan_globals) {
-      cflags += [
-        "-mllvm",
-        "-asan-globals=0",
-      ]
-    }
-    if (is_win) {
-      if (!defined(asan_win_blacklist_path)) {
-        asan_win_blacklist_path =
-            rebase_path("//tools/memory/asan/blacklist_win.txt", root_build_dir)
-      }
-      cflags += [ "-fsanitize-blacklist=$asan_win_blacklist_path" ]
-    } else {
-      # TODO(rnk): Remove this as discussed in http://crbug.com/427202.
-      if (!defined(asan_blacklist_path)) {
-        asan_blacklist_path =
-            rebase_path("//tools/memory/asan/blacklist.txt", root_build_dir)
-      }
-      cflags += [ "-fsanitize-blacklist=$asan_blacklist_path" ]
-    }
-  }
-}
-
-config("link_executable") {
-  if (is_asan && is_win && !is_component_build) {
-    if (target_cpu == "x64") {
-      libs = [ "clang_rt.asan-x86_64.lib" ]
-      ldflags = [
-        "-wholearchive:clang_rt.asan-x86_64.lib",
-        "-include:__asan_get_free_stack",
-      ]
-    } else {
-      assert(target_cpu == "x86", "WinASan unsupported architecture")
-      libs = [ "clang_rt.asan-i386.lib" ]
-      ldflags = [
-        "-wholearchive:clang_rt.asan-i386.lib",
-
-        # TODO(crbug.com/777087): The linker in VS 15.4 does not respect
-        # /wholearchive. This manually includes a symbol to work around that.
-        # We can remove it when the linker is fixed.
-        "-include:___asan_get_free_stack",
-      ]
-    }
-  }
-}
-
-config("link_shared_library") {
-  if (is_asan && is_win && !is_component_build) {
-    if (target_cpu == "x64") {
-      libs = [ "clang_rt.asan_dll_thunk-x86_64.lib" ]
-    } else {
-      assert(target_cpu == "x86", "WinASan unsupported architecture")
-      libs = [ "clang_rt.asan_dll_thunk-i386.lib" ]
-    }
-  }
-}
-
-config("cfi_flags") {
-  cflags = []
-  if (is_cfi && current_toolchain == default_toolchain) {
-    if (!defined(cfi_blacklist_path)) {
-      cfi_blacklist_path =
-          rebase_path("//tools/cfi/blacklist.txt", root_build_dir)
-    }
-    cflags += [
-      "-fsanitize=cfi-vcall",
-      "-fsanitize-blacklist=$cfi_blacklist_path",
-    ]
-
-    if (use_cfi_cast) {
-      cflags += [
-        "-fsanitize=cfi-derived-cast",
-        "-fsanitize=cfi-unrelated-cast",
-      ]
-    }
-
-    if (use_cfi_icall) {
-      cflags += [ "-fsanitize=cfi-icall" ]
-    }
-
-    if (use_cfi_diag) {
-      cflags += [ "-fno-sanitize-trap=cfi" ]
-      if (is_win) {
-        cflags += [
-          "/Oy-",
-          "/Ob0",
-        ]
-      } else {
-        cflags += [
-          "-fno-inline-functions",
-          "-fno-inline",
-          "-fno-omit-frame-pointer",
-          "-O1",
-        ]
-      }
-      if (use_cfi_recover) {
-        cflags += [ "-fsanitize-recover=cfi" ]
-      }
-    }
-  }
-}
-
-# crbug.com/785442: Fix cfi-icall failures for code that casts pointer argument
-# types in function pointer type signatures.
-config("cfi_icall_generalize_pointers") {
-  if (is_clang && is_cfi && use_cfi_icall) {
-    cflags = [ "-fsanitize-cfi-icall-generalize-pointers" ]
-  }
-}
-
-config("coverage_flags") {
-  cflags = []
-  if (use_sanitizer_coverage) {
-    cflags += [
-      "-fsanitize-coverage=$sanitizer_coverage_flags",
-      "-mllvm",
-      "-sanitizer-coverage-prune-blocks=1",
-    ]
-    if (current_cpu == "arm") {
-      # http://crbug.com/517105
-      cflags += [
-        "-mllvm",
-        "-sanitizer-coverage-block-threshold=0",
-      ]
-    }
-    defines = [ "SANITIZER_COVERAGE" ]
-  }
-}
-
-config("lsan_flags") {
-  if (is_lsan) {
-    cflags = [ "-fsanitize=leak" ]
-  }
-}
-
-config("msan_flags") {
-  if (is_msan) {
-    assert(is_linux, "msan only supported on linux x86_64")
-    if (!defined(msan_blacklist_path)) {
-      msan_blacklist_path =
-          rebase_path("//tools/msan/blacklist.txt", root_build_dir)
-    }
-    cflags = [
-      "-fsanitize=memory",
-      "-fsanitize-memory-track-origins=$msan_track_origins",
-      "-fsanitize-blacklist=$msan_blacklist_path",
-    ]
-  }
-}
-
-config("tsan_flags") {
-  if (is_tsan) {
-    assert(is_linux, "tsan only supported on linux x86_64")
-    if (!defined(tsan_blacklist_path)) {
-      tsan_blacklist_path =
-          rebase_path("//tools/memory/tsan_v2/ignores.txt", root_build_dir)
-    }
-    cflags = [
-      "-fsanitize=thread",
-      "-fsanitize-blacklist=$tsan_blacklist_path",
-    ]
-  }
-}
-
-config("ubsan_flags") {
-  cflags = []
-  if (is_ubsan) {
-    if (!defined(ubsan_blacklist_path)) {
-      ubsan_blacklist_path =
-          rebase_path("//tools/ubsan/blacklist.txt", root_build_dir)
-    }
-    cflags += [
-      # Yasm dies with an "Illegal instruction" error when bounds checking is
-      # enabled. See http://crbug.com/489901
-      # "-fsanitize=bounds",
-      "-fsanitize=float-divide-by-zero",
-      "-fsanitize=integer-divide-by-zero",
-      "-fsanitize=null",
-      "-fsanitize=object-size",
-      "-fsanitize=pointer-overflow",
-      "-fsanitize=return",
-      "-fsanitize=returns-nonnull-attribute",
-      "-fsanitize=shift-exponent",
-      "-fsanitize=signed-integer-overflow",
-      "-fsanitize=unreachable",
-      "-fsanitize=vla-bound",
-      "-fsanitize-blacklist=$ubsan_blacklist_path",
-    ]
-
-    # Chromecast ubsan builds fail to compile with these
-    # experimental flags, so only add them to non-chromecast ubsan builds.
-    if (!is_chromecast) {
-      cflags += [
-        # Employ the experimental PBQP register allocator to avoid slow
-        # compilation on files with too many basic blocks.
-        # See http://crbug.com/426271.
-        "-mllvm",
-        "-regalloc=pbqp",
-
-        # Speculatively use coalescing to slightly improve the code generated
-        # by PBQP regallocator. May increase compile time.
-        "-mllvm",
-        "-pbqp-coalescing",
-      ]
-    }
-  }
-}
-
-config("ubsan_no_recover") {
-  if (is_ubsan_no_recover) {
-    cflags = [ "-fno-sanitize-recover=undefined" ]
-  }
-}
-
-config("ubsan_security_flags") {
-  if (is_ubsan_security) {
-    if (!defined(ubsan_security_blacklist_path)) {
-      ubsan_security_blacklist_path =
-          rebase_path("//tools/ubsan/security_blacklist.txt", root_build_dir)
-    }
-    cflags = [
-      "-fsanitize=function",
-      "-fsanitize=pointer-overflow",
-      "-fsanitize=shift",
-      "-fsanitize=signed-integer-overflow",
-      "-fsanitize=vla-bound",
-      "-fsanitize=vptr",
-      "-fsanitize-blacklist=$ubsan_security_blacklist_path",
-    ]
-  }
-}
-
-config("ubsan_null_flags") {
-  if (is_ubsan_null) {
-    cflags = [ "-fsanitize=null" ]
-  }
-}
-
-config("ubsan_vptr_flags") {
-  if (is_ubsan_vptr) {
-    if (!defined(ubsan_vptr_blacklist_path)) {
-      ubsan_vptr_blacklist_path =
-          rebase_path("//tools/ubsan/vptr_blacklist.txt", root_build_dir)
-    }
-    cflags = [
-      "-fsanitize=vptr",
-      "-fsanitize-blacklist=$ubsan_vptr_blacklist_path",
-    ]
-  }
-}
-
-config("fuzzing_build_mode") {
-  if (use_fuzzing_engine) {
-    defines = [ "FUZZING_BUILD_MODE_UNSAFE_FOR_PRODUCTION" ]
-  }
-}
-
-all_sanitizer_configs = [
-  ":common_sanitizer_flags",
-  ":coverage_flags",
-  ":default_sanitizer_ldflags",
-  ":asan_flags",
-  ":cfi_flags",
-  ":lsan_flags",
-  ":msan_flags",
-  ":tsan_flags",
-  ":ubsan_flags",
-  ":ubsan_no_recover",
-  ":ubsan_null_flags",
-  ":ubsan_security_flags",
-  ":ubsan_vptr_flags",
-  ":fuzzing_build_mode",
-]
-
-# This config is applied by default to all targets. It sets the compiler flags
-# for sanitizer usage, or, if no sanitizer is set, does nothing.
-#
-# This needs to be in a separate config so that targets can opt out of
-# sanitizers (by removing the config) if they desire. Even if a target
-# removes this config, executables & shared libraries should still depend on
-# :deps if any of their dependencies have not opted out of sanitizers.
-# Keep this list in sync with default_sanitizer_flags_but_ubsan_vptr.
-config("default_sanitizer_flags") {
-  configs = all_sanitizer_configs
-}
-
-# This config is equivalent to default_sanitizer_flags, but excludes ubsan_vptr.
-# This allows to selectively disable ubsan_vptr, when needed. In particular,
-# if some third_party code is required to be compiled without rtti, which
-# is a requirement for ubsan_vptr.
-config("default_sanitizer_flags_but_ubsan_vptr") {
-  configs = all_sanitizer_configs - [ ":ubsan_vptr_flags" ]
-}
-
-config("default_sanitizer_flags_but_coverage") {
-  configs = all_sanitizer_configs - [ ":coverage_flags" ]
-}
-
-# This config is used by parts of code that aren't targeted in fuzzers and
-# therefore don't need coverage instrumentation and possibly wont need
-# sanitizer instrumentation either. The config also tells the compiler to
-# perform additional optimizations on the configured code and ensures that
-# linking it to the rest of the binary which is instrumented with sanitizers
-# works. The config only does anything if the build is a fuzzing build.
-config("not_fuzzed") {
-  if (use_fuzzing_engine) {
-    # Since we aren't instrumenting with coverage, code size is less of a
-    # concern, so use a more aggressive optimization level than
-    # optimize_for_fuzzing (-O1). When given multiple optimization flags, clang
-    # obeys the last one, so as long as this flag comes after -O1, it should work.
-    # Since this config will always be depended on after
-    # "//build/config/compiler:default_optimization" (which adds -O1 when
-    # optimize_for_fuzzing is true), -O2 should always be the second flag. Even
-    # though this sounds fragile, it isn't a big deal if it breaks, since proto
-    # fuzzers will still work, they will just be slightly slower.
-    cflags = [ "-O2" ]
-
-    # We need to include this config when we remove default_sanitizer_flags or
-    # else there will be linking errors. We would remove default_sanitizer_flags
-    # here as well, but gn doesn't permit this.
-    if (!is_msan) {
-      # We don't actually remove sanitization when MSan is being used so there
-      # is no need to add default_sanitizer_ldflags in that case
-      configs = [ ":default_sanitizer_ldflags" ]
-    }
-  }
-}
diff --git a/build/config/sanitizers/OWNERS b/build/config/sanitizers/OWNERS
deleted file mode 100644
index 7ab46b1..0000000
--- a/build/config/sanitizers/OWNERS
+++ /dev/null
@@ -1,2 +0,0 @@
-mmoroz@chromium.org
-ochang@chromium.org
diff --git a/build/config/sanitizers/sanitizers.gni b/build/config/sanitizers/sanitizers.gni
deleted file mode 100644
index 049a1f1..0000000
--- a/build/config/sanitizers/sanitizers.gni
+++ /dev/null
@@ -1,258 +0,0 @@
-# Copyright 2015 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import("//build/config/chrome_build.gni")
-import("//build/config/chromecast_build.gni")
-import("//build/toolchain/toolchain.gni")
-
-declare_args() {
-  # Compile for Address Sanitizer to find memory bugs.
-  is_asan = false
-
-  # Compile for Leak Sanitizer to find leaks.
-  is_lsan = false
-
-  # Compile for Memory Sanitizer to find uninitialized reads.
-  is_msan = false
-
-  # Compile for Thread Sanitizer to find threading bugs.
-  is_tsan = false
-
-  # Compile for Undefined Behaviour Sanitizer to find various types of
-  # undefined behaviour (excludes vptr checks).
-  is_ubsan = false
-
-  # Halt the program if a problem is detected.
-  is_ubsan_no_recover = false
-
-  # Compile for Undefined Behaviour Sanitizer's null pointer checks.
-  is_ubsan_null = false
-
-  # Compile for Undefined Behaviour Sanitizer's vptr checks.
-  is_ubsan_vptr = false
-
-  # Track where uninitialized memory originates from. From fastest to slowest:
-  # 0 - no tracking, 1 - track only the initial allocation site, 2 - track the
-  # chain of stores leading from allocation site to use site.
-  msan_track_origins = 2
-
-  # Use dynamic libraries instrumented by one of the sanitizers instead of the
-  # standard system libraries. Set this flag to download prebuilt binaries from
-  # GCS.
-  use_prebuilt_instrumented_libraries = false
-
-  # Use dynamic libraries instrumented by one of the sanitizers instead of the
-  # standard system libraries. Set this flag to build the libraries from source.
-  use_locally_built_instrumented_libraries = false
-
-  # Compile with Control Flow Integrity to protect virtual calls and casts.
-  # See http://clang.llvm.org/docs/ControlFlowIntegrity.html
-  #
-  # TODO(pcc): Remove this flag if/when CFI is enabled in all official builds.
-  is_cfi = target_os == "linux" && !is_chromeos && target_cpu == "x64" &&
-           is_official_build
-
-  # Enable checks for bad casts: derived cast and unrelated cast.
-  # TODO(krasin): remove this, when we're ready to add these checks by default.
-  # https://crbug.com/626794
-  use_cfi_cast = false
-
-  # Enable checks for indirect function calls via a function pointer.
-  # TODO(pcc): remove this when we're ready to add these checks by default.
-  # https://crbug.com/701919
-  use_cfi_icall = target_os == "linux" && !is_chromeos && target_cpu == "x64" &&
-                  is_official_build
-
-  # Print detailed diagnostics when Control Flow Integrity detects a violation.
-  use_cfi_diag = false
-
-  # Let Control Flow Integrity continue execution instead of crashing when
-  # printing diagnostics (use_cfi_diag = true).
-  use_cfi_recover = false
-
-  # Compile for fuzzing with LLVM LibFuzzer.
-  # See http://www.chromium.org/developers/testing/libfuzzer
-  use_libfuzzer = false
-
-  # Compile for fuzzing with AFL.
-  use_afl = false
-
-  # Enables core ubsan security features. Will later be removed once it matches
-  # is_ubsan.
-  is_ubsan_security = false
-
-  # Compile for fuzzing with Dr. Fuzz
-  # See http://www.chromium.org/developers/testing/dr-fuzz
-  use_drfuzz = false
-
-  # Helper variable for testing builds with disabled libfuzzer.
-  # Not for client use.
-  disable_libfuzzer = false
-
-  # Value for -fsanitize-coverage flag. Setting this causes
-  # use_sanitizer_coverage to be enabled.
-  # Default value when unset and use_fuzzing_engine=true:
-  #     trace-pc-guard
-  # Default value when unset and use_sanitizer_coverage=true:
-  #     trace-pc-guard,indirect-calls
-  sanitizer_coverage_flags = ""
-
-  # Keep symbol level when building with sanitizers. When sanitizers are
-  # enabled, the default is to compile with the minimum debug info level
-  # necessary, overriding any other symbol level arguments that may be set.
-  # Setting this to true prevents this.
-  sanitizer_keep_symbols = false
-
-  # Builds fuzzer/sanitizers without symbols.  Use with symbol_level=0.
-  # Useful for reducing binary size when building with use_clang_coverage=true.
-  sanitizer_no_symbols = false
-}
-
-# Disable sanitizers for non-default toolchains.
-if (current_toolchain != default_toolchain) {
-  is_asan = false
-  is_cfi = false
-  is_lsan = false
-  is_msan = false
-  is_tsan = false
-  is_ubsan = false
-  is_ubsan_null = false
-  is_ubsan_no_recover = false
-  is_ubsan_security = false
-  is_ubsan_vptr = false
-  msan_track_origins = 0
-  sanitizer_coverage_flags = ""
-  use_afl = false
-  use_cfi_diag = false
-  use_cfi_recover = false
-  use_drfuzz = false
-  use_libfuzzer = false
-  use_prebuilt_instrumented_libraries = false
-  use_locally_built_instrumented_libraries = false
-  use_sanitizer_coverage = false
-}
-
-# Whether we are doing a fuzzer build. Normally this should be checked instead
-# of checking "use_libfuzzer || use_afl" because often developers forget to
-# check for "use_afl".
-use_fuzzing_engine = use_libfuzzer || use_afl
-
-# Args that are in turn dependent on other args must be in a separate
-# declare_args block. User overrides are only applied at the end of a
-# declare_args block.
-declare_args() {
-  use_sanitizer_coverage = use_fuzzing_engine || sanitizer_coverage_flags != ""
-
-  # Detect overflow/underflow for global objects.
-  #
-  # Mac: http://crbug.com/352073
-  asan_globals = !is_mac
-}
-
-if (use_fuzzing_engine && sanitizer_coverage_flags == "") {
-  sanitizer_coverage_flags = "trace-pc-guard"
-} else if (use_sanitizer_coverage && sanitizer_coverage_flags == "") {
-  sanitizer_coverage_flags = "trace-pc-guard,indirect-calls"
-}
-
-# Whether we are linking against a sanitizer runtime library. Among other
-# things, this changes the default symbol level and other settings in order to
-# prepare to create stack traces "live" using the sanitizer runtime.
-using_sanitizer =
-    is_asan || is_lsan || is_tsan || is_msan || is_ubsan || is_ubsan_null ||
-    is_ubsan_vptr || is_ubsan_security || use_sanitizer_coverage || use_cfi_diag
-
-assert(!using_sanitizer || is_clang,
-       "Sanitizers (is_*san) require setting is_clang = true in 'gn args'")
-
-assert(!is_cfi || is_clang,
-       "is_cfi requires setting is_clang = true in 'gn args'")
-
-prebuilt_instrumented_libraries_available =
-    is_msan && (msan_track_origins == 0 || msan_track_origins == 2)
-
-if (use_libfuzzer && is_linux) {
-  if (is_asan) {
-    # We do leak checking with libFuzzer on Linux. Set is_lsan for code that
-    # relies on LEAK_SANITIZER define to avoid false positives.
-    is_lsan = true
-  }
-  if (is_msan) {
-    use_prebuilt_instrumented_libraries = true
-  }
-}
-
-# MSan only links Chrome properly in release builds (brettw -- 9/1/2015). The
-# same is possibly true for the other non-ASan sanitizers. But regardless of
-# whether it links, one would normally never run a sanitizer in debug mode.
-# Running in debug mode probably indicates you forgot to set the "is_debug =
-# false" flag in the build args. ASan seems to run fine in debug mode.
-#
-# If you find a use-case where you want to compile a sanitizer in debug mode
-# and have verified it works, ask brettw and we can consider removing it from
-# this condition. We may also be able to find another way to enable your case
-# without having people accidentally get broken builds by compiling an
-# unsupported or unadvisable configurations.
-#
-# For one-off testing, just comment this assertion out.
-assert(!is_debug || !(is_msan || is_ubsan || is_ubsan_null || is_ubsan_vptr),
-       "Sanitizers should generally be used in release (set is_debug=false).")
-
-assert(!is_msan || (is_linux && current_cpu == "x64"),
-       "MSan currently only works on 64-bit Linux and ChromeOS builds.")
-
-assert(!is_lsan || is_asan, "is_lsan = true requires is_asan = true also.")
-
-# ASAN build on Windows is not working in debug mode. Intercepting memory
-# allocation functions is hard on Windows and not yet implemented in LLVM.
-assert(!is_win || !is_debug || !is_asan,
-       "ASan on Windows doesn't work in debug (set is_debug=false).")
-
-# Make sure that if we recover on detection (i.e. not crash), diagnostics are
-# printed.
-assert(!use_cfi_recover || use_cfi_diag,
-       "Only use CFI recovery together with diagnostics.")
-
-# TODO(crbug.com/753445): the use_sanitizer_coverage arg is currently
-# not supported by the Chromium mac_clang_x64 toolchain on iOS distribution.
-# The coverage works with iOS toolchain but it is broken when the mac
-# toolchain is used as a secondary one on iOS distribution. E.g., it should be
-# possible to build the "net" target for iOS with the sanitizer coverage
-# enabled.
-assert(
-    !(use_sanitizer_coverage && is_mac && target_os == "ios"),
-    "crbug.com/753445: use_sanitizer_coverage=true is not supported by the " +
-        "Chromium mac_clang_x64 toolchain on iOS distribution. Please set " +
-        "the argument value to false.")
-
-# Use these lists of configs to disable instrumenting code that is part of a
-# fuzzer, but which isn't being targeted (such as libprotobuf-mutator, *.pb.cc
-# and libprotobuf when they are built as part of a proto fuzzer). Adding or
-# removing these lists does not have any effect if use_libfuzzer or use_afl are
-# not passed as arguments to gn.
-not_fuzzed_remove_configs = []
-not_fuzzed_remove_nonasan_configs = []
-
-if (use_fuzzing_engine) {
-  # Removing coverage should always just work.
-  not_fuzzed_remove_configs += [ "//build/config/coverage:default_coverage" ]
-  not_fuzzed_remove_nonasan_configs +=
-      [ "//build/config/coverage:default_coverage" ]
-
-  if (!is_msan) {
-    # Allow sanitizer instrumentation to be removed if we are not using MSan
-    # since binaries cannot be partially instrumented with MSan.
-    not_fuzzed_remove_configs +=
-        [ "//build/config/sanitizers:default_sanitizer_flags" ]
-
-    # Certain parts of binaries must be instrumented with ASan if the rest of
-    # the binary is. For these, only remove non-ASan sanitizer instrumentation.
-    if (!is_asan) {
-      not_fuzzed_remove_nonasan_configs +=
-          [ "//build/config/sanitizers:default_sanitizer_flags" ]
-
-      assert(not_fuzzed_remove_nonasan_configs == not_fuzzed_remove_configs)
-    }
-  }
-}
diff --git a/build/config/sysroot.gni b/build/config/sysroot.gni
deleted file mode 100644
index d5daf2d..0000000
--- a/build/config/sysroot.gni
+++ /dev/null
@@ -1,103 +0,0 @@
-# Copyright (c) 2013 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This header file defines the "sysroot" variable which is the absolute path
-# of the sysroot. If no sysroot applies, the variable will be an empty string.
-
-import("//build/config/chrome_build.gni")
-
-declare_args() {
-  # The absolute path of the sysroot that is applied when compiling using
-  # the target toolchain.
-  target_sysroot = ""
-
-  # The absolute path to directory containing linux sysroot images
-  target_sysroot_dir = "//build/linux"
-
-  use_sysroot = current_cpu == "x86" || current_cpu == "x64" ||
-                current_cpu == "arm" || current_cpu == "arm64" ||
-                current_cpu == "mipsel" || current_cpu == "mips64el"
-}
-
-if (current_os == target_os && current_cpu == target_cpu &&
-    target_sysroot != "") {
-  sysroot = target_sysroot
-} else if (is_android) {
-  import("//build/config/android/config.gni")
-
-  # Android uses unified headers, and thus a single compile time sysroot
-  sysroot = "$android_ndk_root/sysroot"
-} else if (is_linux && use_sysroot) {
-  # By default build against a sysroot image downloaded from Cloud Storage
-  # during gclient runhooks.
-  if (current_cpu == "x64") {
-    sysroot = "$target_sysroot_dir/debian_sid_amd64-sysroot"
-  } else if (current_cpu == "x86") {
-    sysroot = "$target_sysroot_dir/debian_sid_i386-sysroot"
-  } else if (current_cpu == "mipsel") {
-    sysroot = "$target_sysroot_dir/debian_sid_mips-sysroot"
-  } else if (current_cpu == "mips64el") {
-    sysroot = "$target_sysroot_dir/debian_sid_mips64el-sysroot"
-  } else if (current_cpu == "arm") {
-    sysroot = "$target_sysroot_dir/debian_sid_arm-sysroot"
-  } else if (current_cpu == "arm64") {
-    sysroot = "$target_sysroot_dir/debian_sid_arm64-sysroot"
-  } else {
-    assert(false, "No linux sysroot for cpu: $target_cpu")
-  }
-
-  if (sysroot != "") {
-    _script_arch = current_cpu
-    if (_script_arch == "x86") {
-      _script_arch = "i386"
-    } else if (_script_arch == "x64") {
-      _script_arch = "amd64"
-    }
-    assert(
-        exec_script("//build/dir_exists.py",
-                    [ rebase_path(sysroot) ],
-                    "string") == "True",
-        "Missing sysroot ($sysroot). To fix, run: build/linux/sysroot_scripts/install-sysroot.py --arch=$_script_arch")
-  }
-} else if (is_mac) {
-  import("//build/config/mac/mac_sdk.gni")
-  sysroot = mac_sdk_path
-} else if (is_ios) {
-  import("//build/config/ios/ios_sdk.gni")
-  sysroot = ios_sdk_path
-} else if (is_fuchsia) {
-  import("//build/config/fuchsia/config.gni")
-  if (current_cpu == "arm64") {
-    sysroot = fuchsia_sdk + "/sysroot/aarch64-fuchsia"
-  } else if (current_cpu == "x64") {
-    sysroot = fuchsia_sdk + "/sysroot/x86_64-fuchsia"
-  } else {
-    sysroot = ""
-  }
-} else {
-  sysroot = ""
-}
-
-if (is_android) {
-  # Android uses unified headers in NDK r16 and later, meaning that the
-  # compile time sysroot and link time sysroot are different
-  link_sysroot = sysroot
-  if (current_cpu == "arm") {
-    link_sysroot = "$android_ndk_root/$arm_android_sysroot_subdir"
-  } else if (current_cpu == "arm64") {
-    link_sysroot = "$android_ndk_root/$arm64_android_sysroot_subdir"
-  } else if (current_cpu == "x86") {
-    link_sysroot = "$android_ndk_root/$x86_android_sysroot_subdir"
-  } else if (current_cpu == "x64") {
-    link_sysroot = "$android_ndk_root/$x86_64_android_sysroot_subdir"
-  } else if (current_cpu == "mipsel") {
-    link_sysroot = "$android_ndk_root/$mips_android_sysroot_subdir"
-  } else if (current_cpu == "mips64el") {
-    link_sysroot = "$android_ndk_root/$mips64_android_sysroot_subdir"
-  } else {
-    assert(false, "No android link sysroot for cpu: $target_cpu")
-  }
-} else {
-  link_sysroot = sysroot
-}
diff --git a/build/config/ui.gni b/build/config/ui.gni
deleted file mode 100644
index 547b42f..0000000
--- a/build/config/ui.gni
+++ /dev/null
@@ -1,60 +0,0 @@
-# Copyright 2014 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# =============================================
-#   PLEASE DO NOT ADD MORE FLAGS TO THIS FILE
-# =============================================
-#
-# These flags are effectively global. Your feature flag should go near the
-# code it controls. Most of these items are here now because they control
-# legacy global #defines passed to the compiler (now replaced with generated
-# buildflag headers -- see //build/buildflag_header.gni).
-#
-# These flags are ui-related so should eventually be moved to various places
-# in //ui/*.
-#
-# There is more advice on where to put build flags in the "Build flag" section
-# of //build/config/BUILDCONFIG.gn.
-
-import("//build/config/chromecast_build.gni")
-
-declare_args() {
-  # Indicates if Ozone is enabled. Ozone is a low-level library layer for Linux
-  # that does not require X11. Enabling this feature disables use of glib, x11,
-  # Pango, and Cairo.
-  use_ozone = is_chromeos || (is_chromecast && !is_android) || is_fuchsia
-
-  # Indicates if Aura is enabled. Aura is a low-level windowing library, sort
-  # of a replacement for GDI or GTK.
-  use_aura = is_win || is_linux || is_fuchsia
-
-  # Whether we should use glib, a low level C utility library.
-  use_glib = is_linux
-}
-
-declare_args() {
-  # True means the UI is built using the "views" framework.
-  toolkit_views = (is_mac || is_win || is_chromeos || use_aura) &&
-                  !is_chromecast && !is_fuchsia
-}
-
-# Additional dependent variables -----------------------------------------------
-#
-# These variables depend on other variables and can't be set externally.
-
-# Indicates if the UI toolkit depends on X11.
-use_x11 = is_linux && !use_ozone
-
-# Turn off glib if Ozone is enabled.
-if (use_ozone) {
-  use_glib = false
-}
-
-# Whether to use atk, the Accessibility ToolKit library
-use_atk = is_desktop_linux && use_x11
-# =============================================
-#   PLEASE DO NOT ADD MORE FLAGS TO THIS FILE
-# =============================================
-#
-# See comment at the top.
diff --git a/build/config/v8_target_cpu.gni b/build/config/v8_target_cpu.gni
deleted file mode 100644
index 305981f..0000000
--- a/build/config/v8_target_cpu.gni
+++ /dev/null
@@ -1,61 +0,0 @@
-# Copyright 2016 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import("//build/config/sanitizers/sanitizers.gni")
-
-declare_args() {
-  # This arg is used when we want to tell the JIT-generating v8 code
-  # that we want to have it generate for an architecture that is different
-  # than the architecture that v8 will actually run on; we then run the
-  # code under an emulator. For example, we might run v8 on x86, but
-  # generate arm code and run that under emulation.
-  #
-  # This arg is defined here rather than in the v8 project because we want
-  # some of the common architecture-specific args (like arm_float_abi or
-  # mips_arch_variant) to be set to their defaults either if the current_cpu
-  # applies *or* if the v8_current_cpu applies.
-  #
-  # As described below, you can also specify the v8_target_cpu to use
-  # indirectly by specifying a `custom_toolchain` that contains v8_$cpu in the
-  # name after the normal toolchain.
-  #
-  # For example, `gn gen --args="custom_toolchain=...:clang_x64_v8_arm64"`
-  # is equivalent to setting --args=`v8_target_cpu="arm64"`. Setting
-  # `custom_toolchain` is more verbose but makes the toolchain that is
-  # (effectively) being used explicit.
-  #
-  # v8_target_cpu can only be used to target one architecture in a build,
-  # so if you wish to build multiple copies of v8 that are targeting
-  # different architectures, you will need to do something more
-  # complicated involving multiple toolchains along the lines of
-  # custom_toolchain, above.
-  v8_target_cpu = ""
-}
-
-if (v8_target_cpu == "") {
-  if (current_toolchain == "//build/toolchain/linux:clang_x64_v8_arm64") {
-    v8_target_cpu = "arm64"
-  } else if (current_toolchain == "//build/toolchain/linux:clang_x86_v8_arm") {
-    v8_target_cpu = "arm"
-  } else if (current_toolchain ==
-             "//build/toolchain/linux:clang_x86_v8_mips64el") {
-    v8_target_cpu = "mips64el"
-  } else if (current_toolchain ==
-             "//build/toolchain/linux:clang_x86_v8_mipsel") {
-    v8_target_cpu = "mipsel"
-  } else if (is_msan) {
-    # If we're running under a sanitizer, if we configure v8 to generate
-    # code that will be run under a simulator, then the generated code
-    # also gets the benefits of the sanitizer.
-    v8_target_cpu = "arm64"
-  } else {
-    v8_target_cpu = target_cpu
-  }
-}
-
-declare_args() {
-  # This argument is declared here so that it can be overridden in toolchains.
-  # It should never be explicitly set by the user.
-  v8_current_cpu = v8_target_cpu
-}
diff --git a/build/config/win/BUILD.gn b/build/config/win/BUILD.gn
deleted file mode 100644
index ce8128b..0000000
--- a/build/config/win/BUILD.gn
+++ /dev/null
@@ -1,538 +0,0 @@
-# Copyright (c) 2013 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import("//build/config/chrome_build.gni")
-import("//build/config/c++/c++.gni")
-import("//build/config/clang/clang.gni")
-import("//build/config/compiler/compiler.gni")
-import("//build/config/sanitizers/sanitizers.gni")
-import("//build/config/win/visual_studio_version.gni")
-import("//build/toolchain/goma.gni")
-import("//build/toolchain/toolchain.gni")
-
-assert(is_win)
-
-declare_args() {
-  # Set this to true to enable static analysis through Visual Studio's
-  # /analyze. This dramatically slows compiles and reports thousands of
-  # warnings, so normally this is done on a build machine and only the new
-  # warnings are examined.
-  use_vs_code_analysis = false
-
-  # Turn this on to have the linker output extra timing information.
-  win_linker_timing = false
-
-  # possible values for target_winuwp_version:
-  #   "10" - Windows UWP 10
-  #   "8.1" - Windows RT 8.1
-  #   "8.0" - Windows RT 8.0
-  target_winuwp_version = "10"
-
-  # possible values:
-  #   "app" - Windows Store Applications
-  #   "phone" - Windows Phone Applications
-  #   "system" - Windows Drivers and Tools
-  #   "server" - Windows Server Applications
-  #   "desktop" - Windows Desktop Applications
-  target_winuwp_family = "app"
-}
-
-# This is included by reference in the //build/config/compiler config that
-# is applied to all targets. It is here to separate out the logic that is
-# Windows-only.
-config("compiler") {
-  if (current_cpu == "x86") {
-    asmflags = [
-      # When /safeseh is specified, the linker will only produce an image if it
-      # can also produce a table of the image's safe exception handlers. This
-      # table specifies for the operating system which exception handlers are
-      # valid for the image. Note that /SAFESEH isn't accepted on the command
-      # line, only /safeseh. This is only accepted by ml.exe, not ml64.exe.
-      "/safeseh",
-    ]
-  }
-
-  cflags = [
-    "/Gy",  # Enable function-level linking.
-    "/FS",  # Preserve previous PDB behavior.
-    "/bigobj",  # Some of our files are bigger than the regular limits.
-  ]
-
-  # Force C/C++ mode for the given GN detected file type. This is necessary
-  # for precompiled headers where the same source file is compiled in both
-  # modes.
-  cflags_c = [ "/TC" ]
-  cflags_cc = [ "/TP" ]
-
-  cflags += [
-    # Tell the compiler to crash on failures. This is undocumented
-    # and unsupported but very handy.
-    "/d2FastFail",
-
-    # Work around crbug.com/526851, bug in VS 2015 RTM compiler.
-    "/Zc:sizedDealloc-",
-  ]
-
-  if (is_clang) {
-    cflags += [ "-fmsc-version=1911" ]
-
-    if (current_cpu == "x86") {
-      cflags += [ "-m32" ]
-    } else {
-      cflags += [ "-m64" ]
-    }
-
-    if (exec_script("//build/win/use_ansi_codes.py", [], "trim string") ==
-        "True") {
-      cflags += [
-        # cmd.exe doesn't understand ANSI escape codes by default,
-        # so only enable them if something emulating them is around.
-        "-fansi-escape-codes",
-      ]
-    }
-
-    # Clang runtime libraries, such as the sanitizer runtimes, live here.
-    lib_dirs = [ "$clang_base_path/lib/clang/$clang_version/lib/windows" ]
-  }
-
-  if (use_lld && !use_thin_lto && (is_clang || !use_goma)) {
-    # /Brepro lets the compiler not write the mtime field in the .obj output.
-    # link.exe /incremental relies on this field to work correctly, but lld
-    # never looks at this timestamp, so it's safe to pass this flag with
-    # lld and get more deterministic compiler output in return.
-    # In LTO builds, the compiler doesn't write .obj files containing mtimes,
-    # so /Brepro is ignored there.
-    cflags += [ "/Brepro" ]
-  }
-
-  if (!is_debug && !is_component_build) {
-    # Enable standard linker optimizations like GC (/OPT:REF) and ICF in static
-    # release builds. These are implied by /PROFILE below, but /PROFILE is
-    # incompatible with /debug:fastlink and LLD ignores it as of this writing.
-    # Release builds always want these optimizations, so enable them explicitly.
-    ldflags = [
-      "/OPT:REF",
-      "/OPT:ICF",
-      "/INCREMENTAL:NO",
-      "/FIXED:NO",
-    ]
-
-    if (use_lld) {
-      # String tail merging leads to smaller binaries, but they don't compress
-      # as well, leading to increased mini_installer size (crbug.com/838449).
-      ldflags += [ "/OPT:NOLLDTAILMERGE" ]
-    }
-
-    # TODO(siggi): Is this of any use anymore?
-    # /PROFILE ensures that the PDB file contains FIXUP information (growing the
-    # PDB file by about 5%) but does not otherwise alter the output binary. It
-    # is enabled opportunistically for builds where it is not prohibited (not
-    # supported when incrementally linking, or using /debug:fastlink).
-    if (!is_win_fastlink) {
-      ldflags += [ "/PROFILE" ]
-    }
-  }
-
-  # arflags apply only to static_libraries. The normal linker configs are only
-  # set for executable and shared library targets so arflags must be set
-  # elsewhere. Since this is relatively contained, we just apply them in this
-  # more general config and they will only have an effect on static libraries.
-  arflags = [
-    # "No public symbols found; archive member will be inaccessible." This
-    # means that one or more object files in the library can never be
-    # pulled in to targets that link to this library. It's just a warning that
-    # the source file is a no-op.
-    "/ignore:4221",
-  ]
-}
-
-config("vs_code_analysis") {
-  if (use_vs_code_analysis && !is_clang) {
-    # When use_vs_code_analysis is specified add the /analyze switch to enable
-    # static analysis. Specifying /analyze:WX- says that /analyze warnings
-    # should not be treated as errors.
-    cflags = [ "/analyze:WX-" ]
-
-    # Also, disable various noisy warnings that have low value.
-    cflags += [
-      "/wd6011",  # Dereferencing NULL pointer
-
-      # C6285 is ~16% of raw warnings and has low value
-      "/wd6285",  # non-zero constant || non-zero constant
-      "/wd6308",  # realloc might return null pointer
-
-      # Possible infinite loop: use of the constant
-      # EXCEPTION_CONTINUE_EXECUTION in the exception-filter
-      "/wd6312",
-
-      "/wd6322",  # Empty _except block
-      "/wd6330",  # 'char' used instead of 'unsigned char' for istype() call
-
-      # C6334 is ~80% of raw warnings and has low value
-      "/wd6334",  # sizeof applied to an expression with an operator
-      "/wd6326",  # Potential comparison of constant with constant
-      "/wd6340",  # Sign mismatch in function parameter
-      "/wd28159",  # Consider using 'GetTickCount64'
-      "/wd28196",  # The precondition is not satisfied
-      "/wd28204",  # Inconsistent SAL annotations
-      "/wd28251",  # Inconsistent SAL annotations
-      "/wd28252",  # Inconsistent SAL annotations
-      "/wd28253",  # Inconsistent SAL annotations
-      "/wd28278",  # Function appears with no prototype in scope
-      "/wd28285",  # syntax error in SAL annotation (in algorithm)
-      "/wd28301",  # Inconsistent SAL annotations
-      "/wd28182",  # Dereferencing NULL pointer
-    ]
-  }
-}
-
-# This is included by reference in the //build/config/compiler:runtime_library
-# config that is applied to all targets. It is here to separate out the logic
-# that is Windows-only. Please see that target for advice on what should go in
-# :runtime_library vs. :compiler.
-config("runtime_library") {
-  cflags = []
-  cflags_cc = []
-
-  # Defines that set up the CRT.
-  defines = [
-    "__STD_C",
-    "_CRT_RAND_S",
-    "_CRT_SECURE_NO_DEPRECATE",
-    "_SCL_SECURE_NO_DEPRECATE",
-  ]
-
-  if (is_clang) {
-    # Work around Fall Creators Update SDK bug - crbug.com/773476 has details.
-    # https://developercommunity.visualstudio.com/content/problem/131391/154-fails-to-define-deprecatedenumerator-2.html
-    defines += [ "DEPRECATEDENUMERATOR(x)=[[deprecated(x)]]" ]
-  }
-
-  # Defines that set up the Windows SDK.
-  defines += [
-    "_ATL_NO_OPENGL",
-    "_WINDOWS",
-    "CERT_CHAIN_PARA_HAS_EXTRA_FIELDS",
-    "PSAPI_VERSION=1",
-    "WIN32",
-    "_SECURE_ATL",
-  ]
-
-  if (!use_vs_code_analysis) {
-    # This is required for ATL to use XP-safe versions of its functions.
-    # However it is prohibited when using /analyze
-    defines += [ "_USING_V110_SDK71_" ]
-  }
-
-  if (use_custom_libcxx) {
-    cflags_cc +=
-        [ "-I" + rebase_path("$libcxx_prefix/include", root_build_dir) ]
-    if (libcpp_is_static) {
-      defines += [ "_LIBCPP_DISABLE_VISIBILITY_ANNOTATIONS" ]
-    }
-
-    # Prevent libc++ from embedding linker flags to try to automatically link
-    # against its runtime library. This is unnecessary with our build system,
-    # and can also result in build failures if libc++'s name for a library does
-    # not match ours.
-    defines += [ "_LIBCPP_NO_AUTO_LINK" ]
-  }
-
-  if (current_os == "winuwp") {
-    # When targeting Windows Runtime, certain compiler/linker flags are
-    # necessary.
-    defines += [
-      "WINUWP",
-      "__WRL_NO_DEFAULT_LIB__",
-    ]
-    if (target_winuwp_family == "app") {
-      defines += [ "WINAPI_FAMILY=WINAPI_FAMILY_PC_APP" ]
-    } else if (target_winuwp_family == "phone") {
-      defines += [ "WINAPI_FAMILY=WINAPI_FAMILY_PHONE_APP" ]
-    } else if (target_winuwp_family == "system") {
-      defines += [ "WINAPI_FAMILY=WINAPI_FAMILY_SYSTEM" ]
-    } else if (target_winuwp_family == "server") {
-      defines += [ "WINAPI_FAMILY=WINAPI_FAMILY_SERVER" ]
-    } else {
-      defines += [ "WINAPI_FAMILY=WINAPI_FAMILY_DESKTOP_APP" ]
-    }
-    cflags_cc += [
-      "/ZW",
-      "/EHsc",
-    ]
-
-    # This warning is given because the linker cannot tell the difference
-    # between consuming WinRT APIs versus authoring WinRT within static
-    # libraries as such this warning is always given by the linker. Since
-    # consuming WinRT APIs within a library is legitimate but authoring
-    # WinRT APis is not allowed, this warning is disabled to ignore the
-    # legitimate consumption of WinRT APIs within static library builds.
-    arflags = [ "/IGNORE:4264" ]
-
-    if (target_winuwp_version == "10") {
-      defines += [ "WIN10=_WIN32_WINNT_WIN10" ]
-    } else if (target_winuwp_version == "8.1") {
-      defines += [ "WIN8_1=_WIN32_WINNT_WINBLUE" ]
-    } else if (target_winuwp_version == "8.0") {
-      defines += [ "WIN8=_WIN32_WINNT_WIN8" ]
-    }
-  } else {
-    # When not targeting Windows Runtime, make sure the WINAPI family is set
-    # to desktop.
-    defines += [ "WINAPI_FAMILY=WINAPI_FAMILY_DESKTOP_APP" ]
-  }
-}
-
-# Sets the default Windows build version. This is separated because some
-# targets need to manually override it for their compiles.
-config("winver") {
-  defines = [
-    "NTDDI_VERSION=0x0A000002",
-    "_WIN32_WINNT=0x0A00",
-    "WINVER=0x0A00",
-  ]
-}
-
-# Linker flags for Windows SDK setup, this is applied only to EXEs and DLLs.
-config("sdk_link") {
-  assert(current_cpu == "x64" || current_cpu == "x86" || current_cpu == "arm" ||
-             current_cpu == "arm64",
-         "Only supports x64, x86, arm and arm64 CPUs")
-  if (current_cpu == "x64") {
-    ldflags = [ "/MACHINE:X64" ]
-  } else if (current_cpu == "x86") {
-    ldflags = [
-      "/MACHINE:X86",
-      "/SAFESEH",  # Not compatible with x64 so use only for x86.
-      "/largeaddressaware",
-    ]
-  } else if (current_cpu == "arm") {
-    ldflags = [ "/MACHINE:ARM" ]
-  } else if (current_cpu == "arm64") {
-    ldflags = [ "/MACHINE:ARM64" ]
-  }
-
-  vcvars_toolchain_data = exec_script("../../toolchain/win/setup_toolchain.py",
-                                      [
-                                        visual_studio_path,
-                                        windows_sdk_path,
-                                        visual_studio_runtime_dirs,
-                                        current_os,
-                                        current_cpu,
-                                        "none",
-                                      ],
-                                      "scope")
-
-  vc_lib_path = vcvars_toolchain_data.vc_lib_path
-  if (defined(vcvars_toolchain_data.vc_lib_atlmfc_path)) {
-    vc_lib_atlmfc_path = vcvars_toolchain_data.vc_lib_atlmfc_path
-  }
-  vc_lib_um_path = vcvars_toolchain_data.vc_lib_um_path
-
-  lib_dirs = [
-    "$vc_lib_um_path",
-    "$vc_lib_path",
-  ]
-  if (defined(vc_lib_atlmfc_path)) {
-    lib_dirs += [ "$vc_lib_atlmfc_path" ]
-  }
-}
-
-# This default linker setup is provided separately from the SDK setup so
-# targets who want different library configurations can remove this and specify
-# their own.
-config("common_linker_setup") {
-  ldflags = [
-    "/fastfail",
-    "/FIXED:NO",
-    "/ignore:4199",
-    "/ignore:4221",
-    "/NXCOMPAT",
-    "/DYNAMICBASE",
-  ]
-
-  if (win_linker_timing) {
-    ldflags += [
-      "/time",
-      "/verbose:incr",
-    ]
-  }
-}
-
-config("cfi_linker") {
-  # Control Flow Guard (CFG)
-  # https://msdn.microsoft.com/en-us/library/windows/desktop/mt637065.aspx
-  # /DYNAMICBASE (ASLR) is turned off in debug builds, therefore CFG can’t be
-  # turned on either.
-  # CFG seems to lead to random corruption with incremental linking so turn off
-  # CFG in component builds. https://crbug.com/812421
-  if (!is_debug && !is_component_build) {
-    # Turn on CFG in msvc linker, regardless of compiler used. Turn off CFG for
-    # longjmp (new in VS 2017) because it relies on compiler support which we do
-    # not have enabled.
-    ldflags = [ "/guard:cf,nolongjmp" ]
-  }
-}
-
-# CRT --------------------------------------------------------------------------
-
-# Configures how the runtime library (CRT) is going to be used.
-# See https://msdn.microsoft.com/en-us/library/2kzt1wy3.aspx for a reference of
-# what each value does.
-config("default_crt") {
-  if (is_component_build) {
-    # Component mode: dynamic CRT. Since the library is shared, it requires
-    # exceptions or will give errors about things not matching, so keep
-    # exceptions on.
-    configs = [ ":dynamic_crt" ]
-  } else {
-    if (current_os == "winuwp") {
-      # https://blogs.msdn.microsoft.com/vcblog/2014/06/10/the-great-c-runtime-crt-refactoring/
-      # contains a details explanation of what is happening with the Windows
-      # CRT in Visual Studio releases related to Windows store applications.
-      configs = [ ":dynamic_crt" ]
-    } else {
-      # Desktop Windows: static CRT.
-      configs = [ ":static_crt" ]
-    }
-  }
-}
-
-# Use this to force the debug CRT for when building perf-critical build tools
-# that need to be fully optimized even in debug builds, for those times when the
-# debug CRT is part of the bottleneck. This also avoids *implicitly* defining
-# _DEBUG.
-config("release_crt") {
-  if (is_component_build) {
-    cflags = [ "/MD" ]
-  } else {
-    cflags = [ "/MT" ]
-  }
-}
-
-config("dynamic_crt") {
-  if (is_debug) {
-    # This pulls in the DLL debug CRT and defines _DEBUG
-    cflags = [ "/MDd" ]
-  } else {
-    cflags = [ "/MD" ]
-  }
-}
-
-config("static_crt") {
-  if (is_debug) {
-    # This pulls in the static debug CRT and defines _DEBUG
-    cflags = [ "/MTd" ]
-  } else {
-    cflags = [ "/MT" ]
-  }
-}
-
-# Subsystem --------------------------------------------------------------------
-
-# This is appended to the subsystem to specify a minimum version.
-if (current_cpu == "x64") {
-  # The number after the comma is the minimum required OS version.
-  # 5.02 = Windows Server 2003.
-  subsystem_version_suffix = ",5.02"
-} else {
-  # 5.01 = Windows XP.
-  subsystem_version_suffix = ",5.01"
-}
-
-config("console") {
-  ldflags = [ "/SUBSYSTEM:CONSOLE$subsystem_version_suffix" ]
-}
-config("windowed") {
-  ldflags = [ "/SUBSYSTEM:WINDOWS$subsystem_version_suffix" ]
-}
-
-# Incremental linking ----------------------------------------------------------
-
-incremental_linking_on_switch = [ "/INCREMENTAL" ]
-incremental_linking_off_switch = [ "/INCREMENTAL:NO" ]
-if (use_lld) {
-  incremental_linking_on_switch += [ "/OPT:NOREF" ]
-}
-
-# Enable incremental linking for debug builds and all component builds - any
-# builds where performance is not job one.
-if (is_debug || is_component_build) {
-  default_incremental_linking_switch = incremental_linking_on_switch
-} else {
-  default_incremental_linking_switch = incremental_linking_off_switch
-}
-
-# Applies incremental linking or not depending on the current configuration.
-config("default_incremental_linking") {
-  ldflags = default_incremental_linking_switch
-}
-
-# Explicitly on or off incremental linking
-config("incremental_linking") {
-  ldflags = incremental_linking_on_switch
-}
-config("no_incremental_linking") {
-  ldflags = incremental_linking_off_switch
-}
-
-# Some large modules can't handle incremental linking in some situations. This
-# config should be applied to large modules to turn off incremental linking
-# when it won't work.
-config("default_large_module_incremental_linking") {
-  if (symbol_level == 0 || (current_cpu == "x86" && is_component_build)) {
-    # In these configurations, ilk file sizes stay low enough that we can
-    # link incrementally.
-    ldflags = default_incremental_linking_switch
-  } else {
-    ldflags = incremental_linking_off_switch
-  }
-}
-
-# Character set ----------------------------------------------------------------
-
-# Not including this config means "ansi" (8-bit system codepage).
-config("unicode") {
-  defines = [
-    "_UNICODE",
-    "UNICODE",
-  ]
-}
-
-# Lean and mean ----------------------------------------------------------------
-
-# Some third party code might not compile with WIN32_LEAN_AND_MEAN so we have
-# to have a separate config for it. Remove this config from your target to
-# get the "bloaty and accommodating" version of windows.h.
-config("lean_and_mean") {
-  defines = [ "WIN32_LEAN_AND_MEAN" ]
-}
-
-# Nominmax --------------------------------------------------------------------
-
-# Some third party code defines NOMINMAX before including windows.h, which
-# then causes warnings when it's been previously defined on the command line.
-# For such targets, this config can be removed.
-
-config("nominmax") {
-  defines = [ "NOMINMAX" ]
-}
-
-# Generating order files -------------------------------------------------------
-
-config("default_cygprofile_instrumentation") {
-  if (generate_order_files) {
-    assert(is_clang, "cygprofile instrumentation only works with clang")
-    assert(is_official_build, "order files should be made w/ official builds")
-    assert(!is_chrome_branded, "order files could leak internal symbol names")
-    configs = [ ":cygprofile_instrumentation" ]
-  }
-}
-
-config("cygprofile_instrumentation") {
-  cflags = [
-    "-Xclang",
-    "-finstrument-functions-after-inlining",
-  ]
-}
diff --git a/build/config/win/console_app.gni b/build/config/win/console_app.gni
deleted file mode 100644
index cac2ef5..0000000
--- a/build/config/win/console_app.gni
+++ /dev/null
@@ -1,18 +0,0 @@
-# Copyright 2016 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import("//build/config/sanitizers/sanitizers.gni")
-
-declare_args() {
-  # If true, builds as a console app (rather than a windowed app), which allows
-  # logging to be printed to the user. This will cause a terminal window to pop
-  # up when the executable is not run from the command line, so should only be
-  # used for development. Only has an effect on Windows builds.
-  win_console_app = false
-}
-
-if (is_win && is_asan) {
-  # AddressSanitizer build should be a console app since it writes to stderr.
-  win_console_app = true
-}
diff --git a/build/config/win/manifest.gni b/build/config/win/manifest.gni
deleted file mode 100644
index b18a4a1..0000000
--- a/build/config/win/manifest.gni
+++ /dev/null
@@ -1,112 +0,0 @@
-# Copyright 2015 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# HOW MANIFESTS WORK IN THE GN BUILD
-#
-# Use the windows_manifest template to declare a manifest generation step.
-# This will combine all listed .manifest files. To link this manifest, just
-# depend on the manifest target from your executable or shared library.
-#
-# This will define an empty placeholder target on non-Windows platforms so
-# the manifest declarations and dependencies do not need to be inside of OS
-# conditionals.
-#
-# A binary can depend on only one manifest target, but the manifest target
-# can depend on many individual .manifest files which will be merged. As a
-# result, only executables and shared libraries should depend on manifest
-# targets. If you want to add a manifest to a component, put the dependency
-# behind a "if (is_component_build)" conditional.
-#
-# Generally you will just want the defaults for the Chrome build. In this case
-# the binary should just depend on one of the targets in //build/win/. There
-# are also individual manifest files in that directory you can reference via
-# the *_manifest variables defined below to pick and choose only some defaults.
-# You might combine these with a custom manifest file to get specific behavior.
-
-# Reference this manifest as a source from windows_manifest targets to get
-# the default Chrome OS compatibility list.
-default_compatibility_manifest = "//build/win/compatibility.manifest"
-
-# Reference this manifest as a source from windows_manifest targets to get
-# the default Chrome common constrols compatibility.
-common_controls_manifest = "//build/win/common_controls.manifest"
-
-# Reference this manifest to request that Windows not perform any elevation
-# when running your program. Otherwise, it might do some autodetection and
-# request elevated privileges from the user. This is normally what you want.
-as_invoker_manifest = "//build/win/as_invoker.manifest"
-
-# An alternative to as_invoker_manifest when you want the application to always
-# elevate.
-require_administrator_manifest = "//build/win/require_administrator.manifest"
-
-# Construct a target to combine the given manifest files into a .rc file.
-#
-# Variables for the windows_manifest template:
-#
-#   sources: (required)
-#     List of source .manifest files to add.
-#
-#   deps: (optional)
-#   visibility: (optional)
-#     Normal meaning.
-#
-# Example:
-#
-#   windows_manifest("doom_melon_manifest") {
-#     sources = [
-#       "doom_melon.manifest",   # Custom values in here.
-#       default_compatibility_manifest,  # Want the normal OS compat list.
-#     ]
-#   }
-#
-#   executable("doom_melon") {
-#     deps = [ ":doom_melon_manifest" ]
-#     ...
-#   }
-
-if (is_win) {
-  template("windows_manifest") {
-    config_name = "${target_name}__config"
-    source_set_name = target_name
-
-    config(config_name) {
-      visibility = [ ":$source_set_name" ]
-      assert(defined(invoker.sources),
-             "\"sources\" must be defined for a windows_manifest target")
-      manifests = []
-      foreach(i, rebase_path(invoker.sources, root_build_dir)) {
-        manifests += [ "/manifestinput:" + i ]
-      }
-      ldflags = [
-                  "/manifest:embed",
-
-                  # We handle UAC by adding explicit .manifest files instead.
-                  "/manifestuac:no",
-                ] + manifests
-    }
-
-    # This source set only exists to add a dep on the invoker's deps and to
-    # add a public_config that sets ldflags on dependents.
-    source_set(source_set_name) {
-      forward_variables_from(invoker, [ "visibility" ])
-      public_configs = [ ":$config_name" ]
-
-      # Apply any dependencies from the invoker to this target, since those
-      # dependencies may have created the input manifest files.
-      forward_variables_from(invoker, [ "deps" ])
-    }
-  }
-} else {
-  # Make a no-op group on non-Windows platforms so windows_manifest
-  # instantiations don't need to be inside windows blocks.
-  template("windows_manifest") {
-    group(target_name) {
-      # Prevent unused variable warnings on non-Windows platforms.
-      assert(invoker.sources != "")
-      assert(!defined(invoker.deps) || invoker.deps != "")
-      assert(!defined(invoker.visibility) || invoker.visibility != "")
-    }
-  }
-}
diff --git a/build/config/win/visual_studio_version.gni b/build/config/win/visual_studio_version.gni
deleted file mode 100644
index 982fbe8..0000000
--- a/build/config/win/visual_studio_version.gni
+++ /dev/null
@@ -1,39 +0,0 @@
-# Copyright 2014 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-declare_args() {
-  # Path to Visual Studio. If empty, the default is used which is to use the
-  # automatic toolchain in depot_tools. If set, you must also set the
-  # visual_studio_version and wdk_path.
-  visual_studio_path = ""
-
-  # Version of Visual Studio pointed to by the visual_studio_path.
-  # Currently always "2015".
-  visual_studio_version = ""
-
-  # Directory of the Windows driver kit. If visual_studio_path is empty, this
-  # will be auto-filled.
-  wdk_path = ""
-
-  # Full path to the Windows SDK, not including a backslash at the end.
-  # This value is the default location, override if you have a different
-  # installation location.
-  windows_sdk_path = "C:\Program Files (x86)\Windows Kits\10"
-}
-
-if (visual_studio_path == "") {
-  toolchain_data =
-      exec_script("../../vs_toolchain.py", [ "get_toolchain_dir" ], "scope")
-  visual_studio_path = toolchain_data.vs_path
-  windows_sdk_path = toolchain_data.sdk_path
-  visual_studio_version = toolchain_data.vs_version
-  wdk_path = toolchain_data.wdk_dir
-  visual_studio_runtime_dirs = toolchain_data.runtime_dirs
-} else {
-  assert(visual_studio_version != "",
-         "You must set the visual_studio_version if you set the path")
-  assert(wdk_path != "",
-         "You must set the wdk_path if you set the visual studio path")
-  visual_studio_runtime_dirs = []
-}
diff --git a/build/config/zip.gni b/build/config/zip.gni
deleted file mode 100644
index 8265e1d..0000000
--- a/build/config/zip.gni
+++ /dev/null
@@ -1,55 +0,0 @@
-# Copyright 2014 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# Creates a zip archive of the inputs.
-#
-# inputs (required)
-#     List of input files relative to the current directory.
-#
-# output (required)
-#     File name to write.
-#
-# base_dir (optional)
-#     If provided, the archive paths will be relative to this directory.
-#
-# deps, public_deps, data_deps, testonly, visibility (optional)
-#     Normal meaning.
-template("zip") {
-  action(target_name) {
-    script = "//build/android/gn/zip.py"
-    depfile = "$target_gen_dir/$target_name.d"
-    inputs = invoker.inputs
-    outputs = [
-      invoker.output,
-    ]
-
-    assert(defined(invoker.inputs))
-    rebase_inputs = rebase_path(invoker.inputs, root_build_dir)
-
-    assert(defined(invoker.output))
-    rebase_output = rebase_path(invoker.output, root_build_dir)
-
-    args = [
-      "--depfile",
-      rebase_path(depfile, root_build_dir),
-      "--inputs=$rebase_inputs",
-      "--output=$rebase_output",
-    ]
-    if (defined(invoker.base_dir)) {
-      args += [
-        "--base-dir",
-        rebase_path(invoker.base_dir, root_build_dir),
-      ]
-    }
-
-    forward_variables_from(invoker,
-                           [
-                             "testonly",
-                             "deps",
-                             "public_deps",
-                             "data_deps",
-                             "visibility",
-                           ])
-  }
-}
diff --git a/build/copy_test_data_ios.py b/build/copy_test_data_ios.py
deleted file mode 100755
index 6f0302f..0000000
--- a/build/copy_test_data_ios.py
+++ /dev/null
@@ -1,105 +0,0 @@
-#!/usr/bin/env python
-# Copyright (c) 2012 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Copies test data files or directories into a given output directory."""
-
-import optparse
-import os
-import shutil
-import sys
-
-class WrongNumberOfArgumentsException(Exception):
-  pass
-
-def EscapePath(path):
-  """Returns a path with spaces escaped."""
-  return path.replace(" ", "\\ ")
-
-def ListFilesForPath(path):
-  """Returns a list of all the files under a given path."""
-  output = []
-  # Ignore revision control metadata directories.
-  if (os.path.basename(path).startswith('.git') or
-      os.path.basename(path).startswith('.svn')):
-    return output
-
-  # Files get returned without modification.
-  if not os.path.isdir(path):
-    output.append(path)
-    return output
-
-  # Directories get recursively expanded.
-  contents = os.listdir(path)
-  for item in contents:
-    full_path = os.path.join(path, item)
-    output.extend(ListFilesForPath(full_path))
-  return output
-
-def CalcInputs(inputs):
-  """Computes the full list of input files for a set of command-line arguments.
-  """
-  # |inputs| is a list of paths, which may be directories.
-  output = []
-  for input in inputs:
-    output.extend(ListFilesForPath(input))
-  return output
-
-def CopyFiles(relative_filenames, output_basedir):
-  """Copies files to the given output directory."""
-  for file in relative_filenames:
-    relative_dirname = os.path.dirname(file)
-    output_dir = os.path.join(output_basedir, relative_dirname)
-    output_filename = os.path.join(output_basedir, file)
-
-    # In cases where a directory has turned into a file or vice versa, delete it
-    # before copying it below.
-    if os.path.exists(output_dir) and not os.path.isdir(output_dir):
-      os.remove(output_dir)
-    if os.path.exists(output_filename) and os.path.isdir(output_filename):
-      shutil.rmtree(output_filename)
-
-    if not os.path.exists(output_dir):
-      os.makedirs(output_dir)
-    shutil.copy(file, output_filename)
-
-def DoMain(argv):
-  parser = optparse.OptionParser()
-  usage = 'Usage: %prog -o <output_dir> [--inputs] [--outputs] <input_files>'
-  parser.set_usage(usage)
-  parser.add_option('-o', dest='output_dir')
-  parser.add_option('--inputs', action='store_true', dest='list_inputs')
-  parser.add_option('--outputs', action='store_true', dest='list_outputs')
-  options, arglist = parser.parse_args(argv)
-
-  if len(arglist) == 0:
-    raise WrongNumberOfArgumentsException('<input_files> required.')
-
-  files_to_copy = CalcInputs(arglist)
-  escaped_files = [EscapePath(x) for x in CalcInputs(arglist)]
-  if options.list_inputs:
-    return '\n'.join(escaped_files)
-
-  if not options.output_dir:
-    raise WrongNumberOfArgumentsException('-o required.')
-
-  if options.list_outputs:
-    outputs = [os.path.join(options.output_dir, x) for x in escaped_files]
-    return '\n'.join(outputs)
-
-  CopyFiles(files_to_copy, options.output_dir)
-  return
-
-def main(argv):
-  try:
-    result = DoMain(argv[1:])
-  except WrongNumberOfArgumentsException, e:
-    print >>sys.stderr, e
-    return 1
-  if result:
-    print result
-  return 0
-
-if __name__ == '__main__':
-  sys.exit(main(sys.argv))
diff --git a/build/cp.py b/build/cp.py
deleted file mode 100755
index 0f32536..0000000
--- a/build/cp.py
+++ /dev/null
@@ -1,23 +0,0 @@
-#!/usr/bin/env python
-# Copyright (c) 2012 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Copy a file.
-
-This module works much like the cp posix command - it takes 2 arguments:
-(src, dst) and copies the file with path |src| to |dst|.
-"""
-
-import os
-import shutil
-import sys
-
-
-def Main(src, dst):
-  # Use copy instead of copyfile to ensure the executable bit is copied.
-  return shutil.copy(src, os.path.normpath(dst))
-
-
-if __name__ == '__main__':
-  sys.exit(Main(sys.argv[1], sys.argv[2]))
diff --git a/build/detect_host_arch.py b/build/detect_host_arch.py
deleted file mode 100755
index 0e491bc..0000000
--- a/build/detect_host_arch.py
+++ /dev/null
@@ -1,51 +0,0 @@
-#!/usr/bin/env python
-# Copyright 2014 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Outputs host CPU architecture in format recognized by gyp."""
-
-import platform
-import re
-import sys
-
-
-def HostArch():
-  """Returns the host architecture with a predictable string."""
-  host_arch = platform.machine()
-
-  # Convert machine type to format recognized by gyp.
-  if re.match(r'i.86', host_arch) or host_arch == 'i86pc':
-    host_arch = 'ia32'
-  elif host_arch in ['x86_64', 'amd64']:
-    host_arch = 'x64'
-  elif host_arch.startswith('arm'):
-    host_arch = 'arm'
-  elif host_arch.startswith('aarch64'):
-    host_arch = 'arm64'
-  elif host_arch.startswith('mips'):
-    host_arch = 'mips'
-  elif host_arch.startswith('ppc'):
-    host_arch = 'ppc'
-  elif host_arch.startswith('s390'):
-    host_arch = 's390'
-
-
-  # platform.machine is based on running kernel. It's possible to use 64-bit
-  # kernel with 32-bit userland, e.g. to give linker slightly more memory.
-  # Distinguish between different userland bitness by querying
-  # the python binary.
-  if host_arch == 'x64' and platform.architecture()[0] == '32bit':
-    host_arch = 'ia32'
-  if host_arch == 'arm64' and platform.architecture()[0] == '32bit':
-    host_arch = 'arm'
-
-  return host_arch
-
-def DoMain(_):
-  """Hook to be called from gyp without starting a separate python
-  interpreter."""
-  return HostArch()
-
-if __name__ == '__main__':
-  print DoMain([])
diff --git a/build/dir_exists.py b/build/dir_exists.py
deleted file mode 100755
index 70d367e..0000000
--- a/build/dir_exists.py
+++ /dev/null
@@ -1,23 +0,0 @@
-#!/usr/bin/env python
-# Copyright (c) 2011 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-"""Writes True if the argument is a directory."""
-
-import os.path
-import sys
-
-def main():
-  sys.stdout.write(_is_dir(sys.argv[1]))
-  return 0
-
-def _is_dir(dir_name):
-  return str(os.path.isdir(dir_name))
-
-def DoMain(args):
-  """Hook to be called from gyp without starting a separate python
-  interpreter."""
-  return _is_dir(args[0])
-
-if __name__ == '__main__':
-  sys.exit(main())
diff --git a/build/docs/mac_hermetic_toolchain.md b/build/docs/mac_hermetic_toolchain.md
deleted file mode 100644
index b1ee13e..0000000
--- a/build/docs/mac_hermetic_toolchain.md
+++ /dev/null
@@ -1,44 +0,0 @@
-# Mac and iOS hermetic toolchain instructions
-
-The following is a short explanation of why we use a the hermetic toolchain
-and instructions on how to roll a new toolchain.
-
-## How to roll a new hermetic toolchain.
-
-1. Download a new version of Xcode, and confirm either mac or ios builds
-   properly with this new version.
-
-2. Run the following command:
-
-   ```
-   src/build/package_mac_toolchain.py /path/to/Xcode.app/ [ios|mac]
-   ```
-
-   The script will create a subset of the toolchain necessary for a build, and
-   upload them to be used by hermetic builds.
-
-   If for some reason this toolchain version has already been uploaded, the
-   script will ask if we should create sub revision.  This can be necessary when
-   the package script has been updated to compress additional files.
-
-2. Create a CL with updated [MAC|IOS]_TOOLCHAIN_VERSION and _SUB_REVISION in
-   src/build/mac_toolchain.py with the version created by the previous command.
-
-3. Run the CL through the trybots to confirm the roll works.
-
-## Why we use a hermetic toolchain.
-
-Building Chrome Mac currently requires many binaries that come bundled with
-Xcode, as well the macOS and iphoneOS SDK [also bundled with Xcode].  Note that
-Chrome ships its own version of clang [compiler], but is dependent on Xcode
-for these other binaries.
-
-Chrome should be built against the latest SDK available, but historically,
-updating the SDK has been nontrivially difficult.  Additionally, bot system
-installs can range from Xcode 5 on some bots, to the latest and
-greatest.  Using a hermetic toolchain has two main benefits:
-
-1. Build Chrome with a well-defined toolchain [rather than whatever happens to
-be installed on the machine].
-
-2. Easily roll/update the toolchain.
diff --git a/build/dotfile_settings.gni b/build/dotfile_settings.gni
deleted file mode 100644
index 8382c75..0000000
--- a/build/dotfile_settings.gni
+++ /dev/null
@@ -1,36 +0,0 @@
-# Copyright (c) 2016 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file contains variables that can be imported into a repo's dotfile (.gn)
-# to make it easier to roll new versions of //build in.
-
-build_dotfile_settings = {
-  exec_script_whitelist = [
-    "//build/config/android/config.gni",
-    "//build/config/android/internal_rules.gni",
-    "//build/config/android/rules.gni",
-    "//build/config/compiler/BUILD.gn",
-    "//build/config/gcc/gcc_version.gni",
-    "//build/config/host_byteorder.gni",
-    "//build/config/ios/ios_sdk.gni",
-    "//build/config/linux/BUILD.gn",
-    "//build/config/linux/pkg_config.gni",
-    "//build/config/linux/atk/BUILD.gn",
-    "//build/config/linux/dri/BUILD.gn",
-    "//build/config/mac/mac_sdk.gni",
-    "//build/config/mac/rules.gni",
-    "//build/config/posix/BUILD.gn",
-    "//build/config/sysroot.gni",
-    "//build/config/win/BUILD.gn",
-    "//build/config/win/visual_studio_version.gni",
-    "//build/toolchain/BUILD.gn",
-    "//build/toolchain/concurrent_links.gni",
-    "//build/toolchain/mac/BUILD.gn",
-    "//build/toolchain/nacl/BUILD.gn",
-    "//build/toolchain/toolchain.gni",
-    "//build/toolchain/win/BUILD.gn",
-    "//build/util/branding.gni",
-    "//build/util/version.gni",
-  ]
-}
diff --git a/build/download_nacl_toolchains.py b/build/download_nacl_toolchains.py
deleted file mode 100755
index ea9e0cd..0000000
--- a/build/download_nacl_toolchains.py
+++ /dev/null
@@ -1,59 +0,0 @@
-#!/usr/bin/env python
-# Copyright (c) 2012 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Shim to run nacl toolchain download script only if there is a nacl dir."""
-
-import os
-import shutil
-import sys
-
-
-def Main(args):
-  # Exit early if disable_nacl=1.
-  if 'disable_nacl=1' in os.environ.get('GYP_DEFINES', ''):
-    return 0
-  if 'OS=android' in os.environ.get('GYP_DEFINES', ''):
-    return 0
-  script_dir = os.path.dirname(os.path.abspath(__file__))
-  src_dir = os.path.dirname(script_dir)
-  nacl_dir = os.path.join(src_dir, 'native_client')
-  nacl_build_dir = os.path.join(nacl_dir, 'build')
-  package_version_dir = os.path.join(nacl_build_dir, 'package_version')
-  package_version = os.path.join(package_version_dir, 'package_version.py')
-  if not os.path.exists(package_version):
-    print "Can't find '%s'" % package_version
-    print 'Presumably you are intentionally building without NativeClient.'
-    print 'Skipping NativeClient toolchain download.'
-    sys.exit(0)
-  sys.path.insert(0, package_version_dir)
-  import package_version
-
-  # BUG:
-  # We remove this --optional-pnacl argument, and instead replace it with
-  # --no-pnacl for most cases.  However, if the bot name is an sdk
-  # bot then we will go ahead and download it.  This prevents increasing the
-  # gclient sync time for developers, or standard Chrome bots.
-  if '--optional-pnacl' in args:
-    args.remove('--optional-pnacl')
-    use_pnacl = False
-    buildbot_name = os.environ.get('BUILDBOT_BUILDERNAME', '')
-    if 'pnacl' in buildbot_name and 'sdk' in buildbot_name:
-      use_pnacl = True
-    if use_pnacl:
-      print '\n*** DOWNLOADING PNACL TOOLCHAIN ***\n'
-    else:
-      args = ['--exclude', 'pnacl_newlib'] + args
-
-  # Only download the ARM gcc toolchain if we are building for ARM
-  # TODO(olonho): we need to invent more reliable way to get build
-  # configuration info, to know if we're building for ARM.
-  if 'target_arch=arm' not in os.environ.get('GYP_DEFINES', ''):
-      args = ['--exclude', 'nacl_arm_newlib'] + args
-
-  return package_version.main(args)
-
-
-if __name__ == '__main__':
-  sys.exit(Main(sys.argv[1:]))
diff --git a/build/download_translation_unit_tool.py b/build/download_translation_unit_tool.py
deleted file mode 100755
index b60d33a..0000000
--- a/build/download_translation_unit_tool.py
+++ /dev/null
@@ -1,54 +0,0 @@
-#!/usr/bin/env python
-# Copyright 2017 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Script to download Clang translation_unit tool from google storage."""
-
-import find_depot_tools
-import json
-import os
-import shutil
-import subprocess
-import sys
-import tarfile
-
-SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__))
-CHROME_SRC = os.path.abspath(os.path.join(SCRIPT_DIR, os.pardir))
-
-
-DEPOT_PATH = find_depot_tools.add_depot_tools_to_path()
-GSUTIL_PATH = os.path.join(DEPOT_PATH, 'gsutil.py')
-
-LLVM_BUILD_PATH = os.path.join(CHROME_SRC, 'third_party', 'llvm-build',
-                               'Release+Asserts')
-CLANG_UPDATE_PY = os.path.join(CHROME_SRC, 'tools', 'clang', 'scripts',
-                               'update.py')
-
-CLANG_BUCKET = 'gs://chromium-browser-clang'
-
-
-def main():
-  clang_revision = subprocess.check_output([sys.executable, CLANG_UPDATE_PY,
-                                            '--print-revision']).rstrip()
-  targz_name = 'translation_unit-%s.tgz' % clang_revision
-
-  if sys.platform == 'win32' or sys.platform == 'cygwin':
-    cds_full_url = CLANG_BUCKET + '/Win/' + targz_name
-  elif sys.platform == 'darwin':
-    cds_full_url = CLANG_BUCKET + '/Mac/' + targz_name
-  else:
-    assert sys.platform.startswith('linux')
-    cds_full_url = CLANG_BUCKET + '/Linux_x64/' + targz_name
-
-  os.chdir(LLVM_BUILD_PATH)
-
-  subprocess.check_call([sys.executable, GSUTIL_PATH,
-                         'cp', cds_full_url, targz_name])
-  tarfile.open(name=targz_name, mode='r:gz').extractall(path=LLVM_BUILD_PATH)
-
-  os.remove(targz_name)
-  return 0
-
-if __name__ == '__main__':
-  sys.exit(main())
diff --git a/build/env_dump.py b/build/env_dump.py
deleted file mode 100755
index 3f82173..0000000
--- a/build/env_dump.py
+++ /dev/null
@@ -1,56 +0,0 @@
-#!/usr/bin/env python
-# Copyright 2013 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This script can either source a file and dump the enironment changes done by
-# it, or just simply dump the current environment as JSON into a file.
-
-import json
-import optparse
-import os
-import pipes
-import subprocess
-import sys
-
-
-def main():
-  parser = optparse.OptionParser()
-  parser.add_option('-f', '--output-json',
-                    help='File to dump the environment as JSON into.')
-  parser.add_option(
-      '-d', '--dump-mode', action='store_true',
-      help='Dump the environment to sys.stdout and exit immediately.')
-
-  parser.disable_interspersed_args()
-  options, args = parser.parse_args()
-  if options.dump_mode:
-    if args or options.output_json:
-      parser.error('Cannot specify args or --output-json with --dump-mode.')
-    json.dump(dict(os.environ), sys.stdout)
-  else:
-    if not options.output_json:
-      parser.error('Requires --output-json option.')
-
-    envsetup_cmd = ' '.join(map(pipes.quote, args))
-    full_cmd = [
-        'bash', '-c',
-        '. %s > /dev/null; %s -d' % (envsetup_cmd, os.path.abspath(__file__))
-    ]
-    try:
-      output = subprocess.check_output(full_cmd)
-    except Exception as e:
-      sys.exit('Error running %s and dumping environment.' % envsetup_cmd)
-
-    env_diff = {}
-    new_env = json.loads(output)
-    for k, val in new_env.items():
-      if k == '_' or (k in os.environ and os.environ[k] == val):
-        continue
-      env_diff[k] = val
-    with open(options.output_json, 'w') as f:
-      json.dump(env_diff, f)
-
-
-if __name__ == '__main__':
-  sys.exit(main())
diff --git a/build/experimental/install-build-deps.py b/build/experimental/install-build-deps.py
deleted file mode 100755
index a286a4e..0000000
--- a/build/experimental/install-build-deps.py
+++ /dev/null
@@ -1,432 +0,0 @@
-#!/usr/bin/env python
-# Copyright 2015 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import argparse
-import operator
-import os
-import platform
-import re
-import subprocess
-import sys
-
-
-SUPPORTED_UBUNTU_VERSIONS = (
-  {'number': '14.04', 'codename': 'trusty'},
-  {'number': '14.10', 'codename': 'utopic'},
-  {'number': '15.04', 'codename': 'vivid'},
-  {'number': '15.10', 'codename': 'wily'},
-)
-
-
-# Packages needed for chromeos only.
-_packages_chromeos_dev = (
-  'libbluetooth-dev',
-  'libxkbcommon-dev',
-  'realpath',
-)
-
-
-# Packages needed for development.
-_packages_dev = (
-  'bison',
-  'cdbs',
-  'curl',
-  'devscripts',
-  'dpkg-dev',
-  'elfutils',
-  'fakeroot',
-  'flex',
-  'fonts-ipafont',
-  'fonts-thai-tlwg',
-  'g++',
-  'git-core',
-  'git-svn',
-  'gperf',
-  'language-pack-da',
-  'language-pack-fr',
-  'language-pack-he',
-  'language-pack-zh-hant',
-  'libapache2-mod-php5',
-  'libasound2-dev',
-  'libav-tools',
-  'libbrlapi-dev',
-  'libbz2-dev',
-  'libcairo2-dev',
-  'libcap-dev',
-  'libcups2-dev',
-  'libcurl4-gnutls-dev',
-  'libdrm-dev',
-  'libelf-dev',
-  'libgconf2-dev',
-  'libglib2.0-dev',
-  'libglu1-mesa-dev',
-  'libgnome-keyring-dev',
-  'libgtk2.0-dev',
-  'libkrb5-dev',
-  'libnspr4-dev',
-  'libnss3-dev',
-  'libpam0g-dev',
-  'libpci-dev',
-  'libpulse-dev',
-  'libsctp-dev',
-  'libspeechd-dev',
-  'libsqlite3-dev',
-  'libssl-dev',
-  'libudev-dev',
-  'libwww-perl',
-  'libxslt1-dev',
-  'libxss-dev',
-  'libxt-dev',
-  'libxtst-dev',
-  'openbox',
-  'patch',
-  'perl',
-  'php5-cgi',
-  'pkg-config',
-  'python',
-  'python-cherrypy3',
-  'python-crypto',
-  'python-dev',
-  'python-numpy',
-  'python-opencv',
-  'python-openssl',
-  'python-psutil',
-  'python-yaml',
-  'rpm',
-  'ruby',
-  'subversion',
-  'ttf-dejavu-core',
-  'ttf-indic-fonts',
-  'wdiff',
-  'zip',
-)
-
-
-# Run-time libraries required by chromeos only.
-_packages_chromeos_lib = (
-  'libbz2-1.0',
-  'libpulse0',
-)
-
-
-# Full list of required run-time libraries.
-_packages_lib = (
-  'libasound2',
-  'libatk1.0-0',
-  'libc6',
-  'libcairo2',
-  'libcap2',
-  'libcups2',
-  'libexpat1',
-  'libfontconfig1',
-  'libfreetype6',
-  'libglib2.0-0',
-  'libgnome-keyring0',
-  'libgtk2.0-0',
-  'libpam0g',
-  'libpango1.0-0',
-  'libpci3',
-  'libpcre3',
-  'libpixman-1-0',
-  'libpng12-0',
-  'libspeechd2',
-  'libsqlite3-0',
-  'libstdc++6',
-  'libx11-6',
-  'libx11-xcb1',
-  'libxau6',
-  'libxcb1',
-  'libxcomposite1',
-  'libxcursor1',
-  'libxdamage1',
-  'libxdmcp6',
-  'libxext6',
-  'libxfixes3',
-  'libxi6',
-  'libxinerama1',
-  'libxrandr2',
-  'libxrender1',
-  'libxtst6',
-  'zlib1g',
-)
-
-
-# Debugging symbols for all of the run-time libraries.
-_packages_dbg = (
-  'libatk1.0-dbg',
-  'libc6-dbg',
-  'libcairo2-dbg',
-  'libfontconfig1-dbg',
-  'libglib2.0-0-dbg',
-  'libgtk2.0-0-dbg',
-  'libpango1.0-0-dbg',
-  'libpcre3-dbg',
-  'libpixman-1-0-dbg',
-  'libsqlite3-0-dbg',
-  'libx11-6-dbg',
-  'libx11-xcb1-dbg',
-  'libxau6-dbg',
-  'libxcb1-dbg',
-  'libxcomposite1-dbg',
-  'libxcursor1-dbg',
-  'libxdamage1-dbg',
-  'libxdmcp6-dbg',
-  'libxext6-dbg',
-  'libxfixes3-dbg',
-  'libxi6-dbg',
-  'libxinerama1-dbg',
-  'libxrandr2-dbg',
-  'libxrender1-dbg',
-  'libxtst6-dbg',
-  'zlib1g-dbg',
-)
-
-
-# 32-bit libraries needed e.g. to compile V8 snapshot for Android or armhf.
-_packages_lib32 = (
-  'linux-libc-dev:i386',
-)
-
-
-# arm cross toolchain packages needed to build chrome on armhf.
-_packages_arm = (
-  'g++-arm-linux-gnueabihf',
-  'libc6-dev-armhf-cross',
-  'linux-libc-dev-armhf-cross',
-)
-
-
-# Packages to build NaCl, its toolchains, and its ports.
-_packages_naclports = (
-  'ant',
-  'autoconf',
-  'bison',
-  'cmake',
-  'gawk',
-  'intltool',
-  'xsltproc',
-  'xutils-dev',
-)
-_packages_nacl = (
-  'g++-mingw-w64-i686',
-  'lib32ncurses5-dev',
-  'lib32z1-dev',
-  'libasound2:i386',
-  'libcap2:i386',
-  'libelf-dev:i386',
-  'libfontconfig1:i386',
-  'libgconf-2-4:i386',
-  'libglib2.0-0:i386',
-  'libgpm2:i386',
-  'libgtk2.0-0:i386',
-  'libncurses5:i386',
-  'libnss3:i386',
-  'libpango1.0-0:i386',
-  'libssl1.0.0:i386',
-  'libtinfo-dev',
-  'libtinfo-dev:i386',
-  'libtool',
-  'libxcomposite1:i386',
-  'libxcursor1:i386',
-  'libxdamage1:i386',
-  'libxi6:i386',
-  'libxrandr2:i386',
-  'libxss1:i386',
-  'libxtst6:i386',
-  'texinfo',
-  'xvfb',
-)
-
-
-def is_userland_64_bit():
-  return platform.architecture()[0] == '64bit'
-
-
-def package_exists(pkg):
-  return pkg in subprocess.check_output(['apt-cache', 'pkgnames']).splitlines()
-
-
-def lsb_release_short_codename():
-  return subprocess.check_output(
-      ['lsb_release', '--codename', '--short']).strip()
-
-
-def write_error(message):
-  sys.stderr.write('ERROR: %s\n' % message)
-  sys.stderr.flush()
-
-
-def nonfatal_get_output(*popenargs, **kwargs):
-  process = subprocess.Popen(
-      stdout=subprocess.PIPE, stderr=subprocess.PIPE, *popenargs, **kwargs)
-  stdout, stderr = process.communicate()
-  retcode = process.poll()
-  return retcode, stdout, stderr
-
-
-def compute_dynamic_package_lists():
-  global _packages_arm
-  global _packages_dbg
-  global _packages_dev
-  global _packages_lib
-  global _packages_lib32
-  global _packages_nacl
-
-  if is_userland_64_bit():
-    # 64-bit systems need a minimum set of 32-bit compat packages
-    # for the pre-built NaCl binaries.
-    _packages_dev += (
-      'lib32gcc1',
-      'lib32stdc++6',
-      'libc6-i386',
-    )
-
-    # When cross building for arm/Android on 64-bit systems the host binaries
-    # that are part of v8 need to be compiled with -m32 which means
-    # that basic multilib support is needed.
-    # gcc-multilib conflicts with the arm cross compiler (at least in trusty)
-    # but g++-X.Y-multilib gives us the 32-bit support that we need. Find out
-    # the appropriate value of X and Y by seeing what version the current
-    # distribution's g++-multilib package depends on.
-    output = subprocess.check_output(['apt-cache', 'depends', 'g++-multilib'])
-    multilib_package = re.search(r'g\+\+-[0-9.]+-multilib', output).group()
-    _packages_lib32 += (multilib_package,)
-
-  lsb_codename = lsb_release_short_codename()
-
-  # Find the proper version of libstdc++6-4.x-dbg.
-  if lsb_codename == 'trusty':
-    _packages_dbg += ('libstdc++6-4.8-dbg',)
-  else:
-    _packages_dbg += ('libstdc++6-4.9-dbg',)
-
-  # Work around for dependency issue Ubuntu/Trusty: http://crbug.com/435056 .
-  if lsb_codename == 'trusty':
-    _packages_arm += (
-      'g++-4.8-multilib-arm-linux-gnueabihf',
-      'gcc-4.8-multilib-arm-linux-gnueabihf',
-    )
-
-  # Find the proper version of libgbm-dev. We can't just install libgbm-dev as
-  # it depends on mesa, and only one version of mesa can exists on the system.
-  # Hence we must match the same version or this entire script will fail.
-  mesa_variant = ''
-  for variant in ('-lts-trusty', '-lts-utopic'):
-    rc, stdout, stderr = nonfatal_get_output(
-        ['dpkg-query', '-Wf\'{Status}\'', 'libgl1-mesa-glx' + variant])
-    if 'ok installed' in output:
-      mesa_variant = variant
-  _packages_dev += (
-    'libgbm-dev' + mesa_variant,
-    'libgl1-mesa-dev' + mesa_variant,
-    'libgles2-mesa-dev' + mesa_variant,
-    'mesa-common-dev' + mesa_variant,
-  )
-
-  if package_exists('ttf-mscorefonts-installer'):
-    _packages_dev += ('ttf-mscorefonts-installer',)
-  else:
-    _packages_dev += ('msttcorefonts',)
-
-  if package_exists('libnspr4-dbg'):
-    _packages_dbg += ('libnspr4-dbg', 'libnss3-dbg')
-    _packages_lib += ('libnspr4', 'libnss3')
-  else:
-    _packages_dbg += ('libnspr4-0d-dbg', 'libnss3-1d-dbg')
-    _packages_lib += ('libnspr4-0d', 'libnss3-1d')
-
-  if package_exists('libjpeg-dev'):
-    _packages_dev += ('libjpeg-dev',)
-  else:
-    _packages_dev += ('libjpeg62-dev',)
-
-  if package_exists('libudev1'):
-    _packages_dev += ('libudev1',)
-    _packages_nacl += ('libudev1:i386',)
-  else:
-    _packages_dev += ('libudev0',)
-    _packages_nacl += ('libudev0:i386',)
-
-  if package_exists('libbrlapi0.6'):
-    _packages_dev += ('libbrlapi0.6',)
-  else:
-    _packages_dev += ('libbrlapi0.5',)
-
-  if package_exists('apache2-bin'):
-    _packages_dev += ('apache2-bin',)
-  else:
-    _packages_dev += ('apache2.2-bin',)
-
-  if package_exists('xfonts-mathml'):
-    _packages_dev += ('xfonts-mathml',)
-
-  # Some packages are only needed if the distribution actually supports
-  # installing them.
-  if package_exists('appmenu-gtk'):
-    _packages_lib += ('appmenu-gtk',)
-
-  _packages_dev += _packages_chromeos_dev
-  _packages_lib += _packages_chromeos_lib
-  _packages_nacl += _packages_naclports
-
-
-def quick_check(packages):
-  rc, stdout, stderr = nonfatal_get_output([
-      'dpkg-query', '-W', '-f', '${PackageSpec}:${Status}\n'] + list(packages))
-  if rc == 0 and not stderr:
-    return 0
-  print stderr
-  return 1
-
-
-def main(argv):
-  parser = argparse.ArgumentParser()
-  parser.add_argument('--quick-check', action='store_true',
-                      help='quickly try to determine if dependencies are '
-                           'installed (this avoids interactive prompts and '
-                           'sudo commands so might not be 100% accurate)')
-  parser.add_argument('--unsupported', action='store_true',
-                      help='attempt installation even on unsupported systems')
-  args = parser.parse_args(argv)
-
-  lsb_codename = lsb_release_short_codename()
-  if not args.unsupported and not args.quick_check:
-    if lsb_codename not in map(
-        operator.itemgetter('codename'), SUPPORTED_UBUNTU_VERSIONS):
-      supported_ubuntus = ['%(number)s (%(codename)s)' % v
-                           for v in SUPPORTED_UBUNTU_VERSIONS]
-      write_error('Only Ubuntu %s are currently supported.' %
-                  ', '.join(supported_ubuntus))
-      return 1
-
-    if platform.machine() not in ('i686', 'x86_64'):
-      write_error('Only x86 architectures are currently supported.')
-      return 1
-
-  if os.geteuid() != 0 and not args.quick_check:
-    print 'Running as non-root user.'
-    print 'You might have to enter your password one or more times'
-    print 'for \'sudo\'.'
-    print
-
-  compute_dynamic_package_lists()
-
-  packages = (_packages_dev + _packages_lib + _packages_dbg + _packages_lib32 +
-              _packages_arm + _packages_nacl)
-  def packages_key(pkg):
-    s = pkg.rsplit(':', 1)
-    if len(s) == 1:
-      return (s, '')
-    return s
-  packages = sorted(set(packages), key=packages_key)
-
-  if args.quick_check:
-    return quick_check(packages)
-
-  return 0
-
-
-if __name__ == '__main__':
-  sys.exit(main(sys.argv[1:]))
diff --git a/build/extract_from_cab.py b/build/extract_from_cab.py
deleted file mode 100755
index 080370c..0000000
--- a/build/extract_from_cab.py
+++ /dev/null
@@ -1,63 +0,0 @@
-#!/usr/bin/env python
-# Copyright (c) 2012 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Extracts a single file from a CAB archive."""
-
-import os
-import shutil
-import subprocess
-import sys
-import tempfile
-
-def run_quiet(*args):
-  """Run 'expand' suppressing noisy output. Returns returncode from process."""
-  popen = subprocess.Popen(args, stdout=subprocess.PIPE)
-  out, _ = popen.communicate()
-  if popen.returncode:
-    # expand emits errors to stdout, so if we fail, then print that out.
-    print out
-  return popen.returncode
-
-def main():
-  if len(sys.argv) != 4:
-    print 'Usage: extract_from_cab.py cab_path archived_file output_dir'
-    return 1
-
-  [cab_path, archived_file, output_dir] = sys.argv[1:]
-
-  # Expand.exe does its work in a fixed-named temporary directory created within
-  # the given output directory. This is a problem for concurrent extractions, so
-  # create a unique temp dir within the desired output directory to work around
-  # this limitation.
-  temp_dir = tempfile.mkdtemp(dir=output_dir)
-
-  try:
-    # Invoke the Windows expand utility to extract the file.
-    level = run_quiet('expand', cab_path, '-F:' + archived_file, temp_dir)
-    if level == 0:
-      # Move the output file into place, preserving expand.exe's behavior of
-      # paving over any preexisting file.
-      output_file = os.path.join(output_dir, archived_file)
-      try:
-        os.remove(output_file)
-      except OSError:
-        pass
-      os.rename(os.path.join(temp_dir, archived_file), output_file)
-  finally:
-    shutil.rmtree(temp_dir, True)
-
-  if level != 0:
-    return level
-
-  # The expand utility preserves the modification date and time of the archived
-  # file. Touch the extracted file. This helps build systems that compare the
-  # modification times of input and output files to determine whether to do an
-  # action.
-  os.utime(os.path.join(output_dir, archived_file), None)
-  return 0
-
-
-if __name__ == '__main__':
-  sys.exit(main())
diff --git a/build/find_depot_tools.py b/build/find_depot_tools.py
deleted file mode 100755
index 5c496e7..0000000
--- a/build/find_depot_tools.py
+++ /dev/null
@@ -1,73 +0,0 @@
-#!/usr/bin/env python
-# Copyright (c) 2011 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-"""Small utility function to find depot_tools and add it to the python path.
-
-Will throw an ImportError exception if depot_tools can't be found since it
-imports breakpad.
-
-This can also be used as a standalone script to print out the depot_tools
-directory location.
-"""
-
-import os
-import sys
-
-
-# Path to //src
-SRC = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir))
-
-
-def IsRealDepotTools(path):
-  expanded_path = os.path.expanduser(path)
-  return os.path.isfile(os.path.join(expanded_path, 'gclient.py'))
-
-
-def add_depot_tools_to_path():
-  """Search for depot_tools and add it to sys.path."""
-  # First, check if we have a DEPS'd in "depot_tools".
-  deps_depot_tools = os.path.join(SRC, 'third_party', 'depot_tools')
-  if IsRealDepotTools(deps_depot_tools):
-    # Put the pinned version at the start of the sys.path, in case there
-    # are other non-pinned versions already on the sys.path.
-    sys.path.insert(0, deps_depot_tools)
-    return deps_depot_tools
-
-  # Then look if depot_tools is already in PYTHONPATH.
-  for i in sys.path:
-    if i.rstrip(os.sep).endswith('depot_tools') and IsRealDepotTools(i):
-      return i
-  # Then look if depot_tools is in PATH, common case.
-  for i in os.environ['PATH'].split(os.pathsep):
-    if IsRealDepotTools(i):
-      sys.path.append(i.rstrip(os.sep))
-      return i
-  # Rare case, it's not even in PATH, look upward up to root.
-  root_dir = os.path.dirname(os.path.abspath(__file__))
-  previous_dir = os.path.abspath(__file__)
-  while root_dir and root_dir != previous_dir:
-    i = os.path.join(root_dir, 'depot_tools')
-    if IsRealDepotTools(i):
-      sys.path.append(i)
-      return i
-    previous_dir = root_dir
-    root_dir = os.path.dirname(root_dir)
-  print >> sys.stderr, 'Failed to find depot_tools'
-  return None
-
-DEPOT_TOOLS_PATH = add_depot_tools_to_path()
-
-# pylint: disable=W0611
-import breakpad
-
-
-def main():
-  if DEPOT_TOOLS_PATH is None:
-    return 1
-  print DEPOT_TOOLS_PATH
-  return 0
-
-
-if __name__ == '__main__':
-  sys.exit(main())
diff --git a/build/find_isolated_tests.py b/build/find_isolated_tests.py
deleted file mode 100755
index c5b3ab7..0000000
--- a/build/find_isolated_tests.py
+++ /dev/null
@@ -1,78 +0,0 @@
-#!/usr/bin/env python
-# Copyright 2014 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Scans build output directory for .isolated files, calculates their SHA1
-hashes, stores final list in JSON document and then removes *.isolated files
-found (to ensure no stale *.isolated stay around on the next build).
-
-Used to figure out what tests were build in isolated mode to trigger these
-tests to run on swarming.
-
-For more info see:
-https://sites.google.com/a/chromium.org/dev/developers/testing/isolated-testing
-"""
-
-import glob
-import hashlib
-import json
-import optparse
-import os
-import re
-import sys
-
-
-def hash_file(filepath):
-  """Calculates the hash of a file without reading it all in memory at once."""
-  digest = hashlib.sha1()
-  with open(filepath, 'rb') as f:
-    while True:
-      chunk = f.read(1024*1024)
-      if not chunk:
-        break
-      digest.update(chunk)
-  return digest.hexdigest()
-
-
-def main():
-  parser = optparse.OptionParser(
-      usage='%prog --build-dir <path> --output-json <path>',
-      description=sys.modules[__name__].__doc__)
-  parser.add_option(
-      '--build-dir',
-      help='Path to a directory to search for *.isolated files.')
-  parser.add_option(
-      '--output-json',
-      help='File to dump JSON results into.')
-
-  options, _ = parser.parse_args()
-  if not options.build_dir:
-    parser.error('--build-dir option is required')
-  if not options.output_json:
-    parser.error('--output-json option is required')
-
-  result = {}
-
-  # Get the file hash values and output the pair.
-  pattern = os.path.join(options.build_dir, '*.isolated')
-  for filepath in sorted(glob.glob(pattern)):
-    test_name = os.path.splitext(os.path.basename(filepath))[0]
-    if re.match(r'^.+?\.\d$', test_name):
-      # It's a split .isolated file, e.g. foo.0.isolated. Ignore these.
-      continue
-
-    # TODO(csharp): Remove deletion once the isolate tracked dependencies are
-    # inputs for the isolated files.
-    sha1_hash = hash_file(filepath)
-    os.remove(filepath)
-    result[test_name] = sha1_hash
-
-  with open(options.output_json, 'wb') as f:
-    json.dump(result, f)
-
-  return 0
-
-
-if __name__ == '__main__':
-  sys.exit(main())
diff --git a/build/fix_gn_headers.py b/build/fix_gn_headers.py
deleted file mode 100755
index 01ff764..0000000
--- a/build/fix_gn_headers.py
+++ /dev/null
@@ -1,218 +0,0 @@
-#!/usr/bin/env python
-# Copyright 2017 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Fix header files missing in GN.
-
-This script takes the missing header files from check_gn_headers.py, and
-try to fix them by adding them to the GN files.
-Manual cleaning up is likely required afterwards.
-"""
-
-import argparse
-import os
-import re
-import subprocess
-import sys
-
-
-def GitGrep(pattern):
-  p = subprocess.Popen(
-      ['git', 'grep', '-En', pattern, '--', '*.gn', '*.gni'],
-      stdout=subprocess.PIPE)
-  out, _ = p.communicate()
-  return out, p.returncode
-
-
-def ValidMatches(basename, cc, grep_lines):
-  """Filter out 'git grep' matches with header files already."""
-  matches = []
-  for line in grep_lines:
-    gnfile, linenr, contents = line.split(':')
-    linenr = int(linenr)
-    new = re.sub(cc, basename, contents)
-    lines = open(gnfile).read().splitlines()
-    assert contents in lines[linenr - 1]
-    # Skip if it's already there. It could be before or after the match.
-    if lines[linenr] == new:
-      continue
-    if lines[linenr - 2] == new:
-      continue
-    print '    ', gnfile, linenr, new
-    matches.append((gnfile, linenr, new))
-  return matches
-
-
-def AddHeadersNextToCC(headers, skip_ambiguous=True):
-  """Add header files next to the corresponding .cc files in GN files.
-
-  When skip_ambiguous is True, skip if multiple .cc files are found.
-  Returns unhandled headers.
-
-  Manual cleaning up is likely required, especially if not skip_ambiguous.
-  """
-  edits = {}
-  unhandled = []
-  for filename in headers:
-    filename = filename.strip()
-    if not (filename.endswith('.h') or filename.endswith('.hh')):
-      continue
-    basename = os.path.basename(filename)
-    print filename
-    cc = r'\b' + os.path.splitext(basename)[0] + r'\.(cc|cpp|mm)\b'
-    out, returncode = GitGrep('(/|")' + cc + '"')
-    if returncode != 0 or not out:
-      unhandled.append(filename)
-      continue
-
-    matches = ValidMatches(basename, cc, out.splitlines())
-
-    if len(matches) == 0:
-      continue
-    if len(matches) > 1:
-      print '\n[WARNING] Ambiguous matching for', filename
-      for i in enumerate(matches, 1):
-        print '%d: %s' % (i[0], i[1])
-      print
-      if skip_ambiguous:
-        continue
-
-      picked = raw_input('Pick the matches ("2,3" for multiple): ')
-      try:
-        matches = [matches[int(i) - 1] for i in picked.split(',')]
-      except (ValueError, IndexError):
-        continue
-
-    for match in matches:
-      gnfile, linenr, new = match
-      print '  ', gnfile, linenr, new
-      edits.setdefault(gnfile, {})[linenr] = new
-
-  for gnfile in edits:
-    lines = open(gnfile).read().splitlines()
-    for l in sorted(edits[gnfile].keys(), reverse=True):
-      lines.insert(l, edits[gnfile][l])
-    open(gnfile, 'w').write('\n'.join(lines) + '\n')
-
-  return unhandled
-
-
-def AddHeadersToSources(headers, skip_ambiguous=True):
-  """Add header files to the sources list in the first GN file.
-
-  The target GN file is the first one up the parent directories.
-  This usually does the wrong thing for _test files if the test and the main
-  target are in the same .gn file.
-  When skip_ambiguous is True, skip if multiple sources arrays are found.
-
-  "git cl format" afterwards is required. Manually cleaning up duplicated items
-  is likely required.
-  """
-  for filename in headers:
-    filename = filename.strip()
-    print filename
-    dirname = os.path.dirname(filename)
-    while not os.path.exists(os.path.join(dirname, 'BUILD.gn')):
-      dirname = os.path.dirname(dirname)
-    rel = filename[len(dirname) + 1:]
-    gnfile = os.path.join(dirname, 'BUILD.gn')
-
-    lines = open(gnfile).read().splitlines()
-    matched = [i for i, l in enumerate(lines) if ' sources = [' in l]
-    if skip_ambiguous and len(matched) > 1:
-      print '[WARNING] Multiple sources in', gnfile
-      continue
-
-    if len(matched) < 1:
-      continue
-    print '  ', gnfile, rel
-    index = matched[0]
-    lines.insert(index + 1, '"%s",' % rel)
-    open(gnfile, 'w').write('\n'.join(lines) + '\n')
-
-
-def RemoveHeader(headers, skip_ambiguous=True):
-  """Remove non-existing headers in GN files.
-
-  When skip_ambiguous is True, skip if multiple matches are found.
-  """
-  edits = {}
-  unhandled = []
-  for filename in headers:
-    filename = filename.strip()
-    if not (filename.endswith('.h') or filename.endswith('.hh')):
-      continue
-    basename = os.path.basename(filename)
-    print filename
-    out, returncode = GitGrep('(/|")' + basename + '"')
-    if returncode != 0 or not out:
-      unhandled.append(filename)
-      print '  Not found'
-      continue
-
-    grep_lines = out.splitlines()
-    matches = []
-    for line in grep_lines:
-      gnfile, linenr, contents = line.split(':')
-      print '    ', gnfile, linenr, contents
-      linenr = int(linenr)
-      lines = open(gnfile).read().splitlines()
-      assert contents in lines[linenr - 1]
-      matches.append((gnfile, linenr, contents))
-
-    if len(matches) == 0:
-      continue
-    if len(matches) > 1:
-      print '\n[WARNING] Ambiguous matching for', filename
-      for i in enumerate(matches, 1):
-        print '%d: %s' % (i[0], i[1])
-      print
-      if skip_ambiguous:
-        continue
-
-      picked = raw_input('Pick the matches ("2,3" for multiple): ')
-      try:
-        matches = [matches[int(i) - 1] for i in picked.split(',')]
-      except (ValueError, IndexError):
-        continue
-
-    for match in matches:
-      gnfile, linenr, contents = match
-      print '  ', gnfile, linenr, contents
-      edits.setdefault(gnfile, set()).add(linenr)
-
-  for gnfile in edits:
-    lines = open(gnfile).read().splitlines()
-    for l in sorted(edits[gnfile], reverse=True):
-      lines.pop(l - 1)
-    open(gnfile, 'w').write('\n'.join(lines) + '\n')
-
-  return unhandled
-
-
-def main():
-  parser = argparse.ArgumentParser()
-  parser.add_argument('input_file', help="missing or non-existing headers, "
-                      "output of check_gn_headers.py")
-  parser.add_argument('--prefix',
-                      help="only handle path name with this prefix")
-  parser.add_argument('--remove', action='store_true',
-                      help="treat input_file as non-existing headers")
-
-  args, _extras = parser.parse_known_args()
-
-  headers = open(args.input_file).readlines()
-
-  if args.prefix:
-    headers = [i for i in headers if i.startswith(args.prefix)]
-
-  if args.remove:
-    RemoveHeader(headers, False)
-  else:
-    unhandled = AddHeadersNextToCC(headers)
-    AddHeadersToSources(unhandled)
-
-
-if __name__ == '__main__':
-  sys.exit(main())
diff --git a/build/fuchsia/OWNERS b/build/fuchsia/OWNERS
deleted file mode 100644
index 22e1b69..0000000
--- a/build/fuchsia/OWNERS
+++ /dev/null
@@ -1,9 +0,0 @@
-jamesr@chromium.org
-kmarshall@chromium.org
-scottmg@chromium.org
-sergeyu@chromium.org
-thakis@chromium.org
-wez@chromium.org
-
-# TEAM: cr-fuchsia@chromium.org
-# COMPONENT: Internals>PlatformIntegration
diff --git a/build/fuchsia/__init__.py b/build/fuchsia/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/build/fuchsia/__init__.py
+++ /dev/null
diff --git a/build/fuchsia/boot_data.py b/build/fuchsia/boot_data.py
deleted file mode 100644
index 50275b0..0000000
--- a/build/fuchsia/boot_data.py
+++ /dev/null
@@ -1,172 +0,0 @@
-# Copyright 2018 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Functions used to provision Fuchsia boot images."""
-
-import common
-import logging
-import os
-import subprocess
-import tempfile
-import time
-import uuid
-
-_SSH_CONFIG_TEMPLATE = """
-Host *
-  CheckHostIP no
-  StrictHostKeyChecking no
-  ForwardAgent no
-  ForwardX11 no
-  UserKnownHostsFile {known_hosts}
-  User fuchsia
-  IdentitiesOnly yes
-  IdentityFile {identity}
-  ServerAliveInterval 1
-  ServerAliveCountMax 1
-  ControlMaster auto
-  ControlPersist 1m
-  ControlPath /tmp/ssh-%r@%h:%p"""
-
-FVM_TYPE_QCOW = 'qcow'
-FVM_TYPE_SPARSE = 'sparse'
-
-
-def _TargetCpuToSdkBinPath(target_arch):
-  """Returns the path to the SDK 'target' file directory for |target_cpu|."""
-
-  return os.path.join(common.SDK_ROOT, 'target', target_arch)
-
-
-def _ProvisionSSH(output_dir):
-  """Provisions the key files used by the SSH daemon, and generates a
-  configuration file used by clients for connecting to SSH.
-
-  Returns a tuple with:
-  #0: the client configuration file
-  #1: a list of file path pairs: (<path in image>, <path on build filesystem>).
-  """
-
-  host_key_path = output_dir + '/ssh_key'
-  host_pubkey_path = host_key_path + '.pub'
-  id_key_path = output_dir + '/id_ed25519'
-  id_pubkey_path = id_key_path + '.pub'
-  known_hosts_path = output_dir + '/known_hosts'
-  ssh_config_path = GetSSHConfigPath(output_dir)
-
-  logging.debug('Generating SSH credentials.')
-  if not os.path.isfile(host_key_path):
-    subprocess.check_call(['ssh-keygen', '-t', 'ed25519', '-h', '-f',
-                           host_key_path, '-P', '', '-N', ''],
-                          stdout=open(os.devnull))
-  if not os.path.isfile(id_key_path):
-    subprocess.check_call(['ssh-keygen', '-t', 'ed25519', '-f', id_key_path,
-                           '-P', '', '-N', ''], stdout=open(os.devnull))
-
-  with open(ssh_config_path, "w") as ssh_config:
-    ssh_config.write(
-        _SSH_CONFIG_TEMPLATE.format(identity=id_key_path,
-                                    known_hosts=known_hosts_path))
-
-  if os.path.exists(known_hosts_path):
-    os.remove(known_hosts_path)
-
-  return (
-      ssh_config_path,
-      (('ssh/ssh_host_ed25519_key', host_key_path),
-       ('ssh/ssh_host_ed25519_key.pub', host_pubkey_path),
-       ('ssh/authorized_keys', id_pubkey_path))
-  )
-
-
-def _MakeQcowDisk(output_dir, disk_path):
-  """Creates a QEMU copy-on-write version of |disk_path| in the output
-  directory."""
-
-  qimg_path = os.path.join(common.SDK_ROOT, 'qemu', 'bin', 'qemu-img')
-  output_path = os.path.join(output_dir,
-                             os.path.basename(disk_path) + '.qcow2')
-  subprocess.check_call([qimg_path, 'create', '-q', '-f', 'qcow2',
-                         '-b', disk_path, output_path])
-  return output_path
-
-
-def GetTargetFile(target_arch, filename):
-  """Computes a path to |filename| in the Fuchsia target directory specific to
-  |target_arch|."""
-
-  return os.path.join(_TargetCpuToSdkBinPath(target_arch), filename)
-
-
-def GetSSHConfigPath(output_dir):
-  return output_dir + '/ssh_config'
-
-
-def ConfigureDataFVM(output_dir, output_type):
-  """Builds the FVM image for the /data volume and prepopulates it
-  with SSH keys.
-
-  output_dir: Path to the output directory which will contain the FVM file.
-  output_type: If FVM_TYPE_QCOW, then returns a path to the qcow2 FVM file,
-               used for QEMU.
-
-               If FVM_TYPE_SPARSE, then returns a path to the
-               sparse/compressed FVM file."""
-
-  logging.debug('Building /data partition FVM file.')
-  with tempfile.NamedTemporaryFile() as data_file:
-    # Build up the minfs partition data and install keys into it.
-    ssh_config, ssh_data = _ProvisionSSH(output_dir)
-    with tempfile.NamedTemporaryFile() as manifest:
-      for dest, src in ssh_data:
-        manifest.write('%s=%s\n' % (dest, src))
-      manifest.flush()
-      minfs_path = os.path.join(common.SDK_ROOT, 'tools', 'minfs')
-      subprocess.check_call([minfs_path, '%s@1G' % data_file.name, 'create'])
-      subprocess.check_call([minfs_path, data_file.name, 'manifest',
-                             manifest.name])
-
-      # Wrap the minfs partition in a FVM container.
-      fvm_path = os.path.join(common.SDK_ROOT, 'tools', 'fvm')
-      fvm_output_path = os.path.join(output_dir, 'fvm.data.blk')
-      if os.path.exists(fvm_output_path):
-        os.remove(fvm_output_path)
-
-      if output_type == FVM_TYPE_SPARSE:
-        cmd = [fvm_path, fvm_output_path, 'sparse', '--compress', 'lz4',
-               '--data', data_file.name]
-      else:
-        cmd = [fvm_path, fvm_output_path, 'create', '--data', data_file.name]
-
-      logging.debug(' '.join(cmd))
-      subprocess.check_call(cmd)
-
-      if output_type == FVM_TYPE_SPARSE:
-        return fvm_output_path
-      elif output_type == FVM_TYPE_QCOW:
-        return _MakeQcowDisk(output_dir, fvm_output_path)
-      else:
-        raise Exception('Unknown output_type: %r' % output_type)
-
-
-def GetNodeName(output_dir):
-  """Returns the cached Zircon node name, or generates one if it doesn't
-  already exist. The node name is used by Discover to find the prior
-  deployment on the LAN."""
-
-  nodename_file = os.path.join(output_dir, 'nodename')
-  if not os.path.exists(nodename_file):
-    nodename = uuid.uuid4()
-    f = open(nodename_file, 'w')
-    f.write(str(nodename))
-    f.flush()
-    f.close()
-    return str(nodename)
-  else:
-    f = open(nodename_file, 'r')
-    return f.readline()
-
-
-def GetKernelArgs(output_dir):
-  return ['devmgr.epoch=%d' % time.time(),
-          'zircon.nodename=' + GetNodeName(output_dir)]
diff --git a/build/fuchsia/common.py b/build/fuchsia/common.py
deleted file mode 100644
index 51a6841..0000000
--- a/build/fuchsia/common.py
+++ /dev/null
@@ -1,18 +0,0 @@
-# Copyright 2018 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import os
-
-DIR_SOURCE_ROOT = os.path.abspath(
-    os.path.join(os.path.dirname(__file__), os.pardir, os.pardir))
-SDK_ROOT = os.path.join(DIR_SOURCE_ROOT, 'third_party', 'fuchsia-sdk', 'sdk')
-
-def EnsurePathExists(path):
-  """Checks that the file |path| exists on the filesystem and returns the path
-  if it does, raising an exception otherwise."""
-
-  if not os.path.exists(path):
-    raise IOError('Missing file: ' + path)
-
-  return path
diff --git a/build/fuchsia/common_args.py b/build/fuchsia/common_args.py
deleted file mode 100644
index 689634b..0000000
--- a/build/fuchsia/common_args.py
+++ /dev/null
@@ -1,75 +0,0 @@
-# Copyright 2018 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-import os
-import sys
-
-from device_target import DeviceTarget
-from qemu_target import QemuTarget
-
-
-def AddCommonArgs(arg_parser):
-  """Adds command line arguments to |arg_parser| for options which are shared
-  across test and executable target types."""
-
-  common_args = arg_parser.add_argument_group('common', 'Common arguments')
-  common_args.add_argument('--package',
-                           type=os.path.realpath, required=True,
-                           help='Path to the package to execute.')
-  common_args.add_argument('--package-name', required=True,
-                           help='Name of the package to execute, defined in ' +
-                                'package metadata.')
-  common_args.add_argument('--package-manifest',
-                           type=os.path.realpath, required=True,
-                           help='Path to the Fuchsia package manifest file.')
-  common_args.add_argument('--output-directory',
-                           type=os.path.realpath, required=True,
-                           help=('Path to the directory in which build files '
-                                 'are located (must include build type).'))
-  common_args.add_argument('--target-cpu', required=True,
-                           help='GN target_cpu setting for the build.')
-  common_args.add_argument('--device', '-d', action='store_true', default=False,
-                           help='Run on hardware device instead of QEMU.')
-  common_args.add_argument('--host', help='The IP of the target device. ' +
-                           'Optional.')
-  common_args.add_argument('--port', '-p', type=int, default=22,
-                           help='The port of the SSH service running on the ' +
-                                'device. Optional.')
-  common_args.add_argument('--ssh-config', '-F',
-                           help='The path to the SSH configuration used for '
-                                'connecting to the target device.')
-  common_args.add_argument('--include-system-logs', default=True, type=bool,
-                           help='Do not show system log data.')
-  common_args.add_argument('--verbose', '-v', default=False,
-                           action='store_true',
-                           help='Enable debug-level logging.')
-
-
-def ConfigureLogging(args):
-  """Configures the logging level based on command line |args|."""
-
-  logging.basicConfig(level=(logging.DEBUG if args.verbose else logging.INFO),
-                      format='%(asctime)s:%(levelname)s:%(name)s:%(message)s')
-
-  # The test server spawner is too noisy with INFO level logging, so tweak
-  # its verbosity a bit by adjusting its logging level.
-  logging.getLogger('chrome_test_server_spawner').setLevel(
-      logging.DEBUG if args.verbose else logging.WARN)
-
-  # Verbose SCP output can be useful at times but oftentimes is just too noisy.
-  # Only enable it if -vv is passed.
-  logging.getLogger('ssh').setLevel(
-      logging.DEBUG if args.verbose else logging.WARN)
-
-
-def GetDeploymentTargetForArgs(args):
-  """Constructs a deployment target object using parameters taken from
-  command line arguments."""
-
-  if not args.device:
-    return QemuTarget(args.output_directory, args.target_cpu)
-  else:
-    return DeviceTarget(args.output_directory, args.target_cpu,
-                        args.host, args.port, args.ssh_config)
diff --git a/build/fuchsia/create_runner_script.py b/build/fuchsia/create_runner_script.py
deleted file mode 100755
index d96c2cb..0000000
--- a/build/fuchsia/create_runner_script.py
+++ /dev/null
@@ -1,89 +0,0 @@
-#!/usr/bin/env python
-#
-# Copyright 2018 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Creates a script to run a Fushsia executable by delegating to
-build/fuchsia/(exe|test)_runner.py.
-"""
-
-import argparse
-import os
-import re
-import sys
-
-
-SCRIPT_TEMPLATE = """\
-#!/usr/bin/env python
-#
-# This file was generated by build/fuchsia/create_runner_script.py
-
-import os
-import sys
-
-def main():
-  script_directory = os.path.dirname(__file__)
-
-  def ResolvePath(path):
-    \"\"\"Returns an absolute filepath given a path relative to this script.
-    \"\"\"
-    return os.path.abspath(os.path.join(script_directory, path))
-
-  runner_path = ResolvePath('{runner_path}')
-  runner_args = {runner_args}
-  runner_path_args = {runner_path_args}
-  for arg, path in runner_path_args:
-    runner_args.extend([arg, ResolvePath(path)])
-
-  os.execv(runner_path,
-           [runner_path] + runner_args + sys.argv[1:])
-
-if __name__ == '__main__':
-  sys.exit(main())
-"""
-
-
-def main(args):
-  parser = argparse.ArgumentParser()
-  parser.add_argument('--runner-script',
-                      help='Name of the runner script to use.')
-  parser.add_argument('--script-output-path',
-                      help='Output path for executable script.')
-  parser.add_argument('--test-runner-path',
-                      help='Path to test_runner.py (optional).')
-  group = parser.add_argument_group('Test runner path arguments.')
-  group.add_argument('--output-directory')
-  group.add_argument('--package')
-  group.add_argument('--package-manifest')
-  args, runner_args = parser.parse_known_args(args)
-
-  def RelativizePathToScript(path):
-    """Returns the path relative to the output script directory."""
-    return os.path.relpath(path, os.path.dirname(args.script_output_path))
-
-  runner_path = args.test_runner_path or os.path.join(
-      os.path.dirname(__file__), args.runner_script)
-  runner_path = RelativizePathToScript(runner_path)
-
-  runner_path_args = []
-  runner_path_args.append(
-      ('--output-directory', RelativizePathToScript(args.output_directory)))
-  runner_path_args.append(
-      ('--package', RelativizePathToScript(args.package)))
-  runner_path_args.append(
-      ('--package-manifest', RelativizePathToScript(args.package_manifest)))
-
-  with open(args.script_output_path, 'w') as script:
-    script.write(SCRIPT_TEMPLATE.format(
-        runner_path=str(runner_path),
-        runner_args=repr(runner_args),
-        runner_path_args=repr(runner_path_args)))
-
-  # Sets the mode of the generated script so that it is executable by the
-  # current user.
-  os.chmod(args.script_output_path, 0750)
-
-
-if __name__ == '__main__':
-  sys.exit(main(sys.argv[1:]))
diff --git a/build/fuchsia/device_target.py b/build/fuchsia/device_target.py
deleted file mode 100644
index 34bc99f..0000000
--- a/build/fuchsia/device_target.py
+++ /dev/null
@@ -1,116 +0,0 @@
-# Copyright 2018 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Implements commands for running and interacting with Fuchsia on devices."""
-
-import boot_data
-import logging
-import os
-import subprocess
-import target
-import time
-import uuid
-
-from common import SDK_ROOT, EnsurePathExists
-
-CONNECT_RETRY_COUNT = 20
-CONNECT_RETRY_WAIT_SECS = 1
-
-class DeviceTarget(target.Target):
-  def __init__(self, output_dir, target_cpu, host=None, port=None,
-               ssh_config=None):
-    """output_dir: The directory which will contain the files that are
-                   generated to support the deployment.
-    target_cpu: The CPU architecture of the deployment target. Can be
-                "x64" or "arm64".
-    host: The address of the deployment target device.
-    port: The port of the SSH service on the deployment target device.
-    ssh_config: The path to SSH configuration data."""
-
-    super(DeviceTarget, self).__init__(output_dir, target_cpu)
-
-    self._port = 22
-    self._auto = not host or not ssh_config
-    self._new_instance = True
-
-    if self._auto:
-      self._ssh_config_path = EnsurePathExists(
-          boot_data.GetSSHConfigPath(output_dir))
-    else:
-      self._ssh_config_path = os.path.expanduser(ssh_config)
-      self._host = host
-      if port:
-        self._port = port
-      self._new_instance = False
-
-  def __Discover(self, node_name):
-    """Returns the IP address and port of a Fuchsia instance discovered on
-    the local area network."""
-
-    netaddr_path = os.path.join(SDK_ROOT, 'tools', 'netaddr')
-    command = [netaddr_path, '--fuchsia', '--nowait', node_name]
-    logging.debug(' '.join(command))
-    proc = subprocess.Popen(command,
-                            stdout=subprocess.PIPE,
-                            stderr=open(os.devnull, 'w'))
-    proc.wait()
-    if proc.returncode == 0:
-      return proc.stdout.readlines()[0].strip()
-    return None
-
-  def Start(self):
-    if self._auto:
-      logging.debug('Starting automatic device deployment.')
-      node_name = boot_data.GetNodeName(self._output_dir)
-      self._host = self.__Discover(node_name)
-      if self._host and self._WaitUntilReady(retries=0):
-        logging.info('Connected to an already booted device.')
-        self._new_instance = False
-        return
-
-      logging.info('Netbooting Fuchsia. ' +
-                   'Please ensure that your device is in bootloader mode.')
-      bootserver_path = os.path.join(SDK_ROOT, 'tools', 'bootserver')
-      bootserver_command = [
-          bootserver_path,
-          '-1',
-          '--efi',
-          EnsurePathExists(boot_data.GetTargetFile(self._GetTargetSdkArch(),
-                                                   'local.esp.blk')),
-          '--fvm',
-          EnsurePathExists(boot_data.GetTargetFile(self._GetTargetSdkArch(),
-                                                   'fvm.sparse.blk')),
-          '--fvm',
-          EnsurePathExists(
-              boot_data.ConfigureDataFVM(self._output_dir,
-                                         boot_data.FVM_TYPE_SPARSE)),
-          EnsurePathExists(boot_data.GetTargetFile(self._GetTargetSdkArch(),
-                                                   'zircon.bin')),
-          EnsurePathExists(boot_data.GetTargetFile(self._GetTargetSdkArch(),
-                                                   'bootdata-blob.bin')),
-          '--'] + boot_data.GetKernelArgs(self._output_dir)
-      logging.debug(' '.join(bootserver_command))
-      subprocess.check_call(bootserver_command)
-
-      logging.debug('Waiting for device to join network.')
-      for _ in xrange(CONNECT_RETRY_COUNT):
-        self._host = self.__Discover(node_name)
-        if self._host:
-          break
-        time.sleep(CONNECT_RETRY_WAIT_SECS)
-      if not self._host:
-        raise Exception('Couldn\'t connect to device.')
-
-      logging.debug('host=%s, port=%d' % (self._host, self._port))
-
-    self._WaitUntilReady();
-
-  def IsNewInstance(self):
-    return self._new_instance
-
-  def _GetEndpoint(self):
-    return (self._host, self._port)
-
-  def _GetSshConfigPath(self):
-    return self._ssh_config_path
diff --git a/build/fuchsia/exe_runner.py b/build/fuchsia/exe_runner.py
deleted file mode 100755
index 14c0c70..0000000
--- a/build/fuchsia/exe_runner.py
+++ /dev/null
@@ -1,34 +0,0 @@
-#!/usr/bin/env python
-#
-# Copyright 2018 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Deploys and executes a packaged Fuchsia executable on a target."""
-
-import argparse
-import logging
-import sys
-
-from common_args import AddCommonArgs, ConfigureLogging, \
-                        GetDeploymentTargetForArgs
-from run_package import RunPackage
-
-
-def main():
-  parser = argparse.ArgumentParser()
-  AddCommonArgs(parser)
-  parser.add_argument('child_args', nargs='*',
-                      help='Arguments for the test process.')
-  args = parser.parse_args()
-  ConfigureLogging(args)
-
-  with GetDeploymentTargetForArgs(args) as target:
-    target.Start()
-    return RunPackage(
-        args.output_directory, target, args.package, args.package_name,
-        args.child_args, args.include_system_logs, args.package_manifest)
-
-
-if __name__ == '__main__':
-  sys.exit(main())
diff --git a/build/fuchsia/layout_test_proxy/BUILD.gn b/build/fuchsia/layout_test_proxy/BUILD.gn
deleted file mode 100644
index 43ed152..0000000
--- a/build/fuchsia/layout_test_proxy/BUILD.gn
+++ /dev/null
@@ -1,27 +0,0 @@
-# Copyright 2018 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-assert(is_fuchsia)
-
-import("//testing/test.gni")
-
-# Binary used to proxy TCP connections from a Fuchsia process. Potentially SSH
-# can be used to forward TCP, but this feature is currently broken on Fuchsia,
-# see ZX-1555. layout_test_proxy can be removed once that issue with sshd is
-# fixed and layout tests are updated to use SSH.
-executable("layout_test_proxy") {
-  testonly = true
-  sources = [
-    "layout_test_proxy.cc",
-  ]
-  deps = [
-    "//net",
-    "//net:test_support",
-  ]
-}
-
-fuchsia_executable_runner("layout_test_proxy_runner") {
-  testonly = true
-  exe_target = ":layout_test_proxy"
-}
diff --git a/build/fuchsia/layout_test_proxy/DEPS b/build/fuchsia/layout_test_proxy/DEPS
deleted file mode 100644
index b2f6f8e..0000000
--- a/build/fuchsia/layout_test_proxy/DEPS
+++ /dev/null
@@ -1,3 +0,0 @@
-include_rules = [
-  "+net",
-]
\ No newline at end of file
diff --git a/build/fuchsia/layout_test_proxy/layout_test_proxy.cc b/build/fuchsia/layout_test_proxy/layout_test_proxy.cc
deleted file mode 100644
index 1d14df9..0000000
--- a/build/fuchsia/layout_test_proxy/layout_test_proxy.cc
+++ /dev/null
@@ -1,78 +0,0 @@
-// Copyright 2018 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include "base/command_line.h"
-#include "base/message_loop/message_loop.h"
-#include "base/strings/string_number_conversions.h"
-#include "base/strings/string_split.h"
-#include "net/base/ip_endpoint.h"
-#include "net/test/tcp_socket_proxy.h"
-
-const char kPortsSwitch[] = "ports";
-const char kRemoteAddressSwitch[] = "remote-address";
-
-int main(int argc, char** argv) {
-  base::CommandLine::Init(argc, argv);
-
-  base::CommandLine* command_line = base::CommandLine::ForCurrentProcess();
-
-  if (!command_line->HasSwitch(kPortsSwitch)) {
-    LOG(ERROR) << "--" << kPortsSwitch << " was not specified.";
-    return 1;
-  }
-
-  std::vector<std::string> ports_strings =
-      base::SplitString(command_line->GetSwitchValueASCII(kPortsSwitch), ",",
-                        base::TRIM_WHITESPACE, base::SPLIT_WANT_NONEMPTY);
-  if (ports_strings.empty()) {
-    LOG(ERROR) << "At least one port must be specified with --" << kPortsSwitch;
-    return 1;
-  }
-
-  std::vector<int> ports;
-  for (auto& port_string : ports_strings) {
-    int port;
-    if (!base::StringToInt(port_string, &port) || port <= 0 || port > 65535) {
-      LOG(ERROR) << "Invalid value specified for --" << kPortsSwitch << ": "
-                 << port_string;
-      return 1;
-    }
-    ports.push_back(port);
-  }
-
-  if (!command_line->HasSwitch(kRemoteAddressSwitch)) {
-    LOG(ERROR) << "--" << kRemoteAddressSwitch << " was not specified.";
-    return 1;
-  }
-
-  std::string remote_address_str =
-      command_line->GetSwitchValueASCII(kRemoteAddressSwitch);
-  net::IPAddress remote_address;
-  if (!remote_address.AssignFromIPLiteral(remote_address_str)) {
-    LOG(ERROR) << "Invalid value specified for --" << kRemoteAddressSwitch
-               << ": " << remote_address_str;
-    return 1;
-  }
-
-  base::MessageLoopForIO message_loop;
-
-  std::vector<std::unique_ptr<net::TcpSocketProxy>> proxies;
-
-  for (int port : ports) {
-    auto test_server_proxy =
-        std::make_unique<net::TcpSocketProxy>(message_loop.task_runner());
-    if (!test_server_proxy->Initialize(port)) {
-      LOG(ERROR) << "Can't bind proxy to port " << port;
-      return 1;
-    }
-    LOG(INFO) << "Listening on port " << test_server_proxy->local_port();
-    test_server_proxy->Start(net::IPEndPoint(remote_address, port));
-    proxies.push_back(std::move(test_server_proxy));
-  }
-
-  // Run the message loop indefinitely.
-  base::RunLoop().Run();
-
-  return 0;
-}
\ No newline at end of file
diff --git a/build/fuchsia/net_test_server.py b/build/fuchsia/net_test_server.py
deleted file mode 100644
index 2849c44..0000000
--- a/build/fuchsia/net_test_server.py
+++ /dev/null
@@ -1,135 +0,0 @@
-# Copyright 2018 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import json
-import logging
-import os
-import re
-import select
-import socket
-import sys
-import subprocess
-import tempfile
-import time
-
-DIR_SOURCE_ROOT = os.path.abspath(
-    os.path.join(os.path.dirname(__file__), os.pardir, os.pardir))
-sys.path.append(os.path.join(DIR_SOURCE_ROOT, 'build', 'util', 'lib', 'common'))
-import chrome_test_server_spawner
-
-PORT_MAP_RE = re.compile('Allocated port (?P<port>\d+) for remote')
-GET_PORT_NUM_TIMEOUT_SECS = 5
-
-
-def _ConnectPortForwardingTask(target, local_port):
-  """Establishes a port forwarding SSH task to a localhost TCP endpoint hosted
-  at port |local_port|. Blocks until port forwarding is established.
-
-  Returns the remote port number."""
-
-  forwarding_flags = ['-O', 'forward',  # Send SSH mux control signal.
-                      '-R', '0:localhost:%d' % local_port,
-                      '-v',   # Get forwarded port info from stderr.
-                      '-NT']  # Don't execute command; don't allocate terminal.
-  task = target.RunCommandPiped([],
-                                ssh_args=forwarding_flags,
-                                stderr=subprocess.PIPE)
-
-  # SSH reports the remote dynamic port number over stderr.
-  # Unfortunately, the output is incompatible with Python's line buffered
-  # input (or vice versa), so we have to build our own buffered input system to
-  # pull bytes over the pipe.
-  poll_obj = select.poll()
-  poll_obj.register(task.stderr, select.POLLIN)
-  line = ''
-  timeout = time.time() + GET_PORT_NUM_TIMEOUT_SECS
-  while time.time() < timeout:
-    poll_result = poll_obj.poll(max(0, timeout - time.time()))
-    if poll_result:
-      next_char = task.stderr.read(1)
-      if not next_char:
-        break
-      line += next_char
-      if line.endswith('\n'):
-        line = line[:-1]
-        logging.debug('ssh: ' + line)
-        matched = PORT_MAP_RE.match(line)
-        if matched:
-          device_port = int(matched.group('port'))
-          logging.debug('Port forwarding established (local=%d, device=%d)' %
-                        (local_port, device_port))
-          task.wait()
-          return device_port
-        line = ''
-
-  raise Exception('Could not establish a port forwarding connection.')
-
-
-# Implementation of chrome_test_server_spawner.PortForwarder that uses SSH's
-# remote port forwarding feature to forward ports.
-class SSHPortForwarder(chrome_test_server_spawner.PortForwarder):
-  def __init__(self, target):
-    self._target = target
-
-    # Maps the host (server) port to the device port number.
-    self._port_mapping = {}
-
-  def Map(self, port_pairs):
-    for p in port_pairs:
-      _, host_port = p
-      self._port_mapping[host_port] = \
-          _ConnectPortForwardingTask(self._target, host_port)
-
-  def GetDevicePortForHostPort(self, host_port):
-    return self._port_mapping[host_port]
-
-  def Unmap(self, device_port):
-    for host_port, entry in self._port_mapping.iteritems():
-      if entry == device_port:
-        forwarding_args = [
-            '-NT', '-O', 'cancel', '-R',
-            '%d:localhost:%d' % (self._port_mapping[host_port], host_port)]
-        task = self._target.RunCommandPiped([],
-                                            ssh_args=forwarding_args,
-                                            stderr=subprocess.PIPE)
-        task.wait()
-        if task.returncode != 0:
-          raise Exception(
-              'Error %d when unmapping port %d' % (task.returncode,
-                                                   device_port))
-        del self._port_mapping[host_port]
-        return
-
-    raise Exception('Unmap called for unknown port: %d' % device_port)
-
-
-def SetupTestServer(target, test_concurrency):
-  """Provisions a forwarding test server and configures |target| to use it.
-
-  Returns a Popen object for the test server process."""
-
-  logging.debug('Starting test server.')
-  spawning_server = chrome_test_server_spawner.SpawningServer(
-      0, SSHPortForwarder(target), test_concurrency)
-  forwarded_port = _ConnectPortForwardingTask(
-      target, spawning_server.server_port)
-  spawning_server.Start()
-
-  logging.debug('Test server listening for connections (port=%d)' %
-                spawning_server.server_port)
-  logging.debug('Forwarded port is %d' % forwarded_port)
-
-  config_file = tempfile.NamedTemporaryFile(delete=True)
-
-  # Clean up the config JSON to only pass ports. See https://crbug.com/810209 .
-  config_file.write(json.dumps({
-    'name': 'testserver',
-    'address': '127.0.0.1',
-    'spawner_url_base': 'http://localhost:%d' % forwarded_port
-  }))
-
-  config_file.flush()
-  target.PutFile(config_file.name, '/data/net-test-server-config')
-
-  return spawning_server
diff --git a/build/fuchsia/qemu_target.py b/build/fuchsia/qemu_target.py
deleted file mode 100644
index aa356d3..0000000
--- a/build/fuchsia/qemu_target.py
+++ /dev/null
@@ -1,154 +0,0 @@
-# Copyright 2018 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Implements commands for running and interacting with Fuchsia on QEMU."""
-
-import boot_data
-import logging
-import target
-import os
-import platform
-import socket
-import subprocess
-import time
-
-from common import SDK_ROOT, EnsurePathExists
-
-
-# Virtual networking configuration data for QEMU.
-GUEST_NET = '192.168.3.0/24'
-GUEST_IP_ADDRESS = '192.168.3.9'
-HOST_IP_ADDRESS = '192.168.3.2'
-GUEST_MAC_ADDRESS = '52:54:00:63:5e:7b'
-
-
-def _GetAvailableTcpPort():
-  """Finds a (probably) open port by opening and closing a listen socket."""
-  sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
-  sock.bind(("", 0))
-  port = sock.getsockname()[1]
-  sock.close()
-  return port
-
-
-class QemuTarget(target.Target):
-  def __init__(self, output_dir, target_cpu,
-               ram_size_mb=2048):
-    """output_dir: The directory which will contain the files that are
-                   generated to support the QEMU deployment.
-    target_cpu: The emulated target CPU architecture.
-                Can be 'x64' or 'arm64'."""
-    super(QemuTarget, self).__init__(output_dir, target_cpu)
-    self._qemu_process = None
-    self._ram_size_mb = ram_size_mb
-
-  def __enter__(self):
-    return self
-
-  # Used by the context manager to ensure that QEMU is killed when the Python
-  # process exits.
-  def __exit__(self, exc_type, exc_val, exc_tb):
-    if self.IsStarted():
-      self.Shutdown()
-
-  def Start(self):
-    qemu_path = os.path.join(SDK_ROOT, 'qemu', 'bin',
-                             'qemu-system-' + self._GetTargetSdkArch())
-    kernel_args = boot_data.GetKernelArgs(self._output_dir)
-
-    # TERM=dumb tells the guest OS to not emit ANSI commands that trigger
-    # noisy ANSI spew from the user's terminal emulator.
-    kernel_args.append('TERM=dumb')
-
-    qemu_command = [qemu_path,
-        '-m', str(self._ram_size_mb),
-        '-nographic',
-        '-kernel', EnsurePathExists(
-            boot_data.GetTargetFile(self._GetTargetSdkArch(),
-                                    'zircon.bin')),
-        '-initrd', EnsurePathExists(
-            boot_data.GetTargetFile(self._GetTargetSdkArch(),
-                                    'bootdata-blob.bin')),
-        '-smp', '4',
-
-        # Attach the blobstore and data volumes. Use snapshot mode to discard
-        # any changes.
-        '-snapshot',
-        '-drive', 'file=%s,format=qcow2,if=none,id=data,snapshot=on' %
-                    EnsurePathExists(os.path.join(self._output_dir,
-                                                  'fvm.blk.qcow2')),
-        '-drive', 'file=%s,format=qcow2,if=none,id=blobstore,snapshot=on' %
-            EnsurePathExists(
-                boot_data.ConfigureDataFVM(self._output_dir,
-                                           boot_data.FVM_TYPE_QCOW)),
-        '-device', 'virtio-blk-pci,drive=data',
-        '-device', 'virtio-blk-pci,drive=blobstore',
-
-        # Use stdio for the guest OS only; don't attach the QEMU interactive
-        # monitor.
-        '-serial', 'stdio',
-        '-monitor', 'none',
-
-        '-append', ' '.join(kernel_args)
-      ]
-
-    # Configure the machine & CPU to emulate, based on the target architecture.
-    # Enable lightweight virtualization (KVM) if the host and guest OS run on
-    # the same architecture.
-    if self._target_cpu == 'arm64':
-      qemu_command.extend([
-          '-machine','virt',
-          '-cpu', 'cortex-a53',
-      ])
-      netdev_type = 'virtio-net-pci'
-      if platform.machine() == 'aarch64':
-        qemu_command.append('-enable-kvm')
-    else:
-      qemu_command.extend([
-          '-machine', 'q35',
-          '-cpu', 'host,migratable=no',
-      ])
-      netdev_type = 'e1000'
-      if platform.machine() == 'x86_64':
-        qemu_command.append('-enable-kvm')
-
-    # Configure virtual network. It is used in the tests to connect to
-    # testserver running on the host.
-    netdev_config = 'user,id=net0,net=%s,dhcpstart=%s,host=%s' % \
-            (GUEST_NET, GUEST_IP_ADDRESS, HOST_IP_ADDRESS)
-
-    self._host_ssh_port = _GetAvailableTcpPort()
-    netdev_config += ",hostfwd=tcp::%s-:22" % self._host_ssh_port
-    qemu_command.extend([
-      '-netdev', netdev_config,
-      '-device', '%s,netdev=net0,mac=%s' % (netdev_type, GUEST_MAC_ADDRESS),
-    ])
-
-    # We pass a separate stdin stream to qemu. Sharing stdin across processes
-    # leads to flakiness due to the OS prematurely killing the stream and the
-    # Python script panicking and aborting.
-    # The precise root cause is still nebulous, but this fix works.
-    # See crbug.com/741194.
-    logging.debug('Launching QEMU.')
-    logging.debug(' '.join(qemu_command))
-
-    stdio_flags = {'stdin': open(os.devnull),
-                   'stdout': open(os.devnull),
-                   'stderr': open(os.devnull)}
-    self._qemu_process = subprocess.Popen(qemu_command, **stdio_flags)
-    self._WaitUntilReady();
-
-  def Shutdown(self):
-    logging.info('Shutting down QEMU.')
-    self._qemu_process.kill()
-
-  def GetQemuStdout(self):
-    return self._qemu_process.stdout
-
-  def _GetEndpoint(self):
-    return ('localhost', self._host_ssh_port)
-
-  def _GetSshConfigPath(self):
-    return boot_data.GetSSHConfigPath(self._output_dir)
-
diff --git a/build/fuchsia/qemu_target_test.py b/build/fuchsia/qemu_target_test.py
deleted file mode 100755
index cefd8f1..0000000
--- a/build/fuchsia/qemu_target_test.py
+++ /dev/null
@@ -1,60 +0,0 @@
-#!/usr/bin/python
-# Copyright 2018 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import qemu_target
-import shutil
-import subprocess
-import tempfile
-import time
-import unittest
-
-TEST_PAYLOAD = "Let's get this payload across the finish line!"
-
-tmpdir = tempfile.mkdtemp()
-
-# Register the target with the context manager so that it always gets
-# torn down on process exit. Otherwise there might be lingering QEMU instances
-# if Python crashes or is interrupted.
-with qemu_target.QemuTarget(tmpdir, 'x64') as target:
-  class TestQemuTarget(unittest.TestCase):
-    @classmethod
-    def setUpClass(cls):
-      target.Start()
-
-    @classmethod
-    def tearDownClass(cls):
-      target.Shutdown()
-      shutil.rmtree(tmpdir)
-
-    def testCopyBidirectional(self):
-      tmp_path = tmpdir + "/payload"
-      with open(tmp_path, "w") as tmpfile:
-        tmpfile.write(TEST_PAYLOAD)
-      target.PutFile(tmp_path, '/tmp/payload')
-
-      tmp_path_roundtrip = tmp_path + ".roundtrip"
-      target.GetFile('/tmp/payload', tmp_path_roundtrip)
-      with open(tmp_path_roundtrip) as roundtrip:
-        self.assertEqual(TEST_PAYLOAD, roundtrip.read())
-
-    def testRunCommand(self):
-      self.assertEqual(0, target.RunCommand(['true']))
-
-      # This is a known bug: https://fuchsia.atlassian.net/browse/NET-349
-      self.assertEqual(1, target.RunCommand(['false']))
-
-    def testRunCommandPiped(self):
-      proc = target.RunCommandPiped(['cat'],
-                                    stdin=subprocess.PIPE,
-                                    stdout=subprocess.PIPE)
-      proc.stdin.write(TEST_PAYLOAD)
-      proc.stdin.flush()
-      proc.stdin.close()
-      self.assertEqual(TEST_PAYLOAD, proc.stdout.readline())
-      proc.kill()
-
-
-  if __name__ == '__main__':
-      unittest.main()
diff --git a/build/fuchsia/remote_cmd.py b/build/fuchsia/remote_cmd.py
deleted file mode 100644
index 67d107f..0000000
--- a/build/fuchsia/remote_cmd.py
+++ /dev/null
@@ -1,109 +0,0 @@
-# Copyright 2018 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Helper functions for remotely executing and copying files over a SSH
-connection."""
-
-import logging
-import os
-import subprocess
-import sys
-
-_SSH = ['ssh']
-_SCP = ['scp', '-C']  # Use gzip compression.
-_SSH_LOGGER = logging.getLogger('ssh')
-
-COPY_TO_TARGET = 0
-COPY_FROM_TARGET = 1
-
-
-def _IsLinkLocalIPv6(hostname):
-  return hostname.startswith('fe80::')
-
-
-def RunSsh(config_path, host, port, command, silent):
-  """Executes an SSH command on the remote host and blocks until completion.
-
-  config_path: Full path to SSH configuration.
-  host: The hostname or IP address of the remote host.
-  port: The port to connect to.
-  command: A list of strings containing the command and its arguments.
-  silent: If true, suppresses all output from 'ssh'.
-
-  Returns the exit code from the remote command."""
-
-  ssh_command = _SSH + ['-F', config_path,
-                        host,
-                        '-p', str(port)] + command
-  _SSH_LOGGER.debug('ssh exec: ' + ' '.join(ssh_command))
-  if silent:
-    devnull = open(os.devnull, 'w')
-    return subprocess.call(ssh_command, stderr=devnull, stdout=devnull)
-  else:
-    return subprocess.call(ssh_command)
-
-
-def RunPipedSsh(config_path, host, port, command = None, ssh_args = None,
-                **kwargs):
-  """Executes an SSH command on the remote host and returns a process object
-  with access to the command's stdio streams. Does not block.
-
-  config_path: Full path to SSH configuration.
-  host: The hostname or IP address of the remote host.
-  port: The port to connect to.
-  command: A list of strings containing the command and its arguments.
-  ssh_args: Arguments that will be passed to SSH.
-  kwargs: A dictionary of parameters to be passed to subprocess.Popen().
-          The parameters can be used to override stdin and stdout, for example.
-
-  Returns a Popen object for the command."""
-
-  if not command:
-    command = []
-  if not ssh_args:
-    ssh_args = []
-
-  ssh_command = _SSH + ['-F', config_path,
-                        host,
-                        '-p', str(port)] + ssh_args + ['--'] + command
-  _SSH_LOGGER.debug(' '.join(ssh_command))
-  return subprocess.Popen(ssh_command, **kwargs)
-
-
-def RunScp(config_path, host, port, sources, dest, direction, recursive=False):
-  """Copies a file to or from a remote host using SCP and blocks until
-  completion.
-
-  config_path: Full path to SSH configuration.
-  host: The hostname or IP address of the remote host.
-  port: The port to connect to.
-  sources: Paths of the files to be copied.
-  dest: The path that |source| will be copied to.
-  direction: Indicates whether the file should be copied to
-             or from the remote side.
-             Valid values are COPY_TO_TARGET or COPY_FROM_TARGET.
-  recursive: If true, performs a recursive copy.
-
-  Function will raise an assertion if a failure occurred."""
-
-  scp_command = _SCP[:]
-  if ':' in host:
-    scp_command.append('-6')
-    host = '[' + host + ']'
-  if _SSH_LOGGER.getEffectiveLevel() == logging.DEBUG:
-    scp_command.append('-v')
-  if recursive:
-    scp_command.append('-r')
-
-  if direction == COPY_TO_TARGET:
-    dest = "%s:%s" % (host, dest)
-  else:
-    sources = ["%s:%s" % (host, source) for source in sources]
-
-  scp_command += ['-F', config_path, '-P', str(port)]
-  scp_command += sources
-  scp_command += [dest]
-
-  _SSH_LOGGER.debug(' '.join(scp_command))
-  subprocess.check_call(scp_command, stdout=open(os.devnull, 'w'))
diff --git a/build/fuchsia/run_package.py b/build/fuchsia/run_package.py
deleted file mode 100644
index 940bb1c..0000000
--- a/build/fuchsia/run_package.py
+++ /dev/null
@@ -1,181 +0,0 @@
-# Copyright 2018 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Contains a helper function for deploying and executing a packaged
-executable on a Target."""
-
-import common
-import json
-import logging
-import multiprocessing
-import os
-import shutil
-import subprocess
-import tempfile
-import threading
-import uuid
-import select
-
-from symbolizer import FilterStream
-
-FAR = os.path.join(common.SDK_ROOT, 'tools', 'far')
-PM = os.path.join(common.SDK_ROOT, 'tools', 'pm')
-
-# Amount of time to wait for the termination of the system log output thread.
-_JOIN_TIMEOUT_SECS = 5
-
-
-def _AttachKernelLogReader(target):
-  """Attaches a kernel log reader as a long-running SSH task."""
-
-  logging.info('Attaching kernel logger.')
-  return target.RunCommandPiped(['dlog', '-f'], stdin=open(os.devnull, 'r'),
-                                stdout=subprocess.PIPE)
-
-
-def _ReadMergedLines(streams):
-  """Creates a generator which merges the buffered line output from |streams|.
-  The generator is terminated when the primary (first in sequence) stream
-  signals EOF. Absolute output ordering is not guaranteed."""
-
-  assert len(streams) > 0
-  poll = select.poll()
-  streams_by_fd = {}
-  primary_fd = streams[0].fileno()
-  for s in streams:
-    poll.register(s.fileno(), select.POLLIN)
-    streams_by_fd[s.fileno()] = s
-
-  try:
-    while primary_fd != None:
-      events = poll.poll(1)
-      for fileno, event in events:
-        if event & select.POLLIN:
-          yield streams_by_fd[fileno].readline()
-
-        elif event & select.POLLHUP:
-          poll.unregister(fileno)
-          del streams_by_fd[fileno]
-
-          if fileno == primary_fd:
-            primary_fd = None
-  finally:
-    for fd_to_cleanup, _ in streams_by_fd.iteritems():
-      poll.unregister(fd_to_cleanup)
-
-
-def DrainStreamToStdout(stream, quit_event):
-  """Outputs the contents of |stream| until |quit_event| is set."""
-
-  poll = select.poll()
-  poll.register(stream.fileno(), select.POLLIN)
-  try:
-    while not quit_event.is_set():
-      events = poll.poll(1)
-      for fileno, event in events:
-        if event & select.POLLIN:
-          print stream.readline().rstrip()
-        elif event & select.POLLHUP:
-          break
-
-  finally:
-    poll.unregister(stream.fileno())
-
-
-def RunPackage(output_dir, target, package_path, package_name, run_args,
-               system_logging, symbolizer_config=None):
-  """Copies the Fuchsia package at |package_path| to the target,
-  executes it with |run_args|, and symbolizes its output.
-
-  output_dir: The path containing the build output files.
-  target: The deployment Target object that will run the package.
-  package_path: The path to the .far package file.
-  package_name: The name of app specified by package metadata.
-  run_args: The arguments which will be passed to the Fuchsia process.
-  system_logging: If true, connects a system log reader to the target.
-  symbolizer_config: A newline delimited list of source files contained
-                     in the package. Omitting this parameter will disable
-                     symbolization.
-
-  Returns the exit code of the remote package process."""
-
-
-  system_logger = _AttachKernelLogReader(target) if system_logging else None
-  package_copied = False
-  try:
-    if system_logger:
-      # Spin up a thread to asynchronously dump the system log to stdout
-      # for easier diagnoses of early, pre-execution failures.
-      log_output_quit_event = multiprocessing.Event()
-      log_output_thread = threading.Thread(
-          target=lambda: DrainStreamToStdout(system_logger.stdout,
-                                             log_output_quit_event))
-      log_output_thread.daemon = True
-      log_output_thread.start()
-
-    logging.info('Copying package to target.')
-    install_path = os.path.join('/data', os.path.basename(package_path))
-    target.PutFile(package_path, install_path)
-    package_copied = True
-
-    logging.info('Installing package.')
-    p = target.RunCommandPiped(['pm', 'install', install_path],
-                               stderr=subprocess.PIPE)
-    output = p.stderr.readlines()
-    p.wait()
-
-    if p.returncode != 0:
-      # Don't error out if the package already exists on the device.
-      if len(output) != 1 or 'ErrAlreadyExists' not in output[0]:
-        raise Exception('Error while installing: %s' % '\n'.join(output))
-
-    if system_logger:
-      log_output_quit_event.set()
-      log_output_thread.join(timeout=_JOIN_TIMEOUT_SECS)
-
-    logging.info('Running application.')
-    command = ['run', package_name] + run_args
-    process = target.RunCommandPiped(command,
-                                     stdin=open(os.devnull, 'r'),
-                                     stdout=subprocess.PIPE,
-                                     stderr=subprocess.STDOUT)
-
-    if system_logger:
-      task_output = _ReadMergedLines([process.stdout, system_logger.stdout])
-    else:
-      task_output = process.stdout
-
-    if symbolizer_config:
-      # Decorate the process output stream with the symbolizer.
-      output = FilterStream(task_output, package_name, symbolizer_config,
-                            output_dir)
-    else:
-      logging.warn('Symbolization is DISABLED.')
-      output = process.stdout
-
-    for next_line in output:
-      print next_line.rstrip()
-
-    process.wait()
-    if process.returncode == 0:
-      logging.info('Process exited normally with status code 0.')
-    else:
-      # The test runner returns an error status code if *any* tests fail,
-      # so we should proceed anyway.
-      logging.warning('Process exited with status code %d.' %
-                      process.returncode)
-
-  finally:
-    if system_logger:
-      logging.info('Terminating kernel log reader.')
-      log_output_quit_event.set()
-      log_output_thread.join()
-      system_logger.kill()
-
-    if package_copied:
-      logging.info('Removing package source from device.')
-      target.RunCommand(['rm', install_path])
-
-
-  return process.returncode
diff --git a/build/fuchsia/sdk.sha1 b/build/fuchsia/sdk.sha1
deleted file mode 100644
index 5a25936..0000000
--- a/build/fuchsia/sdk.sha1
+++ /dev/null
@@ -1 +0,0 @@
-6eec9421e1d7d76b0fdf4d43c2f87d5ad9f75428
\ No newline at end of file
diff --git a/build/fuchsia/symbolizer.py b/build/fuchsia/symbolizer.py
deleted file mode 100644
index 67c487d..0000000
--- a/build/fuchsia/symbolizer.py
+++ /dev/null
@@ -1,230 +0,0 @@
-# Copyright 2018 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-import os
-import re
-import subprocess
-
-# Matches the coarse syntax of a backtrace entry.
-_BACKTRACE_PREFIX_RE = re.compile(r'(\[[0-9.]+\] )?bt#(?P<frame_id>\d+): ')
-
-# Matches the specific fields of a backtrace entry.
-# Back-trace line matcher/parser assumes that 'pc' is always present, and
-# expects that 'sp' and ('binary','pc_offset') may also be provided.
-_BACKTRACE_ENTRY_RE = re.compile(
-    r'pc 0(?:x[0-9a-f]+)?' +
-    r'(?: sp 0x[0-9a-f]+)?' +
-    r'(?: \((?P<binary>\S+),(?P<pc_offset>0x[0-9a-f]+)\))?$')
-
-
-def _GetUnstrippedPath(path):
-  """If there is a binary located at |path|, returns a path to its unstripped
-  source.
-
-  Returns None if |path| isn't a binary or doesn't exist in the lib.unstripped
-  or exe.unstripped directories."""
-
-  if path.endswith('.so'):
-    maybe_unstripped_path = os.path.normpath(
-        os.path.join(path, os.path.pardir, 'lib.unstripped',
-                     os.path.basename(path)))
-  else:
-    maybe_unstripped_path = os.path.normpath(
-        os.path.join(path, os.path.pardir, 'exe.unstripped',
-                     os.path.basename(path)))
-
-  if not os.path.exists(maybe_unstripped_path):
-    return None
-
-  with open(maybe_unstripped_path, 'rb') as f:
-    file_tag = f.read(4)
-  if file_tag != '\x7fELF':
-    logging.warn('Expected an ELF binary: ' + maybe_unstripped_path)
-    return None
-
-  return maybe_unstripped_path
-
-
-def FilterStream(stream, package_name, manifest_path, output_dir):
-  """Looks for backtrace lines from an iterable |stream| and symbolizes them.
-  Yields a stream of strings with symbolized entries replaced."""
-
-  return _SymbolizerFilter(package_name,
-                           manifest_path,
-                           output_dir).SymbolizeStream(stream)
-
-
-class _SymbolizerFilter(object):
-  """Adds backtrace symbolization capabilities to a process output stream."""
-
-  def __init__(self, package_name, manifest_path, output_dir):
-    self._symbols_mapping = {}
-    self._output_dir = output_dir
-    self._package_name = package_name
-
-    # Compute remote/local path mappings using the manifest data.
-    for next_line in open(manifest_path):
-      target, source = next_line.strip().split('=')
-      stripped_binary_path = _GetUnstrippedPath(os.path.join(output_dir,
-                                                             source))
-      if not stripped_binary_path:
-        continue
-
-      self._symbols_mapping[os.path.basename(target)] = stripped_binary_path
-      self._symbols_mapping[target] = stripped_binary_path
-      if target == 'bin/app':
-        self._symbols_mapping[package_name] = stripped_binary_path
-      logging.debug('Symbols: %s -> %s' % (source, target))
-
-  def _SymbolizeEntries(self, entries):
-    """Symbolizes the parsed backtrace |entries| by calling addr2line.
-
-    Returns a set of (frame_id, result) pairs."""
-
-    filename_re = re.compile(r'at ([-._a-zA-Z0-9/+]+):(\d+)')
-
-    # Use addr2line to symbolize all the |pc_offset|s in |entries| in one go.
-    # Entries with no |debug_binary| are also processed here, so that we get
-    # consistent output in that case, with the cannot-symbolize case.
-    addr2line_output = None
-    if entries[0].has_key('debug_binary'):
-      addr2line_args = (['addr2line', '-Cipf', '-p',
-                        '--exe=' + entries[0]['debug_binary']] +
-                        map(lambda entry: entry['pc_offset'], entries))
-      addr2line_output = subprocess.check_output(addr2line_args).splitlines()
-      assert addr2line_output
-
-    results = {}
-    for entry in entries:
-      raw, frame_id = entry['raw'], entry['frame_id']
-      prefix = '#%s: ' % frame_id
-
-      if not addr2line_output:
-        # Either there was no addr2line output, or too little of it.
-        filtered_line = raw
-      else:
-        output_line = addr2line_output.pop(0)
-
-        # Relativize path to the current working (output) directory if we see
-        # a filename.
-        def RelativizePath(m):
-          relpath = os.path.relpath(os.path.normpath(m.group(1)))
-          return 'at ' + relpath + ':' + m.group(2)
-        filtered_line = filename_re.sub(RelativizePath, output_line)
-
-        if '??' in filtered_line.split():
-          # If symbolization fails just output the raw backtrace.
-          filtered_line = raw
-        else:
-          # Release builds may inline things, resulting in "(inlined by)" lines.
-          inlined_by_prefix = " (inlined by)"
-          while (addr2line_output and
-                 addr2line_output[0].startswith(inlined_by_prefix)):
-            inlined_by_line = \
-                '\n' + (' ' * len(prefix)) + addr2line_output.pop(0)
-            filtered_line += filename_re.sub(RelativizePath, inlined_by_line)
-
-      results[entry['frame_id']] = prefix + filtered_line
-
-    return results
-
-  def _LookupDebugBinary(self, entry):
-    """Looks up the binary listed in |entry| in the |_symbols_mapping|.
-    Returns the corresponding host-side binary's filename, or None."""
-
-    binary = entry['binary']
-    if not binary:
-      return None
-
-    app_prefix = 'app:'
-    if binary.startswith(app_prefix):
-      binary = binary[len(app_prefix):]
-
-    # We change directory into /system/ before running the target executable, so
-    # all paths are relative to "/system/", and will typically start with "./".
-    # Some crashes still uses the full filesystem path, so cope with that, too.
-    pkg_prefix = '/pkg/'
-    cwd_prefix = './'
-    if binary.startswith(cwd_prefix):
-      binary = binary[len(cwd_prefix):]
-    elif binary.startswith(pkg_prefix):
-      binary = binary[len(pkg_prefix):]
-    # Allow other paths to pass-through; sometimes neither prefix is present.
-
-    if binary in self._symbols_mapping:
-      return self._symbols_mapping[binary]
-
-    # |binary| may be truncated by the crashlogger, so if there is a unique
-    # match for the truncated name in |symbols_mapping|, use that instead.
-    matches = filter(lambda x: x.startswith(binary),
-                               self._symbols_mapping.keys())
-    if len(matches) == 1:
-      return self._symbols_mapping[matches[0]]
-
-    return None
-
-  def _SymbolizeBacktrace(self, backtrace):
-    """Group |backtrace| entries according to the associated binary, and locate
-    the path to the debug symbols for that binary, if any."""
-
-    batches = {}
-
-    for entry in backtrace:
-      debug_binary = self._LookupDebugBinary(entry)
-      if debug_binary:
-        entry['debug_binary'] = debug_binary
-      batches.setdefault(debug_binary, []).append(entry)
-
-    # Run _SymbolizeEntries on each batch and collate the results.
-    symbolized = {}
-    for batch in batches.itervalues():
-      symbolized.update(self._SymbolizeEntries(batch))
-
-    # Map each entry to its symbolized form, by frame-id, and return the list.
-    return map(lambda entry: symbolized[entry['frame_id']], backtrace)
-
-  def SymbolizeStream(self, stream):
-    """Creates a symbolized logging stream object using the output from
-    |stream|."""
-
-    # A buffer of backtrace entries awaiting symbolization, stored as dicts:
-    # raw: The original back-trace line that followed the prefix.
-    # frame_id: backtrace frame number (starting at 0).
-    # binary: path to executable code corresponding to the current frame.
-    # pc_offset: memory offset within the executable.
-    backtrace_entries = []
-
-    # Read from the stream until we hit EOF.
-    for line in stream:
-      line = line.rstrip()
-
-      # Look for the back-trace prefix, otherwise just emit the line.
-      matched = _BACKTRACE_PREFIX_RE.match(line)
-      if not matched:
-        yield line
-        continue
-      backtrace_line = line[matched.end():]
-
-      # If this was the end of a back-trace then symbolize and emit it.
-      frame_id = matched.group('frame_id')
-      if backtrace_line == 'end':
-        if backtrace_entries:
-          for processed in self._SymbolizeBacktrace(backtrace_entries):
-            yield processed
-        backtrace_entries = []
-        continue
-
-      # Parse the program-counter offset, etc into |backtrace_entries|.
-      matched = _BACKTRACE_ENTRY_RE.match(backtrace_line)
-      if matched:
-        # |binary| and |pc_offset| will be None if not present.
-        backtrace_entries.append(
-            {'raw': backtrace_line, 'frame_id': frame_id,
-             'binary': matched.group('binary'),
-             'pc_offset': matched.group('pc_offset')})
-      else:
-        backtrace_entries.append(
-            {'raw': backtrace_line, 'frame_id': frame_id,
-             'binary': None, 'pc_offset': None})
diff --git a/build/fuchsia/target.py b/build/fuchsia/target.py
deleted file mode 100644
index 8eb5fcf..0000000
--- a/build/fuchsia/target.py
+++ /dev/null
@@ -1,168 +0,0 @@
-# Copyright 2018 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-import os
-import remote_cmd
-import subprocess
-import sys
-import tempfile
-import time
-
-_SHUTDOWN_CMD = ['dm', 'poweroff']
-_ATTACH_MAX_RETRIES = 10
-_ATTACH_RETRY_INTERVAL = 1
-
-
-class FuchsiaTargetException(Exception):
-  def __init__(self, message):
-    super(FuchsiaTargetException, self).__init__(message)
-
-
-class Target(object):
-  """Base class representing a Fuchsia deployment target."""
-
-  def __init__(self, output_dir, target_cpu):
-    self._output_dir = output_dir
-    self._started = False
-    self._dry_run = False
-    self._target_cpu = target_cpu
-
-  # Functions used by the Python context manager for teardown.
-  def __enter__(self):
-    return self
-  def __exit__(self, exc_type, exc_val, exc_tb):
-    return self
-
-  def Start(self):
-    """Handles the instantiation and connection process for the Fuchsia
-    target instance."""
-
-    pass
-
-  def IsStarted(self):
-    """Returns True if the Fuchsia target instance is ready to accept
-    commands."""
-
-    return self._started
-
-  def IsNewInstance(self):
-    """Returns True if the connected target instance is newly provisioned."""
-
-    return True
-
-  def RunCommandPiped(self, command, **kwargs):
-    """Starts a remote command and immediately returns a Popen object for the
-    command. The caller may interact with the streams, inspect the status code,
-    wait on command termination, etc.
-
-    command: A list of strings representing the command and arguments.
-    kwargs: A dictionary of parameters to be passed to subprocess.Popen().
-            The parameters can be used to override stdin and stdout, for
-            example.
-
-    Returns: a Popen object.
-
-    Note: method does not block."""
-
-    self._AssertIsStarted()
-    logging.debug('running (non-blocking) \'%s\'.' % ' '.join(command))
-    host, port = self._GetEndpoint()
-    return remote_cmd.RunPipedSsh(self._GetSshConfigPath(), host, port, command,
-                                  **kwargs)
-
-  def RunCommand(self, command, silent=False):
-    """Executes a remote command and waits for it to finish executing.
-
-    Returns the exit code of the command."""
-
-    self._AssertIsStarted()
-    logging.debug('running \'%s\'.' % ' '.join(command))
-    host, port = self._GetEndpoint()
-    return remote_cmd.RunSsh(self._GetSshConfigPath(), host, port, command,
-                             silent)
-
-  def PutFile(self, source, dest, recursive=False):
-    """Copies a file from the local filesystem to the target filesystem.
-
-    source: The path of the file being copied.
-    dest: The path on the remote filesystem which will be copied to.
-    recursive: If true, performs a recursive copy."""
-
-    assert type(source) is str
-    self.PutFiles([source], dest, recursive)
-
-  def PutFiles(self, sources, dest, recursive=False):
-    """Copies files from the local filesystem to the target filesystem.
-
-    sources: List of local file paths to copy from, or a single path.
-    dest: The path on the remote filesystem which will be copied to.
-    recursive: If true, performs a recursive copy."""
-
-    assert type(sources) is tuple or type(sources) is list
-    self._AssertIsStarted()
-    host, port = self._GetEndpoint()
-    logging.debug('copy local:%s => remote:%s' % (sources, dest))
-    command = remote_cmd.RunScp(self._GetSshConfigPath(), host, port,
-                                sources, dest, remote_cmd.COPY_TO_TARGET,
-                                recursive)
-
-  def GetFile(self, source, dest):
-    """Copies a file from the target filesystem to the local filesystem.
-
-    source: The path of the file being copied.
-    dest: The path on the local filesystem which will be copied to."""
-    assert type(source) is str
-    self.GetFiles([source], dest)
-
-  def GetFiles(self, sources, dest):
-    """Copies files from the target filesystem to the local filesystem.
-
-    sources: List of remote file paths to copy.
-    dest: The path on the local filesystem which will be copied to."""
-    assert type(sources) is tuple or type(sources) is list
-    self._AssertIsStarted()
-    host, port = self._GetEndpoint()
-    logging.debug('copy remote:%s => local:%s' % (sources, dest))
-    return remote_cmd.RunScp(self._GetSshConfigPath(), host, port,
-                             sources, dest, remote_cmd.COPY_FROM_TARGET)
-
-  def _GetEndpoint(self):
-    """Returns a (host, port) tuple for the SSH connection to the target."""
-    raise NotImplementedError
-
-  def _GetTargetSdkArch(self):
-    """Returns the Fuchsia SDK architecture name for the target CPU."""
-    if self._target_cpu == 'arm64':
-      return 'aarch64'
-    elif self._target_cpu == 'x64':
-      return 'x86_64'
-    raise FuchsiaTargetException('Unknown target_cpu:' + self._target_cpu)
-
-  def _AssertIsStarted(self):
-    assert self.IsStarted()
-
-  def _WaitUntilReady(self, retries=_ATTACH_MAX_RETRIES):
-    logging.info('Connecting to Fuchsia using SSH.')
-    for _ in xrange(retries+1):
-      host, port = self._GetEndpoint()
-      if remote_cmd.RunSsh(self._GetSshConfigPath(), host, port, ['true'],
-                           True) == 0:
-        logging.info('Connected!')
-        self._started = True
-        return True
-      time.sleep(_ATTACH_RETRY_INTERVAL)
-    logging.error('Timeout limit reached.')
-    raise FuchsiaTargetException('Couldn\'t connect using SSH.')
-
-  def _GetSshConfigPath(self, path):
-    raise NotImplementedError
-
-  def _GetTargetSdkArch(self):
-    """Returns the Fuchsia SDK architecture name for the target CPU."""
-    if self._target_cpu == 'arm64':
-      return 'aarch64'
-    elif self._target_cpu == 'x64':
-      return 'x86_64'
-    raise Exception('Unknown target_cpu %s:' % self._target_cpu)
diff --git a/build/fuchsia/test_runner.py b/build/fuchsia/test_runner.py
deleted file mode 100755
index e3b21a3..0000000
--- a/build/fuchsia/test_runner.py
+++ /dev/null
@@ -1,116 +0,0 @@
-#!/usr/bin/env python
-#
-# Copyright 2018 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Deploys and runs a test package on a Fuchsia target."""
-
-import argparse
-import json
-import logging
-import os
-import socket
-import subprocess
-import sys
-import tempfile
-import time
-
-from common_args import AddCommonArgs, ConfigureLogging, GetDeploymentTargetForArgs
-from net_test_server import SetupTestServer
-from run_package import RunPackage
-
-DEFAULT_TEST_CONCURRENCY = 4
-TEST_RESULT_PATH = '/data/test_summary.json'
-TEST_FILTER_PATH = '/data/test_filter.txt'
-
-def main():
-  parser = argparse.ArgumentParser()
-  AddCommonArgs(parser)
-  parser.add_argument('--gtest_filter',
-                      help='GTest filter to use in place of any default.')
-  parser.add_argument('--gtest_repeat',
-                      help='GTest repeat value to use. This also disables the '
-                           'test launcher timeout.')
-  parser.add_argument('--gtest_break_on_failure', action='store_true',
-                      default=False,
-                      help='Should GTest break on failure; useful with '
-                           '--gtest_repeat.')
-  parser.add_argument('--single-process-tests', action='store_true',
-                      default=False,
-                      help='Runs the tests and the launcher in the same '
-                           'process. Useful for debugging.')
-  parser.add_argument('--test-launcher-batch-limit',
-                      type=int,
-                      help='Sets the limit of test batch to run in a single '
-                      'process.')
-  # --test-launcher-filter-file is specified relative to --output-directory,
-  # so specifying type=os.path.* will break it.
-  parser.add_argument('--test-launcher-filter-file',
-                      default=None,
-                      help='Override default filter file passed to target test '
-                      'process. Set an empty path to disable filtering.')
-  parser.add_argument('--test-launcher-jobs',
-                      type=int,
-                      help='Sets the number of parallel test jobs.')
-  parser.add_argument('--test-launcher-summary-output',
-                      help='Where the test launcher will output its json.')
-  parser.add_argument('--enable-test-server', action='store_true',
-                      default=False,
-                      help='Enable Chrome test server spawner.')
-  parser.add_argument('child_args', nargs='*',
-                      help='Arguments for the test process.')
-  args = parser.parse_args()
-  ConfigureLogging(args)
-
-  child_args = ['--test-launcher-retry-limit=0']
-  if args.single_process_tests:
-    child_args.append('--single-process-tests')
-  if args.test_launcher_batch_limit:
-    child_args.append('--test-launcher-batch-limit=%d' %
-                       args.test_launcher_batch_limit)
-
-  test_concurrency = args.test_launcher_jobs \
-      if args.test_launcher_jobs else DEFAULT_TEST_CONCURRENCY
-  child_args.append('--test-launcher-jobs=%d' % test_concurrency)
-
-  if args.gtest_filter:
-    child_args.append('--gtest_filter=' + args.gtest_filter)
-  if args.gtest_repeat:
-    child_args.append('--gtest_repeat=' + args.gtest_repeat)
-    child_args.append('--test-launcher-timeout=-1')
-  if args.gtest_break_on_failure:
-    child_args.append('--gtest_break_on_failure')
-  if args.child_args:
-    child_args.extend(args.child_args)
-
-  if args.test_launcher_summary_output:
-    child_args.append('--test-launcher-summary-output=' + TEST_RESULT_PATH)
-
-  with GetDeploymentTargetForArgs(args) as target:
-    target.Start()
-
-    if args.test_launcher_filter_file:
-      target.PutFile(args.test_launcher_filter_file, TEST_FILTER_PATH)
-      child_args.append('--test-launcher-filter-file=' + TEST_FILTER_PATH)
-
-    forwarder = None
-    if args.enable_test_server:
-      test_server = SetupTestServer(target, test_concurrency)
-
-    returncode = RunPackage(
-        args.output_directory, target, args.package, args.package_name,
-        child_args, args.include_system_logs, args.package_manifest)
-
-    if forwarder:
-      forwarder.terminate()
-      forwarder.wait()
-
-    if args.test_launcher_summary_output:
-      target.GetFile(TEST_RESULT_PATH, args.test_launcher_summary_output)
-
-    return returncode
-
-
-if __name__ == '__main__':
-  sys.exit(main())
diff --git a/build/fuchsia/update_sdk.py b/build/fuchsia/update_sdk.py
deleted file mode 100755
index e325352..0000000
--- a/build/fuchsia/update_sdk.py
+++ /dev/null
@@ -1,111 +0,0 @@
-#!/usr/bin/env python
-# Copyright 2017 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Updates the Fuchsia SDK to the given revision. Should be used in a 'hooks_os'
-entry so that it only runs when .gclient's target_os includes 'fuchsia'."""
-
-import os
-import shutil
-import subprocess
-import sys
-import tarfile
-import tempfile
-
-SDK_HASH_FILE = os.path.join(os.path.dirname(__file__), 'sdk.sha1')
-
-REPOSITORY_ROOT = os.path.abspath(os.path.join(
-    os.path.dirname(__file__), '..', '..'))
-sys.path.append(os.path.join(REPOSITORY_ROOT, 'build'))
-
-import find_depot_tools
-
-SDK_SUBDIRS = ["arch", "pkg", "qemu", "sysroot", "target",
-               "toolchain_libs", "tools"]
-
-
-def EnsureDirExists(path):
-  if not os.path.exists(path):
-    print 'Creating directory %s' % path
-    os.makedirs(path)
-
-
-# Removes previous SDK from the specified path if it's detected there.
-def Cleanup(path):
-  hash_file = os.path.join(path, '.hash')
-  if os.path.exists(hash_file):
-    print 'Removing old SDK from %s.' % path
-    for d in SDK_SUBDIRS:
-      to_remove = os.path.join(path, d)
-      if os.path.isdir(to_remove):
-        shutil.rmtree(to_remove)
-    os.remove(hash_file)
-
-
-# Updates the modification timestamps of |path| and its contents to the
-# current time.
-def UpdateTimestampsRecursive(path):
-  for root, dirs, files in os.walk(path):
-    for f in files:
-      os.utime(os.path.join(root, f), None)
-    for d in dirs:
-      os.utime(os.path.join(root, d), None)
-
-
-def main():
-  if len(sys.argv) != 1:
-    print >>sys.stderr, 'usage: %s' % sys.argv[0]
-    return 1
-
-  # Previously SDK was unpacked in //third_party/fuchsia-sdk instead of
-  # //third_party/fuchsia-sdk/sdk . Remove the old files if they are still
-  # there.
-  Cleanup(os.path.join(REPOSITORY_ROOT, 'third_party', 'fuchsia-sdk'))
-
-  with open(SDK_HASH_FILE, 'r') as f:
-    sdk_hash = f.read().strip()
-
-  if not sdk_hash:
-    print >>sys.stderr, 'No SHA1 found in %s' % SDK_HASH_FILE
-    return 1
-
-  output_dir = os.path.join(REPOSITORY_ROOT, 'third_party', 'fuchsia-sdk',
-                            'sdk')
-
-  hash_filename = os.path.join(output_dir, '.hash')
-  if os.path.exists(hash_filename):
-    with open(hash_filename, 'r') as f:
-      if f.read().strip() == sdk_hash:
-        # Nothing to do.
-        return 0
-
-  print 'Downloading SDK %s...' % sdk_hash
-
-  if os.path.isdir(output_dir):
-    shutil.rmtree(output_dir)
-
-  fd, tmp = tempfile.mkstemp()
-  os.close(fd)
-
-  try:
-    bucket = 'gs://fuchsia/sdk/linux-amd64/'
-    cmd = [os.path.join(find_depot_tools.DEPOT_TOOLS_PATH, 'gsutil.py'),
-           'cp', bucket + sdk_hash, tmp]
-    subprocess.check_call(cmd)
-    with open(tmp, 'rb') as f:
-      EnsureDirExists(output_dir)
-      tarfile.open(mode='r:gz', fileobj=f).extractall(path=output_dir)
-  finally:
-    os.remove(tmp)
-
-  with open(hash_filename, 'w') as f:
-    f.write(sdk_hash)
-
-  UpdateTimestampsRecursive(output_dir)
-
-  return 0
-
-
-if __name__ == '__main__':
-  sys.exit(main())
diff --git a/build/gdb-add-index b/build/gdb-add-index
deleted file mode 100755
index 73367c8..0000000
--- a/build/gdb-add-index
+++ /dev/null
@@ -1,184 +0,0 @@
-#!/bin/bash
-# Copyright (c) 2012 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-#
-# Saves the gdb index for a given binary and its shared library dependencies.
-#
-# This will run gdb index in parallel on a number of binaries using SIGUSR1
-# as the communication mechanism to simulate a semaphore. Because of the
-# nature of this technique, using "set -e" is very difficult. The SIGUSR1
-# terminates a "wait" with an error which we need to interpret.
-#
-# When modifying this code, most of the real logic is in the index_one_file
-# function. The rest is cleanup + sempahore plumbing.
-
-function usage_exit {
-  echo "Usage: $0 [-f] [-r] [-n] <paths-to-binaries>..."
-  echo "  -f forces replacement of an existing index."
-  echo "  -r removes the index section."
-  echo "  -n don't extract the dependencies of each binary with lld."
-  echo "       e.g., $0 -n out/Debug/lib.unstripped/lib*"
-  echo
-  echo "  Set TOOLCHAIN_PREFIX to use a non-default set of binutils."
-  exit 1
-}
-
-# Cleanup temp directory and ensure all child jobs are dead-dead.
-function on_exit {
-  trap "" EXIT USR1  # Avoid reentrancy.
-
-  local jobs=$(jobs -p)
-  if [ -n "$jobs" ]; then
-    echo -n "Killing outstanding index jobs..."
-    kill -KILL $(jobs -p)
-    wait
-    echo "done"
-  fi
-
-  if [ -d "$directory" ]; then
-    echo -n "Removing temp directory $directory..."
-    rm -rf "$directory"
-    echo done
-  fi
-}
-
-# Add index to one binary.
-function index_one_file {
-  local file=$1
-  local basename=$(basename "$file")
-  local should_index_this_file="${should_index}"
-
-  local readelf_out=$(${TOOLCHAIN_PREFIX}readelf -S "$file")
-  if [[ $readelf_out =~ "gdb_index" ]]; then
-    if $remove_index; then
-      ${TOOLCHAIN_PREFIX}objcopy --remove-section .gdb_index "$file"
-      echo "Removed index from $basename."
-    else
-      echo "Skipped $basename -- already contains index."
-      should_index_this_file=false
-    fi
-  fi
-
-  if $should_index_this_file; then
-    local start=$(date +"%s%N")
-    echo "Adding index to $basename..."
-
-    ${TOOLCHAIN_PREFIX}gdb -batch "$file" -ex "save gdb-index $directory" \
-      -ex "quit"
-    local index_file="$directory/$basename.gdb-index"
-    if [ -f "$index_file" ]; then
-      ${TOOLCHAIN_PREFIX}objcopy --add-section .gdb_index="$index_file" \
-        --set-section-flags .gdb_index=readonly "$file" "$file"
-      local finish=$(date +"%s%N")
-      local elapsed=$(((finish - start) / 1000000))
-      echo "   ...$basename indexed. [${elapsed}ms]"
-    else
-      echo "   ...$basename unindexable."
-    fi
-  fi
-}
-
-# Functions that when combined, concurrently index all files in FILES_TO_INDEX
-# array. The global FILES_TO_INDEX is declared in the main body of the script.
-function async_index {
-  # Start a background subshell to run the index command.
-  {
-    index_one_file $1
-    kill -SIGUSR1 $$  # $$ resolves to the parent script.
-    exit 129  # See comment above wait loop at bottom.
-  } &
-}
-
-cur_file_num=0
-function index_next {
-  if ((cur_file_num >= ${#files_to_index[@]})); then
-    return
-  fi
-
-  async_index "${files_to_index[cur_file_num]}"
-  ((cur_file_num += 1)) || true
-}
-
-########
-### Main body of the script.
-
-remove_index=false
-should_index=true
-should_index_deps=true
-files_to_index=()
-while (($# > 0)); do
-  case "$1" in
-    -h)
-      usage_exit
-      ;;
-    -f)
-      remove_index=true
-      ;;
-    -r)
-      remove_index=true
-      should_index=false
-      ;;
-    -n)
-      should_index_deps=false
-      ;;
-    -*)
-      echo "Invalid option: $1" >&2
-      usage_exit
-      ;;
-    *)
-      if [[ ! -f "$1" ]]; then
-        echo "Path $1 does not exist."
-        exit 1
-      fi
-      files_to_index+=("$1")
-      ;;
-  esac
-  shift
-done
-
-if ((${#files_to_index[@]} == 0)); then
-  usage_exit
-fi
-
-dependencies=()
-if $should_index_deps; then
-  for file in "${files_to_index[@]}"; do
-      # Append the shared library dependencies of this file that
-      # have the same dirname. The dirname is a signal that these
-      # shared libraries were part of the same build as the binary.
-      dependencies+=( \
-        $(ldd "$file" 2>/dev/null \
-          | grep $(dirname "$file") \
-          | sed "s/.*[ \t]\(.*\) (.*/\1/") \
-      )
-  done
-fi
-files_to_index+=("${dependencies[@]}")
-
-# Ensure we cleanup on on exit.
-trap on_exit EXIT INT
-
-# We're good to go! Create temp directory for index files.
-directory=$(mktemp -d)
-echo "Made temp directory $directory."
-
-# Start concurrent indexing.
-trap index_next USR1
-
-# 4 is an arbitrary default. When changing, remember we are likely IO bound
-# so basing this off the number of cores is not sensible.
-index_tasks=${INDEX_TASKS:-4}
-for ((i = 0; i < index_tasks; i++)); do
-  index_next
-done
-
-# Do a wait loop. Bash waits that terminate due a trap have an exit
-# code > 128. We also ensure that our subshell's "normal" exit occurs with
-# an exit code > 128. This allows us to do consider a > 128 exit code as
-# an indication that the loop should continue. Unfortunately, it also means
-# we cannot use set -e since technically the "wait" is failing.
-wait
-while (($? > 128)); do
-  wait
-done
diff --git a/build/gen.py b/build/gen.py
new file mode 100755
index 0000000..cdb1d44
--- /dev/null
+++ b/build/gen.py
@@ -0,0 +1,748 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Generates build.ninja that will build GN."""
+
+import contextlib
+import errno
+import logging
+import optparse
+import os
+import platform
+import shutil
+import subprocess
+import sys
+import tempfile
+
+BOOTSTRAP_DIR = os.path.dirname(os.path.abspath(__file__))
+REPO_ROOT = os.path.dirname(BOOTSTRAP_DIR)
+GN_ROOT = os.path.join(REPO_ROOT, 'tools', 'gn')
+
+is_win = sys.platform.startswith('win')
+is_linux = sys.platform.startswith('linux')
+is_mac = sys.platform.startswith('darwin')
+is_aix = sys.platform.startswith('aix')
+is_posix = is_linux or is_mac or is_aix
+
+
+def main(argv):
+  parser = optparse.OptionParser(description=sys.modules[__name__].__doc__)
+  parser.add_option('-d', '--debug', action='store_true',
+                    help='Do a debug build. Defaults to release build.')
+  parser.add_option('-v', '--verbose', action='store_true',
+                    help='Log more details')
+  options, args = parser.parse_args(argv)
+
+  if args:
+    parser.error('Unrecognized command line arguments: %s.' % ', '.join(args))
+
+  logging.basicConfig(level=logging.DEBUG if options.verbose else logging.ERROR)
+
+  out_dir = os.path.join(REPO_ROOT, 'out')
+  if not os.path.isdir(out_dir):
+    os.makedirs(out_dir)
+  write_gn_ninja(os.path.join(out_dir, 'build.ninja'), options)
+  return 0
+
+
+def write_generic_ninja(path, static_libraries, executables,
+                        cc, cxx, ar, ld,
+                        cflags=[], cflags_cc=[], ldflags=[],
+                        include_dirs=[], solibs=[]):
+  ninja_header_lines = [
+    'cc = ' + cc,
+    'cxx = ' + cxx,
+    'ar = ' + ar,
+    'ld = ' + ld,
+    '',
+  ]
+
+  if is_win:
+    template_filename = 'build_vs.ninja.template'
+  elif is_mac:
+    template_filename = 'build_mac.ninja.template'
+  elif is_aix:
+    template_filename = 'build_aix.ninja.template'
+  else:
+    template_filename = 'build.ninja.template'
+
+  with open(os.path.join(BOOTSTRAP_DIR, template_filename)) as f:
+    ninja_template = f.read()
+
+  if is_win:
+    executable_ext = '.exe'
+    library_ext = '.lib'
+    object_ext = '.obj'
+  else:
+    executable_ext = ''
+    library_ext = '.a'
+    object_ext = '.o'
+
+  def escape_path_ninja(path):
+      return path.replace('$ ', '$$ ').replace(' ', '$ ').replace(':', '$:')
+
+  def src_to_obj(path):
+    return escape_path_ninja('%s' % os.path.splitext(path)[0] + object_ext)
+
+  def library_to_a(library):
+    return '%s%s' % (library, library_ext)
+
+  ninja_lines = []
+  def build_source(src_file, settings):
+    ninja_lines.extend([
+        'build %s: %s %s' % (src_to_obj(src_file),
+                             settings['tool'],
+                             escape_path_ninja(
+                                 os.path.join(REPO_ROOT, src_file))),
+        '  includes = %s' % ' '.join(
+            ['-I' + escape_path_ninja(dirname) for dirname in
+             include_dirs + settings.get('include_dirs', [])]),
+        '  cflags = %s' % ' '.join(cflags + settings.get('cflags', [])),
+        '  cflags_cc = %s' %
+            ' '.join(cflags_cc + settings.get('cflags_cc', [])),
+    ])
+
+  for library, settings in static_libraries.iteritems():
+    for src_file in settings['sources']:
+      build_source(src_file, settings)
+
+    ninja_lines.append('build %s: alink_thin %s' % (
+        library_to_a(library),
+        ' '.join([src_to_obj(src_file) for src_file in settings['sources']])))
+
+  for executable, settings in executables.iteritems():
+    for src_file in settings['sources']:
+      build_source(src_file, settings)
+
+    ninja_lines.extend([
+      'build %s%s: link %s | %s' % (
+          executable, executable_ext,
+          ' '.join([src_to_obj(src_file) for src_file in settings['sources']]),
+          ' '.join([library_to_a(library) for library in settings['libs']])),
+      '  ldflags = %s' % ' '.join(ldflags),
+      '  solibs = %s' % ' '.join(solibs),
+      '  libs = %s' % ' '.join(
+          [library_to_a(library) for library in settings['libs']]),
+    ])
+
+  ninja_lines.append('')  # Make sure the file ends with a newline.
+
+  with open(path, 'w') as f:
+    f.write('\n'.join(ninja_header_lines))
+    f.write(ninja_template)
+    f.write('\n'.join(ninja_lines))
+
+def write_gn_ninja(path, options):
+  if is_win:
+    cc = os.environ.get('CC', 'cl.exe')
+    cxx = os.environ.get('CXX', 'cl.exe')
+    ld = os.environ.get('LD', 'link.exe')
+    ar = os.environ.get('AR', 'lib.exe')
+  elif is_aix:
+    cc = os.environ.get('CC', 'gcc')
+    cxx = os.environ.get('CXX', 'c++')
+    ld = os.environ.get('LD', cxx)
+    ar = os.environ.get('AR', 'ar -X64')
+  else:
+    cc = os.environ.get('CC', 'cc')
+    cxx = os.environ.get('CXX', 'c++')
+    ld = cxx
+    ar = os.environ.get('AR', 'ar')
+
+  cflags = os.environ.get('CFLAGS', '').split()
+  cflags_cc = os.environ.get('CXXFLAGS', '').split()
+  ldflags = os.environ.get('LDFLAGS', '').split()
+  include_dirs = [REPO_ROOT, os.path.join(REPO_ROOT, 'src')]
+  libs = []
+
+  # //base/allocator/allocator_extension.cc needs this macro defined,
+  # otherwise there would be link errors.
+  cflags.extend(['-DNO_TCMALLOC', '-D__STDC_FORMAT_MACROS'])
+
+  if is_posix:
+    if options.debug:
+      cflags.extend(['-O0', '-g'])
+    else:
+      # The linux::ppc64 BE binary doesn't "work" when
+      # optimization level is set to 2 (0 works fine).
+      # Note that the current bootstrap script has no way to detect host_cpu.
+      # This can be easily fixed once we start building using a GN binary,
+      # as the optimization flag can then just be set using the
+      # logic inside //build/toolchain.
+      cflags.extend(['-O2', '-g0'])
+
+    cflags.extend([
+        '-D_FILE_OFFSET_BITS=64',
+        '-D__STDC_CONSTANT_MACROS', '-D__STDC_FORMAT_MACROS',
+        '-pthread',
+        '-pipe',
+        '-fno-exceptions'
+    ])
+    cflags_cc.extend(['-std=c++14', '-Wno-c++11-narrowing'])
+    if is_aix:
+     cflags.extend(['-maix64'])
+     ldflags.extend([ '-maix64 -Wl,-bbigtoc' ])
+  elif is_win:
+    if not options.debug:
+      cflags.extend(['/Ox', '/DNDEBUG', '/GL'])
+      ldflags.extend(['/LTCG', '/OPT:REF', '/OPT:ICF'])
+
+    cflags.extend([
+        '/FS',
+        '/Gy',
+        '/W3', '/wd4244',
+        '/Zi',
+        '/DWIN32_LEAN_AND_MEAN', '/DNOMINMAX',
+        '/D_CRT_SECURE_NO_DEPRECATE', '/D_SCL_SECURE_NO_DEPRECATE',
+        '/D_WIN32_WINNT=0x0A00', '/DWINVER=0x0A00',
+        '/DUNICODE', '/D_UNICODE',
+    ])
+    cflags_cc.extend([
+        '/GR-',
+        '/D_HAS_EXCEPTIONS=0',
+    ])
+
+    ldflags.extend(['/MACHINE:x64'])
+
+  static_libraries = {
+      'base': {'sources': [], 'tool': 'cxx', 'include_dirs': []},
+      'dynamic_annotations': {'sources': [], 'tool': 'cc', 'include_dirs': []},
+      'gn_lib': {'sources': [], 'tool': 'cxx', 'include_dirs': []},
+  }
+
+  executables = {
+      'gn': {'sources': ['tools/gn/gn_main.cc'],
+             'tool': 'cxx', 'include_dirs': [], 'libs': []},
+  }
+
+  for name in os.listdir(GN_ROOT):
+    if not name.endswith('.cc'):
+      continue
+    if name.endswith('_unittest.cc'):
+      continue
+    if name == 'run_all_unittests.cc':
+      continue
+    if name == 'test_with_scheduler.cc':
+      continue
+    if name == 'gn_main.cc':
+      continue
+    full_path = os.path.join(GN_ROOT, name)
+    static_libraries['gn_lib']['sources'].append(
+        os.path.relpath(full_path, REPO_ROOT))
+
+  static_libraries['dynamic_annotations']['sources'].extend([
+      'base/third_party/dynamic_annotations/dynamic_annotations.c',
+      'base/third_party/superfasthash/superfasthash.c',
+  ])
+  static_libraries['base']['sources'].extend([
+      'base/allocator/allocator_check.cc',
+      'base/allocator/allocator_extension.cc',
+      'base/at_exit.cc',
+      'base/base_paths.cc',
+      'base/base_switches.cc',
+      'base/callback_helpers.cc',
+      'base/callback_internal.cc',
+      'base/command_line.cc',
+      'base/debug/activity_tracker.cc',
+      'base/debug/alias.cc',
+      'base/debug/crash_logging.cc',
+      'base/debug/dump_without_crashing.cc',
+      'base/debug/stack_trace.cc',
+      'base/debug/task_annotator.cc',
+      'base/debug/thread_heap_usage_tracker.cc',
+      'base/environment.cc',
+      'base/feature_list.cc',
+      'base/files/file.cc',
+      'base/files/file_enumerator.cc',
+      'base/files/file_path.cc',
+      'base/files/file_path_constants.cc',
+      'base/files/file_tracing.cc',
+      'base/files/file_util.cc',
+      'base/files/important_file_writer.cc',
+      'base/files/memory_mapped_file.cc',
+      'base/files/scoped_file.cc',
+      'base/hash.cc',
+      'base/json/json_parser.cc',
+      'base/json/json_reader.cc',
+      'base/json/json_string_value_serializer.cc',
+      'base/json/json_writer.cc',
+      'base/json/string_escape.cc',
+      'base/lazy_instance_helpers.cc',
+      'base/location.cc',
+      'base/logging.cc',
+      'base/md5.cc',
+      'base/memory/platform_shared_memory_region.cc',
+      'base/memory/read_only_shared_memory_region.cc',
+      'base/memory/ref_counted.cc',
+      'base/memory/ref_counted_memory.cc',
+      'base/memory/shared_memory_mapping.cc',
+      'base/memory/shared_memory_handle.cc',
+      'base/memory/shared_memory_tracker.cc',
+      'base/memory/weak_ptr.cc',
+      'base/message_loop/incoming_task_queue.cc',
+      'base/message_loop/message_loop.cc',
+      'base/message_loop/message_loop_current.cc',
+      'base/message_loop/message_loop_task_runner.cc',
+      'base/message_loop/message_pump.cc',
+      'base/message_loop/message_pump_default.cc',
+      'base/message_loop/watchable_io_message_pump_posix.cc',
+      'base/metrics/bucket_ranges.cc',
+      'base/metrics/dummy_histogram.cc',
+      'base/metrics/field_trial.cc',
+      'base/metrics/field_trial_param_associator.cc',
+      'base/metrics/field_trial_params.cc',
+      'base/metrics/histogram.cc',
+      'base/metrics/histogram_base.cc',
+      'base/metrics/histogram_functions.cc',
+      'base/metrics/histogram_samples.cc',
+      'base/metrics/histogram_snapshot_manager.cc',
+      'base/metrics/metrics_hashes.cc',
+      'base/metrics/persistent_histogram_allocator.cc',
+      'base/metrics/persistent_memory_allocator.cc',
+      'base/metrics/persistent_sample_map.cc',
+      'base/metrics/sample_map.cc',
+      'base/metrics/sample_vector.cc',
+      'base/metrics/sparse_histogram.cc',
+      'base/metrics/statistics_recorder.cc',
+      'base/observer_list_threadsafe.cc',
+      'base/path_service.cc',
+      'base/pending_task.cc',
+      'base/pickle.cc',
+      'base/process/kill.cc',
+      'base/process/memory.cc',
+      'base/process/process_handle.cc',
+      'base/process/process_iterator.cc',
+      'base/process/process_metrics.cc',
+      'base/rand_util.cc',
+      'base/run_loop.cc',
+      'base/sequence_token.cc',
+      'base/sequence_checker_impl.cc',
+      'base/sequenced_task_runner.cc',
+      'base/sha1.cc',
+      'base/strings/pattern.cc',
+      'base/strings/string_number_conversions.cc',
+      'base/strings/string_piece.cc',
+      'base/strings/string_split.cc',
+      'base/strings/string_util.cc',
+      'base/strings/string_util_constants.cc',
+      'base/strings/stringprintf.cc',
+      'base/strings/utf_string_conversion_utils.cc',
+      'base/strings/utf_string_conversions.cc',
+      'base/synchronization/atomic_flag.cc',
+      'base/synchronization/lock.cc',
+      'base/sys_info.cc',
+      'base/task_runner.cc',
+      'base/task_scheduler/delayed_task_manager.cc',
+      'base/task_scheduler/environment_config.cc',
+      'base/task_scheduler/post_task.cc',
+      'base/task_scheduler/priority_queue.cc',
+      'base/task_scheduler/scheduler_lock_impl.cc',
+      'base/task_scheduler/scheduler_single_thread_task_runner_manager.cc',
+      'base/task_scheduler/scheduler_worker.cc',
+      'base/task_scheduler/scheduler_worker_pool.cc',
+      'base/task_scheduler/scheduler_worker_pool_impl.cc',
+      'base/task_scheduler/scheduler_worker_pool_params.cc',
+      'base/task_scheduler/scheduler_worker_stack.cc',
+      'base/task_scheduler/scoped_set_task_priority_for_current_thread.cc',
+      'base/task_scheduler/sequence.cc',
+      'base/task_scheduler/sequence_sort_key.cc',
+      'base/task_scheduler/service_thread.cc',
+      'base/task_scheduler/task.cc',
+      'base/task_scheduler/task_scheduler.cc',
+      'base/task_scheduler/task_scheduler_impl.cc',
+      'base/task_scheduler/task_tracker.cc',
+      'base/task_scheduler/task_traits.cc',
+      'base/third_party/dmg_fp/dtoa_wrapper.cc',
+      'base/third_party/dmg_fp/g_fmt.cc',
+      'base/third_party/icu/icu_utf.cc',
+      'base/third_party/nspr/prtime.cc',
+      'base/threading/post_task_and_reply_impl.cc',
+      'base/threading/scoped_blocking_call.cc',
+      'base/threading/sequence_local_storage_map.cc',
+      'base/threading/sequenced_task_runner_handle.cc',
+      'base/threading/simple_thread.cc',
+      'base/threading/thread.cc',
+      'base/threading/thread_checker_impl.cc',
+      'base/threading/thread_collision_warner.cc',
+      'base/threading/thread_id_name_manager.cc',
+      'base/threading/thread_local_storage.cc',
+      'base/threading/thread_restrictions.cc',
+      'base/threading/thread_task_runner_handle.cc',
+      'base/time/clock.cc',
+      'base/time/default_clock.cc',
+      'base/time/default_tick_clock.cc',
+      'base/time/tick_clock.cc',
+      'base/time/time.cc',
+      'base/timer/elapsed_timer.cc',
+      'base/timer/timer.cc',
+      'base/trace_event/category_registry.cc',
+      'base/trace_event/event_name_filter.cc',
+      'base/trace_event/heap_profiler_allocation_context.cc',
+      'base/trace_event/heap_profiler_allocation_context_tracker.cc',
+      'base/trace_event/heap_profiler_event_filter.cc',
+      'base/trace_event/heap_profiler_heap_dump_writer.cc',
+      'base/trace_event/heap_profiler_serialization_state.cc',
+      'base/trace_event/heap_profiler_stack_frame_deduplicator.cc',
+      'base/trace_event/heap_profiler_type_name_deduplicator.cc',
+      'base/trace_event/malloc_dump_provider.cc',
+      'base/trace_event/memory_allocator_dump.cc',
+      'base/trace_event/memory_allocator_dump_guid.cc',
+      'base/trace_event/memory_dump_manager.cc',
+      'base/trace_event/memory_dump_provider_info.cc',
+      'base/trace_event/memory_dump_request_args.cc',
+      'base/trace_event/memory_dump_scheduler.cc',
+      'base/trace_event/memory_infra_background_whitelist.cc',
+      'base/trace_event/memory_peak_detector.cc',
+      'base/trace_event/memory_usage_estimator.cc',
+      'base/trace_event/process_memory_dump.cc',
+      'base/trace_event/trace_buffer.cc',
+      'base/trace_event/trace_config.cc',
+      'base/trace_event/trace_config_category_filter.cc',
+      'base/trace_event/trace_event_argument.cc',
+      'base/trace_event/trace_event_filter.cc',
+      'base/trace_event/trace_event_impl.cc',
+      'base/trace_event/trace_event_memory_overhead.cc',
+      'base/trace_event/trace_log.cc',
+      'base/trace_event/trace_log_constants.cc',
+      'base/trace_event/tracing_agent.cc',
+      'base/unguessable_token.cc',
+      'base/value_iterators.cc',
+      'base/values.cc',
+      'base/vlog.cc',
+  ])
+
+  if is_win:
+    static_libraries['base']['sources'].extend([
+        'base/memory/platform_shared_memory_region_win.cc'
+    ])
+  elif is_mac:
+    static_libraries['base']['sources'].extend([
+        'base/memory/platform_shared_memory_region_mac.cc'
+    ])
+  elif is_posix:
+    static_libraries['base']['sources'].extend([
+        'base/memory/platform_shared_memory_region_posix.cc'
+    ])
+
+  if is_posix:
+    static_libraries['base']['sources'].extend([
+        'base/base_paths_posix.cc',
+        'base/debug/debugger_posix.cc',
+        'base/debug/stack_trace_posix.cc',
+        'base/files/file_enumerator_posix.cc',
+        'base/files/file_descriptor_watcher_posix.cc',
+        'base/files/file_posix.cc',
+        'base/files/file_util_posix.cc',
+        'base/files/memory_mapped_file_posix.cc',
+        'base/memory/shared_memory_helper.cc',
+        'base/message_loop/message_pump_libevent.cc',
+        'base/posix/file_descriptor_shuffle.cc',
+        'base/posix/global_descriptors.cc',
+        'base/posix/safe_strerror.cc',
+        'base/process/kill_posix.cc',
+        'base/process/process_handle_posix.cc',
+        'base/process/process_metrics_posix.cc',
+        'base/process/process_posix.cc',
+        'base/rand_util_posix.cc',
+        'base/strings/string16.cc',
+        'base/synchronization/condition_variable_posix.cc',
+        'base/synchronization/lock_impl_posix.cc',
+        'base/sys_info_posix.cc',
+        'base/task_scheduler/task_tracker_posix.cc',
+        'base/threading/platform_thread_internal_posix.cc',
+        'base/threading/platform_thread_posix.cc',
+        'base/threading/thread_local_storage_posix.cc',
+        'base/time/time_conversion_posix.cc',
+    ])
+    static_libraries['libevent'] = {
+        'sources': [
+            'base/third_party/libevent/buffer.c',
+            'base/third_party/libevent/evbuffer.c',
+            'base/third_party/libevent/evdns.c',
+            'base/third_party/libevent/event.c',
+            'base/third_party/libevent/event_tagging.c',
+            'base/third_party/libevent/evrpc.c',
+            'base/third_party/libevent/evutil.c',
+            'base/third_party/libevent/http.c',
+            'base/third_party/libevent/log.c',
+            'base/third_party/libevent/poll.c',
+            'base/third_party/libevent/select.c',
+            'base/third_party/libevent/signal.c',
+            'base/third_party/libevent/strlcpy.c',
+        ],
+        'tool': 'cc',
+        'include_dirs': [],
+        'cflags': cflags + ['-DHAVE_CONFIG_H'],
+    }
+
+  if is_linux or is_aix:
+    static_libraries['xdg_user_dirs'] = {
+        'sources': [
+            'base/third_party/xdg_user_dirs/xdg_user_dir_lookup.cc',
+        ],
+        'tool': 'cxx',
+    }
+    static_libraries['base']['sources'].extend([
+        'base/memory/shared_memory_handle_posix.cc',
+        'base/memory/shared_memory_posix.cc',
+        'base/nix/xdg_util.cc',
+        'base/process/internal_linux.cc',
+        'base/process/memory_linux.cc',
+        'base/process/process_handle_linux.cc',
+        'base/process/process_info_linux.cc',
+        'base/process/process_iterator_linux.cc',
+        'base/process/process_linux.cc',
+        'base/process/process_metrics_linux.cc',
+        'base/strings/sys_string_conversions_posix.cc',
+        'base/synchronization/waitable_event_posix.cc',
+        'base/sys_info_linux.cc',
+        'base/time/time_exploded_posix.cc',
+        'base/time/time_now_posix.cc',
+        'base/threading/platform_thread_linux.cc',
+    ])
+    if is_linux:
+      libcxx_root = REPO_ROOT + '/buildtools/third_party/libc++/trunk'
+      libcxxabi_root = REPO_ROOT + '/buildtools/third_party/libc++abi/trunk'
+      cflags_cc.extend([
+          '-nostdinc++',
+          '-isystem' + libcxx_root + '/include',
+          '-isystem' + libcxxabi_root + '/include',
+      ])
+      ldflags.extend(['-nodefaultlibs'])
+      libs.extend([
+          '-lc',
+          '-lgcc_s',
+          '-lm',
+          '-lpthread',
+      ])
+      static_libraries['libc++'] = {
+          'sources': [
+              libcxx_root + '/src/algorithm.cpp',
+              libcxx_root + '/src/any.cpp',
+              libcxx_root + '/src/bind.cpp',
+              libcxx_root + '/src/chrono.cpp',
+              libcxx_root + '/src/condition_variable.cpp',
+              libcxx_root + '/src/debug.cpp',
+              libcxx_root + '/src/exception.cpp',
+              libcxx_root + '/src/functional.cpp',
+              libcxx_root + '/src/future.cpp',
+              libcxx_root + '/src/hash.cpp',
+              libcxx_root + '/src/ios.cpp',
+              libcxx_root + '/src/iostream.cpp',
+              libcxx_root + '/src/locale.cpp',
+              libcxx_root + '/src/memory.cpp',
+              libcxx_root + '/src/mutex.cpp',
+              libcxx_root + '/src/new.cpp',
+              libcxx_root + '/src/optional.cpp',
+              libcxx_root + '/src/random.cpp',
+              libcxx_root + '/src/regex.cpp',
+              libcxx_root + '/src/shared_mutex.cpp',
+              libcxx_root + '/src/stdexcept.cpp',
+              libcxx_root + '/src/string.cpp',
+              libcxx_root + '/src/strstream.cpp',
+              libcxx_root + '/src/system_error.cpp',
+              libcxx_root + '/src/thread.cpp',
+              libcxx_root + '/src/typeinfo.cpp',
+              libcxx_root + '/src/utility.cpp',
+              libcxx_root + '/src/valarray.cpp',
+              libcxx_root + '/src/variant.cpp',
+              libcxx_root + '/src/vector.cpp',
+          ],
+          'tool': 'cxx',
+          'cflags': cflags + [
+              '-D_LIBCPP_NO_EXCEPTIONS',
+              '-D_LIBCPP_BUILDING_LIBRARY',
+              '-DLIBCXX_BUILDING_LIBCXXABI',
+          ]
+      }
+      static_libraries['libc++abi'] = {
+          'sources': [
+              libcxxabi_root + '/src/abort_message.cpp',
+              libcxxabi_root + '/src/cxa_aux_runtime.cpp',
+              libcxxabi_root + '/src/cxa_default_handlers.cpp',
+              libcxxabi_root + '/src/cxa_demangle.cpp',
+              libcxxabi_root + '/src/cxa_exception_storage.cpp',
+              libcxxabi_root + '/src/cxa_guard.cpp',
+              libcxxabi_root + '/src/cxa_handlers.cpp',
+              libcxxabi_root + '/src/cxa_noexception.cpp',
+              libcxxabi_root + '/src/cxa_unexpected.cpp',
+              libcxxabi_root + '/src/cxa_vector.cpp',
+              libcxxabi_root + '/src/cxa_virtual.cpp',
+              libcxxabi_root + '/src/fallback_malloc.cpp',
+              libcxxabi_root + '/src/private_typeinfo.cpp',
+              libcxxabi_root + '/src/stdlib_exception.cpp',
+              libcxxabi_root + '/src/stdlib_stdexcept.cpp',
+              libcxxabi_root + '/src/stdlib_typeinfo.cpp',
+          ],
+          'tool': 'cxx',
+          'cflags': cflags + [
+              '-DLIBCXXABI_SILENT_TERMINATE',
+              '-D_LIBCXXABI_NO_EXCEPTIONS',
+          ]
+      }
+      static_libraries['base']['sources'].extend([
+        'base/allocator/allocator_shim.cc',
+        'base/allocator/allocator_shim_default_dispatch_to_glibc.cc',
+      ])
+      libs.extend(['-lrt', '-latomic'])
+      static_libraries['libevent']['include_dirs'].extend([
+          os.path.join(REPO_ROOT, 'base', 'third_party', 'libevent', 'linux')
+      ])
+      static_libraries['libevent']['sources'].extend([
+         'base/third_party/libevent/epoll.c',
+      ])
+    else:
+      ldflags.extend(['-pthread'])
+      libs.extend(['-lrt'])
+      static_libraries['base']['sources'].extend([
+          'base/process/internal_aix.cc'
+      ])
+      static_libraries['libevent']['include_dirs'].extend([
+          os.path.join(REPO_ROOT, 'base', 'third_party', 'libevent', 'aix')
+      ])
+      static_libraries['libevent']['include_dirs'].extend([
+          os.path.join(REPO_ROOT, 'base', 'third_party', 'libevent', 'compat')
+      ])
+
+  if is_mac:
+    static_libraries['base']['sources'].extend([
+        'base/base_paths_mac.mm',
+        'base/files/file_util_mac.mm',
+        'base/mac/bundle_locations.mm',
+        'base/mac/call_with_eh_frame.cc',
+        'base/mac/call_with_eh_frame_asm.S',
+        'base/mac/foundation_util.mm',
+        'base/mac/mach_logging.cc',
+        'base/mac/scoped_mach_port.cc',
+        'base/mac/scoped_mach_vm.cc',
+        'base/mac/scoped_nsautorelease_pool.mm',
+        'base/memory/shared_memory_handle_mac.cc',
+        'base/memory/shared_memory_mac.cc',
+        'base/message_loop/message_pump_mac.mm',
+        'base/process/process_handle_mac.cc',
+        'base/process/process_info_mac.cc',
+        'base/process/process_iterator_mac.cc',
+        'base/process/process_metrics_mac.cc',
+        'base/strings/sys_string_conversions_mac.mm',
+        'base/synchronization/waitable_event_mac.cc',
+        'base/sys_info_mac.mm',
+        'base/time/time_exploded_posix.cc',
+        'base/time/time_mac.cc',
+        'base/threading/platform_thread_mac.mm',
+    ])
+    static_libraries['libevent']['include_dirs'].extend([
+        os.path.join(REPO_ROOT, 'base', 'third_party', 'libevent', 'mac')
+    ])
+    static_libraries['libevent']['sources'].extend([
+        'base/third_party/libevent/kqueue.c',
+    ])
+
+    libs.extend([
+        '-framework', 'AppKit',
+        '-framework', 'CoreFoundation',
+        '-framework', 'Foundation',
+        '-framework', 'Security',
+    ])
+
+  if is_win:
+    static_libraries['base']['sources'].extend([
+        "base/allocator/partition_allocator/address_space_randomization.cc",
+        'base/allocator/partition_allocator/page_allocator.cc',
+        "base/allocator/partition_allocator/spin_lock.cc",
+        'base/base_paths_win.cc',
+        'base/cpu.cc',
+        'base/debug/close_handle_hook_win.cc',
+        'base/debug/debugger.cc',
+        'base/debug/debugger_win.cc',
+        'base/debug/profiler.cc',
+        'base/debug/stack_trace_win.cc',
+        'base/file_version_info_win.cc',
+        'base/files/file_enumerator_win.cc',
+        'base/files/file_path_watcher_win.cc',
+        'base/files/file_util_win.cc',
+        'base/files/file_win.cc',
+        'base/files/memory_mapped_file_win.cc',
+        'base/guid.cc',
+        'base/logging_win.cc',
+        'base/memory/memory_pressure_monitor_win.cc',
+        'base/memory/shared_memory_handle_win.cc',
+        'base/memory/shared_memory_win.cc',
+        'base/message_loop/message_pump_win.cc',
+        'base/native_library_win.cc',
+        'base/power_monitor/power_monitor_device_source_win.cc',
+        'base/process/kill_win.cc',
+        'base/process/launch_win.cc',
+        'base/process/memory_win.cc',
+        'base/process/process_handle_win.cc',
+        'base/process/process_info_win.cc',
+        'base/process/process_iterator_win.cc',
+        'base/process/process_metrics_win.cc',
+        'base/process/process_win.cc',
+        'base/profiler/native_stack_sampler_win.cc',
+        'base/profiler/win32_stack_frame_unwinder.cc',
+        'base/rand_util_win.cc',
+        'base/strings/sys_string_conversions_win.cc',
+        'base/sync_socket_win.cc',
+        'base/synchronization/condition_variable_win.cc',
+        'base/synchronization/lock_impl_win.cc',
+        'base/synchronization/waitable_event_watcher_win.cc',
+        'base/synchronization/waitable_event_win.cc',
+        'base/sys_info_win.cc',
+        'base/threading/platform_thread_win.cc',
+        'base/threading/thread_local_storage_win.cc',
+        'base/time/time_win.cc',
+        'base/timer/hi_res_timer_manager_win.cc',
+        'base/trace_event/trace_event_etw_export_win.cc',
+        'base/win/core_winrt_util.cc',
+        'base/win/enum_variant.cc',
+        'base/win/event_trace_controller.cc',
+        'base/win/event_trace_provider.cc',
+        'base/win/i18n.cc',
+        'base/win/iat_patch_function.cc',
+        'base/win/iunknown_impl.cc',
+        'base/win/message_window.cc',
+        'base/win/object_watcher.cc',
+        'base/win/pe_image.cc',
+        'base/win/process_startup_helper.cc',
+        'base/win/registry.cc',
+        'base/win/resource_util.cc',
+        'base/win/scoped_bstr.cc',
+        'base/win/scoped_com_initializer.cc',
+        'base/win/scoped_handle.cc',
+        'base/win/scoped_handle_verifier.cc',
+        'base/win/scoped_process_information.cc',
+        'base/win/scoped_variant.cc',
+        'base/win/scoped_winrt_initializer.cc',
+        'base/win/shortcut.cc',
+        'base/win/startup_information.cc',
+        'base/win/wait_chain.cc',
+        'base/win/win_util.cc',
+        'base/win/windows_version.cc',
+        'base/win/wrapped_window_proc.cc',
+    ])
+
+    libs.extend([
+        'advapi32.lib',
+        'dbghelp.lib',
+        'kernel32.lib',
+        'ole32.lib',
+        'shell32.lib',
+        'user32.lib',
+        'userenv.lib',
+        'version.lib',
+        'winmm.lib',
+        'ws2_32.lib',
+        'Shlwapi.lib',
+    ])
+
+  # we just build static libraries that GN needs
+  executables['gn']['libs'].extend(static_libraries.keys())
+
+  write_generic_ninja(path, static_libraries, executables, cc, cxx, ar, ld,
+                      cflags, cflags_cc, ldflags, include_dirs, libs)
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv[1:]))
diff --git a/build/get_landmines.py b/build/get_landmines.py
deleted file mode 100755
index 8a655e3..0000000
--- a/build/get_landmines.py
+++ /dev/null
@@ -1,82 +0,0 @@
-#!/usr/bin/env python
-# Copyright 2013 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-This file emits the list of reasons why a particular build needs to be clobbered
-(or a list of 'landmines').
-"""
-
-import sys
-
-import landmine_utils
-
-
-host_os = landmine_utils.host_os
-
-
-def print_landmines():
-  """
-  ALL LANDMINES ARE EMITTED FROM HERE.
-  """
-  # DO NOT add landmines as part of a regular CL. Landmines are a last-effort
-  # bandaid fix if a CL that got landed has a build dependency bug and all bots
-  # need to be cleaned up. If you're writing a new CL that causes build
-  # dependency problems, fix the dependency problems instead of adding a
-  # landmine.
-  #
-  # Before adding or changing a landmine consider the consequences of doing so.
-  # Doing so will wipe out every output directory on every Chrome developer's
-  # machine. This can be particularly problematic on Windows where the directory
-  # deletion may well fail (locked files, command prompt in the directory,
-  # etc.), and generated .sln and .vcxproj files will be deleted.
-  #
-  # This output directory deletion will be repeated when going back and forth
-  # across the change that added the landmine, adding to the cost. There are
-  # usually less troublesome alternatives.
-
-  if host_os() == 'win':
-    print 'Compile on cc_unittests fails due to symbols removed in r185063.'
-  if host_os() == 'linux':
-    print 'Builders switching from make to ninja will clobber on this.'
-  if host_os() == 'mac':
-    print 'Switching from bundle to unbundled dylib (issue 14743002).'
-  if host_os() in ('win', 'mac'):
-    print ('Improper dependency for create_nmf.py broke in r240802, '
-           'fixed in r240860.')
-  if host_os() == 'win':
-    print 'Switch to VS2015 Update 3, 14393 SDK'
-  print 'Need to clobber everything due to an IDL change in r154579 (blink)'
-  print 'Need to clobber everything due to gen file moves in r175513 (Blink)'
-  print 'Clobber to get rid of obselete test plugin after r248358'
-  print 'Clobber to rebuild GN files for V8'
-  print 'Clobber to get rid of stale generated mojom.h files'
-  print 'Need to clobber everything due to build_nexe change in nacl r13424'
-  print '[chromium-dev] PSA: clobber build needed for IDR_INSPECTOR_* compil...'
-  print 'blink_resources.grd changed: crbug.com/400860'
-  print 'ninja dependency cycle: crbug.com/408192'
-  print 'Clobber to fix missing NaCl gyp dependencies (crbug.com/427427).'
-  print 'Another clobber for missing NaCl gyp deps (crbug.com/427427).'
-  print 'Clobber to fix GN not picking up increased ID range (crbug.com/444902)'
-  print 'Remove NaCl toolchains from the output dir (crbug.com/456902)'
-  if host_os() == 'win':
-    print 'Clobber to delete stale generated files (crbug.com/510086)'
-  if host_os() == 'mac':
-    print 'Clobber to get rid of evil libsqlite3.dylib (crbug.com/526208)'
-  if host_os() == 'mac':
-    print 'Clobber to remove libsystem.dylib. See crbug.com/620075'
-  if host_os() == 'mac':
-    print 'Clobber to get past mojo gen build error (crbug.com/679607)'
-  if host_os() == 'win':
-    print 'Clobber Windows to fix strange PCH-not-rebuilt errors.'
-  print 'CLobber all to fix GN breakage (crbug.com/736215)'
-  print 'The Great Blink mv for source files (crbug.com/768828)'
-
-def main():
-  print_landmines()
-  return 0
-
-
-if __name__ == '__main__':
-  sys.exit(main())
diff --git a/build/get_syzygy_binaries.py b/build/get_syzygy_binaries.py
deleted file mode 100755
index 09b1199..0000000
--- a/build/get_syzygy_binaries.py
+++ /dev/null
@@ -1,529 +0,0 @@
-#!/usr/bin/env python
-# Copyright 2014 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""A utility script for downloading versioned Syzygy binaries."""
-
-import hashlib
-import errno
-import json
-import logging
-import optparse
-import os
-import re
-import shutil
-import stat
-import sys
-import subprocess
-import tempfile
-import time
-import zipfile
-
-
-_LOGGER = logging.getLogger(os.path.basename(__file__))
-
-# The relative path where official builds are archived in their GS bucket.
-_SYZYGY_ARCHIVE_PATH = ('/builds/official/%(revision)s')
-
-# A JSON file containing the state of the download directory. If this file and
-# directory state do not agree, then the binaries will be downloaded and
-# installed again.
-_STATE = '.state'
-
-# This matches an integer (an SVN revision number) or a SHA1 value (a GIT hash).
-# The archive exclusively uses lowercase GIT hashes.
-_REVISION_RE = re.compile('^(?:\d+|[a-f0-9]{40})$')
-
-# This matches an MD5 hash.
-_MD5_RE = re.compile('^[a-f0-9]{32}$')
-
-# List of reources to be downloaded and installed. These are tuples with the
-# following format:
-# (basename, logging name, relative installation path, extraction filter)
-_RESOURCES = [
-  ('benchmark.zip', 'benchmark', '', None),
-  ('binaries.zip', 'binaries', 'exe', None),
-  ('symbols.zip', 'symbols', 'exe',
-      lambda x: x.filename.endswith('.dll.pdb'))]
-
-
-# Name of the MS DIA dll that we need to copy to the binaries directory.
-_DIA_DLL_NAME = "msdia140.dll"
-
-
-def _LoadState(output_dir):
-  """Loads the contents of the state file for a given |output_dir|, returning
-  None if it doesn't exist.
-  """
-  path = os.path.join(output_dir, _STATE)
-  if not os.path.exists(path):
-    _LOGGER.debug('No state file found.')
-    return None
-  with open(path, 'rb') as f:
-    _LOGGER.debug('Reading state file: %s', path)
-    try:
-      return json.load(f)
-    except ValueError:
-      _LOGGER.debug('Invalid state file.')
-      return None
-
-
-def _SaveState(output_dir, state, dry_run=False):
-  """Saves the |state| dictionary to the given |output_dir| as a JSON file."""
-  path = os.path.join(output_dir, _STATE)
-  _LOGGER.debug('Writing state file: %s', path)
-  if dry_run:
-    return
-  with open(path, 'wb') as f:
-    f.write(json.dumps(state, sort_keys=True, indent=2))
-
-
-def _Md5(path):
-  """Returns the MD5 hash of the file at |path|, which must exist."""
-  return hashlib.md5(open(path, 'rb').read()).hexdigest()
-
-
-def _StateIsValid(state):
-  """Returns true if the given state structure is valid."""
-  if not isinstance(state, dict):
-    _LOGGER.debug('State must be a dict.')
-    return False
-  r = state.get('revision', None)
-  if not isinstance(r, basestring) or not _REVISION_RE.match(r):
-    _LOGGER.debug('State contains an invalid revision.')
-    return False
-  c = state.get('contents', None)
-  if not isinstance(c, dict):
-    _LOGGER.debug('State must contain a contents dict.')
-    return False
-  for (relpath, md5) in c.iteritems():
-    if not isinstance(relpath, basestring) or len(relpath) == 0:
-      _LOGGER.debug('State contents dict contains an invalid path.')
-      return False
-    if not isinstance(md5, basestring) or not _MD5_RE.match(md5):
-      _LOGGER.debug('State contents dict contains an invalid MD5 digest.')
-      return False
-  return True
-
-
-def _BuildActualState(stored, revision, output_dir):
-  """Builds the actual state using the provided |stored| state as a template.
-  Only examines files listed in the stored state, causing the script to ignore
-  files that have been added to the directories locally. |stored| must be a
-  valid state dictionary.
-  """
-  contents = {}
-  state = { 'revision': revision, 'contents': contents }
-  for relpath, md5 in stored['contents'].iteritems():
-    abspath = os.path.abspath(os.path.join(output_dir, relpath))
-    if os.path.isfile(abspath):
-      m = _Md5(abspath)
-      contents[relpath] = m
-
-  return state
-
-
-def _StatesAreConsistent(stored, actual):
-  """Validates whether two state dictionaries are consistent. Both must be valid
-  state dictionaries. Additional entries in |actual| are ignored.
-  """
-  if stored['revision'] != actual['revision']:
-    _LOGGER.debug('Mismatched revision number.')
-    return False
-  cont_stored = stored['contents']
-  cont_actual = actual['contents']
-  for relpath, md5 in cont_stored.iteritems():
-    if relpath not in cont_actual:
-      _LOGGER.debug('Missing content: %s', relpath)
-      return False
-    if md5 != cont_actual[relpath]:
-      _LOGGER.debug('Modified content: %s', relpath)
-      return False
-  return True
-
-
-def _GetCurrentState(revision, output_dir):
-  """Loads the current state and checks to see if it is consistent. Returns
-  a tuple (state, bool). The returned state will always be valid, even if an
-  invalid state is present on disk.
-  """
-  stored = _LoadState(output_dir)
-  if not _StateIsValid(stored):
-    _LOGGER.debug('State is invalid.')
-    # Return a valid but empty state.
-    return ({'revision': '0', 'contents': {}}, False)
-  actual = _BuildActualState(stored, revision, output_dir)
-  # If the script has been modified consider the state invalid.
-  path = os.path.join(output_dir, _STATE)
-  if os.path.getmtime(__file__) > os.path.getmtime(path):
-    return (stored, False)
-  # Otherwise, explicitly validate the state.
-  if not _StatesAreConsistent(stored, actual):
-    return (stored, False)
-  return (stored, True)
-
-
-def _DirIsEmpty(path):
-  """Returns true if the given directory is empty, false otherwise."""
-  for root, dirs, files in os.walk(path):
-    return not dirs and not files
-
-
-def _RmTreeHandleReadOnly(func, path, exc):
-  """An error handling function for use with shutil.rmtree. This will
-  detect failures to remove read-only files, and will change their properties
-  prior to removing them. This is necessary on Windows as os.remove will return
-  an access error for read-only files, and git repos contain read-only
-  pack/index files.
-  """
-  excvalue = exc[1]
-  if func in (os.rmdir, os.remove) and excvalue.errno == errno.EACCES:
-    _LOGGER.debug('Removing read-only path: %s', path)
-    os.chmod(path, stat.S_IRWXU | stat.S_IRWXG | stat.S_IRWXO)
-    func(path)
-  else:
-    raise
-
-
-def _RmTree(path):
-  """A wrapper of shutil.rmtree that handles read-only files."""
-  shutil.rmtree(path, ignore_errors=False, onerror=_RmTreeHandleReadOnly)
-
-
-def _CleanState(output_dir, state, dry_run=False):
-  """Cleans up files/directories in |output_dir| that are referenced by
-  the given |state|. Raises an error if there are local changes. Returns a
-  dictionary of files that were deleted.
-  """
-  _LOGGER.debug('Deleting files from previous installation.')
-  deleted = {}
-
-  # Generate a list of files to delete, relative to |output_dir|.
-  contents = state['contents']
-  files = sorted(contents.keys())
-
-  # Try to delete the files. Keep track of directories to delete as well.
-  dirs = {}
-  for relpath in files:
-    fullpath = os.path.join(output_dir, relpath)
-    fulldir = os.path.dirname(fullpath)
-    dirs[fulldir] = True
-    if os.path.exists(fullpath):
-      # If somehow the file has become a directory complain about it.
-      if os.path.isdir(fullpath):
-        raise Exception('Directory exists where file expected: %s' % fullpath)
-
-      # Double check that the file doesn't have local changes. If it does
-      # then refuse to delete it.
-      if relpath in contents:
-        stored_md5 = contents[relpath]
-        actual_md5 = _Md5(fullpath)
-        if actual_md5 != stored_md5:
-          raise Exception('File has local changes: %s' % fullpath)
-
-      # The file is unchanged so it can safely be deleted.
-      _LOGGER.debug('Deleting file "%s".', fullpath)
-      deleted[relpath] = True
-      if not dry_run:
-        os.unlink(fullpath)
-
-  # Sort directories from longest name to shortest. This lets us remove empty
-  # directories from the most nested paths first.
-  dirs = sorted(dirs.keys(), key=lambda x: len(x), reverse=True)
-  for p in dirs:
-    if os.path.exists(p) and _DirIsEmpty(p):
-      _LOGGER.debug('Deleting empty directory "%s".', p)
-      if not dry_run:
-        _RmTree(p)
-
-  return deleted
-
-
-def _FindGsUtil():
-  """Looks for depot_tools and returns the absolute path to gsutil.py."""
-  for path in os.environ['PATH'].split(os.pathsep):
-    path = os.path.abspath(path)
-    git_cl = os.path.join(path, 'git_cl.py')
-    gs_util = os.path.join(path, 'gsutil.py')
-    if os.path.exists(git_cl) and os.path.exists(gs_util):
-      return gs_util
-  return None
-
-
-def _GsUtil(*cmd):
-  """Runs the given command in gsutil with exponential backoff and retries."""
-  gs_util = _FindGsUtil()
-  cmd = [sys.executable, gs_util] + list(cmd)
-
-  retries = 3
-  timeout = 4  # Seconds.
-  while True:
-    _LOGGER.debug('Running %s', cmd)
-    prog = subprocess.Popen(cmd, shell=False)
-    prog.communicate()
-
-    # Stop retrying on success.
-    if prog.returncode == 0:
-      return
-
-    # Raise a permanent failure if retries have been exhausted.
-    if retries == 0:
-      raise RuntimeError('Command "%s" returned %d.' % (cmd, prog.returncode))
-
-    _LOGGER.debug('Sleeping %d seconds and trying again.', timeout)
-    time.sleep(timeout)
-    retries -= 1
-    timeout *= 2
-
-
-def _Download(resource):
-  """Downloads the given GS resource to a temporary file, returning its path."""
-  tmp = tempfile.mkstemp(suffix='syzygy_archive')
-  os.close(tmp[0])
-  tmp_file = tmp[1]
-  url = 'gs://syzygy-archive' + resource
-  if sys.platform == 'cygwin':
-    # Change temporary path to Windows path for gsutil
-    def winpath(path):
-      return subprocess.check_output(['cygpath', '-w', path]).strip()
-    tmp_file = winpath(tmp_file)
-  _GsUtil('cp', url, tmp_file)
-  return tmp[1]
-
-
-def _MaybeCopyDIABinaries(options, contents):
-  """Try to copy the DIA DLL to the binaries exe directory."""
-  toolchain_data_file = os.path.join(os.path.dirname(__file__),
-                                     'win_toolchain.json')
-  if not os.path.exists(toolchain_data_file):
-    _LOGGER.debug('Toolchain JSON data file doesn\'t exist, skipping.')
-    return
-  with open(toolchain_data_file) as temp_f:
-    toolchain_data = json.load(temp_f)
-  if not os.path.isdir(toolchain_data['path']):
-    _LOGGER.error('The toolchain JSON file is invalid.')
-    return
-  dia_sdk_binaries_dir = os.path.join(toolchain_data['path'], 'DIA SDK', 'bin')
-  dia_dll = os.path.join(dia_sdk_binaries_dir, _DIA_DLL_NAME)
-  if not os.path.exists(dia_dll):
-    _LOGGER.debug('%s is missing, skipping.')
-    return
-  dia_dll_dest = os.path.join(options.output_dir, 'exe', _DIA_DLL_NAME)
-  _LOGGER.debug('Copying %s to %s.' % (dia_dll, dia_dll_dest))
-  if not options.dry_run:
-    shutil.copy(dia_dll, dia_dll_dest)
-    contents[os.path.relpath(dia_dll_dest, options.output_dir)] = (
-        _Md5(dia_dll_dest))
-
-
-def _InstallBinaries(options, deleted={}):
-  """Installs Syzygy binaries. This assumes that the output directory has
-  already been cleaned, as it will refuse to overwrite existing files."""
-  contents = {}
-  state = { 'revision': options.revision, 'contents': contents }
-  archive_path = _SYZYGY_ARCHIVE_PATH % { 'revision': options.revision }
-  if options.resources:
-    resources = [(resource, resource, '', None)
-                 for resource in options.resources]
-  else:
-    resources = _RESOURCES
-  for (base, name, subdir, filt) in resources:
-    # Create the output directory if it doesn't exist.
-    fulldir = os.path.join(options.output_dir, subdir)
-    if os.path.isfile(fulldir):
-      raise Exception('File exists where a directory needs to be created: %s' %
-                      fulldir)
-    if not os.path.exists(fulldir):
-      _LOGGER.debug('Creating directory: %s', fulldir)
-      if not options.dry_run:
-        os.makedirs(fulldir)
-
-    # Download and read the archive.
-    resource = archive_path + '/' + base
-    _LOGGER.debug('Retrieving %s archive at "%s".', name, resource)
-    path = _Download(resource)
-
-    _LOGGER.debug('Unzipping %s archive.', name)
-    with open(path, 'rb') as data:
-      archive = zipfile.ZipFile(data)
-      for entry in archive.infolist():
-        if not filt or filt(entry):
-          fullpath = os.path.normpath(os.path.join(fulldir, entry.filename))
-          relpath = os.path.relpath(fullpath, options.output_dir)
-          if os.path.exists(fullpath):
-            # If in a dry-run take into account the fact that the file *would*
-            # have been deleted.
-            if options.dry_run and relpath in deleted:
-              pass
-            else:
-              raise Exception('Path already exists: %s' % fullpath)
-
-          # Extract the file and update the state dictionary.
-          _LOGGER.debug('Extracting "%s".', fullpath)
-          if not options.dry_run:
-            archive.extract(entry.filename, fulldir)
-            md5 = _Md5(fullpath)
-            contents[relpath] = md5
-            if sys.platform == 'cygwin':
-              os.chmod(fullpath, os.stat(fullpath).st_mode | stat.S_IXUSR)
-
-    _LOGGER.debug('Removing temporary file "%s".', path)
-    os.remove(path)
-
-  if options.copy_dia_binaries:
-    # Try to copy the DIA binaries to the binaries directory.
-    _MaybeCopyDIABinaries(options, contents)
-
-  return state
-
-
-def _ParseCommandLine():
-  """Parses the command-line and returns an options structure."""
-  option_parser = optparse.OptionParser()
-  option_parser.add_option('--dry-run', action='store_true', default=False,
-      help='If true then will simply list actions that would be performed.')
-  option_parser.add_option('--force', action='store_true', default=False,
-      help='Force an installation even if the binaries are up to date.')
-  option_parser.add_option('--no-cleanup', action='store_true', default=False,
-      help='Allow installation on non-Windows platforms, and skip the forced '
-           'cleanup step.')
-  option_parser.add_option('--output-dir', type='string',
-      help='The path where the binaries will be replaced. Existing binaries '
-           'will only be overwritten if not up to date.')
-  option_parser.add_option('--overwrite', action='store_true', default=False,
-      help='If specified then the installation will happily delete and rewrite '
-           'the entire output directory, blasting any local changes.')
-  option_parser.add_option('--revision', type='string',
-      help='The SVN revision or GIT hash associated with the required version.')
-  option_parser.add_option('--revision-file', type='string',
-      help='A text file containing an SVN revision or GIT hash.')
-  option_parser.add_option('--resource', type='string', action='append',
-      dest='resources', help='A resource to be downloaded.')
-  option_parser.add_option('--verbose', dest='log_level', action='store_const',
-      default=logging.INFO, const=logging.DEBUG,
-      help='Enables verbose logging.')
-  option_parser.add_option('--quiet', dest='log_level', action='store_const',
-      default=logging.INFO, const=logging.ERROR,
-      help='Disables all output except for errors.')
-  option_parser.add_option('--copy-dia-binaries', action='store_true',
-      default=False, help='If true then the DIA dll will get copied into the '
-                          'binaries directory if it\'s available.')
-  options, args = option_parser.parse_args()
-  if args:
-    option_parser.error('Unexpected arguments: %s' % args)
-  if not options.output_dir:
-    option_parser.error('Must specify --output-dir.')
-  if not options.revision and not options.revision_file:
-    option_parser.error('Must specify one of --revision or --revision-file.')
-  if options.revision and options.revision_file:
-    option_parser.error('Must not specify both --revision and --revision-file.')
-
-  # Configure logging.
-  logging.basicConfig(level=options.log_level)
-
-  # If a revision file has been specified then read it.
-  if options.revision_file:
-    options.revision = open(options.revision_file, 'rb').read().strip()
-    _LOGGER.debug('Parsed revision "%s" from file "%s".',
-                 options.revision, options.revision_file)
-
-  # Ensure that the specified SVN revision or GIT hash is valid.
-  if not _REVISION_RE.match(options.revision):
-    option_parser.error('Must specify a valid SVN or GIT revision.')
-
-  # This just makes output prettier to read.
-  options.output_dir = os.path.normpath(options.output_dir)
-
-  return options
-
-
-def _RemoveOrphanedFiles(options):
-  """This is run on non-Windows systems to remove orphaned files that may have
-  been downloaded by a previous version of this script.
-  """
-  # Reconfigure logging to output info messages. This will allow inspection of
-  # cleanup status on non-Windows buildbots.
-  _LOGGER.setLevel(logging.INFO)
-
-  output_dir = os.path.abspath(options.output_dir)
-
-  # We only want to clean up the folder in 'src/third_party/syzygy', and we
-  # expect to be called with that as an output directory. This is an attempt to
-  # not start deleting random things if the script is run from an alternate
-  # location, or not called from the gclient hooks.
-  expected_syzygy_dir = os.path.abspath(os.path.join(
-      os.path.dirname(__file__), '..', 'third_party', 'syzygy'))
-  expected_output_dir = os.path.join(expected_syzygy_dir, 'binaries')
-  if expected_output_dir != output_dir:
-    _LOGGER.info('Unexpected output directory, skipping cleanup.')
-    return
-
-  if not os.path.isdir(expected_syzygy_dir):
-    _LOGGER.info('Output directory does not exist, skipping cleanup.')
-    return
-
-  def OnError(function, path, excinfo):
-    """Logs error encountered by shutil.rmtree."""
-    _LOGGER.error('Error when running %s(%s)', function, path, exc_info=excinfo)
-
-  _LOGGER.info('Removing orphaned files from %s', expected_syzygy_dir)
-  if not options.dry_run:
-    shutil.rmtree(expected_syzygy_dir, True, OnError)
-
-
-def main():
-  options = _ParseCommandLine()
-
-  if options.dry_run:
-    _LOGGER.debug('Performing a dry-run.')
-
-  # We only care about Windows platforms, as the Syzygy binaries aren't used
-  # elsewhere. However, there was a short period of time where this script
-  # wasn't gated on OS types, and those OSes downloaded and installed binaries.
-  # This will cleanup orphaned files on those operating systems.
-  if sys.platform not in ('win32', 'cygwin'):
-    if options.no_cleanup:
-      _LOGGER.debug('Skipping usual cleanup for non-Windows platforms.')
-    else:
-      return _RemoveOrphanedFiles(options)
-
-  # Load the current installation state, and validate it against the
-  # requested installation.
-  state, is_consistent = _GetCurrentState(options.revision, options.output_dir)
-
-  # Decide whether or not an install is necessary.
-  if options.force:
-    _LOGGER.debug('Forcing reinstall of binaries.')
-  elif is_consistent:
-    # Avoid doing any work if the contents of the directory are consistent.
-    _LOGGER.debug('State unchanged, no reinstall necessary.')
-    return
-
-  # Under normal logging this is the only only message that will be reported.
-  _LOGGER.info('Installing revision %s Syzygy binaries.',
-               options.revision[0:12])
-
-  # Clean up the old state to begin with.
-  deleted = []
-  if options.overwrite:
-    if os.path.exists(options.output_dir):
-      # If overwrite was specified then take a heavy-handed approach.
-      _LOGGER.debug('Deleting entire installation directory.')
-      if not options.dry_run:
-        _RmTree(options.output_dir)
-  else:
-    # Otherwise only delete things that the previous installation put in place,
-    # and take care to preserve any local changes.
-    deleted = _CleanState(options.output_dir, state, options.dry_run)
-
-  # Install the new binaries. In a dry-run this will actually download the
-  # archives, but it won't write anything to disk.
-  state = _InstallBinaries(options, deleted)
-
-  # Build and save the state for the directory.
-  _SaveState(options.output_dir, state, options.dry_run)
-
-
-if __name__ == '__main__':
-  main()
diff --git a/build/git-hooks/OWNERS b/build/git-hooks/OWNERS
deleted file mode 100644
index 3e327dc..0000000
--- a/build/git-hooks/OWNERS
+++ /dev/null
@@ -1,3 +0,0 @@
-set noparent
-szager@chromium.org
-cmp@chromium.org
diff --git a/build/git-hooks/pre-commit b/build/git-hooks/pre-commit
deleted file mode 100755
index 41b5963..0000000
--- a/build/git-hooks/pre-commit
+++ /dev/null
@@ -1,60 +0,0 @@
-#!/bin/sh
-
-submodule_diff() {
-  if test -n "$2"; then
-    git diff-tree -r --ignore-submodules=dirty "$1" "$2" | grep -e '^:160000' -e '^:...... 160000' | xargs
-  else
-    git diff-index --cached --ignore-submodules=dirty "$1" | grep -e '^:160000' -e '^:...... 160000' | xargs
-  fi
-}
-
-if git rev-parse --verify --quiet --no-revs MERGE_HEAD; then
-  merge_base=$(git merge-base HEAD MERGE_HEAD)
-  if test -z "$(submodule_diff $merge_base HEAD)"; then
-    # Most up-to-date submodules are in MERGE_HEAD.
-    head_ref=MERGE_HEAD
-  else
-    # Most up-to-date submodules are in HEAD.
-    head_ref=HEAD
-  fi
-else
-  # No merge in progress. Submodules must match HEAD.
-  head_ref=HEAD
-fi
-
-submods=$(submodule_diff $head_ref)
-if test "$submods"; then
-  echo "You are trying to commit changes to the following submodules:" 1>&2
-  echo 1>&2
-  echo $submods | cut -d ' ' -f 6 | sed 's/^/  /g' 1>&2
-  cat <<EOF 1>&2
-
-Submodule commits are not allowed.  Please run:
-
-  git status --ignore-submodules=dirty
-
-and/or:
-
-  git diff-index --cached --ignore-submodules=dirty HEAD
-
-... to see what's in your index.
-
-If you're really and truly trying to roll the version of a submodule, you should
-commit the new version to DEPS, instead.
-EOF
-  exit 1
-fi
-
-gitmodules_diff() {
-  git diff-index --cached "$1" .gitmodules
-}
-
-if [ "$(git ls-files .gitmodules)" ] && [ "$(gitmodules_diff $head_ref)" ]; then
-  cat <<EOF 1>&2
-You are trying to commit a change to .gitmodules.  That is not allowed.
-To make changes to submodule names/paths, edit DEPS.
-EOF
-  exit 1
-fi
-
-exit 0
diff --git a/build/gn_helpers.py b/build/gn_helpers.py
deleted file mode 100644
index a9d1e2e..0000000
--- a/build/gn_helpers.py
+++ /dev/null
@@ -1,351 +0,0 @@
-# Copyright 2014 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Helper functions useful when writing scripts that integrate with GN.
-
-The main functions are ToGNString and FromGNString which convert between
-serialized GN veriables and Python variables.
-
-To use in a random python file in the build:
-
-  import os
-  import sys
-
-  sys.path.append(os.path.join(os.path.dirname(__file__),
-                               os.pardir, os.pardir, "build"))
-  import gn_helpers
-
-Where the sequence of parameters to join is the relative path from your source
-file to the build directory."""
-
-class GNException(Exception):
-  pass
-
-
-def ToGNString(value, allow_dicts = True):
-  """Returns a stringified GN equivalent of the Python value.
-
-  allow_dicts indicates if this function will allow converting dictionaries
-  to GN scopes. This is only possible at the top level, you can't nest a
-  GN scope in a list, so this should be set to False for recursive calls."""
-  if isinstance(value, basestring):
-    if value.find('\n') >= 0:
-      raise GNException("Trying to print a string with a newline in it.")
-    return '"' + \
-        value.replace('\\', '\\\\').replace('"', '\\"').replace('$', '\\$') + \
-        '"'
-
-  if isinstance(value, unicode):
-    return ToGNString(value.encode('utf-8'))
-
-  if isinstance(value, bool):
-    if value:
-      return "true"
-    return "false"
-
-  if isinstance(value, list):
-    return '[ %s ]' % ', '.join(ToGNString(v) for v in value)
-
-  if isinstance(value, dict):
-    if not allow_dicts:
-      raise GNException("Attempting to recursively print a dictionary.")
-    result = ""
-    for key in sorted(value):
-      if not isinstance(key, basestring):
-        raise GNException("Dictionary key is not a string.")
-      result += "%s = %s\n" % (key, ToGNString(value[key], False))
-    return result
-
-  if isinstance(value, int):
-    return str(value)
-
-  raise GNException("Unsupported type when printing to GN.")
-
-
-def FromGNString(input_string):
-  """Converts the input string from a GN serialized value to Python values.
-
-  For details on supported types see GNValueParser.Parse() below.
-
-  If your GN script did:
-    something = [ "file1", "file2" ]
-    args = [ "--values=$something" ]
-  The command line would look something like:
-    --values="[ \"file1\", \"file2\" ]"
-  Which when interpreted as a command line gives the value:
-    [ "file1", "file2" ]
-
-  You can parse this into a Python list using GN rules with:
-    input_values = FromGNValues(options.values)
-  Although the Python 'ast' module will parse many forms of such input, it
-  will not handle GN escaping properly, nor GN booleans. You should use this
-  function instead.
-
-
-  A NOTE ON STRING HANDLING:
-
-  If you just pass a string on the command line to your Python script, or use
-  string interpolation on a string variable, the strings will not be quoted:
-    str = "asdf"
-    args = [ str, "--value=$str" ]
-  Will yield the command line:
-    asdf --value=asdf
-  The unquoted asdf string will not be valid input to this function, which
-  accepts only quoted strings like GN scripts. In such cases, you can just use
-  the Python string literal directly.
-
-  The main use cases for this is for other types, in particular lists. When
-  using string interpolation on a list (as in the top example) the embedded
-  strings will be quoted and escaped according to GN rules so the list can be
-  re-parsed to get the same result."""
-  parser = GNValueParser(input_string)
-  return parser.Parse()
-
-
-def FromGNArgs(input_string):
-  """Converts a string with a bunch of gn arg assignments into a Python dict.
-
-  Given a whitespace-separated list of
-
-    <ident> = (integer | string | boolean | <list of the former>)
-
-  gn assignments, this returns a Python dict, i.e.:
-
-    FromGNArgs("foo=true\nbar=1\n") -> { 'foo': True, 'bar': 1 }.
-
-  Only simple types and lists supported; variables, structs, calls
-  and other, more complicated things are not.
-
-  This routine is meant to handle only the simple sorts of values that
-  arise in parsing --args.
-  """
-  parser = GNValueParser(input_string)
-  return parser.ParseArgs()
-
-
-def UnescapeGNString(value):
-  """Given a string with GN escaping, returns the unescaped string.
-
-  Be careful not to feed with input from a Python parsing function like
-  'ast' because it will do Python unescaping, which will be incorrect when
-  fed into the GN unescaper."""
-  result = ''
-  i = 0
-  while i < len(value):
-    if value[i] == '\\':
-      if i < len(value) - 1:
-        next_char = value[i + 1]
-        if next_char in ('$', '"', '\\'):
-          # These are the escaped characters GN supports.
-          result += next_char
-          i += 1
-        else:
-          # Any other backslash is a literal.
-          result += '\\'
-    else:
-      result += value[i]
-    i += 1
-  return result
-
-
-def _IsDigitOrMinus(char):
-  return char in "-0123456789"
-
-
-class GNValueParser(object):
-  """Duplicates GN parsing of values and converts to Python types.
-
-  Normally you would use the wrapper function FromGNValue() below.
-
-  If you expect input as a specific type, you can also call one of the Parse*
-  functions directly. All functions throw GNException on invalid input. """
-  def __init__(self, string):
-    self.input = string
-    self.cur = 0
-
-  def IsDone(self):
-    return self.cur == len(self.input)
-
-  def ConsumeWhitespace(self):
-    while not self.IsDone() and self.input[self.cur] in ' \t\n':
-      self.cur += 1
-
-  def Parse(self):
-    """Converts a string representing a printed GN value to the Python type.
-
-    See additional usage notes on FromGNString above.
-
-    - GN booleans ('true', 'false') will be converted to Python booleans.
-
-    - GN numbers ('123') will be converted to Python numbers.
-
-    - GN strings (double-quoted as in '"asdf"') will be converted to Python
-      strings with GN escaping rules. GN string interpolation (embedded
-      variables preceded by $) are not supported and will be returned as
-      literals.
-
-    - GN lists ('[1, "asdf", 3]') will be converted to Python lists.
-
-    - GN scopes ('{ ... }') are not supported."""
-    result = self._ParseAllowTrailing()
-    self.ConsumeWhitespace()
-    if not self.IsDone():
-      raise GNException("Trailing input after parsing:\n  " +
-                        self.input[self.cur:])
-    return result
-
-  def ParseArgs(self):
-    """Converts a whitespace-separated list of ident=literals to a dict.
-
-    See additional usage notes on FromGNArgs, above.
-    """
-    d = {}
-
-    self.ConsumeWhitespace()
-    while not self.IsDone():
-      ident = self._ParseIdent()
-      self.ConsumeWhitespace()
-      if self.input[self.cur] != '=':
-        raise GNException("Unexpected token: " + self.input[self.cur:])
-      self.cur += 1
-      self.ConsumeWhitespace()
-      val = self._ParseAllowTrailing()
-      self.ConsumeWhitespace()
-      d[ident] = val
-
-    return d
-
-  def _ParseAllowTrailing(self):
-    """Internal version of Parse that doesn't check for trailing stuff."""
-    self.ConsumeWhitespace()
-    if self.IsDone():
-      raise GNException("Expected input to parse.")
-
-    next_char = self.input[self.cur]
-    if next_char == '[':
-      return self.ParseList()
-    elif _IsDigitOrMinus(next_char):
-      return self.ParseNumber()
-    elif next_char == '"':
-      return self.ParseString()
-    elif self._ConstantFollows('true'):
-      return True
-    elif self._ConstantFollows('false'):
-      return False
-    else:
-      raise GNException("Unexpected token: " + self.input[self.cur:])
-
-  def _ParseIdent(self):
-    ident = ''
-
-    next_char = self.input[self.cur]
-    if not next_char.isalpha() and not next_char=='_':
-      raise GNException("Expected an identifier: " + self.input[self.cur:])
-
-    ident += next_char
-    self.cur += 1
-
-    next_char = self.input[self.cur]
-    while next_char.isalpha() or next_char.isdigit() or next_char=='_':
-      ident += next_char
-      self.cur += 1
-      next_char = self.input[self.cur]
-
-    return ident
-
-  def ParseNumber(self):
-    self.ConsumeWhitespace()
-    if self.IsDone():
-      raise GNException('Expected number but got nothing.')
-
-    begin = self.cur
-
-    # The first character can include a negative sign.
-    if not self.IsDone() and _IsDigitOrMinus(self.input[self.cur]):
-      self.cur += 1
-    while not self.IsDone() and self.input[self.cur].isdigit():
-      self.cur += 1
-
-    number_string = self.input[begin:self.cur]
-    if not len(number_string) or number_string == '-':
-      raise GNException("Not a valid number.")
-    return int(number_string)
-
-  def ParseString(self):
-    self.ConsumeWhitespace()
-    if self.IsDone():
-      raise GNException('Expected string but got nothing.')
-
-    if self.input[self.cur] != '"':
-      raise GNException('Expected string beginning in a " but got:\n  ' +
-                        self.input[self.cur:])
-    self.cur += 1  # Skip over quote.
-
-    begin = self.cur
-    while not self.IsDone() and self.input[self.cur] != '"':
-      if self.input[self.cur] == '\\':
-        self.cur += 1  # Skip over the backslash.
-        if self.IsDone():
-          raise GNException("String ends in a backslash in:\n  " +
-                            self.input)
-      self.cur += 1
-
-    if self.IsDone():
-      raise GNException('Unterminated string:\n  ' + self.input[begin:])
-
-    end = self.cur
-    self.cur += 1  # Consume trailing ".
-
-    return UnescapeGNString(self.input[begin:end])
-
-  def ParseList(self):
-    self.ConsumeWhitespace()
-    if self.IsDone():
-      raise GNException('Expected list but got nothing.')
-
-    # Skip over opening '['.
-    if self.input[self.cur] != '[':
-      raise GNException("Expected [ for list but got:\n  " +
-                        self.input[self.cur:])
-    self.cur += 1
-    self.ConsumeWhitespace()
-    if self.IsDone():
-      raise GNException("Unterminated list:\n  " + self.input)
-
-    list_result = []
-    previous_had_trailing_comma = True
-    while not self.IsDone():
-      if self.input[self.cur] == ']':
-        self.cur += 1  # Skip over ']'.
-        return list_result
-
-      if not previous_had_trailing_comma:
-        raise GNException("List items not separated by comma.")
-
-      list_result += [ self._ParseAllowTrailing() ]
-      self.ConsumeWhitespace()
-      if self.IsDone():
-        break
-
-      # Consume comma if there is one.
-      previous_had_trailing_comma = self.input[self.cur] == ','
-      if previous_had_trailing_comma:
-        # Consume comma.
-        self.cur += 1
-        self.ConsumeWhitespace()
-
-    raise GNException("Unterminated list:\n  " + self.input)
-
-  def _ConstantFollows(self, constant):
-    """Returns true if the given constant follows immediately at the current
-    location in the input. If it does, the text is consumed and the function
-    returns true. Otherwise, returns false and the current position is
-    unchanged."""
-    end = self.cur + len(constant)
-    if end > len(self.input):
-      return False  # Not enough room.
-    if self.input[self.cur:end] == constant:
-      self.cur = end
-      return True
-    return False
diff --git a/build/gn_helpers_unittest.py b/build/gn_helpers_unittest.py
deleted file mode 100644
index cc6018a..0000000
--- a/build/gn_helpers_unittest.py
+++ /dev/null
@@ -1,117 +0,0 @@
-# Copyright 2016 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import gn_helpers
-import unittest
-
-class UnitTest(unittest.TestCase):
-  def test_ToGNString(self):
-    self.assertEqual(
-        gn_helpers.ToGNString([1, 'two', [ '"thr$\\', True, False, [] ]]),
-        '[ 1, "two", [ "\\"thr\\$\\\\", true, false, [  ] ] ]')
-
-  def test_UnescapeGNString(self):
-    # Backslash followed by a \, $, or " means the folling character without
-    # the special meaning. Backslash followed by everything else is a literal.
-    self.assertEqual(
-        gn_helpers.UnescapeGNString('\\as\\$\\\\asd\\"'),
-        '\\as$\\asd"')
-
-  def test_FromGNString(self):
-    self.assertEqual(
-        gn_helpers.FromGNString('[1, -20, true, false,["as\\"", []]]'),
-        [ 1, -20, True, False, [ 'as"', [] ] ])
-
-    with self.assertRaises(gn_helpers.GNException):
-      parser = gn_helpers.GNValueParser('123 456')
-      parser.Parse()
-
-  def test_ParseBool(self):
-    parser = gn_helpers.GNValueParser('true')
-    self.assertEqual(parser.Parse(), True)
-
-    parser = gn_helpers.GNValueParser('false')
-    self.assertEqual(parser.Parse(), False)
-
-  def test_ParseNumber(self):
-    parser = gn_helpers.GNValueParser('123')
-    self.assertEqual(parser.ParseNumber(), 123)
-
-    with self.assertRaises(gn_helpers.GNException):
-      parser = gn_helpers.GNValueParser('')
-      parser.ParseNumber()
-    with self.assertRaises(gn_helpers.GNException):
-      parser = gn_helpers.GNValueParser('a123')
-      parser.ParseNumber()
-
-  def test_ParseString(self):
-    parser = gn_helpers.GNValueParser('"asdf"')
-    self.assertEqual(parser.ParseString(), 'asdf')
-
-    with self.assertRaises(gn_helpers.GNException):
-      parser = gn_helpers.GNValueParser('')  # Empty.
-      parser.ParseString()
-    with self.assertRaises(gn_helpers.GNException):
-      parser = gn_helpers.GNValueParser('asdf')  # Unquoted.
-      parser.ParseString()
-    with self.assertRaises(gn_helpers.GNException):
-      parser = gn_helpers.GNValueParser('"trailing')  # Unterminated.
-      parser.ParseString()
-
-  def test_ParseList(self):
-    parser = gn_helpers.GNValueParser('[1,]')  # Optional end comma OK.
-    self.assertEqual(parser.ParseList(), [ 1 ])
-
-    with self.assertRaises(gn_helpers.GNException):
-      parser = gn_helpers.GNValueParser('')  # Empty.
-      parser.ParseList()
-    with self.assertRaises(gn_helpers.GNException):
-      parser = gn_helpers.GNValueParser('asdf')  # No [].
-      parser.ParseList()
-    with self.assertRaises(gn_helpers.GNException):
-      parser = gn_helpers.GNValueParser('[1, 2')  # Unterminated
-      parser.ParseList()
-    with self.assertRaises(gn_helpers.GNException):
-      parser = gn_helpers.GNValueParser('[1 2]')  # No separating comma.
-      parser.ParseList()
-
-  def test_FromGNArgs(self):
-    # Booleans and numbers should work; whitespace is allowed works.
-    self.assertEqual(gn_helpers.FromGNArgs('foo = true\nbar = 1\n'),
-                     {'foo': True, 'bar': 1})
-
-    # Whitespace is not required; strings should also work.
-    self.assertEqual(gn_helpers.FromGNArgs('foo="bar baz"'),
-                     {'foo': 'bar baz'})
-
-    # Lists should work.
-    self.assertEqual(gn_helpers.FromGNArgs('foo=[1, 2, 3]'),
-                     {'foo': [1, 2, 3]})
-
-    # Empty strings should return an empty dict.
-    self.assertEqual(gn_helpers.FromGNArgs(''), {})
-    self.assertEqual(gn_helpers.FromGNArgs(' \n '), {})
-
-    # Non-identifiers should raise an exception.
-    with self.assertRaises(gn_helpers.GNException):
-      gn_helpers.FromGNArgs('123 = true')
-
-    # References to other variables should raise an exception.
-    with self.assertRaises(gn_helpers.GNException):
-      gn_helpers.FromGNArgs('foo = bar')
-
-    # References to functions should raise an exception.
-    with self.assertRaises(gn_helpers.GNException):
-      gn_helpers.FromGNArgs('foo = exec_script("//build/baz.py")')
-
-    # Underscores in identifiers should work.
-    self.assertEqual(gn_helpers.FromGNArgs('_foo = true'),
-                     {'_foo': True})
-    self.assertEqual(gn_helpers.FromGNArgs('foo_bar = true'),
-                     {'foo_bar': True})
-    self.assertEqual(gn_helpers.FromGNArgs('foo_=true'),
-                     {'foo_': True})
-
-if __name__ == '__main__':
-  unittest.main()
diff --git a/build/gn_run_binary.py b/build/gn_run_binary.py
deleted file mode 100644
index d7f7165..0000000
--- a/build/gn_run_binary.py
+++ /dev/null
@@ -1,31 +0,0 @@
-# Copyright 2014 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Helper script for GN to run an arbitrary binary. See compiled_action.gni.
-
-Run with:
-  python gn_run_binary.py <binary_name> [args ...]
-"""
-
-import subprocess
-import sys
-
-# This script is designed to run binaries produced by the current build. We
-# always prefix it with "./" to avoid picking up system versions that might
-# also be on the path.
-path = './' + sys.argv[1]
-
-# The rest of the arguments are passed directly to the executable.
-args = [path] + sys.argv[2:]
-
-ret = subprocess.call(args)
-if ret != 0:
-  if ret <= -100:
-    # Windows error codes such as 0xC0000005 and 0xC0000409 are much easier to
-    # recognize and differentiate in hex. In order to print them as unsigned
-    # hex we need to add 4 Gig to them.
-    print '%s failed with exit code 0x%08X' % (sys.argv[1], ret + (1 << 32))
-  else:
-    print '%s failed with exit code %d' % (sys.argv[1], ret)
-sys.exit(ret)
diff --git a/build/gyp_chromium b/build/gyp_chromium
deleted file mode 100755
index 4fc62ba..0000000
--- a/build/gyp_chromium
+++ /dev/null
@@ -1,12 +0,0 @@
-#!/usr/bin/env python
-# Copyright (c) 2012 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# Simple launcher script for gyp_chromium.py.
-# TODO(sbc): This should probably be shell script but for historical
-# reasons (all the python code used to live in this script without a
-# .py extension, and was often run as 'python gyp_chromium') it is
-# currently still python.
-
-execfile(__file__ + '.py')
diff --git a/build/gyp_chromium.py b/build/gyp_chromium.py
deleted file mode 100644
index ab2e470..0000000
--- a/build/gyp_chromium.py
+++ /dev/null
@@ -1,68 +0,0 @@
-# Copyright (c) 2012 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""This script is now only used by the closure_compilation builders."""
-
-import argparse
-import glob
-import gyp_environment
-import os
-import shlex
-import sys
-
-script_dir = os.path.dirname(os.path.realpath(__file__))
-chrome_src = os.path.abspath(os.path.join(script_dir, os.pardir))
-
-sys.path.insert(0, os.path.join(chrome_src, 'tools', 'gyp', 'pylib'))
-import gyp
-
-
-def ProcessGypDefinesItems(items):
-  """Converts a list of strings to a list of key-value pairs."""
-  result = []
-  for item in items:
-    tokens = item.split('=', 1)
-    # Some GYP variables have hyphens, which we don't support.
-    if len(tokens) == 2:
-      result += [(tokens[0], tokens[1])]
-    else:
-      # No value supplied, treat it as a boolean and set it. Note that we
-      # use the string '1' here so we have a consistent definition whether
-      # you do 'foo=1' or 'foo'.
-      result += [(tokens[0], '1')]
-  return result
-
-
-def GetSupplementalFiles():
-  return []
-
-
-def GetGypVars(_):
-  """Returns a dictionary of all GYP vars."""
-  # GYP defines from the environment.
-  env_items = ProcessGypDefinesItems(
-      shlex.split(os.environ.get('GYP_DEFINES', '')))
-
-  # GYP defines from the command line.
-  parser = argparse.ArgumentParser()
-  parser.add_argument('-D', dest='defines', action='append', default=[])
-  cmdline_input_items = parser.parse_known_args()[0].defines
-  cmdline_items = ProcessGypDefinesItems(cmdline_input_items)
-
-  return dict(env_items + cmdline_items)
-
-
-def main():
-  gyp_environment.SetEnvironment()
-
-  print 'Updating projects from gyp files...'
-  sys.stdout.flush()
-  sys.exit(gyp.main(sys.argv[1:] + [
-      '--check',
-      '--no-circular-check',
-      '-I', os.path.join(script_dir, 'common.gypi'),
-      '-D', 'gyp_output_dir=out']))
-
-if __name__ == '__main__':
-  sys.exit(main())
diff --git a/build/gyp_environment.py b/build/gyp_environment.py
deleted file mode 100644
index 51b9136..0000000
--- a/build/gyp_environment.py
+++ /dev/null
@@ -1,30 +0,0 @@
-# Copyright 2014 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Sets up various automatic gyp environment variables. These are used by
-gyp_chromium and landmines.py which run at different stages of runhooks. To
-make sure settings are consistent between them, all setup should happen here.
-"""
-
-import gyp_helper
-import os
-import sys
-import vs_toolchain
-
-def SetEnvironment():
-  """Sets defaults for GYP_* variables."""
-  gyp_helper.apply_chromium_gyp_env()
-
-  # Default to ninja on linux and windows, but only if no generator has
-  # explicitly been set.
-  # Also default to ninja on mac, but only when not building chrome/ios.
-  # . -f / --format has precedence over the env var, no need to check for it
-  # . set the env var only if it hasn't been set yet
-  # . chromium.gyp_env has been applied to os.environ at this point already
-  if sys.platform.startswith(('linux', 'win', 'freebsd', 'darwin')) and \
-      not os.environ.get('GYP_GENERATORS'):
-    os.environ['GYP_GENERATORS'] = 'ninja'
-
-  vs_toolchain.SetEnvironmentAndGetRuntimeDllDirs()
diff --git a/build/gyp_helper.py b/build/gyp_helper.py
deleted file mode 100644
index c840f2d..0000000
--- a/build/gyp_helper.py
+++ /dev/null
@@ -1,68 +0,0 @@
-# Copyright (c) 2012 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file helps gyp_chromium and landmines correctly set up the gyp
-# environment from chromium.gyp_env on disk
-
-import os
-
-SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__))
-CHROME_SRC = os.path.dirname(SCRIPT_DIR)
-
-
-def apply_gyp_environment_from_file(file_path):
-  """Reads in a *.gyp_env file and applies the valid keys to os.environ."""
-  if not os.path.exists(file_path):
-    return
-  with open(file_path, 'rU') as f:
-    file_contents = f.read()
-  try:
-    file_data = eval(file_contents, {'__builtins__': None}, None)
-  except SyntaxError, e:
-    e.filename = os.path.abspath(file_path)
-    raise
-  supported_vars = (
-      'CC',
-      'CC_wrapper',
-      'CC.host_wrapper',
-      'CHROMIUM_GYP_FILE',
-      'CHROMIUM_GYP_SYNTAX_CHECK',
-      'CXX',
-      'CXX_wrapper',
-      'CXX.host_wrapper',
-      'GYP_DEFINES',
-      'GYP_GENERATOR_FLAGS',
-      'GYP_CROSSCOMPILE',
-      'GYP_GENERATOR_OUTPUT',
-      'GYP_GENERATORS',
-      'GYP_INCLUDE_FIRST',
-      'GYP_INCLUDE_LAST',
-      'GYP_MSVS_VERSION',
-  )
-  for var in supported_vars:
-    file_val = file_data.get(var)
-    if file_val:
-      if var in os.environ:
-        behavior = 'replaces'
-        if var == 'GYP_DEFINES':
-          result = file_val + ' ' + os.environ[var]
-          behavior = 'merges with, and individual components override,'
-        else:
-          result = os.environ[var]
-        print 'INFO: Environment value for "%s" %s value in %s' % (
-            var, behavior, os.path.abspath(file_path)
-        )
-        string_padding = max(len(var), len(file_path), len('result'))
-        print '      %s: %s' % (var.rjust(string_padding), os.environ[var])
-        print '      %s: %s' % (file_path.rjust(string_padding), file_val)
-        os.environ[var] = result
-      else:
-        os.environ[var] = file_val
-
-
-def apply_chromium_gyp_env():
-  if 'SKIP_CHROMIUM_GYP_ENV' not in os.environ:
-    # Update the environment based on chromium.gyp_env
-    path = os.path.join(os.path.dirname(CHROME_SRC), 'chromium.gyp_env')
-    apply_gyp_environment_from_file(path)
diff --git a/build/gypi_to_gn.py b/build/gypi_to_gn.py
deleted file mode 100644
index 2a3a72a..0000000
--- a/build/gypi_to_gn.py
+++ /dev/null
@@ -1,192 +0,0 @@
-# Copyright 2014 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Converts a given gypi file to a python scope and writes the result to stdout.
-
-USING THIS SCRIPT IN CHROMIUM
-
-Forking Python to run this script in the middle of GN is slow, especially on
-Windows, and it makes both the GYP and GN files harder to follow. You can't
-use "git grep" to find files in the GN build any more, and tracking everything
-in GYP down requires a level of indirection. Any calls will have to be removed
-and cleaned up once the GYP-to-GN transition is complete.
-
-As a result, we only use this script when the list of files is large and
-frequently-changing. In these cases, having one canonical list outweights the
-downsides.
-
-As of this writing, the GN build is basically complete. It's likely that all
-large and frequently changing targets where this is appropriate use this
-mechanism already. And since we hope to turn down the GYP build soon, the time
-horizon is also relatively short. As a result, it is likely that no additional
-uses of this script should every be added to the build. During this later part
-of the transition period, we should be focusing more and more on the absolute
-readability of the GN build.
-
-
-HOW TO USE
-
-It is assumed that the file contains a toplevel dictionary, and this script
-will return that dictionary as a GN "scope" (see example below). This script
-does not know anything about GYP and it will not expand variables or execute
-conditions.
-
-It will strip conditions blocks.
-
-A variables block at the top level will be flattened so that the variables
-appear in the root dictionary. This way they can be returned to the GN code.
-
-Say your_file.gypi looked like this:
-  {
-     'sources': [ 'a.cc', 'b.cc' ],
-     'defines': [ 'ENABLE_DOOM_MELON' ],
-  }
-
-You would call it like this:
-  gypi_values = exec_script("//build/gypi_to_gn.py",
-                            [ rebase_path("your_file.gypi") ],
-                            "scope",
-                            [ "your_file.gypi" ])
-
-Notes:
- - The rebase_path call converts the gypi file from being relative to the
-   current build file to being system absolute for calling the script, which
-   will have a different current directory than this file.
-
- - The "scope" parameter tells GN to interpret the result as a series of GN
-   variable assignments.
-
- - The last file argument to exec_script tells GN that the given file is a
-   dependency of the build so Ninja can automatically re-run GN if the file
-   changes.
-
-Read the values into a target like this:
-  component("mycomponent") {
-    sources = gypi_values.sources
-    defines = gypi_values.defines
-  }
-
-Sometimes your .gypi file will include paths relative to a different
-directory than the current .gn file. In this case, you can rebase them to
-be relative to the current directory.
-  sources = rebase_path(gypi_values.sources, ".",
-                        "//path/gypi/input/values/are/relative/to")
-
-This script will tolerate a 'variables' in the toplevel dictionary or not. If
-the toplevel dictionary just contains one item called 'variables', it will be
-collapsed away and the result will be the contents of that dictinoary. Some
-.gypi files are written with or without this, depending on how they expect to
-be embedded into a .gyp file.
-
-This script also has the ability to replace certain substrings in the input.
-Generally this is used to emulate GYP variable expansion. If you passed the
-argument "--replace=<(foo)=bar" then all instances of "<(foo)" in strings in
-the input will be replaced with "bar":
-
-  gypi_values = exec_script("//build/gypi_to_gn.py",
-                            [ rebase_path("your_file.gypi"),
-                              "--replace=<(foo)=bar"],
-                            "scope",
-                            [ "your_file.gypi" ])
-
-"""
-
-import gn_helpers
-from optparse import OptionParser
-import sys
-
-def LoadPythonDictionary(path):
-  file_string = open(path).read()
-  try:
-    file_data = eval(file_string, {'__builtins__': None}, None)
-  except SyntaxError, e:
-    e.filename = path
-    raise
-  except Exception, e:
-    raise Exception("Unexpected error while reading %s: %s" % (path, str(e)))
-
-  assert isinstance(file_data, dict), "%s does not eval to a dictionary" % path
-
-  # Flatten any variables to the top level.
-  if 'variables' in file_data:
-    file_data.update(file_data['variables'])
-    del file_data['variables']
-
-  # Strip all elements that this script can't process.
-  elements_to_strip = [
-    'conditions',
-    'target_conditions',
-    'target_defaults',
-    'targets',
-    'includes',
-    'actions',
-  ]
-  for element in elements_to_strip:
-    if element in file_data:
-      del file_data[element]
-
-  return file_data
-
-
-def ReplaceSubstrings(values, search_for, replace_with):
-  """Recursively replaces substrings in a value.
-
-  Replaces all substrings of the "search_for" with "repace_with" for all
-  strings occurring in "values". This is done by recursively iterating into
-  lists as well as the keys and values of dictionaries."""
-  if isinstance(values, str):
-    return values.replace(search_for, replace_with)
-
-  if isinstance(values, list):
-    return [ReplaceSubstrings(v, search_for, replace_with) for v in values]
-
-  if isinstance(values, dict):
-    # For dictionaries, do the search for both the key and values.
-    result = {}
-    for key, value in values.items():
-      new_key = ReplaceSubstrings(key, search_for, replace_with)
-      new_value = ReplaceSubstrings(value, search_for, replace_with)
-      result[new_key] = new_value
-    return result
-
-  # Assume everything else is unchanged.
-  return values
-
-def main():
-  parser = OptionParser()
-  parser.add_option("-r", "--replace", action="append",
-    help="Replaces substrings. If passed a=b, replaces all substrs a with b.")
-  (options, args) = parser.parse_args()
-
-  if len(args) != 1:
-    raise Exception("Need one argument which is the .gypi file to read.")
-
-  data = LoadPythonDictionary(args[0])
-  if options.replace:
-    # Do replacements for all specified patterns.
-    for replace in options.replace:
-      split = replace.split('=')
-      # Allow "foo=" to replace with nothing.
-      if len(split) == 1:
-        split.append('')
-      assert len(split) == 2, "Replacement must be of the form 'key=value'."
-      data = ReplaceSubstrings(data, split[0], split[1])
-
-  # Sometimes .gypi files use the GYP syntax with percents at the end of the
-  # variable name (to indicate not to overwrite a previously-defined value):
-  #   'foo%': 'bar',
-  # Convert these to regular variables.
-  for key in data:
-    if len(key) > 1 and key[len(key) - 1] == '%':
-      data[key[:-1]] = data[key]
-      del data[key]
-
-  print gn_helpers.ToGNString(data)
-
-if __name__ == '__main__':
-  try:
-    main()
-  except Exception, e:
-    print str(e)
-    sys.exit(1)
diff --git a/build/install-build-deps-android.sh b/build/install-build-deps-android.sh
deleted file mode 100755
index 15fea85..0000000
--- a/build/install-build-deps-android.sh
+++ /dev/null
@@ -1,74 +0,0 @@
-#!/bin/bash
-
-# Copyright (c) 2012 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# Script to install everything needed to build chromium on android, including
-# items requiring sudo privileges.
-# See https://www.chromium.org/developers/how-tos/android-build-instructions
-
-args="$@"
-
-if ! uname -m | egrep -q "i686|x86_64"; then
-  echo "Only x86 architectures are currently supported" >&2
-  exit
-fi
-
-# Exit if any commands fail.
-set -e
-
-lsb_release=$(lsb_release --codename --short)
-
-# Install first the default Linux build deps.
-"$(dirname "${BASH_SOURCE[0]}")/install-build-deps.sh" \
-  --no-syms --lib32 --no-arm --no-chromeos-fonts --no-nacl --no-prompt "${args}"
-
-# Fix deps
-sudo apt-get -f install
-
-# common
-sudo apt-get -y install lib32z1 lighttpd python-pexpect xvfb x11-utils
-
-# Some binaries in the Android SDK require 32-bit libraries on the host.
-# See https://developer.android.com/sdk/installing/index.html?pkg=tools
-sudo apt-get -y install libncurses5:i386 libstdc++6:i386 zlib1g:i386
-
-# Required for apk-patch-size-estimator
-sudo apt-get -y install bsdiff
-
-# Do our own error handling for java.
-set +e
-
-function IsJava8() {
-  # Arg is either "java" or "javac"
-  $1 -version 2>&1 | grep -q '1\.8'
-}
-
-if ! (IsJava8 java && IsJava8 javac); then
-  sudo apt-get -y install openjdk-8-jre openjdk-8-jdk
-fi
-
-# There can be several reasons why java8 is not default despite being installed.
-# Just show an error and exit.
-if ! (IsJava8 java && IsJava8 javac); then
-  echo
-  echo "Automatic java installation failed."
-  echo '`java -version` reports:'
-  java -version
-  echo
-  echo '`javac -version` reports:'
-  javac -version
-  echo
-  echo "Please ensure that JDK 8 is installed and resolves first in your PATH."
-  echo -n '`which java` reports: '
-  which java
-  echo -n '`which javac` reports: '
-  which javac
-  echo
-  echo "You might also try running:"
-  echo "    sudo update-java-alternatives -s java-1.8.0-openjdk-amd64"
-  exit 1
-fi
-
-echo "install-build-deps-android.sh complete."
diff --git a/build/install-build-deps.sh b/build/install-build-deps.sh
deleted file mode 100755
index b110558..0000000
--- a/build/install-build-deps.sh
+++ /dev/null
@@ -1,722 +0,0 @@
-#!/bin/bash -e
-
-# Copyright (c) 2012 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# Script to install everything needed to build chromium (well, ideally, anyway)
-# See https://chromium.googlesource.com/chromium/src/+/master/docs/linux_build_instructions.md
-
-usage() {
-  echo "Usage: $0 [--options]"
-  echo "Options:"
-  echo "--[no-]syms: enable or disable installation of debugging symbols"
-  echo "--lib32: enable installation of 32-bit libraries, e.g. for V8 snapshot"
-  echo "--[no-]arm: enable or disable installation of arm cross toolchain"
-  echo "--[no-]chromeos-fonts: enable or disable installation of Chrome OS"\
-       "fonts"
-  echo "--[no-]nacl: enable or disable installation of prerequisites for"\
-       "building standalone NaCl and all its toolchains"
-  echo "--no-prompt: silently select standard options/defaults"
-  echo "--quick-check: quickly try to determine if dependencies are installed"
-  echo "               (this avoids interactive prompts and sudo commands,"
-  echo "               so might not be 100% accurate)"
-  echo "--unsupported: attempt installation even on unsupported systems"
-  echo "Script will prompt interactively if options not given."
-  exit 1
-}
-
-# Waits for the user to press 'Y' or 'N'. Either uppercase of lowercase is
-# accepted. Returns 0 for 'Y' and 1 for 'N'. If an optional parameter has
-# been provided to yes_no(), the function also accepts RETURN as a user input.
-# The parameter specifies the exit code that should be returned in that case.
-# The function will echo the user's selection followed by a newline character.
-# Users can abort the function by pressing CTRL-C. This will call "exit 1".
-yes_no() {
-  if [ 0 -ne "${do_default-0}" ] ; then
-    [ $1 -eq 0 ] && echo "Y" || echo "N"
-    return $1
-  fi
-  local c
-  while :; do
-    c="$(trap 'stty echo -iuclc icanon 2>/dev/null' EXIT INT TERM QUIT
-         stty -echo iuclc -icanon 2>/dev/null
-         dd count=1 bs=1 2>/dev/null | od -An -tx1)"
-    case "$c" in
-      " 0a") if [ -n "$1" ]; then
-               [ $1 -eq 0 ] && echo "Y" || echo "N"
-               return $1
-             fi
-             ;;
-      " 79") echo "Y"
-             return 0
-             ;;
-      " 6e") echo "N"
-             return 1
-             ;;
-      "")    echo "Aborted" >&2
-             exit 1
-             ;;
-      *)     # The user pressed an unrecognized key. As we are not echoing
-             # any incorrect user input, alert the user by ringing the bell.
-             (tput bel) 2>/dev/null
-             ;;
-    esac
-  done
-}
-
-# Checks whether a particular package is available in the repos.
-# USAGE: $ package_exists <package name>
-package_exists() {
-  # 'apt-cache search' takes a regex string, so eg. the +'s in packages like
-  # "libstdc++" need to be escaped.
-  local escaped="$(echo $1 | sed 's/[\~\+\.\:-]/\\&/g')"
-  [ ! -z "$(apt-cache search --names-only "${escaped}" | \
-            awk '$1 == "'$1'" { print $1; }')" ]
-}
-
-# These default to on because (some) bots need them and it keeps things
-# simple for the bot setup if all bots just run the script in its default
-# mode.  Developers who don't want stuff they don't need installed on their
-# own workstations can pass --no-arm --no-nacl when running the script.
-do_inst_arm=1
-do_inst_nacl=1
-
-while test "$1" != ""
-do
-  case "$1" in
-  --syms)                   do_inst_syms=1;;
-  --no-syms)                do_inst_syms=0;;
-  --lib32)                  do_inst_lib32=1;;
-  --arm)                    do_inst_arm=1;;
-  --no-arm)                 do_inst_arm=0;;
-  --chromeos-fonts)         do_inst_chromeos_fonts=1;;
-  --no-chromeos-fonts)      do_inst_chromeos_fonts=0;;
-  --nacl)                   do_inst_nacl=1;;
-  --no-nacl)                do_inst_nacl=0;;
-  --no-prompt)              do_default=1
-                            do_quietly="-qq --assume-yes"
-    ;;
-  --quick-check)            do_quick_check=1;;
-  --unsupported)            do_unsupported=1;;
-  *) usage;;
-  esac
-  shift
-done
-
-if test "$do_inst_arm" = "1"; then
-  do_inst_lib32=1
-fi
-
-# Check for lsb_release command in $PATH
-if ! which lsb_release > /dev/null; then
-  echo "ERROR: lsb_release not found in \$PATH" >&2
-  exit 1;
-fi
-
-distro_codename=$(lsb_release --codename --short)
-distro_id=$(lsb_release --id --short)
-supported_codenames="(trusty|xenial|artful|bionic)"
-supported_ids="(Debian)"
-if [ 0 -eq "${do_unsupported-0}" ] && [ 0 -eq "${do_quick_check-0}" ] ; then
-  if [[ ! $distro_codename =~ $supported_codenames &&
-        ! $distro_id =~ $supported_ids ]]; then
-    echo -e "ERROR: The only supported distros are\n" \
-      "\tUbuntu 14.04 LTS (trusty)\n" \
-      "\tUbuntu 16.04 LTS (xenial)\n" \
-      "\tUbuntu 17.10 (artful)\n" \
-      "\tUbuntu 18.04 LTS (bionic)\n" \
-      "\tDebian 8 (jessie) or later" >&2
-    exit 1
-  fi
-
-  if ! uname -m | egrep -q "i686|x86_64"; then
-    echo "Only x86 architectures are currently supported" >&2
-    exit
-  fi
-fi
-
-if [ "x$(id -u)" != x0 ] && [ 0 -eq "${do_quick_check-0}" ]; then
-  echo "Running as non-root user."
-  echo "You might have to enter your password one or more times for 'sudo'."
-  echo
-fi
-
-# Packages needed for chromeos only
-chromeos_dev_list="libbluetooth-dev libxkbcommon-dev"
-
-if package_exists realpath; then
-  chromeos_dev_list="${chromeos_dev_list} realpath"
-fi
-
-# Packages needed for development
-dev_list="\
-  bison
-  bzip2
-  cdbs
-  curl
-  dbus-x11
-  dpkg-dev
-  elfutils
-  devscripts
-  fakeroot
-  flex
-  g++
-  git-core
-  git-svn
-  gperf
-  libappindicator-dev
-  libappindicator3-dev
-  libasound2-dev
-  libbrlapi-dev
-  libbz2-dev
-  libcairo2-dev
-  libcap-dev
-  libcups2-dev
-  libcurl4-gnutls-dev
-  libdrm-dev
-  libelf-dev
-  libffi-dev
-  libgbm-dev
-  libglib2.0-dev
-  libglu1-mesa-dev
-  libgnome-keyring-dev
-  libgtk2.0-dev
-  libgtk-3-dev
-  libkrb5-dev
-  libnspr4-dev
-  libnss3-dev
-  libpam0g-dev
-  libpci-dev
-  libpulse-dev
-  libsctp-dev
-  libspeechd-dev
-  libsqlite3-dev
-  libssl-dev
-  libudev-dev
-  libwww-perl
-  libxslt1-dev
-  libxss-dev
-  libxt-dev
-  libxtst-dev
-  locales
-  openbox
-  p7zip
-  patch
-  perl
-  pkg-config
-  python
-  python-cherrypy3
-  python-crypto
-  python-dev
-  python-numpy
-  python-opencv
-  python-openssl
-  python-psutil
-  python-yaml
-  rpm
-  ruby
-  subversion
-  wdiff
-  x11-utils
-  xcompmgr
-  xz-utils
-  zip
-  $chromeos_dev_list
-"
-
-# 64-bit systems need a minimum set of 32-bit compat packages for the pre-built
-# NaCl binaries.
-if file -L /sbin/init | grep -q 'ELF 64-bit'; then
-  dev_list="${dev_list} libc6-i386 lib32gcc1 lib32stdc++6"
-fi
-
-# Run-time libraries required by chromeos only
-chromeos_lib_list="libpulse0 libbz2-1.0"
-
-# Full list of required run-time libraries
-lib_list="\
-  libappindicator1
-  libappindicator3-1
-  libasound2
-  libatk1.0-0
-  libc6
-  libcairo2
-  libcap2
-  libcups2
-  libexpat1
-  libffi6
-  libfontconfig1
-  libfreetype6
-  libglib2.0-0
-  libgnome-keyring0
-  libgtk2.0-0
-  libgtk-3-0
-  libpam0g
-  libpango1.0-0
-  libpci3
-  libpcre3
-  libpixman-1-0
-  libspeechd2
-  libstdc++6
-  libsqlite3-0
-  libwayland-egl1-mesa
-  libx11-6
-  libx11-xcb1
-  libxau6
-  libxcb1
-  libxcomposite1
-  libxcursor1
-  libxdamage1
-  libxdmcp6
-  libxext6
-  libxfixes3
-  libxi6
-  libxinerama1
-  libxrandr2
-  libxrender1
-  libxtst6
-  zlib1g
-  $chromeos_lib_list
-"
-
-# Debugging symbols for all of the run-time libraries
-dbg_list="\
-  libc6-dbg
-  libffi6-dbg
-  libgtk2.0-0-dbg
-  libpcre3-dbg
-  libxau6-dbg
-  libxcb1-dbg
-  libxcomposite1-dbg
-  libxdmcp6-dbg
-  libxext6-dbg
-  libxinerama1-dbg
-  zlib1g-dbg
-"
-
-if package_exists libstdc++6-6-dbg; then
-  dbg_list="${dbg_list} libstdc++6-6-dbg"
-elif package_exists libstdc++6-4.9-dbg; then
-  dbg_list="${dbg_list} libstdc++6-4.9-dbg"
-else
-  dbg_list="${dbg_list} libstdc++6-4.8-dbg"
-fi
-if package_exists libgtk-3-0-dbgsym; then
-  dbg_list="${dbg_list} libgtk-3-0-dbgsym"
-elif package_exists libgtk-3-0-dbg; then
-  dbg_list="${dbg_list} libgtk-3-0-dbg"
-fi
-if package_exists libatk1.0-0-dbgsym; then
-  dbg_list="${dbg_list} libatk1.0-0-dbgsym"
-elif package_exists libatk1.0-dbg; then
-  dbg_list="${dbg_list} libatk1.0-dbg"
-fi
-if package_exists libcairo2-dbgsym; then
-  dbg_list="${dbg_list} libcairo2-dbgsym"
-elif package_exists libcairo2-dbg; then
-  dbg_list="${dbg_list} libcairo2-dbg"
-fi
-if package_exists libfontconfig1-dbgsym; then
-  dbg_list="${dbg_list} libfontconfig1-dbgsym"
-else
-  dbg_list="${dbg_list} libfontconfig1-dbg"
-fi
-if package_exists libxdamage1-dbgsym; then
-  dbg_list="${dbg_list} libxdamage1-dbgsym"
-elif package_exists libxdamage1-dbg; then
-  dbg_list="${dbg_list} libxdamage1-dbg"
-fi
-if package_exists libpango1.0-dev-dbgsym; then
-  dbg_list="${dbg_list} libpango1.0-dev-dbgsym"
-elif package_exists libpango1.0-0-dbg; then
-  dbg_list="${dbg_list} libpango1.0-0-dbg"
-fi
-if package_exists libx11-6-dbg; then
-  dbg_list="${dbg_list} libx11-6-dbg"
-fi
-if package_exists libx11-xcb1-dbg; then
-  dbg_list="${dbg_list} libx11-xcb1-dbg"
-fi
-if package_exists libxfixes3-dbg; then
-  dbg_list="${dbg_list} libxfixes3-dbg"
-fi
-if package_exists libxi6-dbg; then
-  dbg_list="${dbg_list} libxi6-dbg"
-fi
-if package_exists libxrandr2-dbg; then
-  dbg_list="${dbg_list} libxrandr2-dbg"
-fi
-if package_exists libxrender1-dbg; then
-  dbg_list="${dbg_list} libxrender1-dbg"
-fi
-if package_exists libxtst6-dbg; then
-  dbg_list="${dbg_list} libxtst6-dbg"
-fi
-if package_exists libglib2.0-0-dbgsym; then
-  dbg_list="${dbg_list} libglib2.0-0-dbgsym"
-elif package_exists libglib2.0-0-dbg; then
-  dbg_list="${dbg_list} libglib2.0-0-dbg"
-fi
-if package_exists libxcursor1-dbgsym; then
-  dbg_list="${dbg_list} libxcursor1-dbgsym"
-elif package_exists libxcursor1-dbg; then
-  dbg_list="${dbg_list} libxcursor1-dbg"
-fi
-if package_exists libsqlite3-0-dbgsym; then
-  dbg_list="${dbg_list} libsqlite3-0-dbgsym"
-else
-  dbg_list="${dbg_list} libsqlite3-0-dbg"
-fi
-if package_exists libpixman-1-0-dbgsym; then
-  dbg_list="${dbg_list} libpixman-1-0-dbgsym"
-else
-  dbg_list="${dbg_list} libpixman-1-0-dbg"
-fi
-
-# 32-bit libraries needed e.g. to compile V8 snapshot for Android or armhf
-lib32_list="linux-libc-dev:i386 libpci3:i386"
-
-# 32-bit libraries needed for a 32-bit build
-lib32_list="$lib32_list libx11-xcb1:i386"
-
-# arm cross toolchain packages needed to build chrome on armhf
-EM_REPO="deb http://emdebian.org/tools/debian/ jessie main"
-EM_SOURCE=$(cat <<EOF
-# Repo added by Chromium $0
-${EM_REPO}
-# deb-src http://emdebian.org/tools/debian/ jessie main
-EOF
-)
-EM_ARCHIVE_KEY_FINGER="084C6C6F39159EDB67969AA87DE089671804772E"
-GPP_ARM_PACKAGE="g++-arm-linux-gnueabihf"
-case $distro_codename in
-  jessie)
-    eval $(apt-config shell APT_SOURCESDIR 'Dir::Etc::sourceparts/d')
-    CROSSTOOLS_LIST="${APT_SOURCESDIR}/crosstools.list"
-    arm_list="libc6-dev:armhf
-              linux-libc-dev:armhf"
-    if test "$do_inst_arm" = "1"; then
-      if $(dpkg-query -W ${GPP_ARM_PACKAGE} &>/dev/null); then
-        arm_list+=" ${GPP_ARM_PACKAGE}"
-      else
-        echo "The Debian Cross-toolchains repository is necessary to"
-        echo "cross-compile Chromium for arm."
-        echo -n "Do you want me to add it for you (y/N) "
-        if yes_no 1; then
-          gpg --keyserver pgp.mit.edu --recv-keys ${EM_ARCHIVE_KEY_FINGER}
-          gpg -a --export ${EM_ARCHIVE_KEY_FINGER} | sudo apt-key add -
-          if ! grep "^${EM_REPO}" "${CROSSTOOLS_LIST}" &>/dev/null; then
-            echo "${EM_SOURCE}" | sudo tee -a "${CROSSTOOLS_LIST}" >/dev/null
-          fi
-          arm_list+=" ${GPP_ARM_PACKAGE}"
-        fi
-      fi
-    fi
-    ;;
-  # All necessary ARM packages are available on the default repos on
-  # Debian 9 and later.
-  *)
-    arm_list="libc6-dev-armhf-cross
-              linux-libc-dev-armhf-cross
-              ${GPP_ARM_PACKAGE}"
-    ;;
-esac
-
-# Work around for dependency issue Ubuntu/Trusty: http://crbug.com/435056
-case $distro_codename in
-  trusty)
-    arm_list+=" g++-4.8-multilib-arm-linux-gnueabihf
-                gcc-4.8-multilib-arm-linux-gnueabihf"
-    ;;
-  xenial|artful|bionic)
-    arm_list+=" g++-5-multilib-arm-linux-gnueabihf
-                gcc-5-multilib-arm-linux-gnueabihf
-                gcc-arm-linux-gnueabihf"
-    ;;
-esac
-
-# Packages to build NaCl, its toolchains, and its ports.
-naclports_list="ant autoconf bison cmake gawk intltool xutils-dev xsltproc"
-nacl_list="\
-  g++-mingw-w64-i686
-  lib32z1-dev
-  libasound2:i386
-  libcap2:i386
-  libelf-dev:i386
-  libfontconfig1:i386
-  libglib2.0-0:i386
-  libgpm2:i386
-  libgtk2.0-0:i386
-  libgtk-3-0:i386
-  libncurses5:i386
-  lib32ncurses5-dev
-  libnss3:i386
-  libpango1.0-0:i386
-  libssl-dev:i386
-  libtinfo-dev
-  libtinfo-dev:i386
-  libtool
-  libxcomposite1:i386
-  libxcursor1:i386
-  libxdamage1:i386
-  libxi6:i386
-  libxrandr2:i386
-  libxss1:i386
-  libxtst6:i386
-  texinfo
-  xvfb
-  ${naclports_list}
-"
-
-if package_exists libssl1.1; then
-  nacl_list="${nacl_list} libssl1.1:i386"
-elif package_exists libssl1.0.2; then
-  nacl_list="${nacl_list} libssl1.0.2:i386"
-else
-  nacl_list="${nacl_list} libssl1.0.0:i386"
-fi
-
-# Some package names have changed over time
-if package_exists libpng16-16; then
-  lib_list="${lib_list} libpng16-16"
-else
-  lib_list="${lib_list} libpng12-0"
-fi
-if package_exists libnspr4-dbg; then
-  dbg_list="${dbg_list} libnspr4-dbg libnss3-dbg"
-  lib_list="${lib_list} libnspr4 libnss3"
-else
-  dbg_list="${dbg_list} libnspr4-0d-dbg libnss3-1d-dbg"
-  lib_list="${lib_list} libnspr4-0d libnss3-1d"
-fi
-if package_exists libjpeg-dev; then
-  dev_list="${dev_list} libjpeg-dev"
-else
-  dev_list="${dev_list} libjpeg62-dev"
-fi
-if package_exists libudev1; then
-  dev_list="${dev_list} libudev1"
-  nacl_list="${nacl_list} libudev1:i386"
-else
-  dev_list="${dev_list} libudev0"
-  nacl_list="${nacl_list} libudev0:i386"
-fi
-if package_exists libbrlapi0.6; then
-  dev_list="${dev_list} libbrlapi0.6"
-else
-  dev_list="${dev_list} libbrlapi0.5"
-fi
-if package_exists apache2.2-bin; then
-  dev_list="${dev_list} apache2.2-bin"
-else
-  dev_list="${dev_list} apache2-bin"
-fi
-if package_exists libav-tools; then
-  dev_list="${dev_list} libav-tools"
-fi
-if package_exists php7.2-cgi; then
-  dev_list="${dev_list} php7.2-cgi libapache2-mod-php7.2"
-elif package_exists php7.1-cgi; then
-  dev_list="${dev_list} php7.1-cgi libapache2-mod-php7.1"
-elif package_exists php7.0-cgi; then
-  dev_list="${dev_list} php7.0-cgi libapache2-mod-php7.0"
-else
-  dev_list="${dev_list} php5-cgi libapache2-mod-php5"
-fi
-
-# Some packages are only needed if the distribution actually supports
-# installing them.
-if package_exists appmenu-gtk; then
-  lib_list="$lib_list appmenu-gtk"
-fi
-
-# Cross-toolchain strip is needed for building the sysroots.
-if package_exists binutils-arm-linux-gnueabihf; then
-  dev_list="${dev_list} binutils-arm-linux-gnueabihf"
-fi
-if package_exists binutils-aarch64-linux-gnu; then
-  dev_list="${dev_list} binutils-aarch64-linux-gnu"
-fi
-if package_exists binutils-mipsel-linux-gnu; then
-  dev_list="${dev_list} binutils-mipsel-linux-gnu"
-fi
-if package_exists binutils-mips64el-linux-gnuabi64; then
-  dev_list="${dev_list} binutils-mips64el-linux-gnuabi64"
-fi
-
-# When cross building for arm/Android on 64-bit systems the host binaries
-# that are part of v8 need to be compiled with -m32 which means
-# that basic multilib support is needed.
-if file -L /sbin/init | grep -q 'ELF 64-bit'; then
-  # gcc-multilib conflicts with the arm cross compiler (at least in trusty) but
-  # g++-X.Y-multilib gives us the 32-bit support that we need. Find out the
-  # appropriate value of X and Y by seeing what version the current
-  # distribution's g++-multilib package depends on.
-  multilib_package=$(apt-cache depends g++-multilib --important | \
-      grep -E --color=never --only-matching '\bg\+\+-[0-9.]+-multilib\b')
-  lib32_list="$lib32_list $multilib_package"
-fi
-
-if test "$do_inst_syms" = "" && test 0 -eq ${do_quick_check-0}
-then
-  echo "This script installs all tools and libraries needed to build Chromium."
-  echo ""
-  echo "For most of the libraries, it can also install debugging symbols, which"
-  echo "will allow you to debug code in the system libraries. Most developers"
-  echo "won't need these symbols."
-  echo -n "Do you want me to install them for you (y/N) "
-  if yes_no 1; then
-    do_inst_syms=1
-  fi
-fi
-if test "$do_inst_syms" = "1"; then
-  echo "Including debugging symbols."
-else
-  echo "Skipping debugging symbols."
-  dbg_list=
-fi
-
-if test "$do_inst_lib32" = "1" ; then
-  echo "Including 32-bit libraries."
-else
-  echo "Skipping 32-bit libraries."
-  lib32_list=
-fi
-
-if test "$do_inst_arm" = "1" ; then
-  echo "Including ARM cross toolchain."
-else
-  echo "Skipping ARM cross toolchain."
-  arm_list=
-fi
-
-if test "$do_inst_nacl" = "1"; then
-  echo "Including NaCl, NaCl toolchain, NaCl ports dependencies."
-else
-  echo "Skipping NaCl, NaCl toolchain, NaCl ports dependencies."
-  nacl_list=
-fi
-
-# The `sort -r -s -t: -k2` sorts all the :i386 packages to the front, to avoid
-# confusing dpkg-query (crbug.com/446172).
-packages="$(
-  echo "${dev_list} ${lib_list} ${dbg_list} ${lib32_list} ${arm_list}"\
-       "${nacl_list}" | tr " " "\n" | sort -u | sort -r -s -t: -k2 | tr "\n" " "
-)"
-
-if [ 1 -eq "${do_quick_check-0}" ] ; then
-  if ! missing_packages="$(dpkg-query -W -f ' ' ${packages} 2>&1)"; then
-    # Distinguish between packages that actually aren't available to the
-    # system (i.e. not in any repo) and packages that just aren't known to
-    # dpkg (i.e. managed by apt).
-    missing_packages="$(echo "${missing_packages}" | awk '{print $NF}')"
-    not_installed=""
-    unknown=""
-    for p in ${missing_packages}; do
-      if apt-cache show ${p} > /dev/null 2>&1; then
-        not_installed="${p}\n${not_installed}"
-      else
-        unknown="${p}\n${unknown}"
-      fi
-    done
-    if [ -n "${not_installed}" ]; then
-      echo "WARNING: The following packages are not installed:"
-      echo -e "${not_installed}" | sed -e "s/^/  /"
-    fi
-    if [ -n "${unknown}" ]; then
-      echo "WARNING: The following packages are unknown to your system"
-      echo "(maybe missing a repo or need to 'sudo apt-get update'):"
-      echo -e "${unknown}" | sed -e "s/^/  /"
-    fi
-    exit 1
-  fi
-  exit 0
-fi
-
-if test "$do_inst_lib32" = "1" || test "$do_inst_nacl" = "1"; then
-  sudo dpkg --add-architecture i386
-fi
-sudo apt-get update
-
-# We initially run "apt-get" with the --reinstall option and parse its output.
-# This way, we can find all the packages that need to be newly installed
-# without accidentally promoting any packages from "auto" to "manual".
-# We then re-run "apt-get" with just the list of missing packages.
-echo "Finding missing packages..."
-# Intentionally leaving $packages unquoted so it's more readable.
-echo "Packages required: " $packages
-echo
-new_list_cmd="sudo apt-get install --reinstall $(echo $packages)"
-if new_list="$(yes n | LANGUAGE=en LANG=C $new_list_cmd)"; then
-  # We probably never hit this following line.
-  echo "No missing packages, and the packages are up to date."
-elif [ $? -eq 1 ]; then
-  # We expect apt-get to have exit status of 1.
-  # This indicates that we cancelled the install with "yes n|".
-  new_list=$(echo "$new_list" |
-    sed -e '1,/The following NEW packages will be installed:/d;s/^  //;t;d')
-  new_list=$(echo "$new_list" | sed 's/ *$//')
-  if [ -z "$new_list" ] ; then
-    echo "No missing packages, and the packages are up to date."
-  else
-    echo "Installing missing packages: $new_list."
-    sudo apt-get install ${do_quietly-} ${new_list}
-  fi
-  echo
-else
-  # An apt-get exit status of 100 indicates that a real error has occurred.
-
-  # I am intentionally leaving out the '"'s around new_list_cmd,
-  # as this makes it easier to cut and paste the output
-  echo "The following command failed: " ${new_list_cmd}
-  echo
-  echo "It produces the following output:"
-  yes n | $new_list_cmd || true
-  echo
-  echo "You will have to install the above packages yourself."
-  echo
-  exit 100
-fi
-
-# Install the Chrome OS default fonts. This must go after running
-# apt-get, since install-chromeos-fonts depends on curl.
-if test "$do_inst_chromeos_fonts" != "0"; then
-  echo
-  echo "Installing Chrome OS fonts."
-  dir=`echo $0 | sed -r -e 's/\/[^/]+$//'`
-  if ! sudo $dir/linux/install-chromeos-fonts.py; then
-    echo "ERROR: The installation of the Chrome OS default fonts failed."
-    if [ `stat -f -c %T $dir` == "nfs" ]; then
-      echo "The reason is that your repo is installed on a remote file system."
-    else
-      echo "This is expected if your repo is installed on a remote file system."
-    fi
-    echo "It is recommended to install your repo on a local file system."
-    echo "You can skip the installation of the Chrome OS default founts with"
-    echo "the command line option: --no-chromeos-fonts."
-    exit 1
-  fi
-else
-  echo "Skipping installation of Chrome OS fonts."
-fi
-
-echo "Installing locales."
-CHROMIUM_LOCALES="da_DK.UTF-8 fr_FR.UTF-8 he_IL.UTF-8 zh_TW.UTF-8"
-LOCALE_GEN=/etc/locale.gen
-if [ -e ${LOCALE_GEN} ]; then
-  OLD_LOCALE_GEN="$(cat /etc/locale.gen)"
-  for CHROMIUM_LOCALE in ${CHROMIUM_LOCALES}; do
-    sudo sed -i "s/^# ${CHROMIUM_LOCALE}/${CHROMIUM_LOCALE}/" ${LOCALE_GEN}
-  done
-  # Regenerating locales can take a while, so only do it if we need to.
-  if (echo "${OLD_LOCALE_GEN}" | cmp -s ${LOCALE_GEN}); then
-    echo "Locales already up-to-date."
-  else
-    sudo locale-gen
-  fi
-else
-  for CHROMIUM_LOCALE in ${CHROMIUM_LOCALES}; do
-    sudo locale-gen ${CHROMIUM_LOCALE}
-  done
-fi
diff --git a/build/install-chroot.sh b/build/install-chroot.sh
deleted file mode 100755
index d76d535..0000000
--- a/build/install-chroot.sh
+++ /dev/null
@@ -1,888 +0,0 @@
-#!/bin/bash -e
-
-# Copyright (c) 2012 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This script installs Debian-derived distributions in a chroot environment.
-# It can for example be used to have an accurate 32bit build and test
-# environment when otherwise working on a 64bit machine.
-# N. B. it is unlikely that this script will ever work on anything other than a
-# Debian-derived system.
-
-# Older Debian based systems had both "admin" and "adm" groups, with "admin"
-# apparently being used in more places. Newer distributions have standardized
-# on just the "adm" group. Check /etc/group for the preferred name of the
-# administrator group.
-admin=$(grep '^admin:' /etc/group >&/dev/null && echo admin || echo adm)
-
-usage() {
-  echo "usage: ${0##*/} [-m mirror] [-g group,...] [-s] [-c]"
-  echo "-b dir       additional directories that should be bind mounted,"
-  echo '             or "NONE".'
-  echo "             Default: if local filesystems present, ask user for help"
-  echo "-g group,... groups that can use the chroot unauthenticated"
-  echo "             Default: '${admin}' and current user's group ('$(id -gn)')"
-  echo "-l           List all installed chroot environments"
-  echo "-m mirror    an alternate repository mirror for package downloads"
-  echo "-s           configure default deb-srcs"
-  echo "-c           always copy 64bit helper binaries to 32bit chroot"
-  echo "-h           this help message"
-}
-
-process_opts() {
-  local OPTNAME OPTIND OPTERR OPTARG
-  while getopts ":b:g:lm:sch" OPTNAME; do
-    case "$OPTNAME" in
-      b)
-        if [ "${OPTARG}" = "NONE" -a -z "${bind_mounts}" ]; then
-          bind_mounts="${OPTARG}"
-        else
-          if [ "${bind_mounts}" = "NONE" -o "${OPTARG}" = "${OPTARG#/}" -o \
-               ! -d "${OPTARG}" ]; then
-            echo "Invalid -b option(s)"
-            usage
-            exit 1
-          fi
-          bind_mounts="${bind_mounts}
-${OPTARG} ${OPTARG} none rw,bind 0 0"
-        fi
-        ;;
-      g)
-        [ -n "${OPTARG}" ] &&
-          chroot_groups="${chroot_groups}${chroot_groups:+,}${OPTARG}"
-        ;;
-      l)
-        list_all_chroots
-        exit
-        ;;
-      m)
-        if [ -n "${mirror}" ]; then
-          echo "You can only specify exactly one mirror location"
-          usage
-          exit 1
-        fi
-        mirror="$OPTARG"
-        ;;
-      s)
-        add_srcs="y"
-        ;;
-      c)
-        copy_64="y"
-        ;;
-      h)
-        usage
-        exit 0
-        ;;
-      \:)
-        echo "'-$OPTARG' needs an argument."
-        usage
-        exit 1
-        ;;
-      *)
-        echo "invalid command-line option: $OPTARG"
-        usage
-        exit 1
-        ;;
-    esac
-  done
-
-  if [ $# -ge ${OPTIND} ]; then
-    eval echo "Unexpected command line argument: \${${OPTIND}}"
-    usage
-    exit 1
-  fi
-}
-
-list_all_chroots() {
-  for i in /var/lib/chroot/*; do
-    i="${i##*/}"
-    [ "${i}" = "*" ] && continue
-    [ -x "/usr/local/bin/${i%bit}" ] || continue
-    grep -qs "^\[${i%bit}\]\$" /etc/schroot/schroot.conf || continue
-    [ -r "/etc/schroot/script-${i}" -a \
-      -r "/etc/schroot/mount-${i}" ] || continue
-    echo "${i%bit}"
-  done
-}
-
-getkey() {
-  (
-    trap 'stty echo -iuclc icanon 2>/dev/null' EXIT INT TERM QUIT HUP
-    stty -echo iuclc -icanon 2>/dev/null
-    dd count=1 bs=1 2>/dev/null
-  )
-}
-
-chr() {
-  printf "\\$(printf '%03o' "$1")"
-}
-
-ord() {
-  printf '%d' $(printf '%c' "$1" | od -tu1 -An)
-}
-
-is_network_drive() {
-  stat -c %T -f "$1/" 2>/dev/null |
-    egrep -qs '^nfs|cifs|smbfs'
-}
-
-# Check that we are running as a regular user
-[ "$(id -nu)" = root ] && {
-  echo "Run this script as a regular user and provide your \"sudo\""           \
-       "password if requested" >&2
-  exit 1
-}
-
-process_opts "$@"
-
-echo "This script will help you through the process of installing a"
-echo "Debian or Ubuntu distribution in a chroot environment. You will"
-echo "have to provide your \"sudo\" password when requested."
-echo
-
-# Error handler
-trap 'exit 1' INT TERM QUIT HUP
-trap 'sudo apt-get clean; tput bel; echo; echo Failed' EXIT
-
-# Install any missing applications that this script relies on. If these packages
-# are already installed, don't force another "apt-get install". That would
-# prevent them from being auto-removed, if they ever become eligible for that.
-# And as this script only needs the packages once, there is no good reason to
-# introduce a hard dependency on things such as dchroot and debootstrap.
-dep=
-for i in dchroot debootstrap libwww-perl; do
-  [ -d /usr/share/doc/"$i" ] || dep="$dep $i"
-done
-[ -n "$dep" ] && sudo apt-get -y install $dep
-sudo apt-get -y install schroot
-
-# Create directory for chroot
-sudo mkdir -p /var/lib/chroot
-
-# Find chroot environments that can be installed with debootstrap
-targets="$(cd /usr/share/debootstrap/scripts
-           ls | grep '^[a-z]*$')"
-
-# Ask user to pick one of the available targets
-echo "The following targets are available to be installed in a chroot:"
-j=1; for i in $targets; do
-  printf '%4d: %s\n' "$j" "$i"
-  j=$(($j+1))
-done
-while :; do
-  printf "Which target would you like to install: "
-  read n
-  [ "$n" -gt 0 -a "$n" -lt "$j" ] >&/dev/null && break
-done
-j=1; for i in $targets; do
-  [ "$j" -eq "$n" ] && { distname="$i"; break; }
-  j=$(($j+1))
-done
-echo
-
-# On x86-64, ask whether the user wants to install x86-32 or x86-64
-archflag=
-arch=
-if [ "$(uname -m)" = x86_64 ]; then
-  while :; do
-    echo "You are running a 64bit kernel. This allows you to install either a"
-    printf "32bit or a 64bit chroot environment. %s"                           \
-           "Which one do you want (32, 64) "
-    read arch
-    [ "${arch}" == 32 -o "${arch}" == 64 ] && break
-  done
-  [ "${arch}" == 32 ] && archflag="--arch i386" || archflag="--arch amd64"
-  arch="${arch}bit"
-  echo
-fi
-target="${distname}${arch}"
-
-# Don't accidentally overwrite an existing installation
-[ -d /var/lib/chroot/"${target}" ] && {
-  while :; do
-    echo "This chroot already exists on your machine."
-    if schroot -l --all-sessions 2>&1 |
-       sed 's/^session://' |
-       grep -qs "^${target%bit}-"; then
-      echo "And it appears to be in active use. Terminate all programs that"
-      echo "are currently using the chroot environment and then re-run this"
-      echo "script."
-      echo "If you still get an error message, you might have stale mounts"
-      echo "that you forgot to delete. You can always clean up mounts by"
-      echo "executing \"${target%bit} -c\"."
-      exit 1
-    fi
-    echo "I can abort installation, I can overwrite the existing chroot,"
-    echo "or I can delete the old one and then exit. What would you like to"
-    printf "do (a/o/d)? "
-    read choice
-    case "${choice}" in
-      a|A) exit 1;;
-      o|O) sudo rm -rf "/var/lib/chroot/${target}"; break;;
-      d|D) sudo rm -rf "/var/lib/chroot/${target}"      \
-                       "/usr/local/bin/${target%bit}"   \
-                       "/etc/schroot/mount-${target}"   \
-                       "/etc/schroot/script-${target}"  \
-                       "/etc/schroot/${target}"
-           sudo sed -ni '/^[[]'"${target%bit}"']$/,${
-                         :1;n;/^[[]/b2;b1;:2;p;n;b2};p' \
-                       "/etc/schroot/schroot.conf"
-           trap '' INT TERM QUIT HUP
-           trap '' EXIT
-           echo "Deleted!"
-           exit 0;;
-    esac
-  done
-  echo
-}
-sudo mkdir -p /var/lib/chroot/"${target}"
-
-# Offer to include additional standard repositories for Ubuntu-based chroots.
-alt_repos=
-grep -qs ubuntu.com /usr/share/debootstrap/scripts/"${distname}" && {
-  while :; do
-    echo "Would you like to add ${distname}-updates and ${distname}-security "
-    printf "to the chroot's sources.list (y/n)? "
-    read alt_repos
-    case "${alt_repos}" in
-      y|Y)
-        alt_repos="y"
-        break
-      ;;
-      n|N)
-        break
-      ;;
-    esac
-  done
-  echo
-}
-
-# Check for non-standard file system mount points and ask the user whether
-# they should be imported into the chroot environment
-# We limit to the first 26 mount points that much some basic heuristics,
-# because a) that allows us to enumerate choices with a single character,
-# and b) if we find more than 26 mount points, then these are probably
-# false-positives and something is very unusual about the system's
-# configuration. No need to spam the user with even more information that
-# is likely completely irrelevant.
-if [ -z "${bind_mounts}" ]; then
-  mounts="$(awk '$2 != "/" && $2 !~ "^/boot" && $2 !~ "^/home" &&
-                 $2 !~ "^/media" && $2 !~ "^/run" &&
-                 ($3 ~ "ext[2-4]" || $3 == "reiserfs" || $3 == "btrfs" ||
-                 $3 == "xfs" || $3 == "jfs" || $3 == "u?msdos" ||
-                 $3 == "v?fat" || $3 == "hfs" || $3 == "ntfs" ||
-                 $3 ~ "nfs[4-9]?" || $3 == "smbfs" || $3 == "cifs") {
-                   print $2
-                 }' /proc/mounts |
-            head -n26)"
-  if [ -n "${mounts}" ]; then
-    echo "You appear to have non-standard mount points that you"
-    echo "might want to import into the chroot environment:"
-    echo
-    sel=
-    while :; do
-      # Print a menu, listing all non-default mounts of local or network
-      # file systems.
-      j=1; for m in ${mounts}; do
-        c="$(printf $(printf '\\%03o' $((64+$j))))"
-        echo "$sel" | grep -qs $c &&
-          state="mounted in chroot" || state="$(tput el)"
-        printf "   $c) %-40s${state}\n" "$m"
-        j=$(($j+1))
-      done
-      # Allow user to interactively (de-)select any of the entries
-      echo
-      printf "Select mount points that you want to be included or press %s" \
-             "SPACE to continue"
-      c="$(getkey | tr a-z A-Z)"
-      [ "$c" == " " ] && { echo; echo; break; }
-      if [ -z "$c" ] ||
-         [ "$c" '<' 'A' -o $(ord "$c") -gt $((64 + $(ord "$j"))) ]; then
-          # Invalid input, ring the console bell
-          tput bel
-      else
-        # Toggle the selection for the given entry
-        if echo "$sel" | grep -qs $c; then
-          sel="$(printf "$sel" | sed "s/$c//")"
-        else
-          sel="$sel$c"
-        fi
-      fi
-      # Reposition cursor to the top of the list of entries
-      tput cuu $(($j + 1))
-      echo
-    done
-  fi
-  j=1; for m in ${mounts}; do
-    c="$(chr $(($j + 64)))"
-    if echo "$sel" | grep -qs $c; then
-      bind_mounts="${bind_mounts}$m $m none rw,bind 0 0
-"
-    fi
-    j=$(($j+1))
-  done
-fi
-
-# Remove stale entry from /etc/schroot/schroot.conf. Entries start
-# with the target name in square brackets, followed by an arbitrary
-# number of lines. The entry stops when either the end of file has
-# been reached, or when the beginning of a new target is encountered.
-# This means, we cannot easily match for a range of lines in
-# "sed". Instead, we actually have to iterate over each line and check
-# whether it is the beginning of a new entry.
-sudo sed -ni '/^[[]'"${target%bit}"']$/,${:1;n;/^[[]/b2;b1;:2;p;n;b2};p'       \
-         /etc/schroot/schroot.conf
-
-# Download base system. This takes some time
-if [ -z "${mirror}" ]; then
- grep -qs ubuntu.com /usr/share/debootstrap/scripts/"${distname}" &&
-   mirror="http://archive.ubuntu.com/ubuntu" ||
-   mirror="http://ftp.us.debian.org/debian"
-fi
-
-sudo ${http_proxy:+http_proxy="${http_proxy}"} debootstrap ${archflag} \
-    "${distname}" "/var/lib/chroot/${target}"  "$mirror"
-
-# Add new entry to /etc/schroot/schroot.conf
-grep -qs ubuntu.com /usr/share/debootstrap/scripts/"${distname}" &&
-  brand="Ubuntu" || brand="Debian"
-if [ -z "${chroot_groups}" ]; then
-  chroot_groups="${admin},$(id -gn)"
-fi
-
-if [ -d '/etc/schroot/default' ]; then
-  new_version=1
-  fstab="/etc/schroot/${target}/fstab"
-else
-  new_version=0
-  fstab="/etc/schroot/mount-${target}"
-fi
-
-if [ "$new_version" = "1" ]; then
-  sudo cp -ar /etc/schroot/default /etc/schroot/${target}
-
-  sudo sh -c 'cat >>/etc/schroot/schroot.conf' <<EOF
-[${target%bit}]
-description=${brand} ${distname} ${arch}
-type=directory
-directory=/var/lib/chroot/${target}
-users=root
-groups=${chroot_groups}
-root-groups=${chroot_groups}
-personality=linux$([ "${arch}" != 64bit ] && echo 32)
-profile=${target}
-
-EOF
-  [ -n "${bind_mounts}" -a "${bind_mounts}" != "NONE" ] &&
-    printf "${bind_mounts}" |
-      sudo sh -c "cat >>${fstab}"
-else
-  # Older versions of schroot wanted a "priority=" line, whereas recent
-  # versions deprecate "priority=" and warn if they see it. We don't have
-  # a good feature test, but scanning for the string "priority=" in the
-  # existing "schroot.conf" file is a good indication of what to do.
-  priority=$(grep -qs 'priority=' /etc/schroot/schroot.conf &&
-           echo 'priority=3' || :)
-  sudo sh -c 'cat >>/etc/schroot/schroot.conf' <<EOF
-[${target%bit}]
-description=${brand} ${distname} ${arch}
-type=directory
-directory=/var/lib/chroot/${target}
-users=root
-groups=${chroot_groups}
-root-groups=${chroot_groups}
-personality=linux$([ "${arch}" != 64bit ] && echo 32)
-script-config=script-${target}
-${priority}
-
-EOF
-
-  # Set up a list of mount points that is specific to this
-  # chroot environment.
-  sed '/^FSTAB=/s,"[^"]*","'"${fstab}"'",' \
-           /etc/schroot/script-defaults |
-    sudo sh -c 'cat >/etc/schroot/script-'"${target}"
-  sed '\,^/home[/[:space:]],s/\([,[:space:]]\)bind[[:space:]]/\1rbind /' \
-    /etc/schroot/mount-defaults |
-    sudo sh -c "cat > ${fstab}"
-fi
-
-# Add the extra mount points that the user told us about
-[ -n "${bind_mounts}" -a "${bind_mounts}" != "NONE" ] &&
-  printf "${bind_mounts}" |
-    sudo sh -c 'cat >>'"${fstab}"
-
-# If this system has a "/media" mountpoint, import it into the chroot
-# environment. Most modern distributions use this mount point to
-# automatically mount devices such as CDROMs, USB sticks, etc...
-if [ -d /media ] &&
-   ! grep -qs '^/media' "${fstab}"; then
-  echo '/media /media none rw,rbind 0 0' |
-    sudo sh -c 'cat >>'"${fstab}"
-fi
-
-# Share /dev/shm, /run and /run/shm.
-grep -qs '^/dev/shm' "${fstab}" ||
-  echo '/dev/shm /dev/shm none rw,bind 0 0' |
-    sudo sh -c 'cat >>'"${fstab}"
-if [ ! -d "/var/lib/chroot/${target}/run" ] &&
-   ! grep -qs '^/run' "${fstab}"; then
-  echo '/run /run none rw,bind 0 0' |
-    sudo sh -c 'cat >>'"${fstab}"
-fi
-if ! grep -qs '^/run/shm' "${fstab}"; then
-  { [ -d /run ] && echo '/run/shm /run/shm none rw,bind 0 0' ||
-                   echo '/dev/shm /run/shm none rw,bind 0 0'; } |
-    sudo sh -c 'cat >>'"${fstab}"
-fi
-
-# Set up a special directory that changes contents depending on the target
-# that is executing.
-d="$(readlink -f "${HOME}/chroot" 2>/dev/null || echo "${HOME}/chroot")"
-s="${d}/.${target}"
-echo "${s} ${d} none rw,bind 0 0" |
-  sudo sh -c 'cat >>'"${target}"
-mkdir -p "${s}"
-
-# Install a helper script to launch commands in the chroot
-sudo sh -c 'cat >/usr/local/bin/'"${target%bit}" <<'EOF'
-#!/bin/bash
-
-chroot="${0##*/}"
-
-wrap() {
-  # Word-wrap the text passed-in on stdin. Optionally, on continuation lines
-  # insert the same number of spaces as the number of characters in the
-  # parameter(s) passed to this function.
-  # If the "fold" program cannot be found, or if the actual width of the
-  # terminal cannot be determined, this function doesn't attempt to do any
-  # wrapping.
-  local f="$(type -P fold)"
-  [ -z "${f}" ] && { cat; return; }
-  local c="$(stty -a </dev/tty 2>/dev/null |
-             sed 's/.*columns[[:space:]]*\([0-9]*\).*/\1/;t;d')"
-  [ -z "${c}" ] && { cat; return; }
-  local i="$(echo "$*"|sed 's/./ /g')"
-  local j="$(printf %s "${i}"|wc -c)"
-  if [ "${c}" -gt "${j}" ]; then
-    dd bs=1 count="${j}" 2>/dev/null
-    "${f}" -sw "$((${c}-${j}))" | sed '2,$s/^/'"${i}"'/'
-  else
-    "${f}" -sw "${c}"
-  fi
-}
-
-help() {
-  echo "Usage ${0##*/} [-h|--help] [-c|--clean] [-C|--clean-all] [-l|--list] [--] args" | wrap "Usage ${0##*/} "
-  echo "  help:      print this message"                                                | wrap "             "
-  echo "  list:      list all known chroot environments"                                | wrap "             "
-  echo "  clean:     remove all old chroot sessions for \"${chroot}\""                  | wrap "             "
-  echo "  clean-all: remove all old chroot sessions for all environments"               | wrap "             "
-  exit 0
-}
-
-clean() {
-  local s t rc
-  rc=0
-  for s in $(schroot -l --all-sessions); do
-    if [ -n "$1" ]; then
-      t="${s#session:}"
-      [ "${t#${chroot}-}" == "${t}" ] && continue
-    fi
-    if ls -l /proc/*/{cwd,fd} 2>/dev/null |
-       fgrep -qs "/var/lib/schroot/mount/${t}"; then
-      echo "Session \"${t}\" still has active users, not cleaning up" | wrap
-      rc=1
-      continue
-    fi
-    sudo schroot -c "${s}" -e || rc=1
-  done
-  exit ${rc}
-}
-
-list() {
-  for e in $(schroot -l); do
-    e="${e#chroot:}"
-    [ -x "/usr/local/bin/${e}" ] || continue
-    if schroot -l --all-sessions 2>/dev/null |
-       sed 's/^session://' |
-       grep -qs "^${e}-"; then
-      echo "${e} is currently active"
-    else
-      echo "${e}"
-    fi
-  done
-  exit 0
-}
-
-while [ "$#" -ne 0 ]; do
-  case "$1" in
-    --)             shift; break;;
-    -h|--help)      shift; help;;
-    -l|--list)      shift; list;;
-    -c|--clean)     shift; clean "${chroot}";;
-    -C|--clean-all) shift; clean;;
-    *)              break;;
-  esac
-done
-
-# Start a new chroot session and keep track of the session id. We inject this
-# id into all processes that run inside the chroot. Unless they go out of their
-# way to clear their environment, we can then later identify our child and
-# grand-child processes by scanning their environment.
-session="$(schroot -c "${chroot}" -b)"
-export CHROOT_SESSION_ID="${session}"
-
-# Set GOMA_TMP_DIR for better handling of goma inside chroot.
-export GOMA_TMP_DIR="/tmp/goma_tmp_$CHROOT_SESSION_ID"
-mkdir -p "$GOMA_TMP_DIR"
-
-if [ $# -eq 0 ]; then
-  # Run an interactive shell session
-  schroot -c "${session}" -r -p
-else
-  # Run a command inside of the chroot environment
-  p="$1"; shift
-  schroot -c "${session}" -r -p "$p" -- "$@"
-fi
-rc=$?
-
-# Compute the inode of the root directory inside of the chroot environment.
-i=$(schroot -c "${session}" -r -p ls -- -id /proc/self/root/. |
-     awk '{ print $1 }') 2>/dev/null
-other_pids=
-while [ -n "$i" ]; do
-  # Identify processes by the inode number of their root directory. Then
-  # remove all processes that we know belong to other sessions. We use
-  # "sort | uniq -u" to do what amounts to a "set subtraction operation".
-  pids=$({ ls -id1 /proc/*/root/. 2>/dev/null |
-         sed -e 's,^[^0-9]*'$i'.*/\([1-9][0-9]*\)/.*$,\1,
-                 t
-                 d';
-         echo "${other_pids}";
-         echo "${other_pids}"; } | sort | uniq -u) >/dev/null 2>&1
-  # Kill all processes that are still left running in the session. This is
-  # typically an assortment of daemon processes that were started
-  # automatically. They result in us being unable to tear down the session
-  # cleanly.
-  [ -z "${pids}" ] && break
-  for j in $pids; do
-    # Unfortunately, the way that schroot sets up sessions has the
-    # side-effect of being unable to tell one session apart from another.
-    # This can result in us attempting to kill processes in other sessions.
-    # We make a best-effort to avoid doing so.
-    k="$( ( xargs -0 -n1 </proc/$j/environ ) 2>/dev/null |
-         sed 's/^CHROOT_SESSION_ID=/x/;t1;d;:1;q')"
-    if [ -n "${k}" -a "${k#x}" != "${session}" ]; then
-      other_pids="${other_pids}
-${j}"
-      continue
-    fi
-    kill -9 $pids
-  done
-done
-# End the chroot session. This should clean up all temporary files. But if we
-# earlier failed to terminate all (daemon) processes inside of the session,
-# deleting the session could fail. When that happens, the user has to manually
-# clean up the stale files by invoking us with "--clean" after having killed
-# all running processes.
-schroot -c "${session}" -e
-# Since no goma processes are running, we can remove goma directory.
-rm -rf "$GOMA_TMP_DIR"
-exit $rc
-EOF
-sudo chown root:root /usr/local/bin/"${target%bit}"
-sudo chmod 755 /usr/local/bin/"${target%bit}"
-
-# Add the standard Ubuntu update repositories if requested.
-[ "${alt_repos}" = "y" -a \
-  -r "/var/lib/chroot/${target}/etc/apt/sources.list" ] &&
-sudo sed -i '/^deb .* [^ -]\+ main$/p
-             s/^\(deb .* [^ -]\+\) main/\1-security main/
-             p
-             t1
-             d
-             :1;s/-security main/-updates main/
-             t
-             d' "/var/lib/chroot/${target}/etc/apt/sources.list"
-
-# Add a few more repositories to the chroot
-[ -r "/var/lib/chroot/${target}/etc/apt/sources.list" ] &&
-sudo sed -i 's/ main$/ main restricted universe multiverse/' \
-         "/var/lib/chroot/${target}/etc/apt/sources.list"
-
-# Add the Ubuntu "partner" repository, if available
-if [ -r "/var/lib/chroot/${target}/etc/apt/sources.list" ] &&
-   HEAD "http://archive.canonical.com/ubuntu/dists/${distname}/partner" \
-   >&/dev/null; then
-  sudo sh -c '
-    echo "deb http://archive.canonical.com/ubuntu" \
-         "'"${distname}"' partner" \
-      >>"/var/lib/chroot/'"${target}"'/etc/apt/sources.list"'
-fi
-
-# Add source repositories, if the user requested we do so
-[ "${add_srcs}" = "y" -a \
-  -r "/var/lib/chroot/${target}/etc/apt/sources.list" ] &&
-sudo sed -i '/^deb[^-]/p
-             s/^deb\([^-]\)/deb-src\1/' \
-         "/var/lib/chroot/${target}/etc/apt/sources.list"
-
-# Set apt proxy if host has set http_proxy
-if [ -n "${http_proxy}" ]; then
-  sudo sh -c '
-    echo "Acquire::http::proxy \"'"${http_proxy}"'\";" \
-        >>"/var/lib/chroot/'"${target}"'/etc/apt/apt.conf"'
-fi
-
-# Update packages
-sudo "/usr/local/bin/${target%bit}" /bin/sh -c '
-  apt-get update; apt-get -y dist-upgrade' || :
-
-# Install a couple of missing packages
-for i in debian-keyring ubuntu-keyring locales sudo; do
-  [ -d "/var/lib/chroot/${target}/usr/share/doc/$i" ] ||
-    sudo "/usr/local/bin/${target%bit}" apt-get -y install "$i" || :
-done
-
-# Configure locales
-sudo "/usr/local/bin/${target%bit}" /bin/sh -c '
-  l='"${LANG:-en_US}"'; l="${l%%.*}"
-  [ -r /etc/locale.gen ] &&
-    sed -i "s/^# \($l\)/\1/" /etc/locale.gen
-  locale-gen $LANG en_US en_US.UTF-8' || :
-
-# Enable multi-arch support, if available
-sudo "/usr/local/bin/${target%bit}" dpkg --assert-multi-arch >&/dev/null &&
-  [ -r "/var/lib/chroot/${target}/etc/apt/sources.list" ] && {
-  sudo sed -i 's/ / [arch=amd64,i386] /' \
-              "/var/lib/chroot/${target}/etc/apt/sources.list"
-  [ -d /var/lib/chroot/${target}/etc/dpkg/dpkg.cfg.d/ ] &&
-  sudo "/usr/local/bin/${target%bit}" dpkg --add-architecture \
-      $([ "${arch}" = "32bit" ] && echo amd64 || echo i386) >&/dev/null ||
-    echo foreign-architecture \
-        $([ "${arch}" = "32bit" ] && echo amd64 || echo i386) |
-      sudo sh -c \
-        "cat >'/var/lib/chroot/${target}/etc/dpkg/dpkg.cfg.d/multiarch'"
-}
-
-# Configure "sudo" package
-sudo "/usr/local/bin/${target%bit}" /bin/sh -c '
-  egrep -qs '"'^$(id -nu) '"' /etc/sudoers ||
-  echo '"'$(id -nu) ALL=(ALL) ALL'"' >>/etc/sudoers'
-
-# Install a few more commonly used packages
-sudo "/usr/local/bin/${target%bit}" apt-get -y install                         \
-  autoconf automake1.9 dpkg-dev g++-multilib gcc-multilib gdb less libtool     \
-  lsof strace
-
-# If running a 32bit environment on a 64bit machine, install a few binaries
-# as 64bit. This is only done automatically if the chroot distro is the same as
-# the host, otherwise there might be incompatibilities in build settings or
-# runtime dependencies. The user can force it with the '-c' flag.
-host_distro=$(grep -s DISTRIB_CODENAME /etc/lsb-release | \
-  cut -d "=" -f 2)
-if [ "${copy_64}" = "y" -o \
-    "${host_distro}" = "${distname}" -a "${arch}" = 32bit ] && \
-    file /bin/bash 2>/dev/null | grep -q x86-64; then
-  readlinepkg=$(sudo "/usr/local/bin/${target%bit}" sh -c \
-    'apt-cache search "lib64readline.\$" | sort | tail -n 1 | cut -d " " -f 1')
-  sudo "/usr/local/bin/${target%bit}" apt-get -y install                       \
-    lib64expat1 lib64ncurses5 ${readlinepkg} lib64z1 lib64stdc++6
-  dep=
-  for i in binutils gdb; do
-    [ -d /usr/share/doc/"$i" ] || dep="$dep $i"
-  done
-  [ -n "$dep" ] && sudo apt-get -y install $dep
-  sudo mkdir -p "/var/lib/chroot/${target}/usr/local/lib/amd64"
-  for i in libbfd libpython; do
-    lib="$({ ldd /usr/bin/ld; ldd /usr/bin/gdb; } |
-           grep -s "$i" | awk '{ print $3 }')"
-    if [ -n "$lib" -a -r "$lib" ]; then
-      sudo cp "$lib" "/var/lib/chroot/${target}/usr/local/lib/amd64"
-    fi
-  done
-  for lib in libssl libcrypt; do
-    for path in /usr/lib /usr/lib/x86_64-linux-gnu; do
-      sudo cp $path/$lib* \
-              "/var/lib/chroot/${target}/usr/local/lib/amd64/" >&/dev/null || :
-    done
-  done
-  for i in gdb ld; do
-    sudo cp /usr/bin/$i "/var/lib/chroot/${target}/usr/local/lib/amd64/"
-    sudo sh -c "cat >'/var/lib/chroot/${target}/usr/local/bin/$i'" <<EOF
-#!/bin/sh
-exec /lib64/ld-linux-x86-64.so.2 --library-path /usr/local/lib/amd64 \
-  /usr/local/lib/amd64/$i "\$@"
-EOF
-    sudo chmod 755 "/var/lib/chroot/${target}/usr/local/bin/$i"
-  done
-fi
-
-
-# If the install-build-deps.sh script can be found, offer to run it now
-script="$(dirname $(readlink -f "$0"))/install-build-deps.sh"
-if [ -x "${script}" ]; then
-  while :; do
-    echo
-    echo "If you plan on building Chrome inside of the new chroot environment,"
-    echo "you now have to install the build dependencies. Do you want me to"
-    printf "start the script that does this for you (y/n)? "
-    read install_deps
-    case "${install_deps}" in
-      y|Y)
-        echo
-        # We prefer running the script in-place, but this might not be
-        # possible, if it lives on a network filesystem that denies
-        # access to root.
-        tmp_script=
-        if ! sudo /usr/local/bin/"${target%bit}" \
-            sh -c "[ -x '${script}' ]" >&/dev/null; then
-          tmp_script="/tmp/${script##*/}"
-          cp "${script}" "${tmp_script}"
-        fi
-        # Some distributions automatically start an instance of the system-
-        # wide dbus daemon, cron daemon or of the logging daemon, when
-        # installing the Chrome build depencies. This prevents the chroot
-        # session from being closed.  So, we always try to shut down any running
-        # instance of dbus and rsyslog.
-        sudo /usr/local/bin/"${target%bit}" sh -c "${script};
-              rc=$?;
-              /etc/init.d/cron stop >/dev/null 2>&1 || :;
-              /etc/init.d/rsyslog stop >/dev/null 2>&1 || :;
-              /etc/init.d/dbus stop >/dev/null 2>&1 || :;
-              exit $rc"
-        rc=$?
-        [ -n "${tmp_script}" ] && rm -f "${tmp_script}"
-        [ $rc -ne 0 ] && exit $rc
-        break
-      ;;
-      n|N)
-        break
-      ;;
-    esac
-  done
-  echo
-fi
-
-# Check whether ~/chroot is on a (slow) network file system and offer to
-# relocate it. Also offer relocation, if the user appears to have multiple
-# spindles (as indicated by "${bind_mount}" being non-empty).
-# We only offer this option, if it doesn't look as if a chroot environment
-# is currently active. Otherwise, relocation is unlikely to work and it
-# can be difficult for the user to recover from the failed attempt to relocate
-# the ~/chroot directory.
-# We don't aim to solve this problem for every configuration,
-# but try to help with the common cases. For more advanced configuration
-# options, the user can always manually adjust things.
-mkdir -p "${HOME}/chroot/"
-if [ ! -h "${HOME}/chroot" ] &&
-   ! egrep -qs '^[^[:space:]]*/chroot' /etc/fstab &&
-   { [ -n "${bind_mounts}" -a "${bind_mounts}" != "NONE" ] ||
-     is_network_drive "${HOME}/chroot"; } &&
-   ! egrep -qs '/var/lib/[^/]*chroot/.*/chroot' /proc/mounts; then
-  echo "${HOME}/chroot is currently located on the same device as your"
-  echo "home directory."
-  echo "This might not be what you want. Do you want me to move it somewhere"
-  echo "else?"
-  # If the computer has multiple spindles, many users configure all or part of
-  # the secondary hard disk to be writable by the primary user of this machine.
-  # Make some reasonable effort to detect this type of configuration and
-  # then offer a good location for where to put the ~/chroot directory.
-  suggest=
-  for i in $(echo "${bind_mounts}"|cut -d ' ' -f 1); do
-    if [ -d "$i" -a -w "$i" -a \( ! -a "$i/chroot" -o -w "$i/chroot/." \) ] &&
-       ! is_network_drive "$i"; then
-      suggest="$i"
-    else
-      for j in "$i/"*; do
-        if [ -d "$j" -a -w "$j" -a \
-             \( ! -a "$j/chroot" -o -w "$j/chroot/." \) ] &&
-           ! is_network_drive "$j"; then
-          suggest="$j"
-        else
-          for k in "$j/"*; do
-            if [ -d "$k" -a -w "$k" -a \
-                 \( ! -a "$k/chroot" -o -w "$k/chroot/." \) ] &&
-               ! is_network_drive "$k"; then
-              suggest="$k"
-              break
-            fi
-          done
-        fi
-        [ -n "${suggest}" ] && break
-      done
-    fi
-    [ -n "${suggest}" ] && break
-  done
-  def_suggest="${HOME}"
-  if [ -n "${suggest}" ]; then
-    # For home directories that reside on network drives, make our suggestion
-    # the default option. For home directories that reside on a local drive,
-    # require that the user manually enters the new location.
-    if is_network_drive "${HOME}"; then
-      def_suggest="${suggest}"
-    else
-      echo "A good location would probably be in \"${suggest}\""
-    fi
-  fi
-  while :; do
-    printf "Physical location [${def_suggest}]: "
-    read dir
-    [ -z "${dir}" ] && dir="${def_suggest}"
-    [ "${dir%%/}" == "${HOME%%/}" ] && break
-    if ! [ -d "${dir}" -a -w "${dir}" ] ||
-       [ -a "${dir}/chroot" -a ! -w "${dir}/chroot/." ]; then
-      echo "Cannot write to ${dir}/chroot. Please try again"
-    else
-      mv "${HOME}/chroot" "${dir}/chroot"
-      ln -s "${dir}/chroot" "${HOME}/chroot"
-      for i in $(list_all_chroots); do
-        sudo "$i" mkdir -p "${dir}/chroot"
-      done
-      sudo sed -i "s,${HOME}/chroot,${dir}/chroot,g" /etc/schroot/mount-*
-      break
-    fi
-  done
-fi
-
-# Clean up package files
-sudo schroot -c "${target%bit}" -p -- apt-get clean
-sudo apt-get clean
-
-trap '' INT TERM QUIT HUP
-trap '' EXIT
-
-# Let the user know what we did
-cat <<EOF
-
-
-Successfully installed ${distname} ${arch}
-
-You can run programs inside of the chroot by invoking the
-"/usr/local/bin/${target%bit}" command.
-
-This command can be used with arguments, in order to just run a single
-program inside of the chroot environment (e.g. "${target%bit} make chrome")
-or without arguments, in order to run an interactive shell session inside
-of the chroot environment.
-
-If you need to run things as "root", you can use "sudo" (e.g. try
-"sudo ${target%bit} apt-get update").
-
-Your home directory is shared between the host and the chroot. But I
-configured "${HOME}/chroot" to be private to the chroot environment.
-You can use it for files that need to differ between environments. This
-would be a good place to store binaries that you have built from your
-source files.
-
-For Chrome, this probably means you want to make your "out" directory a
-symbolic link that points somewhere inside of "${HOME}/chroot".
-
-You still need to run "gclient runhooks" whenever you switch from building
-outside of the chroot to inside of the chroot. But you will find that you
-don't have to repeatedly erase and then completely rebuild all your object
-and binary files.
-
-EOF
diff --git a/build/internal/README.chromium b/build/internal/README.chromium
deleted file mode 100644
index 4624830..0000000
--- a/build/internal/README.chromium
+++ /dev/null
@@ -1,24 +0,0 @@
-Internal property sheets:
-  essential.vsprops
-    Contains the common settings used throughout the projects. Is included by either ..\debug.vsprops or ..\release.vsprops, so in general, it is not included directly.
-
-  release_defaults.vsprops
-    Included by ..\release.vsprops. Its settings are overriden by release_impl$(CHROME_BUILD_TYPE).vsprops. Uses the default VS setting which is "Maximize Speed". Results in relatively fast build with reasonable optimization level but without whole program optimization to reduce build time.
-
-  release_impl.vsprops
-    Included by ..\release.vsprops by default when CHROME_BUILD_TYPE is undefined. Includes release_defaults.vsprops.
-
-  release_impl_checksenabled.vsprops
-    Included by ..\release.vsprops when CHROME_BUILD_TYPE=_checksenabled. Matches what release_defaults.vsprops does, but doesn't actually inherit from it as we couldn't quite get that working. The only difference is that _DEBUG is set instead of NDEBUG. Used for keeping debug checks enabled with a build that is fast enough to dogfood with.
-
-  release_impl_official.vsprops
-    Included by ..\release.vsprops when CHROME_BUILD_TYPE=_official. Includes release_defaults.vsprops. Enables Whole Program Optimizations (WPO), which doubles the build time. Results in much more optimized build. Uses "Full Optimization" and "Flavor small code".
-
-  release_impl_pgo_instrument.vsprops
-    Included by ..\release.vsprops when CHROME_BUILD_TYPE=_pgo_instrument. Includes release_defaults.vsprops. Enables Profile Guided Optimization (PGO) instrumentation (first pass). Uses "Full Optimization" and "Flavor small code".
-
-  release_impl_pgo_optimize.vsprops
-    Included by ..\release.vsprops when CHROME_BUILD_TYPE=_pgo_optimize. Includes release_defaults.vsprops. Enables Profile Guided Optimization (PGO) optimization (second pass). Uses "Full Optimization" and "Flavor small code".
-
-  release_impl_purify.vsprops
-    Included by ..\release.vsprops when CHROME_BUILD_TYPE=_purify. Includes release_defaults.vsprops. Disables optimizations. Used with Purify to test without debug tools and without optimization; i.e. NDEBUG is defined but the compiler doesn't optimize the binary.
diff --git a/build/ios/OWNERS b/build/ios/OWNERS
deleted file mode 100644
index 40a68c7..0000000
--- a/build/ios/OWNERS
+++ /dev/null
@@ -1 +0,0 @@
-rohitrao@chromium.org
diff --git a/build/ios/chrome_ios.croc b/build/ios/chrome_ios.croc
deleted file mode 100644
index 938a2e9..0000000
--- a/build/ios/chrome_ios.croc
+++ /dev/null
@@ -1,71 +0,0 @@
-# -*- python -*-
-# Crocodile config file for Chromium iOS.
-#
-# Note that Chromium iOS also uses the config file at src/build/common.croc.
-#
-# See src/tools/code_coverage/example.croc for more info on config files.
-
-{
-  # List of rules, applied in order
-  'rules' : [
-    # Specify inclusions before exclusions, since rules are in order.
-
-    # Exclude everything to negate whatever is in src/build/common.croc
-    {
-      'regexp' : '.*',
-      'include' : 0,
-    },
-
-    # Include all directories (but not the files in the directories).
-    # This is a workaround for how croc.py walks the directory tree. See the
-    # TODO in the AddFiles method of src/tools/code_coverage/croc.py
-    {
-      'regexp' : '.*/$',
-      'include' : 1,
-    },
-
-    # Include any file with an 'ios' directory in the path.
-    {
-      'regexp' : '.*/ios/.*',
-      'include' : 1,
-      'add_if_missing' : 1,
-    },
-    
-    # Include any file that ends with _ios.
-    {
-      'regexp' : '.*_ios\\.(c|cc|m|mm)$',
-      'include' : 1,
-      'add_if_missing' : 1,
-    },
-
-    # Include any file that ends with _ios_unittest (and label it a test).
-    {
-      'regexp' : '.*_ios_unittest\\.(c|cc|m|mm)$',
-      'include' : 1,
-      'add_if_missing' : 1,
-      'group' : 'test',
-    },
-
-    # Don't scan for executable lines in uninstrumented header files
-    {
-      'regexp' : '.*\\.(h|hpp)$',
-      'add_if_missing' : 0,
-    },
-
-    # Don't measure coverage of perftests.
-    {
-      'regexp' : '.*perftest\\.(c|cc|m|mm)$',
-      'include' : 0,
-    },
-
-    # Languages
-    {
-      'regexp' : '.*\\.m$',
-      'language' : 'ObjC',
-    },
-    {
-      'regexp' : '.*\\.mm$',
-      'language' : 'ObjC++',
-    },
-  ],
-}
diff --git a/build/ios/clean_env.py b/build/ios/clean_env.py
deleted file mode 100755
index bf56b2f..0000000
--- a/build/ios/clean_env.py
+++ /dev/null
@@ -1,77 +0,0 @@
-#!/usr/bin/env python
-# Copyright (c) 2012 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import os
-import sys
-
-def Main(argv):
-  """This is like 'env -i', but it uses a whitelist of env variables to allow
-  through to the command being run.  It attempts to strip off Xcode-added
-  values from PATH.
-  """
-  # Note: An attempt was made to do something like: env -i bash -lc '[command]'
-  # but that fails to set the things set by login (USER, etc.), so instead
-  # the only approach that seems to work is to have a whitelist.
-  env_key_whitelist = (
-    'HOME',
-    'LOGNAME',
-    # 'PATH' added below (but filtered).
-    'PWD',
-    'SHELL',
-    'TEMP',
-    'TMPDIR',
-    'USER'
-  )
-
-  # Need something to run.
-  # TODO(lliabraa): Make this output a usage string and exit (here and below).
-  assert(len(argv) > 0)
-
-  add_to_path = [];
-  first_entry = argv[0];
-  if first_entry.startswith('ADD_TO_PATH='):
-    argv = argv[1:];
-    add_to_path = first_entry.replace('ADD_TO_PATH=', '', 1).split(':')
-
-  # Still need something to run.
-  assert(len(argv) > 0)
-
-  clean_env = {}
-
-  # Pull over the whitelisted keys.
-  for key in env_key_whitelist:
-    val = os.environ.get(key, None)
-    if not val is None:
-      clean_env[key] = val
-
-  # Collect the developer dir as set via Xcode, defaulting it.
-  dev_prefix = os.environ.get('DEVELOPER_DIR', '/Developer/')
-  if dev_prefix[-1:] != '/':
-    dev_prefix += '/'
-
-  # Now pull in PATH, but remove anything Xcode might have added.
-  initial_path = os.environ.get('PATH', '')
-  filtered_chunks = \
-      [x for x in initial_path.split(':') if not x.startswith(dev_prefix)]
-  if filtered_chunks:
-    clean_env['PATH'] = ':'.join(add_to_path + filtered_chunks)
-
-  # Add any KEY=VALUE args before the command to the cleaned environment.
-  args = argv[:]
-  while '=' in args[0]:
-    (key, val) = args[0].split('=', 1)
-    clean_env[key] = val
-    args = args[1:]
-
-  # Still need something to run.
-  assert(len(args) > 0)
-
-  # Off it goes...
-  os.execvpe(args[0], args, clean_env)
-  # Should never get here, so return a distinctive, non-zero status code.
-  return 66
-
-if __name__ == '__main__':
-  sys.exit(Main(sys.argv[1:]))
diff --git a/build/landmine_utils.py b/build/landmine_utils.py
deleted file mode 100644
index a3f21ff..0000000
--- a/build/landmine_utils.py
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2013 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-
-import sys
-
-
-def IsWindows():
-  return sys.platform in ['win32', 'cygwin']
-
-
-def IsLinux():
-  return sys.platform.startswith(('linux', 'freebsd', 'netbsd', 'openbsd'))
-
-
-def IsMac():
-  return sys.platform == 'darwin'
-
-
-def host_os():
-  """
-  Returns a string representing the host_os of the current system.
-  Possible values: 'win', 'mac', 'linux', 'unknown'.
-  """
-  if IsWindows():
-    return 'win'
-  elif IsLinux():
-    return 'linux'
-  elif IsMac():
-    return 'mac'
-  else:
-    return 'unknown'
diff --git a/build/landmines.py b/build/landmines.py
deleted file mode 100755
index d0f4298..0000000
--- a/build/landmines.py
+++ /dev/null
@@ -1,145 +0,0 @@
-#!/usr/bin/env python
-# Copyright (c) 2012 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-This script runs every build as the first hook (See DEPS). If it detects that
-the build should be clobbered, it will delete the contents of the build
-directory.
-
-A landmine is tripped when a builder checks out a different revision, and the
-diff between the new landmines and the old ones is non-null. At this point, the
-build is clobbered.
-
-Before adding or changing a landmine consider the consequences of doing so.
-Doing so will wipe out every output directory on every Chrome developer's
-machine. This can be particularly problematic on Windows where the directory
-deletion may well fail (locked files, command prompt in the directory, etc.),
-and generated .sln and .vcxproj files will be deleted.
-
-This output directory deletion will be repated when going back and forth across
-the change that added the landmine, adding to the cost. There are usually less
-troublesome alternatives.
-"""
-
-import difflib
-import errno
-import logging
-import optparse
-import os
-import sys
-import subprocess
-import time
-
-import clobber
-import landmine_utils
-
-
-def get_build_dir(src_dir):
-  """
-  Returns output directory absolute path dependent on build and targets.
-  Examples:
-    r'c:\b\build\slave\win\build\src\out'
-    '/mnt/data/b/build/slave/linux/build/src/out'
-    '/b/build/slave/ios_rel_device/build/src/out'
-
-  Keep this function in sync with tools/build/scripts/slave/compile.py
-  """
-  if 'CHROMIUM_OUT_DIR' in os.environ:
-    output_dir = os.environ.get('CHROMIUM_OUT_DIR').strip()
-    if not output_dir:
-      raise Error('CHROMIUM_OUT_DIR environment variable is set but blank!')
-  else:
-    output_dir = 'out'
-  return os.path.abspath(os.path.join(src_dir, output_dir))
-
-
-def clobber_if_necessary(new_landmines, src_dir):
-  """Does the work of setting, planting, and triggering landmines."""
-  out_dir = get_build_dir(src_dir)
-  landmines_path = os.path.normpath(os.path.join(src_dir, '.landmines'))
-  try:
-    os.makedirs(out_dir)
-  except OSError as e:
-    if e.errno == errno.EEXIST:
-      pass
-
-  if os.path.exists(landmines_path):
-    with open(landmines_path, 'r') as f:
-      old_landmines = f.readlines()
-    if old_landmines != new_landmines:
-      old_date = time.ctime(os.stat(landmines_path).st_ctime)
-      diff = difflib.unified_diff(old_landmines, new_landmines,
-          fromfile='old_landmines', tofile='new_landmines',
-          fromfiledate=old_date, tofiledate=time.ctime(), n=0)
-      sys.stdout.write('Clobbering due to:\n')
-      sys.stdout.writelines(diff)
-      sys.stdout.flush()
-
-      clobber.clobber(out_dir)
-
-  # Save current set of landmines for next time.
-  with open(landmines_path, 'w') as f:
-    f.writelines(new_landmines)
-
-
-def process_options():
-  """Returns an options object containing the configuration for this script."""
-  parser = optparse.OptionParser()
-  parser.add_option(
-      '-s', '--landmine-scripts', action='append',
-      help='Path to the script which emits landmines to stdout. The target '
-           'is passed to this script via option -t. Note that an extra '
-           'script can be specified via an env var EXTRA_LANDMINES_SCRIPT.')
-  parser.add_option('-d', '--src-dir',
-      help='Path of the source root dir. Overrides the default location of the '
-           'source root dir when calculating the build directory.')
-  parser.add_option('-v', '--verbose', action='store_true',
-      default=('LANDMINES_VERBOSE' in os.environ),
-      help=('Emit some extra debugging information (default off). This option '
-          'is also enabled by the presence of a LANDMINES_VERBOSE environment '
-          'variable.'))
-
-  options, args = parser.parse_args()
-
-  if args:
-    parser.error('Unknown arguments %s' % args)
-
-  logging.basicConfig(
-      level=logging.DEBUG if options.verbose else logging.ERROR)
-
-  if options.src_dir:
-    if not os.path.isdir(options.src_dir):
-      parser.error('Cannot find source root dir at %s' % options.src_dir)
-    logging.debug('Overriding source root dir. Using: %s', options.src_dir)
-  else:
-    options.src_dir = \
-        os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
-
-  if not options.landmine_scripts:
-    options.landmine_scripts = [os.path.join(options.src_dir, 'build',
-                                             'get_landmines.py')]
-
-  extra_script = os.environ.get('EXTRA_LANDMINES_SCRIPT')
-  if extra_script:
-    options.landmine_scripts += [extra_script]
-
-  return options
-
-
-def main():
-  options = process_options()
-
-  landmines = []
-  for s in options.landmine_scripts:
-    proc = subprocess.Popen([sys.executable, s], stdout=subprocess.PIPE)
-    output, _ = proc.communicate()
-    landmines.extend([('%s\n' % l.strip()) for l in output.splitlines()])
-  clobber_if_necessary(landmines, options.src_dir)
-
-  return 0
-
-
-if __name__ == '__main__':
-  sys.exit(main())
diff --git a/build/linux/BUILD.gn b/build/linux/BUILD.gn
deleted file mode 100644
index 54314c7..0000000
--- a/build/linux/BUILD.gn
+++ /dev/null
@@ -1,31 +0,0 @@
-# Copyright (c) 2015 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import("//build/config/features.gni")
-import("//build/config/freetype/freetype.gni")
-import("//build/config/linux/pkg_config.gni")
-
-if (use_gio) {
-  pkg_config("gio_config") {
-    packages = [ "gio-2.0" ]
-
-    defines = [ "USE_GIO" ]
-  }
-}
-
-# Looking for libspeechd? Use //third_party/speech-dispatcher
-
-if (use_system_freetype) {
-  assert(!is_chromecast)
-
-  # Only provided for distributions which prefer to keep linking to FreeType on
-  # the system, use with caution,for details see build/config/freetype/BUILD.gn.
-  pkg_config("freetype_from_pkgconfig") {
-    visibility = [
-      "//third_party:freetype_harfbuzz",
-      "//third_party/harfbuzz-ng:harfbuzz_source",
-    ]
-    packages = [ "freetype2" ]
-  }
-}
diff --git a/build/linux/OWNERS b/build/linux/OWNERS
deleted file mode 100644
index 8e1cb55..0000000
--- a/build/linux/OWNERS
+++ /dev/null
@@ -1,3 +0,0 @@
-mmoss@chromium.org
-thestig@chromium.org
-thomasanderson@chromium.org
diff --git a/build/linux/chrome.map b/build/linux/chrome.map
deleted file mode 100644
index 914f37b..0000000
--- a/build/linux/chrome.map
+++ /dev/null
@@ -1,87 +0,0 @@
-{
-global:
-  __bss_start;
-  __data_start;
-  data_start;
-  _edata;
-  _end;
-  _IO_stdin_used;
-
-  # Initialization and finalization functions for static global
-  # variables.
-  _fini;
-  _init;
-  __libc_csu_fini;
-  __libc_csu_init;
-
-  # Chrome's main function.  Exported for historical purposes.
-  ChromeMain;
-
-  # Program entry point.
-  _start;
-
-  # Memory allocation symbols.  We want chrome and any libraries to
-  # share the same heap, so it is correct to export these symbols.
-  calloc;
-  cfree;
-  free;
-  __free_hook;
-  __libc_calloc;
-  __libc_cfree;
-  __libc_free;
-  __libc_malloc;
-  __libc_memalign;
-  __libc_pvalloc;
-  __libc_realloc;
-  __libc_valloc;
-  mallinfo;
-  malloc;
-  __malloc_hook;
-  malloc_size;
-  malloc_stats;
-  malloc_usable_size;
-  mallopt;
-  memalign;
-  __memalign_hook;
-  __posix_memalign;
-  posix_memalign;
-  pvalloc;
-  realloc;
-  __realloc_hook;
-  valloc;
-
-  # Various flavors of operator new and operator delete.
-  _ZdaPv;
-  _ZdaPvm;
-  _ZdaPvmSt11align_val_t;
-  _ZdaPvRKSt9nothrow_t;
-  _ZdaPvSt11align_val_t;
-  _ZdaPvSt11align_val_tRKSt9nothrow_t;
-  _ZdlPv;
-  _ZdlPvm;
-  _ZdlPvmSt11align_val_t;
-  _ZdlPvRKSt9nothrow_t;
-  _ZdlPvSt11align_val_t;
-  _ZdlPvSt11align_val_tRKSt9nothrow_t;
-  _Znam;
-  _ZnamRKSt9nothrow_t;
-  _ZnamSt11align_val_t;
-  _ZnamSt11align_val_tRKSt9nothrow_t;
-  _Znwm;
-  _ZnwmRKSt9nothrow_t;
-  _ZnwmSt11align_val_t;
-  _ZnwmSt11align_val_tRKSt9nothrow_t;
-
-  # Various flavors of localtime().  These are exported by the chrome
-  # sandbox to intercept calls to localtime(), which would otherwise
-  # fail in untrusted processes that don't have permission to read
-  # /etc/localtime.  These overrides forward the request to the browser
-  # process, which uses dlsym(localtime) to make the real calls.
-  localtime;
-  localtime64;
-  localtime64_r;
-  localtime_r;
-
-local:
-  *;
-};
diff --git a/build/linux/chrome_linux.croc b/build/linux/chrome_linux.croc
deleted file mode 100644
index f400306..0000000
--- a/build/linux/chrome_linux.croc
+++ /dev/null
@@ -1,29 +0,0 @@
-# -*- python -*-
-# Crocodile config file for Chromium linux
-
-# TODO(jhawkins): We'll need to add a chromeos.croc once we get a coverage bot
-# for that platform.
-
-{
-  # List of rules, applied in order
-  'rules' : [
-    # Specify inclusions before exclusions, since rules are in order.
-
-    # Don't include non-Linux platform dirs
-    {
-      'regexp' : '.*/(chromeos|views)/',
-      'include' : 0,
-    },
-    # Don't include chromeos, windows, or mac specific files
-    {
-      'regexp' : '.*(_|/)(chromeos|mac|win|views)(\\.|_)',
-      'include' : 0,
-    },
-
-    # Groups
-    {
-      'regexp' : '.*_test_linux\\.',
-      'group' : 'test',
-    },
-  ],
-}
diff --git a/build/linux/dump_app_syms.py b/build/linux/dump_app_syms.py
deleted file mode 100644
index 12e693e..0000000
--- a/build/linux/dump_app_syms.py
+++ /dev/null
@@ -1,29 +0,0 @@
-# Copyright 2015 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# Helper script to run dump_syms on Chrome Linux executables and strip
-# them if needed.
-
-import os
-import subprocess
-import sys
-
-if len(sys.argv) != 5:
-  print "dump_app_syms.py <dump_syms_exe> <strip_binary>"
-  print "                 <binary_with_symbols> <symbols_output>"
-  sys.exit(1)
-
-dumpsyms = sys.argv[1]
-strip_binary = sys.argv[2]
-infile = sys.argv[3]
-outfile = sys.argv[4]
-
-# Dump only when the output file is out-of-date.
-if not os.path.isfile(outfile) or \
-   os.stat(outfile).st_mtime < os.stat(infile).st_mtime:
-  with open(outfile, 'w') as outfileobj:
-    subprocess.check_call([dumpsyms, infile], stdout=outfileobj)
-
-if strip_binary != '0':
-  subprocess.check_call(['strip', infile])
diff --git a/build/linux/extract_symbols.gni b/build/linux/extract_symbols.gni
deleted file mode 100644
index 50b1aa3..0000000
--- a/build/linux/extract_symbols.gni
+++ /dev/null
@@ -1,44 +0,0 @@
-# Copyright 2018 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import("//build/toolchain/toolchain.gni")
-
-# Extracts symbols from a binary into a symbol file using dump_app_syms.py.
-#
-# Args:
-#   binary: Path to the binary containing symbols to extract, e.g.:
-#       "$root_out_dir/chrome"
-#   symbol_file: Desired output file for symbols, e.g.:
-#       "$root_out_dir/chrome.breakpad.$current_cpu"
-template("extract_symbols") {
-  forward_variables_from(invoker,
-                         [
-                           "deps",
-                           "testonly",
-                         ])
-  symbol_target_name = "${target_name}_symbols"
-
-  action("${symbol_target_name}") {
-    dump_syms_label = "//third_party/breakpad:dump_syms($host_toolchain)"
-    dump_syms_binary =
-        get_label_info(dump_syms_label, "root_out_dir") + "/" + "dump_syms"
-
-    script = "//build/linux/dump_app_syms.py"
-    inputs = [
-      invoker.binary,
-      dump_syms_binary,
-    ]
-    outputs = [
-      invoker.symbol_file,
-    ]
-    args = [
-      "./" + rebase_path(dump_syms_binary, root_build_dir),
-      "0",  # strip_binary = false
-      rebase_path(invoker.binary, root_build_dir),
-      rebase_path(invoker.symbol_file, root_build_dir),
-    ]
-
-    deps += [ dump_syms_label ]
-  }
-}
diff --git a/build/linux/install-chromeos-fonts.py b/build/linux/install-chromeos-fonts.py
deleted file mode 100755
index 54d0ede..0000000
--- a/build/linux/install-chromeos-fonts.py
+++ /dev/null
@@ -1,118 +0,0 @@
-#!/usr/bin/env python
-# Copyright 2013 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# Script to install the Chrome OS fonts on Linux.
-# This script can be run manually (as root), but is also run as part
-# install-build-deps.sh.
-
-import os
-import shutil
-import subprocess
-import sys
-
-URL_TEMPLATE = ('https://commondatastorage.googleapis.com/chromeos-localmirror/'
-                'distfiles/%(name)s-%(version)s.tar.bz2')
-
-# Taken from the media-fonts/<name> ebuilds in chromiumos-overlay.
-# noto-cjk used to be here, but is removed because fc-cache takes too long
-# regenerating the fontconfig cache (See crbug.com/697954.)
-# TODO(jshin): Add it back when the above issue can be avoided.
-SOURCES = [
-  {
-    'name': 'notofonts',
-    'version': '20161129'
-  }, {
-    'name': 'robotofonts',
-    'version': '2.132'
-  }
-]
-
-URLS = sorted([URL_TEMPLATE % d for d in SOURCES])
-FONTS_DIR = '/usr/local/share/fonts'
-
-def main(args):
-  if not sys.platform.startswith('linux'):
-    print "Error: %s must be run on Linux." % __file__
-    return 1
-
-  if os.getuid() != 0:
-    print "Error: %s must be run as root." % __file__
-    return 1
-
-  if not os.path.isdir(FONTS_DIR):
-    print "Error: Destination directory does not exist: %s" % FONTS_DIR
-    return 1
-
-  dest_dir = os.path.join(FONTS_DIR, 'chromeos')
-
-  stamp = os.path.join(dest_dir, ".stamp02")
-  if os.path.exists(stamp):
-    with open(stamp) as s:
-      if s.read() == '\n'.join(URLS):
-        print "Chrome OS fonts already up to date in %s." % dest_dir
-        return 0
-
-  if os.path.isdir(dest_dir):
-    shutil.rmtree(dest_dir)
-  os.mkdir(dest_dir)
-  os.chmod(dest_dir, 0755)
-
-  print "Installing Chrome OS fonts to %s." % dest_dir
-  for url in URLS:
-    tarball = os.path.join(dest_dir, os.path.basename(url))
-    subprocess.check_call(['curl', '-L', url, '-o', tarball])
-    subprocess.check_call(['tar', '--no-same-owner', '--no-same-permissions',
-                           '-xf', tarball, '-C', dest_dir])
-    os.remove(tarball)
-
-  readme = os.path.join(dest_dir, "README")
-  with open(readme, 'w') as s:
-    s.write("This directory and its contents are auto-generated.\n")
-    s.write("It may be deleted and recreated. Do not modify.\n")
-    s.write("Script: %s\n" % __file__)
-
-  with open(stamp, 'w') as s:
-    s.write('\n'.join(URLS))
-
-  for base, dirs, files in os.walk(dest_dir):
-    for dir in dirs:
-      os.chmod(os.path.join(base, dir), 0755)
-    for file in files:
-      os.chmod(os.path.join(base, file), 0644)
-
-  print """\
-
-Chrome OS font rendering settings are specified using Fontconfig. If your
-system's configuration doesn't match Chrome OS's (which vary for different
-devices), fonts may be rendered with different subpixel rendering, subpixel
-positioning, or hinting settings. This may affect font metrics.
-
-Chrome OS's settings are stored in the media-libs/fontconfig package, which is
-at src/third_party/chromiumos-overlay/media-libs/fontconfig in a Chrome OS
-checkout. You can configure your system to match Chrome OS's defaults by
-creating or editing a ~/.fonts.conf file:
-
-<?xml version="1.0"?>
-<!DOCTYPE fontconfig SYSTEM "fonts.dtd">
-<fontconfig>
-  <match target="font">
-    <edit name="antialias" mode="assign"><bool>true</bool></edit>
-    <edit name="autohint" mode="assign"><bool>true</bool></edit>
-    <edit name="hinting" mode="assign"><bool>true</bool></edit>
-    <edit name="hintstyle" mode="assign"><const>hintslight</const></edit>
-    <edit name="rgba" mode="assign"><const>rgb</const></edit>
-  </match>
-</fontconfig>
-
-To load additional per-font configs (and assuming you have Chrome OS checked
-out), add the following immediately before the "</fontconfig>" line:
-
-  <include ignore_missing="yes">/path/to/src/third_party/chromiumos-overlay/media-libs/fontconfig/files/local.conf</include>
-"""
-
-  return 0
-
-if __name__ == '__main__':
-  sys.exit(main(sys.argv[1:]))
diff --git a/build/linux/libbrlapi/BUILD.gn b/build/linux/libbrlapi/BUILD.gn
deleted file mode 100644
index 4ee3950..0000000
--- a/build/linux/libbrlapi/BUILD.gn
+++ /dev/null
@@ -1,26 +0,0 @@
-# Copyright 2016 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import("//tools/generate_library_loader/generate_library_loader.gni")
-
-generate_library_loader("libbrlapi") {
-  name = "LibBrlapiLoader"
-  output_h = "libbrlapi.h"
-  output_cc = "libbrlapi_loader.cc"
-  header = "<brlapi.h>"
-
-  functions = [
-    "brlapi_getHandleSize",
-    "brlapi_error_location",
-    "brlapi_strerror",
-    "brlapi__acceptKeys",
-    "brlapi__openConnection",
-    "brlapi__closeConnection",
-    "brlapi__getDisplaySize",
-    "brlapi__enterTtyModeWithPath",
-    "brlapi__leaveTtyMode",
-    "brlapi__writeDots",
-    "brlapi__readKey",
-  ]
-}
diff --git a/build/linux/libpci/BUILD.gn b/build/linux/libpci/BUILD.gn
deleted file mode 100644
index 2d1e267..0000000
--- a/build/linux/libpci/BUILD.gn
+++ /dev/null
@@ -1,22 +0,0 @@
-# Copyright 2016 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import("//tools/generate_library_loader/generate_library_loader.gni")
-
-# This generates a target named "libpci".
-generate_library_loader("libpci") {
-  name = "LibPciLoader"
-  output_h = "libpci.h"
-  output_cc = "libpci_loader.cc"
-  header = "<pci/pci.h>"
-
-  functions = [
-    "pci_alloc",
-    "pci_init",
-    "pci_cleanup",
-    "pci_scan_bus",
-    "pci_fill_info",
-    "pci_lookup_name",
-  ]
-}
diff --git a/build/linux/libudev/BUILD.gn b/build/linux/libudev/BUILD.gn
deleted file mode 100644
index 9486a03..0000000
--- a/build/linux/libudev/BUILD.gn
+++ /dev/null
@@ -1,65 +0,0 @@
-# Copyright 2016 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import("//tools/generate_library_loader/generate_library_loader.gni")
-
-libudev_functions = [
-  "udev_device_get_action",
-  "udev_device_get_devnode",
-  "udev_device_get_parent",
-  "udev_device_get_parent_with_subsystem_devtype",
-  "udev_device_get_property_value",
-  "udev_device_get_subsystem",
-  "udev_device_get_sysattr_value",
-  "udev_device_get_sysname",
-  "udev_device_get_syspath",
-  "udev_device_new_from_devnum",
-  "udev_device_new_from_subsystem_sysname",
-  "udev_device_new_from_syspath",
-  "udev_device_unref",
-  "udev_enumerate_add_match_subsystem",
-  "udev_enumerate_get_list_entry",
-  "udev_enumerate_new",
-  "udev_enumerate_scan_devices",
-  "udev_enumerate_unref",
-  "udev_list_entry_get_next",
-  "udev_list_entry_get_name",
-  "udev_monitor_enable_receiving",
-  "udev_monitor_filter_add_match_subsystem_devtype",
-  "udev_monitor_get_fd",
-  "udev_monitor_new_from_netlink",
-  "udev_monitor_receive_device",
-  "udev_monitor_unref",
-  "udev_new",
-  "udev_set_log_fn",
-  "udev_set_log_priority",
-  "udev_unref",
-]
-
-# This generates a target named "udev0_loader".
-generate_library_loader("udev0_loader") {
-  name = "LibUdev0Loader"
-  output_h = "libudev0.h"
-  output_cc = "libudev0_loader.cc"
-  header = "\"third_party/libudev/libudev0.h\""
-
-  functions = libudev_functions
-}
-
-# This generates a target named "udev1_loader".
-generate_library_loader("udev1_loader") {
-  name = "LibUdev1Loader"
-  output_h = "libudev1.h"
-  output_cc = "libudev1_loader.cc"
-  header = "\"third_party/libudev/libudev1.h\""
-
-  functions = libudev_functions
-}
-
-group("libudev") {
-  public_deps = [
-    ":udev0_loader",
-    ":udev1_loader",
-  ]
-}
diff --git a/build/linux/pkg-config-wrapper b/build/linux/pkg-config-wrapper
deleted file mode 100755
index c4935d7..0000000
--- a/build/linux/pkg-config-wrapper
+++ /dev/null
@@ -1,46 +0,0 @@
-#!/bin/bash
-# Copyright (c) 2012 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This program wraps around pkg-config to generate the correct include and
-# library paths when cross-compiling using a sysroot.
-# The assumption is that the sysroot contains the .pc files in usr/lib/pkgconfig
-# and usr/share/pkgconfig (relative to the sysroot) and that they output paths
-# relative to some parent path of the sysroot.
-# This assumption is valid for a range of sysroots, in particular: a
-# LSB-compliant root filesystem mounted at the sysroot, and a board build
-# directory of a Chromium OS chroot.
-
-set -o nounset
-set -o errexit
-
-root="$1"
-shift
-target_arch="$1"
-shift
-libpath="$1"
-shift
-
-if [ -z "$root" -o -z "$target_arch" ]
-then
-  echo "usage: $0 /path/to/sysroot target_arch libdir [pkg-config-arguments] package" >&2
-  exit 1
-fi
-
-rewrite=`dirname $0`/rewrite_dirs.py
-package=${!#}
-
-libdir=$root/usr/$libpath/pkgconfig:$root/usr/share/pkgconfig
-
-set -e
-# Some sysroots, like the Chromium OS ones, may generate paths that are not
-# relative to the sysroot. For example,
-# /path/to/chroot/build/x86-generic/usr/lib/pkgconfig/pkg.pc may have all paths
-# relative to /path/to/chroot (i.e. prefix=/build/x86-generic/usr) instead of
-# relative to /path/to/chroot/build/x86-generic (i.e prefix=/usr).
-# To support this correctly, it's necessary to extract the prefix to strip from
-# pkg-config's |prefix| variable.
-prefix=`PKG_CONFIG_LIBDIR=$libdir pkg-config --variable=prefix "$package" | sed -e 's|/usr$||'`
-result=`PKG_CONFIG_LIBDIR=$libdir pkg-config "$@"`
-echo "$result"| $rewrite --sysroot "$root" --strip-prefix "$prefix"
diff --git a/build/linux/rewrite_dirs.py b/build/linux/rewrite_dirs.py
deleted file mode 100755
index 30f22f0..0000000
--- a/build/linux/rewrite_dirs.py
+++ /dev/null
@@ -1,71 +0,0 @@
-#!/usr/bin/env python
-# Copyright (c) 2011 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Rewrites paths in -I, -L and other option to be relative to a sysroot."""
-
-import sys
-import os
-import optparse
-
-REWRITE_PREFIX = ['-I',
-                  '-idirafter',
-                  '-imacros',
-                  '-imultilib',
-                  '-include',
-                  '-iprefix',
-                  '-iquote',
-                  '-isystem',
-                  '-L']
-
-def RewritePath(path, opts):
-  """Rewrites a path by stripping the prefix and prepending the sysroot."""
-  sysroot = opts.sysroot
-  prefix = opts.strip_prefix
-  if os.path.isabs(path) and not path.startswith(sysroot):
-    if path.startswith(prefix):
-      path = path[len(prefix):]
-    path = path.lstrip('/')
-    return os.path.join(sysroot, path)
-  else:
-    return path
-
-
-def RewriteLine(line, opts):
-  """Rewrites all the paths in recognized options."""
-  args = line.split()
-  count = len(args)
-  i = 0
-  while i < count:
-    for prefix in REWRITE_PREFIX:
-      # The option can be either in the form "-I /path/to/dir" or
-      # "-I/path/to/dir" so handle both.
-      if args[i] == prefix:
-        i += 1
-        try:
-          args[i] = RewritePath(args[i], opts)
-        except IndexError:
-          sys.stderr.write('Missing argument following %s\n' % prefix)
-          break
-      elif args[i].startswith(prefix):
-        args[i] = prefix + RewritePath(args[i][len(prefix):], opts)
-    i += 1
-
-  return ' '.join(args)
-
-
-def main(argv):
-  parser = optparse.OptionParser()
-  parser.add_option('-s', '--sysroot', default='/', help='sysroot to prepend')
-  parser.add_option('-p', '--strip-prefix', default='', help='prefix to strip')
-  opts, args = parser.parse_args(argv[1:])
-
-  for line in sys.stdin.readlines():
-    line = RewriteLine(line.strip(), opts)
-    print line
-  return 0
-
-
-if __name__ == '__main__':
-  sys.exit(main(sys.argv))
diff --git a/build/linux/sysroot_ld_path.sh b/build/linux/sysroot_ld_path.sh
deleted file mode 100755
index 623d47b..0000000
--- a/build/linux/sysroot_ld_path.sh
+++ /dev/null
@@ -1,99 +0,0 @@
-#!/bin/sh
-# Copyright (c) 2013 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# Reads etc/ld.so.conf and/or etc/ld.so.conf.d/*.conf and returns the
-# appropriate linker flags.
-#
-#  sysroot_ld_path.sh /abspath/to/sysroot
-#
-
-log_error_and_exit() {
-  echo $0: $@
-  exit 1
-}
-
-process_entry() {
-  if [ -z "$1" ] || [ -z "$2" ]; then
-    log_error_and_exit "bad arguments to process_entry()"
-  fi
-  local root="$1"
-  local localpath="$2"
-
-  echo $localpath | grep -qs '^/'
-  if [ $? -ne 0 ]; then
-    log_error_and_exit $localpath does not start with /
-  fi
-  local entry="$root$localpath"
-  echo $entry
-}
-
-process_ld_so_conf() {
-  if [ -z "$1" ] || [ -z "$2" ]; then
-    log_error_and_exit "bad arguments to process_ld_so_conf()"
-  fi
-  local root="$1"
-  local ld_so_conf="$2"
-
-  # ld.so.conf may include relative include paths. pushd is a bashism.
-  local saved_pwd=$(pwd)
-  cd $(dirname "$ld_so_conf")
-
-  cat "$ld_so_conf" | \
-    while read ENTRY; do
-      echo "$ENTRY" | grep -qs ^include
-      if [ $? -eq 0 ]; then
-        local included_files=$(echo "$ENTRY" | sed 's/^include //')
-        echo "$included_files" | grep -qs ^/
-        if [ $? -eq 0 ]; then
-          if ls $root$included_files >/dev/null 2>&1 ; then
-            for inc_file in $root$included_files; do
-              process_ld_so_conf "$root" "$inc_file"
-            done
-          fi
-        else
-          if ls $(pwd)/$included_files >/dev/null 2>&1 ; then
-            for inc_file in $(pwd)/$included_files; do
-              process_ld_so_conf "$root" "$inc_file"
-            done
-          fi
-        fi
-        continue
-      fi
-
-      echo "$ENTRY" | grep -qs ^/
-      if [ $? -eq 0 ]; then
-        process_entry "$root" "$ENTRY"
-      fi
-    done
-
-  # popd is a bashism
-  cd "$saved_pwd"
-}
-
-# Main
-
-if [ $# -ne 1 ]; then
-  echo Usage $0 /abspath/to/sysroot
-  exit 1
-fi
-
-echo $1 | grep -qs ' '
-if [ $? -eq 0 ]; then
-  log_error_and_exit $1 contains whitespace.
-fi
-
-LD_SO_CONF="$1/etc/ld.so.conf"
-LD_SO_CONF_D="$1/etc/ld.so.conf.d"
-
-if [ -e "$LD_SO_CONF" ]; then
-  process_ld_so_conf "$1" "$LD_SO_CONF" | xargs echo
-elif [ -e "$LD_SO_CONF_D" ]; then
-  find "$LD_SO_CONF_D" -maxdepth 1 -name '*.conf' -print -quit > /dev/null
-  if [ $? -eq 0 ]; then
-    for entry in $LD_SO_CONF_D/*.conf; do
-      process_ld_so_conf "$1" "$entry"
-    done | xargs echo
-  fi
-fi
diff --git a/build/linux/sysroot_scripts/build_and_upload.py b/build/linux/sysroot_scripts/build_and_upload.py
deleted file mode 100755
index 78eaa63..0000000
--- a/build/linux/sysroot_scripts/build_and_upload.py
+++ /dev/null
@@ -1,97 +0,0 @@
-#!/usr/bin/env python
-# Copyright 2016 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Automates running BuildPackageLists, BuildSysroot, and
-UploadSysroot for each supported arch of each sysroot creator.
-"""
-
-import glob
-import hashlib
-import json
-import multiprocessing
-import os
-import re
-import string
-import subprocess
-import sys
-
-def run_script(args):
-  fnull = open(os.devnull, 'w')
-  subprocess.check_call(args, stdout=fnull, stderr=fnull)
-
-def sha1sumfile(filename):
-  sha1 = hashlib.sha1()
-  with open(filename, 'rb') as f:
-    while True:
-      data = f.read(65536)
-      if not data:
-        break
-      sha1.update(data)
-  return sha1.hexdigest()
-
-def get_proc_output(args):
-  return subprocess.check_output(args).strip()
-
-def build_and_upload(script_path, distro, release, arch, lock):
-  # TODO(thomasanderson):  Find out which revision 'git-cl upload' uses to
-  # calculate the diff against and use that instead of HEAD.
-  script_dir = os.path.dirname(os.path.realpath(__file__))
-  revision = get_proc_output(['git', '-C', script_dir, 'rev-parse', 'HEAD'])
-
-  run_script([script_path, 'UpdatePackageLists%s' % arch])
-  run_script([script_path, 'BuildSysroot%s' % arch])
-  run_script([script_path, 'UploadSysroot%s' % arch, revision])
-
-  tarball = '%s_%s_%s_sysroot.tar.xz' % (distro, release, arch.lower())
-  tarxz_path = os.path.join(script_dir, "..", "..", "..", "out",
-                            "sysroot-build", release, tarball)
-  sha1sum = sha1sumfile(tarxz_path)
-  sysroot_dir = '%s_%s_%s-sysroot' % (distro, release, arch.lower())
-
-  sysroot_metadata = {
-      'Revision': revision,
-      'Tarball': tarball,
-      'Sha1Sum': sha1sum,
-      'SysrootDir': sysroot_dir
-  }
-  with lock:
-    with open(os.path.join(script_dir, 'sysroots.json'), 'rw+') as f:
-      sysroots = json.load(f)
-      sysroots["%s_%s" % (release, arch.lower())] = sysroot_metadata
-      f.seek(0)
-      f.truncate()
-      f.write(json.dumps(sysroots, sort_keys=True, indent=4,
-                         separators=(',', ': ')))
-      f.write('\n')
-
-def main():
-  script_dir = os.path.dirname(os.path.realpath(__file__))
-  procs = []
-  lock = multiprocessing.Lock()
-  for filename in glob.glob(os.path.join(script_dir, 'sysroot-creator-*.sh')):
-    script_path = os.path.join(script_dir, filename)
-    distro = get_proc_output([script_path, 'PrintDistro'])
-    release = get_proc_output([script_path, 'PrintRelease'])
-    architectures = get_proc_output([script_path, 'PrintArchitectures'])
-    for arch in architectures.split('\n'):
-      proc = multiprocessing.Process(target=build_and_upload,
-                                     args=(script_path, distro, release, arch,
-                                           lock))
-      procs.append(("%s %s (%s)" % (distro, release, arch), proc))
-      proc.start()
-  for _, proc in procs:
-    proc.join()
-
-  print "SYSROOT CREATION SUMMARY"
-  failures = 0
-  for name, proc in procs:
-    if proc.exitcode:
-      failures += 1
-    status = "FAILURE" if proc.exitcode else "SUCCESS"
-    print "%s sysroot creation\t%s" % (name, status)
-  return failures
-
-if __name__ == '__main__':
-  sys.exit(main())
diff --git a/build/linux/sysroot_scripts/debian-archive-sid-stable.gpg b/build/linux/sysroot_scripts/debian-archive-sid-stable.gpg
deleted file mode 100644
index a282485..0000000
--- a/build/linux/sysroot_scripts/debian-archive-sid-stable.gpg
+++ /dev/null
Binary files differ
diff --git a/build/linux/sysroot_scripts/find_incompatible_glibc_symbols.py b/build/linux/sysroot_scripts/find_incompatible_glibc_symbols.py
deleted file mode 100755
index 426e17f..0000000
--- a/build/linux/sysroot_scripts/find_incompatible_glibc_symbols.py
+++ /dev/null
@@ -1,58 +0,0 @@
-#!/usr/bin/env python
-# Copyright 2018 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Find incompatible symbols in glibc and output a list of replacements.
-"""
-
-import re
-import sys
-
-# This constant comes from https://crbug.com/580892
-MAX_ALLOWED_GLIBC_VERSION = [2, 17]
-
-
-def get_replacements(nm_file, max_allowed_glibc_version):
-  symbol_format = re.compile('\S+ \S+ ([^@]+)@@?(\S+)\n')
-  version_format = re.compile('GLIBC_[0-9\.]+')
-  symbols = {}
-  for line in nm_file:
-    m = re.match(symbol_format, line)
-    symbol = m.group(1)
-    version = m.group(2)
-    if not re.match(version_format, version):
-      continue
-    if symbol in symbols:
-      symbols[symbol].add(version)
-    else:
-      symbols[symbol] = set([version])
-
-  replacements = []
-  for symbol, versions in symbols.iteritems():
-    if len(versions) <= 1:
-      continue
-    versions_parsed = [[
-        int(part) for part in version.lstrip('GLIBC_').split('.')
-    ] for version in versions]
-    if (max(versions_parsed) > max_allowed_glibc_version and
-        min(versions_parsed) <= max_allowed_glibc_version):
-      # Use the newest allowed version of the symbol.
-      replacement_version_parsed = max([
-          version for version in versions_parsed
-          if version <= max_allowed_glibc_version
-      ])
-      replacement_version = 'GLIBC_' + '.'.join(
-          [str(part) for part in replacement_version_parsed])
-      replacements.append('__asm__(".symver %s, %s@%s");' %
-                          (symbol, symbol, replacement_version))
-  return sorted(replacements)
-
-
-if __name__ == '__main__':
-  replacements = get_replacements(sys.stdin, MAX_ALLOWED_GLIBC_VERSION)
-  if replacements:
-    print('// Chromium-specific hack.')
-    print('// See explanation in sysroot-creator.sh.')
-    for replacement in replacements:
-      print replacement
diff --git a/build/linux/sysroot_scripts/find_incompatible_glibc_symbols_unittest.py b/build/linux/sysroot_scripts/find_incompatible_glibc_symbols_unittest.py
deleted file mode 100755
index 5af3eb2..0000000
--- a/build/linux/sysroot_scripts/find_incompatible_glibc_symbols_unittest.py
+++ /dev/null
@@ -1,36 +0,0 @@
-#!/usr/bin/env python
-# Copyright 2018 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import cStringIO
-import find_incompatible_glibc_symbols
-
-NM_DATA = """\
-0000000000000001 W expf@GLIBC_2.2.5
-0000000000000002 W expf@@GLIBC_2.27
-0000000000000003 W foo@@GLIBC_2.2.5
-0000000000000004 W bar@GLIBC_2.2.5
-0000000000000005 W baz@GLIBC_2.2.5
-0000000000000006 T foo2@GLIBC_2.2
-0000000000000007 T foo2@GLIBC_2.3
-0000000000000008 T foo2@GLIBC_2.30
-0000000000000009 T foo2@@GLIBC_2.31
-000000000000000a T bar2@GLIBC_2.30
-000000000000000b T bar2@@GLIBC_2.31
-000000000000000c T baz2@GLIBC_2.2
-000000000000000d T baz2@@GLIBC_2.3
-"""
-
-EXPECTED_REPLACEMENTS = [
-    '__asm__(".symver expf, expf@GLIBC_2.2.5");',
-    '__asm__(".symver foo2, foo2@GLIBC_2.3");',
-]
-
-nm_file = cStringIO.StringIO()
-nm_file.write(NM_DATA)
-nm_file.seek(0)
-
-assert (
-    EXPECTED_REPLACEMENTS == find_incompatible_glibc_symbols.get_replacements(
-        nm_file, [2, 17]))
diff --git a/build/linux/sysroot_scripts/install-sysroot.py b/build/linux/sysroot_scripts/install-sysroot.py
deleted file mode 100755
index 58f0995..0000000
--- a/build/linux/sysroot_scripts/install-sysroot.py
+++ /dev/null
@@ -1,160 +0,0 @@
-#!/usr/bin/env python
-# Copyright (c) 2013 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Install Debian sysroots for building chromium.
-"""
-
-# The sysroot is needed to ensure that binaries that get built will run on
-# the oldest stable version of Debian that we currently support.
-# This script can be run manually but is more often run as part of gclient
-# hooks. When run from hooks this script is a no-op on non-linux platforms.
-
-# The sysroot image could be constructed from scratch based on the current state
-# of the Debian archive but for consistency we use a pre-built root image (we
-# don't want upstream changes to Debian to effect the chromium build until we
-# choose to pull them in). The images will normally need to be rebuilt every
-# time chrome's build dependencies are changed but should also be updated
-# periodically to include upstream security fixes from Debian.
-
-import hashlib
-import json
-import platform
-import optparse
-import os
-import re
-import shutil
-import subprocess
-import sys
-import urllib2
-
-SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
-
-URL_PREFIX = 'https://commondatastorage.googleapis.com'
-URL_PATH = 'chrome-linux-sysroot/toolchain'
-
-VALID_ARCHS = ('arm', 'arm64', 'i386', 'amd64', 'mips', 'mips64el')
-
-ARCH_TRANSLATIONS = {
-    'x64': 'amd64',
-    'x86': 'i386',
-    'mipsel': 'mips',
-    'mips64': 'mips64el',
-}
-
-DEFAULT_TARGET_PLATFORM = 'sid'
-
-class Error(Exception):
-  pass
-
-
-def GetSha1(filename):
-  sha1 = hashlib.sha1()
-  with open(filename, 'rb') as f:
-    while True:
-      # Read in 1mb chunks, so it doesn't all have to be loaded into memory.
-      chunk = f.read(1024*1024)
-      if not chunk:
-        break
-      sha1.update(chunk)
-  return sha1.hexdigest()
-
-
-def main(args):
-  parser = optparse.OptionParser('usage: %prog [OPTIONS]', description=__doc__)
-  parser.add_option('--arch',
-                    help='Sysroot architecture: %s' % ', '.join(VALID_ARCHS))
-  parser.add_option('--all', action='store_true',
-                    help='Install all sysroot images (useful when updating the'
-                         ' images)')
-  parser.add_option('--print-hash',
-                    help='Print the hash of the sysroot for the given arch.')
-  options, _ = parser.parse_args(args)
-  if not sys.platform.startswith('linux'):
-    return 0
-
-  if options.print_hash:
-    arch = options.print_hash
-    print GetSysrootDict(DEFAULT_TARGET_PLATFORM,
-                         ARCH_TRANSLATIONS.get(arch, arch))['Sha1Sum']
-    return 0
-  if options.arch:
-    InstallSysroot(DEFAULT_TARGET_PLATFORM,
-                   ARCH_TRANSLATIONS.get(options.arch, options.arch))
-  elif options.all:
-    for arch in VALID_ARCHS:
-      InstallSysroot(DEFAULT_TARGET_PLATFORM, arch)
-  else:
-    print 'You much specify one of the options.'
-    return 1
-
-  return 0
-
-
-def GetSysrootDict(target_platform, target_arch):
-  if target_arch not in VALID_ARCHS:
-    raise Error('Unknown architecture: %s' % target_arch)
-
-  sysroots_file = os.path.join(SCRIPT_DIR, 'sysroots.json')
-  sysroots = json.load(open(sysroots_file))
-  sysroot_key = '%s_%s' % (target_platform, target_arch)
-  if sysroot_key not in sysroots:
-    raise Error('No sysroot for: %s %s' % (target_platform, target_arch))
-  return sysroots[sysroot_key]
-
-
-def InstallSysroot(target_platform, target_arch):
-  sysroot_dict = GetSysrootDict(target_platform, target_arch)
-  revision = sysroot_dict['Revision']
-  tarball_filename = sysroot_dict['Tarball']
-  tarball_sha1sum = sysroot_dict['Sha1Sum']
-  # TODO(thestig) Consider putting this elsewhere to avoid having to recreate
-  # it on every build.
-  linux_dir = os.path.dirname(SCRIPT_DIR)
-  sysroot = os.path.join(linux_dir, sysroot_dict['SysrootDir'])
-
-  url = '%s/%s/%s/%s' % (URL_PREFIX, URL_PATH, revision, tarball_filename)
-
-  stamp = os.path.join(sysroot, '.stamp')
-  if os.path.exists(stamp):
-    with open(stamp) as s:
-      if s.read() == url:
-        return
-
-  print 'Installing Debian %s %s root image: %s' % \
-      (target_platform, target_arch, sysroot)
-  if os.path.isdir(sysroot):
-    shutil.rmtree(sysroot)
-  os.mkdir(sysroot)
-  tarball = os.path.join(sysroot, tarball_filename)
-  print 'Downloading %s' % url
-  sys.stdout.flush()
-  sys.stderr.flush()
-  for _ in range(3):
-    try:
-      response = urllib2.urlopen(url)
-      with open(tarball, "wb") as f:
-        f.write(response.read())
-      break
-    except:
-      pass
-  else:
-    raise Error('Failed to download %s' % url)
-  sha1sum = GetSha1(tarball)
-  if sha1sum != tarball_sha1sum:
-    raise Error('Tarball sha1sum is wrong.'
-                'Expected %s, actual: %s' % (tarball_sha1sum, sha1sum))
-  subprocess.check_call(['tar', 'xf', tarball, '-C', sysroot])
-  os.remove(tarball)
-
-  with open(stamp, 'w') as s:
-    s.write(url)
-
-
-if __name__ == '__main__':
-  try:
-    sys.exit(main(sys.argv[1:]))
-  except Error as e:
-    sys.stderr.write(str(e) + '\n')
-    sys.exit(1)
diff --git a/build/linux/sysroot_scripts/libdbus-1-3-symbols b/build/linux/sysroot_scripts/libdbus-1-3-symbols
deleted file mode 100644
index 28050aa..0000000
--- a/build/linux/sysroot_scripts/libdbus-1-3-symbols
+++ /dev/null
@@ -1,235 +0,0 @@
-libdbus-1.so.3 libdbus-1-3 #MINVER#
- dbus_address_entries_free@Base 1.0.2
- dbus_address_entry_get_method@Base 1.0.2
- dbus_address_entry_get_value@Base 1.0.2
- dbus_address_escape_value@Base 1.0.2
- dbus_address_unescape_value@Base 1.0.2
- dbus_bus_add_match@Base 1.0.2
- dbus_bus_get@Base 1.0.2
- dbus_bus_get_id@Base 1.1.1
- dbus_bus_get_private@Base 1.0.2
- dbus_bus_get_unique_name@Base 1.0.2
- dbus_bus_get_unix_user@Base 1.0.2
- dbus_bus_name_has_owner@Base 1.0.2
- dbus_bus_register@Base 1.0.2
- dbus_bus_release_name@Base 1.0.2
- dbus_bus_remove_match@Base 1.0.2
- dbus_bus_request_name@Base 1.0.2
- dbus_bus_set_unique_name@Base 1.0.2
- dbus_bus_start_service_by_name@Base 1.0.2
- dbus_connection_add_filter@Base 1.0.2
- dbus_connection_allocate_data_slot@Base 1.0.2
- dbus_connection_borrow_message@Base 1.0.2
- dbus_connection_can_send_type@Base 1.3.1
- dbus_connection_close@Base 1.0.2
- dbus_connection_dispatch@Base 1.0.2
- dbus_connection_flush@Base 1.0.2
- dbus_connection_free_data_slot@Base 1.0.2
- dbus_connection_free_preallocated_send@Base 1.0.2
- dbus_connection_get_adt_audit_session_data@Base 1.2.4
- dbus_connection_get_data@Base 1.0.2
- dbus_connection_get_dispatch_status@Base 1.0.2
- dbus_connection_get_is_anonymous@Base 1.1.1
- dbus_connection_get_is_authenticated@Base 1.0.2
- dbus_connection_get_is_connected@Base 1.0.2
- dbus_connection_get_max_message_size@Base 1.0.2
- dbus_connection_get_max_message_unix_fds@Base 1.3.1
- dbus_connection_get_max_received_size@Base 1.0.2
- dbus_connection_get_max_received_unix_fds@Base 1.3.1
- dbus_connection_get_object_path_data@Base 1.0.2
- dbus_connection_get_outgoing_size@Base 1.0.2
- dbus_connection_get_outgoing_unix_fds@Base 1.3.1
- dbus_connection_get_server_id@Base 1.1.1
- dbus_connection_get_socket@Base 1.0.2
- dbus_connection_get_unix_fd@Base 1.0.2
- dbus_connection_get_unix_process_id@Base 1.0.2
- dbus_connection_get_unix_user@Base 1.0.2
- dbus_connection_get_windows_user@Base 1.1.1
- dbus_connection_has_messages_to_send@Base 1.0.2
- dbus_connection_list_registered@Base 1.0.2
- dbus_connection_open@Base 1.0.2
- dbus_connection_open_private@Base 1.0.2
- dbus_connection_pop_message@Base 1.0.2
- dbus_connection_preallocate_send@Base 1.0.2
- dbus_connection_read_write@Base 1.0.2
- dbus_connection_read_write_dispatch@Base 1.0.2
- dbus_connection_ref@Base 1.0.2
- dbus_connection_register_fallback@Base 1.0.2
- dbus_connection_register_object_path@Base 1.0.2
- dbus_connection_remove_filter@Base 1.0.2
- dbus_connection_return_message@Base 1.0.2
- dbus_connection_send@Base 1.0.2
- dbus_connection_send_preallocated@Base 1.0.2
- dbus_connection_send_with_reply@Base 1.0.2
- dbus_connection_send_with_reply_and_block@Base 1.0.2
- dbus_connection_set_allow_anonymous@Base 1.1.1
- dbus_connection_set_change_sigpipe@Base 1.0.2
- dbus_connection_set_data@Base 1.0.2
- dbus_connection_set_dispatch_status_function@Base 1.0.2
- dbus_connection_set_exit_on_disconnect@Base 1.0.2
- dbus_connection_set_max_message_size@Base 1.0.2
- dbus_connection_set_max_message_unix_fds@Base 1.3.1
- dbus_connection_set_max_received_size@Base 1.0.2
- dbus_connection_set_max_received_unix_fds@Base 1.3.1
- dbus_connection_set_route_peer_messages@Base 1.0.2
- dbus_connection_set_timeout_functions@Base 1.0.2
- dbus_connection_set_unix_user_function@Base 1.0.2
- dbus_connection_set_wakeup_main_function@Base 1.0.2
- dbus_connection_set_watch_functions@Base 1.0.2
- dbus_connection_set_windows_user_function@Base 1.1.1
- dbus_connection_steal_borrowed_message@Base 1.0.2
- dbus_connection_try_register_fallback@Base 1.1.4
- dbus_connection_try_register_object_path@Base 1.1.4
- dbus_connection_unref@Base 1.0.2
- dbus_connection_unregister_object_path@Base 1.0.2
- dbus_error_free@Base 1.0.2
- dbus_error_has_name@Base 1.0.2
- dbus_error_init@Base 1.0.2
- dbus_error_is_set@Base 1.0.2
- dbus_free@Base 1.0.2
- dbus_free_string_array@Base 1.0.2
- dbus_get_local_machine_id@Base 1.0.2
- dbus_get_version@Base 1.1.4
- dbus_internal_do_not_use_create_uuid@Base 1.0.2
- dbus_internal_do_not_use_get_uuid@Base 1.0.2
- dbus_malloc0@Base 1.0.2
- dbus_malloc@Base 1.0.2
- dbus_message_allocate_data_slot@Base 1.0.2
- dbus_message_append_args@Base 1.0.2
- dbus_message_append_args_valist@Base 1.0.2
- dbus_message_contains_unix_fds@Base 1.3.1
- dbus_message_copy@Base 1.0.2
- dbus_message_demarshal@Base 1.1.1
- dbus_message_demarshal_bytes_needed@Base 1.2.14
- dbus_message_free_data_slot@Base 1.0.2
- dbus_message_get_args@Base 1.0.2
- dbus_message_get_args_valist@Base 1.0.2
- dbus_message_get_auto_start@Base 1.0.2
- dbus_message_get_data@Base 1.0.2
- dbus_message_get_destination@Base 1.0.2
- dbus_message_get_error_name@Base 1.0.2
- dbus_message_get_interface@Base 1.0.2
- dbus_message_get_member@Base 1.0.2
- dbus_message_get_no_reply@Base 1.0.2
- dbus_message_get_path@Base 1.0.2
- dbus_message_get_path_decomposed@Base 1.0.2
- dbus_message_get_reply_serial@Base 1.0.2
- dbus_message_get_sender@Base 1.0.2
- dbus_message_get_serial@Base 1.0.2
- dbus_message_get_signature@Base 1.0.2
- dbus_message_get_type@Base 1.0.2
- dbus_message_has_destination@Base 1.0.2
- dbus_message_has_interface@Base 1.0.2
- dbus_message_has_member@Base 1.0.2
- dbus_message_has_path@Base 1.0.2
- dbus_message_has_sender@Base 1.0.2
- dbus_message_has_signature@Base 1.0.2
- dbus_message_is_error@Base 1.0.2
- dbus_message_is_method_call@Base 1.0.2
- dbus_message_is_signal@Base 1.0.2
- dbus_message_iter_abandon_container@Base 1.2.16
- dbus_message_iter_append_basic@Base 1.0.2
- dbus_message_iter_append_fixed_array@Base 1.0.2
- dbus_message_iter_close_container@Base 1.0.2
- dbus_message_iter_get_arg_type@Base 1.0.2
- dbus_message_iter_get_array_len@Base 1.0.2
- dbus_message_iter_get_basic@Base 1.0.2
- dbus_message_iter_get_element_type@Base 1.0.2
- dbus_message_iter_get_fixed_array@Base 1.0.2
- dbus_message_iter_get_signature@Base 1.0.2
- dbus_message_iter_has_next@Base 1.0.2
- dbus_message_iter_init@Base 1.0.2
- dbus_message_iter_init_append@Base 1.0.2
- dbus_message_iter_next@Base 1.0.2
- dbus_message_iter_open_container@Base 1.0.2
- dbus_message_iter_recurse@Base 1.0.2
- dbus_message_lock@Base 1.2.14
- dbus_message_marshal@Base 1.1.1
- dbus_message_new@Base 1.0.2
- dbus_message_new_error@Base 1.0.2
- dbus_message_new_error_printf@Base 1.0.2
- dbus_message_new_method_call@Base 1.0.2
- dbus_message_new_method_return@Base 1.0.2
- dbus_message_new_signal@Base 1.0.2
- dbus_message_ref@Base 1.0.2
- dbus_message_set_auto_start@Base 1.0.2
- dbus_message_set_data@Base 1.0.2
- dbus_message_set_destination@Base 1.0.2
- dbus_message_set_error_name@Base 1.0.2
- dbus_message_set_interface@Base 1.0.2
- dbus_message_set_member@Base 1.0.2
- dbus_message_set_no_reply@Base 1.0.2
- dbus_message_set_path@Base 1.0.2
- dbus_message_set_reply_serial@Base 1.0.2
- dbus_message_set_sender@Base 1.0.2
- dbus_message_set_serial@Base 1.2.14
- dbus_message_type_from_string@Base 1.0.2
- dbus_message_type_to_string@Base 1.0.2
- dbus_message_unref@Base 1.0.2
- dbus_move_error@Base 1.0.2
- dbus_parse_address@Base 1.0.2
- dbus_pending_call_allocate_data_slot@Base 1.0.2
- dbus_pending_call_block@Base 1.0.2
- dbus_pending_call_cancel@Base 1.0.2
- dbus_pending_call_free_data_slot@Base 1.0.2
- dbus_pending_call_get_completed@Base 1.0.2
- dbus_pending_call_get_data@Base 1.0.2
- dbus_pending_call_ref@Base 1.0.2
- dbus_pending_call_set_data@Base 1.0.2
- dbus_pending_call_set_notify@Base 1.0.2
- dbus_pending_call_steal_reply@Base 1.0.2
- dbus_pending_call_unref@Base 1.0.2
- dbus_realloc@Base 1.0.2
- dbus_server_allocate_data_slot@Base 1.0.2
- dbus_server_disconnect@Base 1.0.2
- dbus_server_free_data_slot@Base 1.0.2
- dbus_server_get_address@Base 1.0.2
- dbus_server_get_data@Base 1.0.2
- dbus_server_get_id@Base 1.1.1
- dbus_server_get_is_connected@Base 1.0.2
- dbus_server_listen@Base 1.0.2
- dbus_server_ref@Base 1.0.2
- dbus_server_set_auth_mechanisms@Base 1.0.2
- dbus_server_set_data@Base 1.0.2
- dbus_server_set_new_connection_function@Base 1.0.2
- dbus_server_set_timeout_functions@Base 1.0.2
- dbus_server_set_watch_functions@Base 1.0.2
- dbus_server_unref@Base 1.0.2
- dbus_set_error@Base 1.0.2
- dbus_set_error_const@Base 1.0.2
- dbus_set_error_from_message@Base 1.0.2
- dbus_setenv@Base 1.7.6
- dbus_shutdown@Base 1.0.2
- dbus_signature_iter_get_current_type@Base 1.0.2
- dbus_signature_iter_get_element_type@Base 1.0.2
- dbus_signature_iter_get_signature@Base 1.0.2
- dbus_signature_iter_init@Base 1.0.2
- dbus_signature_iter_next@Base 1.0.2
- dbus_signature_iter_recurse@Base 1.0.2
- dbus_signature_validate@Base 1.0.2
- dbus_signature_validate_single@Base 1.0.2
- dbus_threads_init@Base 1.0.2
- dbus_threads_init_default@Base 1.0.2
- dbus_timeout_get_data@Base 1.0.2
- dbus_timeout_get_enabled@Base 1.0.2
- dbus_timeout_get_interval@Base 1.0.2
- dbus_timeout_handle@Base 1.0.2
- dbus_timeout_set_data@Base 1.0.2
- dbus_type_is_basic@Base 1.0.2
- dbus_type_is_container@Base 1.0.2
- dbus_type_is_fixed@Base 1.0.2
- dbus_type_is_valid@Base 1.5.0
- dbus_validate_bus_name@Base 1.5.12
- dbus_validate_error_name@Base 1.5.12
- dbus_validate_interface@Base 1.5.12
- dbus_validate_member@Base 1.5.12
- dbus_validate_path@Base 1.5.12
- dbus_validate_utf8@Base 1.5.12
- dbus_watch_get_data@Base 1.0.2
- dbus_watch_get_enabled@Base 1.0.2
- dbus_watch_get_fd@Base 1.0.2
- dbus_watch_get_flags@Base 1.0.2
- dbus_watch_get_socket@Base 1.1.1
- dbus_watch_get_unix_fd@Base 1.1.1
- dbus_watch_handle@Base 1.0.2
- dbus_watch_set_data@Base 1.0.2
diff --git a/build/linux/sysroot_scripts/merge-package-lists.py b/build/linux/sysroot_scripts/merge-package-lists.py
deleted file mode 100755
index 58bd163..0000000
--- a/build/linux/sysroot_scripts/merge-package-lists.py
+++ /dev/null
@@ -1,34 +0,0 @@
-#!/usr/bin/env python
-# Copyright 2016 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Merge package entries from different package lists.
-"""
-
-# This is used for replacing packages in eg. sid with those in experimental.
-# The updated packages are ABI compatible, but include security patches, so we
-# should use those instead in our sysroots.
-
-import sys
-
-if len(sys.argv) != 2:
-  exit(1)
-
-packages = {}
-
-def AddPackagesFromFile(file):
-  global packages
-  lines = file.readlines()
-  if len(lines) % 3 != 0:
-    exit(1)
-  for i in xrange(0, len(lines), 3):
-    packages[lines[i]] = (lines[i + 1], lines[i + 2])
-
-AddPackagesFromFile(open(sys.argv[1], 'r'))
-AddPackagesFromFile(sys.stdin)
-
-output_file = open(sys.argv[1], 'w')
-
-for (package, (filename, sha256)) in packages.iteritems():
-  output_file.write(package + filename + sha256)
diff --git a/build/linux/sysroot_scripts/packagelist.sid.amd64 b/build/linux/sysroot_scripts/packagelist.sid.amd64
deleted file mode 100644
index 68fba9f..0000000
--- a/build/linux/sysroot_scripts/packagelist.sid.amd64
+++ /dev/null
@@ -1,311 +0,0 @@
-http://ftp.us.debian.org/debian/pool/main/a/alsa-lib/libasound2_1.1.3-5_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/a/alsa-lib/libasound2-dev_1.1.3-5_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/a/atk1.0/libatk1.0-0_2.28.1-1_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/a/atk1.0/libatk1.0-dev_2.28.1-1_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/a/at-spi2-atk/libatk-bridge2.0-0_2.26.2-1_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/a/at-spi2-atk/libatk-bridge2.0-dev_2.26.2-1_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/a/at-spi2-core/libatspi2.0-0_2.28.0-1_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/a/at-spi2-core/libatspi2.0-dev_2.28.0-1_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/a/attr/libattr1_2.4.47-2+b2_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/a/audit/libaudit1_2.8.2-1_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/a/avahi/libavahi-client3_0.7-3.1_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/a/avahi/libavahi-common3_0.7-3.1_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/b/bluez/libbluetooth3_5.47-1+b1_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/b/bluez/libbluetooth-dev_5.47-1+b1_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/b/brltty/libbrlapi0.6_5.6-3_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/b/brltty/libbrlapi-dev_5.6-3_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/c/cairo/libcairo2_1.15.10-2_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/c/cairo/libcairo2-dev_1.15.10-2_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/c/cairo/libcairo-gobject2_1.15.10-2_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/c/cairo/libcairo-script-interpreter2_1.15.10-2_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/c/colord/libcolord2_1.3.3-2_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/c/cups/libcups2_2.2.7-2_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/c/cups/libcups2-dev_2.2.7-2_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/c/cups/libcupsimage2_2.2.7-2_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/c/cups/libcupsimage2-dev_2.2.7-2_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/d/db5.3/libdb5.3_5.3.28-13.1+b1_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/d/dbus-glib/libdbus-glib-1-2_0.110-2_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/d/dbus/libdbus-1-3_1.12.6-2_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/d/dbus/libdbus-1-dev_1.12.6-2_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/e/e2fsprogs/comerr-dev_2.1-1.44.1-1_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/e/e2fsprogs/libcom-err2_1.44.1-1_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/e/elfutils/libelf1_0.170-0.3_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/e/elfutils/libelf-dev_0.170-0.3_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/e/expat/libexpat1_2.2.5-3_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/e/expat/libexpat1-dev_2.2.5-3_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/f/flac/libflac8_1.3.2-1_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/f/flac/libflac-dev_1.3.2-1_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/f/fontconfig/libfontconfig1_2.12.6-0.1_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/f/fontconfig/libfontconfig1-dev_2.12.6-0.1_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/f/freetype/libfreetype6_2.8.1-2_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/f/freetype/libfreetype6-dev_2.8.1-2_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/f/fribidi/libfribidi0_0.19.7-2_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/g/gcc-6/libasan3_6.4.0-15_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/g/gcc-6/libgcc-6-dev_6.4.0-15_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/g/gcc-6/libstdc++-6-dev_6.4.0-15_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/g/gcc-7/libcilkrts5_7.3.0-14_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/g/gcc-7/libubsan0_7.3.0-14_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/g/gcc-8/libatomic1_8-20180402-1_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/g/gcc-8/libgcc1_8-20180402-1_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/g/gcc-8/libgomp1_8-20180402-1_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/g/gcc-8/libitm1_8-20180402-1_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/g/gcc-8/liblsan0_8-20180402-1_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/g/gcc-8/libmpx2_8-20180402-1_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/g/gcc-8/libquadmath0_8-20180402-1_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/g/gcc-8/libstdc++6_8-20180402-1_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/g/gcc-8/libtsan0_8-20180402-1_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/g/gdk-pixbuf/libgdk-pixbuf2.0-0_2.36.11-2_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/g/gdk-pixbuf/libgdk-pixbuf2.0-dev_2.36.11-2_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/g/glib2.0/libglib2.0-0_2.56.0-6_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/g/glib2.0/libglib2.0-dev_2.56.0-6_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/g/glibc/libc6_2.27-3_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/g/glibc/libc6-dev_2.27-3_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/g/gmp/libgmp10_6.1.2+dfsg-3_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/g/gnutls28/libgnutls28-dev_3.5.18-1_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/g/gnutls28/libgnutls30_3.5.18-1_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/g/gnutls28/libgnutls-dane0_3.5.18-1_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/g/gnutls28/libgnutls-openssl27_3.5.18-1_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/g/gnutls28/libgnutlsxx28_3.5.18-1_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/g/graphite2/libgraphite2-3_1.3.11-2_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/g/graphite2/libgraphite2-dev_1.3.11-2_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/g/gtk+2.0/libgtk2.0-0_2.24.32-1_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/g/gtk+2.0/libgtk2.0-dev_2.24.32-1_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/g/gtk+3.0/libgtk-3-0_3.22.29-3_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/g/gtk+3.0/libgtk-3-dev_3.22.29-3_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/h/harfbuzz/libharfbuzz0b_1.7.2-1_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/h/harfbuzz/libharfbuzz-dev_1.7.2-1_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/h/harfbuzz/libharfbuzz-gobject0_1.7.2-1_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/h/harfbuzz/libharfbuzz-icu0_1.7.2-1_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/i/icu/libicu57_57.1-9_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/j/jbigkit/libjbig0_2.1-3.1+b2_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/j/json-glib/libjson-glib-1.0-0_1.4.2-3_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/k/keyutils/libkeyutils1_1.5.9-9.2_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/k/krb5/krb5-multidev_1.16-2_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/k/krb5/libgssapi-krb5-2_1.16-2_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/k/krb5/libgssrpc4_1.16-2_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/k/krb5/libk5crypto3_1.16-2_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/k/krb5/libkadm5clnt-mit11_1.16-2_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/k/krb5/libkadm5srv-mit11_1.16-2_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/k/krb5/libkdb5-9_1.16-2_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/k/krb5/libkrb5-3_1.16-2_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/k/krb5/libkrb5-dev_1.16-2_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/k/krb5/libkrb5support0_1.16-2_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/liba/libappindicator/libappindicator1_0.4.92-5_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/liba/libappindicator/libappindicator3-1_0.4.92-5_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/liba/libappindicator/libappindicator3-dev_0.4.92-5_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/liba/libappindicator/libappindicator-dev_0.4.92-5_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/liba/libasyncns/libasyncns0_0.8-6_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/libb/libbsd/libbsd0_0.8.7-1_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/libc/libcap2/libcap2_2.25-1.2_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/libc/libcap2/libcap-dev_2.25-1.2_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/libc/libcap-ng/libcap-ng0_0.7.7-3.1+b1_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/libd/libdatrie/libdatrie1_0.2.10-7_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/libd/libdbusmenu/libdbusmenu-glib4_16.04.1+17.04.20170109.1-5_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/libd/libdbusmenu/libdbusmenu-glib-dev_16.04.1+17.04.20170109.1-5_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/libd/libdbusmenu/libdbusmenu-gtk3-4_16.04.1+17.04.20170109.1-5_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/libd/libdbusmenu/libdbusmenu-gtk4_16.04.1+17.04.20170109.1-5_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/libd/libdrm/libdrm2_2.4.91-2_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/libd/libdrm/libdrm-amdgpu1_2.4.91-2_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/libd/libdrm/libdrm-dev_2.4.91-2_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/libd/libdrm/libdrm-intel1_2.4.91-2_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/libd/libdrm/libdrm-nouveau2_2.4.91-2_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/libd/libdrm/libdrm-radeon1_2.4.91-2_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/libe/libepoxy/libepoxy0_1.4.3-1_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/libe/libepoxy/libepoxy-dev_1.4.3-1_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/libe/libevent/libevent-2.1-6_2.1.8-stable-4_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/libf/libffi/libffi6_3.2.1-8_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/libf/libffi/libffi-dev_3.2.1-8_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/libg/libgcrypt20/libgcrypt20_1.8.2-2_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/libg/libgcrypt20/libgcrypt20-dev_1.8.2-2_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/libg/libglvnd/libegl1_1.0.0+git20180308-1_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/libg/libglvnd/libgl1_1.0.0+git20180308-1_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/libg/libglvnd/libgles2_1.0.0+git20180308-1_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/libg/libglvnd/libglvnd0_1.0.0+git20180308-1_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/libg/libglvnd/libglvnd-dev_1.0.0+git20180308-1_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/libg/libglvnd/libglx0_1.0.0+git20180308-1_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/libg/libglvnd/libopengl0_1.0.0+git20180308-1_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/libg/libgnome-keyring/libgnome-keyring0_3.12.0-1+b2_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/libg/libgnome-keyring/libgnome-keyring-dev_3.12.0-1+b2_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/libg/libgpg-error/libgpg-error0_1.28-2_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/libg/libgpg-error/libgpg-error-dev_1.28-2_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/libi/libice/libice6_1.0.9-2_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/libi/libidl/libidl-2-0_0.8.14-4_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/libi/libidn2/libidn2-0_2.0.4-1.1_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/libi/libidn/libidn11_1.33-2.1_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/libi/libindicator/libindicator3-7_0.5.0-3+b1_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/libi/libindicator/libindicator7_0.5.0-3+b1_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/libj/libjpeg-turbo/libjpeg62-turbo_1.5.2-2+b1_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/libj/libjpeg-turbo/libjpeg62-turbo-dev_1.5.2-2+b1_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/libj/libjsoncpp/libjsoncpp1_1.7.4-3_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/libj/libjsoncpp/libjsoncpp-dev_1.7.4-3_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/libn/libnss-db/libnss-db_2.2.3pre1-6+b5_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/libo/libogg/libogg0_1.3.2-1+b1_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/libo/libogg/libogg-dev_1.3.2-1+b1_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/libp/libpciaccess/libpciaccess0_0.14-1_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/libp/libpng1.6/libpng16-16_1.6.34-1_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/libp/libpng1.6/libpng-dev_1.6.34-1_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/libp/libpthread-stubs/libpthread-stubs0-dev_0.3-4_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/libr/librest/librest-0.7-0_0.8.0-2_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/libs/libselinux/libselinux1_2.7-2+b2_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/libs/libsm/libsm6_1.2.2-1+b3_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/libs/libsndfile/libsndfile1_1.0.28-4_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/libs/libsoup2.4/libsoup2.4-1_2.62.0-1_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/libs/libsoup2.4/libsoup-gnome2.4-1_2.62.0-1_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/libt/libtasn1-6/libtasn1-6_4.13-2_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/libt/libthai/libthai0_0.1.27-2_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/libt/libtool/libltdl7_2.4.6-2_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/libu/libunistring/libunistring2_0.9.8-1_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/libv/libva/libva2_2.1.0-3_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/libv/libva/libva-dev_2.1.0-3_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/libv/libva/libva-drm2_2.1.0-3_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/libv/libva/libva-glx2_2.1.0-3_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/libv/libva/libva-wayland2_2.1.0-3_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/libv/libva/libva-x11-2_2.1.0-3_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/libv/libvorbis/libvorbis0a_1.3.6-1_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/libv/libvorbis/libvorbisenc2_1.3.6-1_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/libv/libvpx/libvpx5_1.7.0-3_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/libv/libvpx/libvpx-dev_1.7.0-3_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/libw/libwebp/libwebp6_0.6.1-2_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/libw/libwebp/libwebpdemux2_0.6.1-2_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/libw/libwebp/libwebp-dev_0.6.1-2_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/libw/libwebp/libwebpmux3_0.6.1-2_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libx11/libx11-6_1.6.5-1_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libx11/libx11-dev_1.6.5-1_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libx11/libx11-xcb1_1.6.5-1_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libx11/libx11-xcb-dev_1.6.5-1_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxau/libxau6_1.0.8-1+b2_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxau/libxau-dev_1.0.8-1+b2_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb1_1.13-1_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb1-dev_1.13-1_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb-dri2-0_1.13-1_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb-dri3-0_1.13-1_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb-glx0_1.13-1_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb-present0_1.13-1_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb-render0_1.13-1_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb-render0-dev_1.13-1_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb-shm0_1.13-1_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb-shm0-dev_1.13-1_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb-sync1_1.13-1_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb-xfixes0_1.13-1_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxcomposite/libxcomposite1_0.4.4-2_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxcomposite/libxcomposite-dev_0.4.4-2_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxcursor/libxcursor1_1.1.15-1_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxcursor/libxcursor-dev_1.1.15-1_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxdamage/libxdamage1_1.1.4-3_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxdamage/libxdamage-dev_1.1.4-3_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxdmcp/libxdmcp6_1.1.2-3_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxdmcp/libxdmcp-dev_1.1.2-3_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxext/libxext6_1.3.3-1+b2_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxext/libxext-dev_1.3.3-1+b2_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxfixes/libxfixes3_5.0.3-1_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxfixes/libxfixes-dev_5.0.3-1_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxi/libxi6_1.7.9-1_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxi/libxi-dev_1.7.9-1_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxinerama/libxinerama1_1.1.3-1+b3_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxinerama/libxinerama-dev_1.1.3-1+b3_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxkbcommon/libxkbcommon0_0.8.0-1_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxkbcommon/libxkbcommon-dev_0.8.0-1_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxml2/libxml2_2.9.4+dfsg1-6.1_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxml2/libxml2-dev_2.9.4+dfsg1-6.1_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxrandr/libxrandr2_1.5.1-1_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxrandr/libxrandr-dev_1.5.1-1_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxrender/libxrender1_0.9.10-1_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxrender/libxrender-dev_0.9.10-1_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxshmfence/libxshmfence1_1.3-1_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxslt/libxslt1.1_1.1.29-5_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxslt/libxslt1-dev_1.1.29-5_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxss/libxss1_1.2.2-1+b2_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxss/libxss-dev_1.2.2-1+b2_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxt/libxt6_1.1.5-1_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxt/libxt-dev_1.1.5-1_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxtst/libxtst6_1.2.3-1_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxtst/libxtst-dev_1.2.3-1_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxxf86vm/libxxf86vm1_1.1.4-1+b2_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/l/lcms2/liblcms2-2_2.9-1_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/l/linux/linux-libc-dev_4.15.11-1_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/l/lz4/liblz4-1_0.0~r131-2+b1_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/l/lzo2/liblzo2-2_2.08-1.2+b2_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/m/mesa/libegl1-mesa_17.3.7-1_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/m/mesa/libegl1-mesa-dev_17.3.7-1_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/m/mesa/libgbm1_17.3.7-1_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/m/mesa/libgbm-dev_17.3.7-1_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/m/mesa/libgl1-mesa-dev_17.3.7-1_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/m/mesa/libgl1-mesa-glx_17.3.7-1_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/m/mesa/libglapi-mesa_17.3.7-1_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/m/mesa/libwayland-egl1-mesa_17.3.7-1_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/m/mesa/mesa-common-dev_17.3.7-1_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/m/minizip/libminizip1_1.1-8+b1_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/m/minizip/libminizip-dev_1.1-8+b1_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/n/nettle/libhogweed4_3.4-1_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/n/nettle/libnettle6_3.4-1_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/n/nspr/libnspr4_4.18-1_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/n/nspr/libnspr4-dev_4.18-1_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/n/nss/libnss3_3.35-2_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/n/nss/libnss3-dev_3.35-2_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/o/openssl1.0/libssl1.0.2_1.0.2o-1_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/o/openssl/libssl1.1_1.1.0h-2_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/o/openssl/libssl-dev_1.1.0h-2_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/o/opus/libopus0_1.2.1-1_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/o/opus/libopus-dev_1.2.1-1_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/o/orbit2/liborbit-2-0_2.14.19-4_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/o/orbit2/liborbit2_2.14.19-4_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/p/p11-kit/libp11-kit0_0.23.10-2_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/p/pam/libpam0g_1.1.8-3.7_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/p/pam/libpam0g-dev_1.1.8-3.7_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/p/pango1.0/libpango-1.0-0_1.42.0-1_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/p/pango1.0/libpango1.0-dev_1.42.0-1_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/p/pango1.0/libpangocairo-1.0-0_1.42.0-1_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/p/pango1.0/libpangoft2-1.0-0_1.42.0-1_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/p/pango1.0/libpangoxft-1.0-0_1.42.0-1_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/p/pangox-compat/libpangox-1.0-0_0.0.2-5+b2_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/p/pciutils/libpci3_3.5.2-1_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/p/pciutils/libpci-dev_3.5.2-1_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/p/pcre3/libpcre16-3_8.39-9_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/p/pcre3/libpcre32-3_8.39-9_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/p/pcre3/libpcre3_8.39-9_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/p/pcre3/libpcre3-dev_8.39-9_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/p/pcre3/libpcrecpp0v5_8.39-9_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/p/pixman/libpixman-1-0_0.34.0-2_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/p/pixman/libpixman-1-dev_0.34.0-2_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/p/pulseaudio/libpulse0_11.1-4_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/p/pulseaudio/libpulse-dev_11.1-4_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/p/pulseaudio/libpulse-mainloop-glib0_11.1-4_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/r/re2/libre2-4_20180301+dfsg-1_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/r/re2/libre2-dev_20180301+dfsg-1_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/s/snappy/libsnappy1v5_1.1.7-1_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/s/snappy/libsnappy-dev_1.1.7-1_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/s/speech-dispatcher/libspeechd2_0.8.8-1_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/s/speech-dispatcher/libspeechd-dev_0.8.8-1_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/s/speech-dispatcher/speech-dispatcher_0.8.8-1_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/s/sqlite3/libsqlite3-0_3.23.0-1_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/s/systemd/libsystemd0_238-4_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/s/systemd/libudev1_238-4_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/s/systemd/libudev-dev_238-4_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/t/tcp-wrappers/libwrap0_7.6.q-27_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/t/tiff/libtiff5_4.0.9-4_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/u/unbound/libunbound2_1.6.7-1_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/u/util-linux/libblkid1_2.31.1-0.5_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/u/util-linux/libmount1_2.31.1-0.5_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/u/util-linux/libuuid1_2.31.1-0.5_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/u/util-linux/uuid-dev_2.31.1-0.5_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/w/wayland/libwayland-client0_1.14.0-2_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/w/wayland/libwayland-cursor0_1.14.0-2_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/w/wayland/libwayland-dev_1.14.0-2_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/w/wayland/libwayland-server0_1.14.0-2_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/w/wayland-protocols/wayland-protocols_1.13-1_all.deb
-http://ftp.us.debian.org/debian/pool/main/x/xft/libxft2_2.3.2-1+b2_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-composite-dev_2018.4-4_all.deb
-http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-damage-dev_2018.4-4_all.deb
-http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-dev_2018.4-4_all.deb
-http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-fixes-dev_2018.4-4_all.deb
-http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-input-dev_2018.4-4_all.deb
-http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-kb-dev_2018.4-4_all.deb
-http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-randr-dev_2018.4-4_all.deb
-http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-record-dev_2018.4-4_all.deb
-http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-render-dev_2018.4-4_all.deb
-http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-scrnsaver-dev_2018.4-4_all.deb
-http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-xext-dev_2018.4-4_all.deb
-http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-xinerama-dev_2018.4-4_all.deb
-http://ftp.us.debian.org/debian/pool/main/x/xz-utils/liblzma5_5.2.2-1.3_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/z/zlib/zlib1g_1.2.8.dfsg-5_amd64.deb
-http://ftp.us.debian.org/debian/pool/main/z/zlib/zlib1g-dev_1.2.8.dfsg-5_amd64.deb
diff --git a/build/linux/sysroot_scripts/packagelist.sid.arm b/build/linux/sysroot_scripts/packagelist.sid.arm
deleted file mode 100644
index 4b7d022..0000000
--- a/build/linux/sysroot_scripts/packagelist.sid.arm
+++ /dev/null
@@ -1,309 +0,0 @@
-http://ftp.us.debian.org/debian/pool/main/a/alsa-lib/libasound2_1.1.3-5_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/a/alsa-lib/libasound2-dev_1.1.3-5_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/a/atk1.0/libatk1.0-0_2.28.1-1_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/a/atk1.0/libatk1.0-dev_2.28.1-1_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/a/at-spi2-atk/libatk-bridge2.0-0_2.26.2-1_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/a/at-spi2-atk/libatk-bridge2.0-dev_2.26.2-1_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/a/at-spi2-core/libatspi2.0-0_2.28.0-1_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/a/at-spi2-core/libatspi2.0-dev_2.28.0-1_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/a/attr/libattr1_2.4.47-2+b2_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/a/audit/libaudit1_2.8.2-1_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/a/avahi/libavahi-client3_0.7-3.1_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/a/avahi/libavahi-common3_0.7-3.1_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/b/bluez/libbluetooth3_5.47-1+b1_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/b/bluez/libbluetooth-dev_5.47-1+b1_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/b/brltty/libbrlapi0.6_5.6-3_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/b/brltty/libbrlapi-dev_5.6-3_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/c/cairo/libcairo2_1.15.10-2_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/c/cairo/libcairo2-dev_1.15.10-2_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/c/cairo/libcairo-gobject2_1.15.10-2_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/c/cairo/libcairo-script-interpreter2_1.15.10-2_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/c/colord/libcolord2_1.3.3-2_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/c/cups/libcups2_2.2.7-2_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/c/cups/libcups2-dev_2.2.7-2_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/c/cups/libcupsimage2_2.2.7-2_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/c/cups/libcupsimage2-dev_2.2.7-2_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/d/db5.3/libdb5.3_5.3.28-13.1+b1_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/d/dbus-glib/libdbus-glib-1-2_0.110-2_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/d/dbus/libdbus-1-3_1.12.6-2_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/d/dbus/libdbus-1-dev_1.12.6-2_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/e/e2fsprogs/comerr-dev_2.1-1.44.1-1_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/e/e2fsprogs/libcom-err2_1.44.1-1_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/e/elfutils/libelf1_0.170-0.3_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/e/elfutils/libelf-dev_0.170-0.3_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/e/expat/libexpat1_2.2.5-3_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/e/expat/libexpat1-dev_2.2.5-3_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/f/flac/libflac8_1.3.2-1_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/f/flac/libflac-dev_1.3.2-1_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/f/fontconfig/libfontconfig1_2.12.6-0.1_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/f/fontconfig/libfontconfig1-dev_2.12.6-0.1_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/f/freetype/libfreetype6_2.8.1-2_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/f/freetype/libfreetype6-dev_2.8.1-2_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/f/fribidi/libfribidi0_0.19.7-2_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/g/gcc-6/libasan3_6.4.0-15_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/g/gcc-6/libgcc-6-dev_6.4.0-15_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/g/gcc-6/libstdc++-6-dev_6.4.0-15_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/g/gcc-7/libubsan0_7.3.0-14_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/g/gcc-8/libatomic1_8-20180402-1_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/g/gcc-8/libgcc1_8-20180402-1_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/g/gcc-8/libgomp1_8-20180402-1_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/g/gcc-8/libstdc++6_8-20180402-1_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/g/gdk-pixbuf/libgdk-pixbuf2.0-0_2.36.11-2_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/g/gdk-pixbuf/libgdk-pixbuf2.0-dev_2.36.11-2_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/g/glib2.0/libglib2.0-0_2.56.0-6_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/g/glib2.0/libglib2.0-dev_2.56.0-6_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/g/glibc/libc6_2.27-3_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/g/glibc/libc6-dev_2.27-3_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/g/gmp/libgmp10_6.1.2+dfsg-3_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/g/gnutls28/libgnutls28-dev_3.5.18-1_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/g/gnutls28/libgnutls30_3.5.18-1_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/g/gnutls28/libgnutls-dane0_3.5.18-1_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/g/gnutls28/libgnutls-openssl27_3.5.18-1_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/g/gnutls28/libgnutlsxx28_3.5.18-1_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/g/graphite2/libgraphite2-3_1.3.11-2_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/g/graphite2/libgraphite2-dev_1.3.11-2_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/g/gtk+2.0/libgtk2.0-0_2.24.32-1_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/g/gtk+2.0/libgtk2.0-dev_2.24.32-1_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/g/gtk+3.0/libgtk-3-0_3.22.29-3_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/g/gtk+3.0/libgtk-3-dev_3.22.29-3_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/h/harfbuzz/libharfbuzz0b_1.7.2-1_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/h/harfbuzz/libharfbuzz-dev_1.7.2-1_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/h/harfbuzz/libharfbuzz-gobject0_1.7.2-1_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/h/harfbuzz/libharfbuzz-icu0_1.7.2-1_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/i/icu/libicu57_57.1-9_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/j/jbigkit/libjbig0_2.1-3.1+b2_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/j/json-glib/libjson-glib-1.0-0_1.4.2-3_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/k/keyutils/libkeyutils1_1.5.9-9.2_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/k/krb5/krb5-multidev_1.16-2_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/k/krb5/libgssapi-krb5-2_1.16-2_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/k/krb5/libgssrpc4_1.16-2_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/k/krb5/libk5crypto3_1.16-2_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/k/krb5/libkadm5clnt-mit11_1.16-2_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/k/krb5/libkadm5srv-mit11_1.16-2_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/k/krb5/libkdb5-9_1.16-2_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/k/krb5/libkrb5-3_1.16-2_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/k/krb5/libkrb5-dev_1.16-2_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/k/krb5/libkrb5support0_1.16-2_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/liba/libappindicator/libappindicator1_0.4.92-5_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/liba/libappindicator/libappindicator3-1_0.4.92-5_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/liba/libappindicator/libappindicator3-dev_0.4.92-5_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/liba/libappindicator/libappindicator-dev_0.4.92-5_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/liba/libasyncns/libasyncns0_0.8-6_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/libb/libbsd/libbsd0_0.8.7-1_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/libc/libcap2/libcap2_2.25-1.2_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/libc/libcap2/libcap-dev_2.25-1.2_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/libc/libcap-ng/libcap-ng0_0.7.7-3.1+b1_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/libd/libdatrie/libdatrie1_0.2.10-7_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/libd/libdbusmenu/libdbusmenu-glib4_16.04.1+17.04.20170109.1-5_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/libd/libdbusmenu/libdbusmenu-glib-dev_16.04.1+17.04.20170109.1-5_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/libd/libdbusmenu/libdbusmenu-gtk3-4_16.04.1+17.04.20170109.1-5_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/libd/libdbusmenu/libdbusmenu-gtk4_16.04.1+17.04.20170109.1-5_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/libd/libdrm/libdrm2_2.4.91-2_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/libd/libdrm/libdrm-amdgpu1_2.4.91-2_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/libd/libdrm/libdrm-dev_2.4.91-2_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/libd/libdrm/libdrm-etnaviv1_2.4.91-2_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/libd/libdrm/libdrm-exynos1_2.4.91-2_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/libd/libdrm/libdrm-freedreno1_2.4.91-2_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/libd/libdrm/libdrm-nouveau2_2.4.91-2_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/libd/libdrm/libdrm-omap1_2.4.91-2_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/libd/libdrm/libdrm-radeon1_2.4.91-2_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/libd/libdrm/libdrm-tegra0_2.4.91-2_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/libe/libepoxy/libepoxy0_1.4.3-1_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/libe/libepoxy/libepoxy-dev_1.4.3-1_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/libe/libevent/libevent-2.1-6_2.1.8-stable-4_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/libf/libffi/libffi6_3.2.1-8_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/libf/libffi/libffi-dev_3.2.1-8_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/libg/libgcrypt20/libgcrypt20_1.8.2-2_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/libg/libgcrypt20/libgcrypt20-dev_1.8.2-2_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/libg/libglvnd/libegl1_1.0.0-2_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/libg/libglvnd/libgl1_1.0.0-2_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/libg/libglvnd/libgles2_1.0.0-2_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/libg/libglvnd/libglvnd0_1.0.0-2_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/libg/libglvnd/libglvnd-dev_1.0.0-2_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/libg/libglvnd/libglx0_1.0.0-2_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/libg/libglvnd/libopengl0_1.0.0-2_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/libg/libgnome-keyring/libgnome-keyring0_3.12.0-1+b2_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/libg/libgnome-keyring/libgnome-keyring-dev_3.12.0-1+b2_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/libg/libgpg-error/libgpg-error0_1.28-2_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/libg/libgpg-error/libgpg-error-dev_1.28-2_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/libi/libice/libice6_1.0.9-2_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/libi/libidl/libidl-2-0_0.8.14-4_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/libi/libidn2/libidn2-0_2.0.4-1.1_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/libi/libidn/libidn11_1.33-2.1_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/libi/libindicator/libindicator3-7_0.5.0-3+b1_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/libi/libindicator/libindicator7_0.5.0-3+b1_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/libj/libjpeg-turbo/libjpeg62-turbo_1.5.2-2+b1_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/libj/libjpeg-turbo/libjpeg62-turbo-dev_1.5.2-2+b1_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/libj/libjsoncpp/libjsoncpp1_1.7.4-3_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/libj/libjsoncpp/libjsoncpp-dev_1.7.4-3_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/libn/libnss-db/libnss-db_2.2.3pre1-6+b5_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/libo/libogg/libogg0_1.3.2-1+b1_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/libo/libogg/libogg-dev_1.3.2-1+b1_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/libp/libpciaccess/libpciaccess0_0.14-1_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/libp/libpng1.6/libpng16-16_1.6.34-1_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/libp/libpng1.6/libpng-dev_1.6.34-1_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/libp/libpthread-stubs/libpthread-stubs0-dev_0.3-4_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/libr/librest/librest-0.7-0_0.8.0-2_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/libs/libselinux/libselinux1_2.7-2+b2_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/libs/libsm/libsm6_1.2.2-1+b3_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/libs/libsndfile/libsndfile1_1.0.28-4_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/libs/libsoup2.4/libsoup2.4-1_2.62.0-1_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/libs/libsoup2.4/libsoup-gnome2.4-1_2.62.0-1_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/libt/libtasn1-6/libtasn1-6_4.13-2_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/libt/libthai/libthai0_0.1.27-2_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/libt/libtool/libltdl7_2.4.6-2_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/libu/libunistring/libunistring2_0.9.8-1_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/libv/libva/libva2_2.1.0-3_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/libv/libva/libva-dev_2.1.0-3_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/libv/libva/libva-drm2_2.1.0-3_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/libv/libva/libva-glx2_2.1.0-3_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/libv/libva/libva-wayland2_2.1.0-3_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/libv/libva/libva-x11-2_2.1.0-3_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/libv/libvorbis/libvorbis0a_1.3.6-1_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/libv/libvorbis/libvorbisenc2_1.3.6-1_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/libv/libvpx/libvpx5_1.7.0-3_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/libv/libvpx/libvpx-dev_1.7.0-3_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/libw/libwebp/libwebp6_0.6.1-2_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/libw/libwebp/libwebpdemux2_0.6.1-2_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/libw/libwebp/libwebp-dev_0.6.1-2_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/libw/libwebp/libwebpmux3_0.6.1-2_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libx11/libx11-6_1.6.5-1_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libx11/libx11-dev_1.6.5-1_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libx11/libx11-xcb1_1.6.5-1_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libx11/libx11-xcb-dev_1.6.5-1_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxau/libxau6_1.0.8-1+b2_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxau/libxau-dev_1.0.8-1+b2_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb1_1.13-1_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb1-dev_1.13-1_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb-dri2-0_1.13-1_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb-dri3-0_1.13-1_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb-glx0_1.13-1_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb-present0_1.13-1_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb-render0_1.13-1_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb-render0-dev_1.13-1_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb-shm0_1.13-1_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb-shm0-dev_1.13-1_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb-sync1_1.13-1_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb-xfixes0_1.13-1_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxcomposite/libxcomposite1_0.4.4-2_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxcomposite/libxcomposite-dev_0.4.4-2_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxcursor/libxcursor1_1.1.15-1_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxcursor/libxcursor-dev_1.1.15-1_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxdamage/libxdamage1_1.1.4-3_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxdamage/libxdamage-dev_1.1.4-3_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxdmcp/libxdmcp6_1.1.2-3_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxdmcp/libxdmcp-dev_1.1.2-3_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxext/libxext6_1.3.3-1+b2_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxext/libxext-dev_1.3.3-1+b2_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxfixes/libxfixes3_5.0.3-1_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxfixes/libxfixes-dev_5.0.3-1_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxi/libxi6_1.7.9-1_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxi/libxi-dev_1.7.9-1_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxinerama/libxinerama1_1.1.3-1+b3_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxinerama/libxinerama-dev_1.1.3-1+b3_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxkbcommon/libxkbcommon0_0.8.0-1_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxkbcommon/libxkbcommon-dev_0.8.0-1_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxml2/libxml2_2.9.4+dfsg1-6.1_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxml2/libxml2-dev_2.9.4+dfsg1-6.1_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxrandr/libxrandr2_1.5.1-1_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxrandr/libxrandr-dev_1.5.1-1_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxrender/libxrender1_0.9.10-1_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxrender/libxrender-dev_0.9.10-1_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxshmfence/libxshmfence1_1.3-1_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxslt/libxslt1.1_1.1.29-5_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxslt/libxslt1-dev_1.1.29-5_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxss/libxss1_1.2.2-1+b2_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxss/libxss-dev_1.2.2-1+b2_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxt/libxt6_1.1.5-1_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxt/libxt-dev_1.1.5-1_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxtst/libxtst6_1.2.3-1_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxtst/libxtst-dev_1.2.3-1_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxxf86vm/libxxf86vm1_1.1.4-1+b2_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/l/lcms2/liblcms2-2_2.9-1_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/l/linux/linux-libc-dev_4.15.11-1_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/l/lz4/liblz4-1_0.0~r131-2+b1_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/l/lzo2/liblzo2-2_2.08-1.2+b2_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/m/mesa/libegl1-mesa_17.3.7-1_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/m/mesa/libegl1-mesa-dev_17.3.7-1_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/m/mesa/libgbm1_17.3.7-1_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/m/mesa/libgbm-dev_17.3.7-1_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/m/mesa/libgl1-mesa-dev_17.3.7-1_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/m/mesa/libgl1-mesa-glx_17.3.7-1_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/m/mesa/libglapi-mesa_17.3.7-1_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/m/mesa/libwayland-egl1-mesa_17.3.7-1_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/m/mesa/mesa-common-dev_17.3.7-1_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/m/minizip/libminizip1_1.1-8+b1_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/m/minizip/libminizip-dev_1.1-8+b1_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/n/nettle/libhogweed4_3.4-1_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/n/nettle/libnettle6_3.4-1_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/n/nspr/libnspr4_4.18-1_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/n/nspr/libnspr4-dev_4.18-1_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/n/nss/libnss3_3.35-2_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/n/nss/libnss3-dev_3.35-2_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/o/openssl1.0/libssl1.0.2_1.0.2o-1_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/o/openssl/libssl1.1_1.1.0h-2_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/o/openssl/libssl-dev_1.1.0h-2_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/o/opus/libopus0_1.2.1-1_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/o/opus/libopus-dev_1.2.1-1_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/o/orbit2/liborbit-2-0_2.14.19-4_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/o/orbit2/liborbit2_2.14.19-4_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/p/p11-kit/libp11-kit0_0.23.10-2_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/p/pam/libpam0g_1.1.8-3.7_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/p/pam/libpam0g-dev_1.1.8-3.7_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/p/pango1.0/libpango-1.0-0_1.42.0-1_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/p/pango1.0/libpango1.0-dev_1.42.0-1_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/p/pango1.0/libpangocairo-1.0-0_1.42.0-1_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/p/pango1.0/libpangoft2-1.0-0_1.42.0-1_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/p/pango1.0/libpangoxft-1.0-0_1.42.0-1_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/p/pangox-compat/libpangox-1.0-0_0.0.2-5+b2_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/p/pciutils/libpci3_3.5.2-1_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/p/pciutils/libpci-dev_3.5.2-1_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/p/pcre3/libpcre16-3_8.39-9_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/p/pcre3/libpcre32-3_8.39-9_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/p/pcre3/libpcre3_8.39-9_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/p/pcre3/libpcre3-dev_8.39-9_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/p/pcre3/libpcrecpp0v5_8.39-9_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/p/pixman/libpixman-1-0_0.34.0-2_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/p/pixman/libpixman-1-dev_0.34.0-2_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/p/pulseaudio/libpulse0_11.1-4_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/p/pulseaudio/libpulse-dev_11.1-4_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/p/pulseaudio/libpulse-mainloop-glib0_11.1-4_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/r/re2/libre2-4_20180301+dfsg-1_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/r/re2/libre2-dev_20180301+dfsg-1_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/s/snappy/libsnappy1v5_1.1.7-1_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/s/snappy/libsnappy-dev_1.1.7-1_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/s/speech-dispatcher/libspeechd2_0.8.8-1_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/s/speech-dispatcher/libspeechd-dev_0.8.8-1_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/s/speech-dispatcher/speech-dispatcher_0.8.8-1_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/s/sqlite3/libsqlite3-0_3.23.0-1_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/s/systemd/libsystemd0_238-4_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/s/systemd/libudev1_238-4_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/s/systemd/libudev-dev_238-4_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/t/tcp-wrappers/libwrap0_7.6.q-27_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/t/tiff/libtiff5_4.0.9-4_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/u/unbound/libunbound2_1.6.7-1_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/u/util-linux/libblkid1_2.31.1-0.5_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/u/util-linux/libmount1_2.31.1-0.5_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/u/util-linux/libuuid1_2.31.1-0.5_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/u/util-linux/uuid-dev_2.31.1-0.5_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/w/wayland/libwayland-client0_1.14.0-2_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/w/wayland/libwayland-cursor0_1.14.0-2_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/w/wayland/libwayland-dev_1.14.0-2_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/w/wayland/libwayland-server0_1.14.0-2_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/w/wayland-protocols/wayland-protocols_1.13-1_all.deb
-http://ftp.us.debian.org/debian/pool/main/x/xft/libxft2_2.3.2-1+b2_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-composite-dev_2018.4-4_all.deb
-http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-damage-dev_2018.4-4_all.deb
-http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-dev_2018.4-4_all.deb
-http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-fixes-dev_2018.4-4_all.deb
-http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-input-dev_2018.4-4_all.deb
-http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-kb-dev_2018.4-4_all.deb
-http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-randr-dev_2018.4-4_all.deb
-http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-record-dev_2018.4-4_all.deb
-http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-render-dev_2018.4-4_all.deb
-http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-scrnsaver-dev_2018.4-4_all.deb
-http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-xext-dev_2018.4-4_all.deb
-http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-xinerama-dev_2018.4-4_all.deb
-http://ftp.us.debian.org/debian/pool/main/x/xz-utils/liblzma5_5.2.2-1.3_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/z/zlib/zlib1g_1.2.8.dfsg-5_armhf.deb
-http://ftp.us.debian.org/debian/pool/main/z/zlib/zlib1g-dev_1.2.8.dfsg-5_armhf.deb
diff --git a/build/linux/sysroot_scripts/packagelist.sid.arm64 b/build/linux/sysroot_scripts/packagelist.sid.arm64
deleted file mode 100644
index 753a41a..0000000
--- a/build/linux/sysroot_scripts/packagelist.sid.arm64
+++ /dev/null
@@ -1,309 +0,0 @@
-http://ftp.us.debian.org/debian/pool/main/a/alsa-lib/libasound2_1.1.3-5_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/a/alsa-lib/libasound2-dev_1.1.3-5_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/a/atk1.0/libatk1.0-0_2.28.1-1_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/a/atk1.0/libatk1.0-dev_2.28.1-1_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/a/at-spi2-atk/libatk-bridge2.0-0_2.26.2-1_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/a/at-spi2-atk/libatk-bridge2.0-dev_2.26.2-1_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/a/at-spi2-core/libatspi2.0-0_2.28.0-1_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/a/at-spi2-core/libatspi2.0-dev_2.28.0-1_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/a/attr/libattr1_2.4.47-2+b2_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/a/audit/libaudit1_2.8.2-1_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/a/avahi/libavahi-client3_0.7-3.1_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/a/avahi/libavahi-common3_0.7-3.1_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/b/bluez/libbluetooth3_5.47-1+b1_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/b/bluez/libbluetooth-dev_5.47-1+b1_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/b/brltty/libbrlapi0.6_5.6-3_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/b/brltty/libbrlapi-dev_5.6-3_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/c/cairo/libcairo2_1.15.10-2_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/c/cairo/libcairo2-dev_1.15.10-2_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/c/cairo/libcairo-gobject2_1.15.10-2_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/c/cairo/libcairo-script-interpreter2_1.15.10-2_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/c/colord/libcolord2_1.3.3-2_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/c/cups/libcups2_2.2.7-2_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/c/cups/libcups2-dev_2.2.7-2_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/c/cups/libcupsimage2_2.2.7-2_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/c/cups/libcupsimage2-dev_2.2.7-2_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/d/db5.3/libdb5.3_5.3.28-13.1+b1_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/d/dbus-glib/libdbus-glib-1-2_0.110-2_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/d/dbus/libdbus-1-3_1.12.6-2_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/d/dbus/libdbus-1-dev_1.12.6-2_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/e/e2fsprogs/comerr-dev_2.1-1.44.1-1_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/e/e2fsprogs/libcom-err2_1.44.1-1_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/e/elfutils/libelf1_0.170-0.3_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/e/elfutils/libelf-dev_0.170-0.3_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/e/expat/libexpat1_2.2.5-3_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/e/expat/libexpat1-dev_2.2.5-3_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/f/flac/libflac8_1.3.2-1_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/f/flac/libflac-dev_1.3.2-1_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/f/fontconfig/libfontconfig1_2.12.6-0.1_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/f/fontconfig/libfontconfig1-dev_2.12.6-0.1_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/f/freetype/libfreetype6_2.8.1-2_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/f/freetype/libfreetype6-dev_2.8.1-2_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/f/fribidi/libfribidi0_0.19.7-2_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/g/gcc-6/libasan3_6.4.0-15_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/g/gcc-6/libgcc-6-dev_6.4.0-15_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/g/gcc-6/libstdc++-6-dev_6.4.0-15_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/g/gcc-7/libubsan0_7.3.0-14_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/g/gcc-8/libatomic1_8-20180402-1_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/g/gcc-8/libgcc1_8-20180402-1_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/g/gcc-8/libgomp1_8-20180402-1_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/g/gcc-8/libitm1_8-20180402-1_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/g/gcc-8/libstdc++6_8-20180402-1_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/g/gdk-pixbuf/libgdk-pixbuf2.0-0_2.36.11-2_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/g/gdk-pixbuf/libgdk-pixbuf2.0-dev_2.36.11-2_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/g/glib2.0/libglib2.0-0_2.56.0-6_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/g/glib2.0/libglib2.0-dev_2.56.0-6_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/g/glibc/libc6_2.27-3_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/g/glibc/libc6-dev_2.27-3_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/g/gmp/libgmp10_6.1.2+dfsg-3_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/g/gmp/libgmp10_6.1.2+dfsg-3_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/g/gnutls28/libgnutls28-dev_3.5.18-1_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/g/gnutls28/libgnutls30_3.5.18-1_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/g/gnutls28/libgnutls-dane0_3.5.18-1_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/g/gnutls28/libgnutls-openssl27_3.5.18-1_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/g/gnutls28/libgnutlsxx28_3.5.18-1_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/g/graphite2/libgraphite2-3_1.3.11-2_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/g/graphite2/libgraphite2-dev_1.3.11-2_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/g/gtk+2.0/libgtk2.0-0_2.24.32-1_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/g/gtk+2.0/libgtk2.0-dev_2.24.32-1_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/g/gtk+3.0/libgtk-3-0_3.22.29-3_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/g/gtk+3.0/libgtk-3-dev_3.22.29-3_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/h/harfbuzz/libharfbuzz0b_1.7.2-1_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/h/harfbuzz/libharfbuzz-dev_1.7.2-1_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/h/harfbuzz/libharfbuzz-gobject0_1.7.2-1_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/h/harfbuzz/libharfbuzz-icu0_1.7.2-1_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/i/icu/libicu57_57.1-9_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/j/jbigkit/libjbig0_2.1-3.1+b2_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/j/json-glib/libjson-glib-1.0-0_1.4.2-3_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/k/keyutils/libkeyutils1_1.5.9-9.2_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/k/krb5/krb5-multidev_1.16-2_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/k/krb5/libgssapi-krb5-2_1.16-2_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/k/krb5/libgssrpc4_1.16-2_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/k/krb5/libk5crypto3_1.16-2_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/k/krb5/libkadm5clnt-mit11_1.16-2_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/k/krb5/libkadm5srv-mit11_1.16-2_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/k/krb5/libkdb5-9_1.16-2_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/k/krb5/libkrb5-3_1.16-2_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/k/krb5/libkrb5-dev_1.16-2_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/k/krb5/libkrb5support0_1.16-2_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/liba/libappindicator/libappindicator1_0.4.92-5_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/liba/libappindicator/libappindicator3-1_0.4.92-5_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/liba/libappindicator/libappindicator3-dev_0.4.92-5_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/liba/libappindicator/libappindicator-dev_0.4.92-5_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/liba/libasyncns/libasyncns0_0.8-6_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/libb/libbsd/libbsd0_0.8.7-1_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/libc/libcap2/libcap2_2.25-1.2_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/libc/libcap2/libcap-dev_2.25-1.2_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/libc/libcap-ng/libcap-ng0_0.7.7-3.1+b1_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/libd/libdatrie/libdatrie1_0.2.10-7_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/libd/libdbusmenu/libdbusmenu-glib4_16.04.1+17.04.20170109.1-5_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/libd/libdbusmenu/libdbusmenu-glib-dev_16.04.1+17.04.20170109.1-5_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/libd/libdbusmenu/libdbusmenu-gtk3-4_16.04.1+17.04.20170109.1-5_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/libd/libdbusmenu/libdbusmenu-gtk4_16.04.1+17.04.20170109.1-5_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/libd/libdrm/libdrm2_2.4.91-2_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/libd/libdrm/libdrm-amdgpu1_2.4.91-2_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/libd/libdrm/libdrm-dev_2.4.91-2_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/libd/libdrm/libdrm-freedreno1_2.4.91-2_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/libd/libdrm/libdrm-nouveau2_2.4.91-2_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/libd/libdrm/libdrm-radeon1_2.4.91-2_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/libd/libdrm/libdrm-tegra0_2.4.91-2_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/libe/libepoxy/libepoxy0_1.4.3-1_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/libe/libepoxy/libepoxy-dev_1.4.3-1_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/libe/libevent/libevent-2.1-6_2.1.8-stable-4_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/libf/libffi/libffi6_3.2.1-8_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/libf/libffi/libffi-dev_3.2.1-8_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/libg/libgcrypt20/libgcrypt20_1.8.2-2_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/libg/libgcrypt20/libgcrypt20-dev_1.8.2-2_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/libg/libglvnd/libegl1_1.0.0+git20180308-1_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/libg/libglvnd/libgl1_1.0.0+git20180308-1_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/libg/libglvnd/libgles2_1.0.0+git20180308-1_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/libg/libglvnd/libglvnd0_1.0.0+git20180308-1_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/libg/libglvnd/libglvnd-dev_1.0.0+git20180308-1_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/libg/libglvnd/libglx0_1.0.0+git20180308-1_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/libg/libglvnd/libopengl0_1.0.0+git20180308-1_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/libg/libgnome-keyring/libgnome-keyring0_3.12.0-1+b2_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/libg/libgnome-keyring/libgnome-keyring-dev_3.12.0-1+b2_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/libg/libgpg-error/libgpg-error0_1.28-2_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/libg/libgpg-error/libgpg-error-dev_1.28-2_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/libi/libice/libice6_1.0.9-2_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/libi/libidl/libidl-2-0_0.8.14-4_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/libi/libidn2/libidn2-0_2.0.4-1.1_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/libi/libidn/libidn11_1.33-2.1_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/libi/libindicator/libindicator3-7_0.5.0-3+b1_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/libi/libindicator/libindicator7_0.5.0-3+b1_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/libj/libjpeg-turbo/libjpeg62-turbo_1.5.2-2+b1_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/libj/libjpeg-turbo/libjpeg62-turbo-dev_1.5.2-2+b1_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/libj/libjsoncpp/libjsoncpp1_1.7.4-3_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/libj/libjsoncpp/libjsoncpp-dev_1.7.4-3_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/libn/libnss-db/libnss-db_2.2.3pre1-6+b5_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/libo/libogg/libogg0_1.3.2-1+b1_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/libo/libogg/libogg-dev_1.3.2-1+b1_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/libp/libpciaccess/libpciaccess0_0.14-1_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/libp/libpng1.6/libpng16-16_1.6.34-1_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/libp/libpng1.6/libpng-dev_1.6.34-1_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/libp/libpthread-stubs/libpthread-stubs0-dev_0.3-4_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/libr/librest/librest-0.7-0_0.8.0-2_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/libs/libselinux/libselinux1_2.7-2+b2_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/libs/libsm/libsm6_1.2.2-1+b3_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/libs/libsndfile/libsndfile1_1.0.28-4_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/libs/libsoup2.4/libsoup2.4-1_2.62.0-1_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/libs/libsoup2.4/libsoup-gnome2.4-1_2.62.0-1_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/libt/libtasn1-6/libtasn1-6_4.13-2_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/libt/libthai/libthai0_0.1.27-2_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/libt/libthai/libthai0_0.1.27-2_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/libt/libtool/libltdl7_2.4.6-2_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/libu/libunistring/libunistring2_0.9.8-1_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/libv/libva/libva2_2.1.0-3_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/libv/libva/libva-dev_2.1.0-3_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/libv/libva/libva-drm2_2.1.0-3_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/libv/libva/libva-glx2_2.1.0-3_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/libv/libva/libva-wayland2_2.1.0-3_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/libv/libva/libva-x11-2_2.1.0-3_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/libv/libvorbis/libvorbis0a_1.3.6-1_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/libv/libvorbis/libvorbisenc2_1.3.6-1_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/libv/libvpx/libvpx5_1.7.0-3_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/libv/libvpx/libvpx-dev_1.7.0-3_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/libw/libwebp/libwebp6_0.6.1-2_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/libw/libwebp/libwebpdemux2_0.6.1-2_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/libw/libwebp/libwebp-dev_0.6.1-2_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/libw/libwebp/libwebpmux3_0.6.1-2_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libx11/libx11-6_1.6.5-1_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libx11/libx11-dev_1.6.5-1_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libx11/libx11-xcb1_1.6.5-1_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libx11/libx11-xcb-dev_1.6.5-1_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxau/libxau6_1.0.8-1+b2_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxau/libxau-dev_1.0.8-1+b2_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb1_1.13-1_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb1-dev_1.13-1_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb-dri2-0_1.13-1_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb-dri3-0_1.13-1_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb-glx0_1.13-1_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb-present0_1.13-1_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb-render0_1.13-1_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb-render0-dev_1.13-1_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb-shm0_1.13-1_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb-shm0-dev_1.13-1_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb-sync1_1.13-1_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb-xfixes0_1.13-1_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxcomposite/libxcomposite1_0.4.4-2_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxcomposite/libxcomposite-dev_0.4.4-2_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxcursor/libxcursor1_1.1.15-1_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxcursor/libxcursor-dev_1.1.15-1_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxdamage/libxdamage1_1.1.4-3_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxdamage/libxdamage-dev_1.1.4-3_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxdmcp/libxdmcp6_1.1.2-3_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxdmcp/libxdmcp-dev_1.1.2-3_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxext/libxext6_1.3.3-1+b2_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxext/libxext-dev_1.3.3-1+b2_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxfixes/libxfixes3_5.0.3-1_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxfixes/libxfixes-dev_5.0.3-1_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxi/libxi6_1.7.9-1_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxi/libxi-dev_1.7.9-1_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxinerama/libxinerama1_1.1.3-1+b3_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxinerama/libxinerama-dev_1.1.3-1+b3_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxkbcommon/libxkbcommon0_0.8.0-1_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxkbcommon/libxkbcommon-dev_0.8.0-1_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxml2/libxml2_2.9.4+dfsg1-6.1_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxml2/libxml2-dev_2.9.4+dfsg1-6.1_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxrandr/libxrandr2_1.5.1-1_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxrandr/libxrandr-dev_1.5.1-1_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxrender/libxrender1_0.9.10-1_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxrender/libxrender-dev_0.9.10-1_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxshmfence/libxshmfence1_1.3-1_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxslt/libxslt1.1_1.1.29-5_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxslt/libxslt1-dev_1.1.29-5_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxss/libxss1_1.2.2-1+b2_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxss/libxss-dev_1.2.2-1+b2_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxt/libxt6_1.1.5-1_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxt/libxt-dev_1.1.5-1_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxtst/libxtst6_1.2.3-1_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxtst/libxtst-dev_1.2.3-1_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxxf86vm/libxxf86vm1_1.1.4-1+b2_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/l/lcms2/liblcms2-2_2.9-1_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/l/linux/linux-libc-dev_4.15.11-1_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/l/lz4/liblz4-1_0.0~r131-2+b1_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/l/lzo2/liblzo2-2_2.08-1.2+b2_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/m/mesa/libegl1-mesa_17.3.7-1_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/m/mesa/libegl1-mesa-dev_17.3.7-1_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/m/mesa/libgbm1_17.3.7-1_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/m/mesa/libgbm-dev_17.3.7-1_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/m/mesa/libgl1-mesa-dev_17.3.7-1_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/m/mesa/libgl1-mesa-glx_17.3.7-1_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/m/mesa/libglapi-mesa_17.3.7-1_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/m/mesa/libwayland-egl1-mesa_17.3.7-1_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/m/mesa/mesa-common-dev_17.3.7-1_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/m/minizip/libminizip1_1.1-8+b1_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/m/minizip/libminizip-dev_1.1-8+b1_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/n/nettle/libhogweed4_3.4-1_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/n/nettle/libnettle6_3.4-1_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/n/nspr/libnspr4_4.18-1_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/n/nspr/libnspr4-dev_4.18-1_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/n/nss/libnss3_3.35-2_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/n/nss/libnss3-dev_3.35-2_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/o/openssl1.0/libssl1.0.2_1.0.2o-1_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/o/openssl/libssl1.1_1.1.0h-2_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/o/openssl/libssl-dev_1.1.0h-2_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/o/opus/libopus0_1.2.1-1_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/o/opus/libopus-dev_1.2.1-1_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/o/orbit2/liborbit-2-0_2.14.19-4_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/o/orbit2/liborbit2_2.14.19-4_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/p/p11-kit/libp11-kit0_0.23.10-2_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/p/pam/libpam0g_1.1.8-3.7_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/p/pam/libpam0g-dev_1.1.8-3.7_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/p/pango1.0/libpango-1.0-0_1.42.0-1_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/p/pango1.0/libpango1.0-dev_1.42.0-1_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/p/pango1.0/libpangocairo-1.0-0_1.42.0-1_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/p/pango1.0/libpangoft2-1.0-0_1.42.0-1_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/p/pango1.0/libpangoxft-1.0-0_1.42.0-1_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/p/pangox-compat/libpangox-1.0-0_0.0.2-5+b2_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/p/pciutils/libpci3_3.5.2-1_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/p/pciutils/libpci-dev_3.5.2-1_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/p/pcre3/libpcre16-3_8.39-9_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/p/pcre3/libpcre32-3_8.39-9_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/p/pcre3/libpcre3_8.39-9_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/p/pcre3/libpcre3-dev_8.39-9_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/p/pcre3/libpcrecpp0v5_8.39-9_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/p/pixman/libpixman-1-0_0.34.0-2_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/p/pixman/libpixman-1-dev_0.34.0-2_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/p/pulseaudio/libpulse0_11.1-4_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/p/pulseaudio/libpulse-dev_11.1-4_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/p/pulseaudio/libpulse-mainloop-glib0_11.1-4_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/r/re2/libre2-4_20180301+dfsg-1_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/r/re2/libre2-dev_20180301+dfsg-1_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/s/snappy/libsnappy1v5_1.1.7-1_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/s/snappy/libsnappy-dev_1.1.7-1_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/s/speech-dispatcher/libspeechd2_0.8.8-1_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/s/speech-dispatcher/libspeechd-dev_0.8.8-1_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/s/speech-dispatcher/speech-dispatcher_0.8.8-1_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/s/sqlite3/libsqlite3-0_3.23.0-1_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/s/systemd/libsystemd0_238-4_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/s/systemd/libudev1_238-4_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/s/systemd/libudev-dev_238-4_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/t/tcp-wrappers/libwrap0_7.6.q-27_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/t/tiff/libtiff5_4.0.9-4_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/u/unbound/libunbound2_1.6.7-1_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/u/util-linux/libblkid1_2.31.1-0.5_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/u/util-linux/libmount1_2.31.1-0.5_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/u/util-linux/libuuid1_2.31.1-0.5_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/u/util-linux/uuid-dev_2.31.1-0.5_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/w/wayland/libwayland-client0_1.14.0-2_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/w/wayland/libwayland-cursor0_1.14.0-2_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/w/wayland/libwayland-dev_1.14.0-2_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/w/wayland/libwayland-server0_1.14.0-2_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/w/wayland-protocols/wayland-protocols_1.13-1_all.deb
-http://ftp.us.debian.org/debian/pool/main/x/xft/libxft2_2.3.2-1+b2_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-composite-dev_2018.4-4_all.deb
-http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-damage-dev_2018.4-4_all.deb
-http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-dev_2018.4-4_all.deb
-http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-fixes-dev_2018.4-4_all.deb
-http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-input-dev_2018.4-4_all.deb
-http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-kb-dev_2018.4-4_all.deb
-http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-randr-dev_2018.4-4_all.deb
-http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-record-dev_2018.4-4_all.deb
-http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-render-dev_2018.4-4_all.deb
-http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-scrnsaver-dev_2018.4-4_all.deb
-http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-xext-dev_2018.4-4_all.deb
-http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-xinerama-dev_2018.4-4_all.deb
-http://ftp.us.debian.org/debian/pool/main/x/xz-utils/liblzma5_5.2.2-1.3_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/z/zlib/zlib1g_1.2.8.dfsg-5_arm64.deb
-http://ftp.us.debian.org/debian/pool/main/z/zlib/zlib1g-dev_1.2.8.dfsg-5_arm64.deb
diff --git a/build/linux/sysroot_scripts/packagelist.sid.i386 b/build/linux/sysroot_scripts/packagelist.sid.i386
deleted file mode 100644
index d2d9131..0000000
--- a/build/linux/sysroot_scripts/packagelist.sid.i386
+++ /dev/null
@@ -1,309 +0,0 @@
-http://ftp.us.debian.org/debian/pool/main/a/alsa-lib/libasound2_1.1.3-5_i386.deb
-http://ftp.us.debian.org/debian/pool/main/a/alsa-lib/libasound2-dev_1.1.3-5_i386.deb
-http://ftp.us.debian.org/debian/pool/main/a/atk1.0/libatk1.0-0_2.28.1-1_i386.deb
-http://ftp.us.debian.org/debian/pool/main/a/atk1.0/libatk1.0-dev_2.28.1-1_i386.deb
-http://ftp.us.debian.org/debian/pool/main/a/at-spi2-atk/libatk-bridge2.0-0_2.26.2-1_i386.deb
-http://ftp.us.debian.org/debian/pool/main/a/at-spi2-atk/libatk-bridge2.0-dev_2.26.2-1_i386.deb
-http://ftp.us.debian.org/debian/pool/main/a/at-spi2-core/libatspi2.0-0_2.28.0-1_i386.deb
-http://ftp.us.debian.org/debian/pool/main/a/at-spi2-core/libatspi2.0-dev_2.28.0-1_i386.deb
-http://ftp.us.debian.org/debian/pool/main/a/attr/libattr1_2.4.47-2+b2_i386.deb
-http://ftp.us.debian.org/debian/pool/main/a/audit/libaudit1_2.8.2-1_i386.deb
-http://ftp.us.debian.org/debian/pool/main/a/avahi/libavahi-client3_0.7-3.1_i386.deb
-http://ftp.us.debian.org/debian/pool/main/a/avahi/libavahi-common3_0.7-3.1_i386.deb
-http://ftp.us.debian.org/debian/pool/main/b/bluez/libbluetooth3_5.47-1+b1_i386.deb
-http://ftp.us.debian.org/debian/pool/main/b/bluez/libbluetooth-dev_5.47-1+b1_i386.deb
-http://ftp.us.debian.org/debian/pool/main/b/brltty/libbrlapi0.6_5.6-3_i386.deb
-http://ftp.us.debian.org/debian/pool/main/b/brltty/libbrlapi-dev_5.6-3_i386.deb
-http://ftp.us.debian.org/debian/pool/main/c/cairo/libcairo2_1.15.10-2_i386.deb
-http://ftp.us.debian.org/debian/pool/main/c/cairo/libcairo2-dev_1.15.10-2_i386.deb
-http://ftp.us.debian.org/debian/pool/main/c/cairo/libcairo-gobject2_1.15.10-2_i386.deb
-http://ftp.us.debian.org/debian/pool/main/c/cairo/libcairo-script-interpreter2_1.15.10-2_i386.deb
-http://ftp.us.debian.org/debian/pool/main/c/colord/libcolord2_1.3.3-2_i386.deb
-http://ftp.us.debian.org/debian/pool/main/c/cups/libcups2_2.2.7-2_i386.deb
-http://ftp.us.debian.org/debian/pool/main/c/cups/libcups2-dev_2.2.7-2_i386.deb
-http://ftp.us.debian.org/debian/pool/main/c/cups/libcupsimage2_2.2.7-2_i386.deb
-http://ftp.us.debian.org/debian/pool/main/c/cups/libcupsimage2-dev_2.2.7-2_i386.deb
-http://ftp.us.debian.org/debian/pool/main/d/db5.3/libdb5.3_5.3.28-13.1+b1_i386.deb
-http://ftp.us.debian.org/debian/pool/main/d/dbus-glib/libdbus-glib-1-2_0.110-2_i386.deb
-http://ftp.us.debian.org/debian/pool/main/d/dbus/libdbus-1-3_1.12.6-2_i386.deb
-http://ftp.us.debian.org/debian/pool/main/d/dbus/libdbus-1-dev_1.12.6-2_i386.deb
-http://ftp.us.debian.org/debian/pool/main/e/e2fsprogs/comerr-dev_2.1-1.44.1-1_i386.deb
-http://ftp.us.debian.org/debian/pool/main/e/e2fsprogs/libcom-err2_1.44.1-1_i386.deb
-http://ftp.us.debian.org/debian/pool/main/e/elfutils/libelf1_0.170-0.3_i386.deb
-http://ftp.us.debian.org/debian/pool/main/e/elfutils/libelf-dev_0.170-0.3_i386.deb
-http://ftp.us.debian.org/debian/pool/main/e/expat/libexpat1_2.2.5-3_i386.deb
-http://ftp.us.debian.org/debian/pool/main/e/expat/libexpat1-dev_2.2.5-3_i386.deb
-http://ftp.us.debian.org/debian/pool/main/f/flac/libflac8_1.3.2-1_i386.deb
-http://ftp.us.debian.org/debian/pool/main/f/flac/libflac-dev_1.3.2-1_i386.deb
-http://ftp.us.debian.org/debian/pool/main/f/fontconfig/libfontconfig1_2.12.6-0.1_i386.deb
-http://ftp.us.debian.org/debian/pool/main/f/fontconfig/libfontconfig1-dev_2.12.6-0.1_i386.deb
-http://ftp.us.debian.org/debian/pool/main/f/freetype/libfreetype6_2.8.1-2_i386.deb
-http://ftp.us.debian.org/debian/pool/main/f/freetype/libfreetype6-dev_2.8.1-2_i386.deb
-http://ftp.us.debian.org/debian/pool/main/f/fribidi/libfribidi0_0.19.7-2_i386.deb
-http://ftp.us.debian.org/debian/pool/main/g/gcc-6/libasan3_6.4.0-15_i386.deb
-http://ftp.us.debian.org/debian/pool/main/g/gcc-6/libgcc-6-dev_6.4.0-15_i386.deb
-http://ftp.us.debian.org/debian/pool/main/g/gcc-6/libstdc++-6-dev_6.4.0-15_i386.deb
-http://ftp.us.debian.org/debian/pool/main/g/gcc-7/libcilkrts5_7.3.0-14_i386.deb
-http://ftp.us.debian.org/debian/pool/main/g/gcc-7/libubsan0_7.3.0-14_i386.deb
-http://ftp.us.debian.org/debian/pool/main/g/gcc-8/libatomic1_8-20180402-1_i386.deb
-http://ftp.us.debian.org/debian/pool/main/g/gcc-8/libgcc1_8-20180402-1_i386.deb
-http://ftp.us.debian.org/debian/pool/main/g/gcc-8/libgomp1_8-20180402-1_i386.deb
-http://ftp.us.debian.org/debian/pool/main/g/gcc-8/libitm1_8-20180402-1_i386.deb
-http://ftp.us.debian.org/debian/pool/main/g/gcc-8/libmpx2_8-20180402-1_i386.deb
-http://ftp.us.debian.org/debian/pool/main/g/gcc-8/libquadmath0_8-20180402-1_i386.deb
-http://ftp.us.debian.org/debian/pool/main/g/gcc-8/libstdc++6_8-20180402-1_i386.deb
-http://ftp.us.debian.org/debian/pool/main/g/gdk-pixbuf/libgdk-pixbuf2.0-0_2.36.11-2_i386.deb
-http://ftp.us.debian.org/debian/pool/main/g/gdk-pixbuf/libgdk-pixbuf2.0-dev_2.36.11-2_i386.deb
-http://ftp.us.debian.org/debian/pool/main/g/glib2.0/libglib2.0-0_2.56.0-6_i386.deb
-http://ftp.us.debian.org/debian/pool/main/g/glib2.0/libglib2.0-dev_2.56.0-6_i386.deb
-http://ftp.us.debian.org/debian/pool/main/g/glibc/libc6_2.27-3_i386.deb
-http://ftp.us.debian.org/debian/pool/main/g/glibc/libc6-dev_2.27-3_i386.deb
-http://ftp.us.debian.org/debian/pool/main/g/gmp/libgmp10_6.1.2+dfsg-3_i386.deb
-http://ftp.us.debian.org/debian/pool/main/g/gnutls28/libgnutls28-dev_3.5.18-1_i386.deb
-http://ftp.us.debian.org/debian/pool/main/g/gnutls28/libgnutls30_3.5.18-1_i386.deb
-http://ftp.us.debian.org/debian/pool/main/g/gnutls28/libgnutls-dane0_3.5.18-1_i386.deb
-http://ftp.us.debian.org/debian/pool/main/g/gnutls28/libgnutls-openssl27_3.5.18-1_i386.deb
-http://ftp.us.debian.org/debian/pool/main/g/gnutls28/libgnutlsxx28_3.5.18-1_i386.deb
-http://ftp.us.debian.org/debian/pool/main/g/graphite2/libgraphite2-3_1.3.11-2_i386.deb
-http://ftp.us.debian.org/debian/pool/main/g/graphite2/libgraphite2-dev_1.3.11-2_i386.deb
-http://ftp.us.debian.org/debian/pool/main/g/gtk+2.0/libgtk2.0-0_2.24.32-1_i386.deb
-http://ftp.us.debian.org/debian/pool/main/g/gtk+2.0/libgtk2.0-dev_2.24.32-1_i386.deb
-http://ftp.us.debian.org/debian/pool/main/g/gtk+3.0/libgtk-3-0_3.22.29-3_i386.deb
-http://ftp.us.debian.org/debian/pool/main/g/gtk+3.0/libgtk-3-dev_3.22.29-3_i386.deb
-http://ftp.us.debian.org/debian/pool/main/h/harfbuzz/libharfbuzz0b_1.7.2-1_i386.deb
-http://ftp.us.debian.org/debian/pool/main/h/harfbuzz/libharfbuzz-dev_1.7.2-1_i386.deb
-http://ftp.us.debian.org/debian/pool/main/h/harfbuzz/libharfbuzz-gobject0_1.7.2-1_i386.deb
-http://ftp.us.debian.org/debian/pool/main/h/harfbuzz/libharfbuzz-icu0_1.7.2-1_i386.deb
-http://ftp.us.debian.org/debian/pool/main/i/icu/libicu57_57.1-9_i386.deb
-http://ftp.us.debian.org/debian/pool/main/j/jbigkit/libjbig0_2.1-3.1+b2_i386.deb
-http://ftp.us.debian.org/debian/pool/main/j/json-glib/libjson-glib-1.0-0_1.4.2-3_i386.deb
-http://ftp.us.debian.org/debian/pool/main/k/keyutils/libkeyutils1_1.5.9-9.2_i386.deb
-http://ftp.us.debian.org/debian/pool/main/k/krb5/krb5-multidev_1.16-2_i386.deb
-http://ftp.us.debian.org/debian/pool/main/k/krb5/libgssapi-krb5-2_1.16-2_i386.deb
-http://ftp.us.debian.org/debian/pool/main/k/krb5/libgssrpc4_1.16-2_i386.deb
-http://ftp.us.debian.org/debian/pool/main/k/krb5/libk5crypto3_1.16-2_i386.deb
-http://ftp.us.debian.org/debian/pool/main/k/krb5/libkadm5clnt-mit11_1.16-2_i386.deb
-http://ftp.us.debian.org/debian/pool/main/k/krb5/libkadm5srv-mit11_1.16-2_i386.deb
-http://ftp.us.debian.org/debian/pool/main/k/krb5/libkdb5-9_1.16-2_i386.deb
-http://ftp.us.debian.org/debian/pool/main/k/krb5/libkrb5-3_1.16-2_i386.deb
-http://ftp.us.debian.org/debian/pool/main/k/krb5/libkrb5-dev_1.16-2_i386.deb
-http://ftp.us.debian.org/debian/pool/main/k/krb5/libkrb5support0_1.16-2_i386.deb
-http://ftp.us.debian.org/debian/pool/main/liba/libappindicator/libappindicator1_0.4.92-5_i386.deb
-http://ftp.us.debian.org/debian/pool/main/liba/libappindicator/libappindicator3-1_0.4.92-5_i386.deb
-http://ftp.us.debian.org/debian/pool/main/liba/libappindicator/libappindicator3-dev_0.4.92-5_i386.deb
-http://ftp.us.debian.org/debian/pool/main/liba/libappindicator/libappindicator-dev_0.4.92-5_i386.deb
-http://ftp.us.debian.org/debian/pool/main/liba/libasyncns/libasyncns0_0.8-6_i386.deb
-http://ftp.us.debian.org/debian/pool/main/libb/libbsd/libbsd0_0.8.7-1_i386.deb
-http://ftp.us.debian.org/debian/pool/main/libc/libcap2/libcap2_2.25-1.2_i386.deb
-http://ftp.us.debian.org/debian/pool/main/libc/libcap2/libcap-dev_2.25-1.2_i386.deb
-http://ftp.us.debian.org/debian/pool/main/libc/libcap-ng/libcap-ng0_0.7.7-3.1+b1_i386.deb
-http://ftp.us.debian.org/debian/pool/main/libd/libdatrie/libdatrie1_0.2.10-7_i386.deb
-http://ftp.us.debian.org/debian/pool/main/libd/libdbusmenu/libdbusmenu-glib4_16.04.1+17.04.20170109.1-5_i386.deb
-http://ftp.us.debian.org/debian/pool/main/libd/libdbusmenu/libdbusmenu-glib-dev_16.04.1+17.04.20170109.1-5_i386.deb
-http://ftp.us.debian.org/debian/pool/main/libd/libdbusmenu/libdbusmenu-gtk3-4_16.04.1+17.04.20170109.1-5_i386.deb
-http://ftp.us.debian.org/debian/pool/main/libd/libdbusmenu/libdbusmenu-gtk4_16.04.1+17.04.20170109.1-5_i386.deb
-http://ftp.us.debian.org/debian/pool/main/libd/libdrm/libdrm2_2.4.91-2_i386.deb
-http://ftp.us.debian.org/debian/pool/main/libd/libdrm/libdrm-amdgpu1_2.4.91-2_i386.deb
-http://ftp.us.debian.org/debian/pool/main/libd/libdrm/libdrm-dev_2.4.91-2_i386.deb
-http://ftp.us.debian.org/debian/pool/main/libd/libdrm/libdrm-intel1_2.4.91-2_i386.deb
-http://ftp.us.debian.org/debian/pool/main/libd/libdrm/libdrm-nouveau2_2.4.91-2_i386.deb
-http://ftp.us.debian.org/debian/pool/main/libd/libdrm/libdrm-radeon1_2.4.91-2_i386.deb
-http://ftp.us.debian.org/debian/pool/main/libe/libepoxy/libepoxy0_1.4.3-1_i386.deb
-http://ftp.us.debian.org/debian/pool/main/libe/libepoxy/libepoxy-dev_1.4.3-1_i386.deb
-http://ftp.us.debian.org/debian/pool/main/libe/libevent/libevent-2.1-6_2.1.8-stable-4_i386.deb
-http://ftp.us.debian.org/debian/pool/main/libf/libffi/libffi6_3.2.1-8_i386.deb
-http://ftp.us.debian.org/debian/pool/main/libf/libffi/libffi-dev_3.2.1-8_i386.deb
-http://ftp.us.debian.org/debian/pool/main/libg/libgcrypt20/libgcrypt20_1.8.2-2_i386.deb
-http://ftp.us.debian.org/debian/pool/main/libg/libgcrypt20/libgcrypt20-dev_1.8.2-2_i386.deb
-http://ftp.us.debian.org/debian/pool/main/libg/libglvnd/libegl1_1.0.0+git20180308-1_i386.deb
-http://ftp.us.debian.org/debian/pool/main/libg/libglvnd/libgl1_1.0.0+git20180308-1_i386.deb
-http://ftp.us.debian.org/debian/pool/main/libg/libglvnd/libgles2_1.0.0+git20180308-1_i386.deb
-http://ftp.us.debian.org/debian/pool/main/libg/libglvnd/libglvnd0_1.0.0+git20180308-1_i386.deb
-http://ftp.us.debian.org/debian/pool/main/libg/libglvnd/libglvnd-dev_1.0.0+git20180308-1_i386.deb
-http://ftp.us.debian.org/debian/pool/main/libg/libglvnd/libglx0_1.0.0+git20180308-1_i386.deb
-http://ftp.us.debian.org/debian/pool/main/libg/libglvnd/libopengl0_1.0.0+git20180308-1_i386.deb
-http://ftp.us.debian.org/debian/pool/main/libg/libgnome-keyring/libgnome-keyring0_3.12.0-1+b2_i386.deb
-http://ftp.us.debian.org/debian/pool/main/libg/libgnome-keyring/libgnome-keyring-dev_3.12.0-1+b2_i386.deb
-http://ftp.us.debian.org/debian/pool/main/libg/libgpg-error/libgpg-error0_1.28-2_i386.deb
-http://ftp.us.debian.org/debian/pool/main/libg/libgpg-error/libgpg-error-dev_1.28-2_i386.deb
-http://ftp.us.debian.org/debian/pool/main/libi/libice/libice6_1.0.9-2_i386.deb
-http://ftp.us.debian.org/debian/pool/main/libi/libidl/libidl-2-0_0.8.14-4_i386.deb
-http://ftp.us.debian.org/debian/pool/main/libi/libidn2/libidn2-0_2.0.4-1.1_i386.deb
-http://ftp.us.debian.org/debian/pool/main/libi/libidn/libidn11_1.33-2.1_i386.deb
-http://ftp.us.debian.org/debian/pool/main/libi/libindicator/libindicator3-7_0.5.0-3+b1_i386.deb
-http://ftp.us.debian.org/debian/pool/main/libi/libindicator/libindicator7_0.5.0-3+b1_i386.deb
-http://ftp.us.debian.org/debian/pool/main/libj/libjpeg-turbo/libjpeg62-turbo_1.5.2-2+b1_i386.deb
-http://ftp.us.debian.org/debian/pool/main/libj/libjpeg-turbo/libjpeg62-turbo-dev_1.5.2-2+b1_i386.deb
-http://ftp.us.debian.org/debian/pool/main/libj/libjsoncpp/libjsoncpp1_1.7.4-3_i386.deb
-http://ftp.us.debian.org/debian/pool/main/libj/libjsoncpp/libjsoncpp-dev_1.7.4-3_i386.deb
-http://ftp.us.debian.org/debian/pool/main/libn/libnss-db/libnss-db_2.2.3pre1-6+b5_i386.deb
-http://ftp.us.debian.org/debian/pool/main/libo/libogg/libogg0_1.3.2-1+b1_i386.deb
-http://ftp.us.debian.org/debian/pool/main/libo/libogg/libogg-dev_1.3.2-1+b1_i386.deb
-http://ftp.us.debian.org/debian/pool/main/libp/libpciaccess/libpciaccess0_0.14-1_i386.deb
-http://ftp.us.debian.org/debian/pool/main/libp/libpng1.6/libpng16-16_1.6.34-1_i386.deb
-http://ftp.us.debian.org/debian/pool/main/libp/libpng1.6/libpng-dev_1.6.34-1_i386.deb
-http://ftp.us.debian.org/debian/pool/main/libp/libpthread-stubs/libpthread-stubs0-dev_0.3-4_i386.deb
-http://ftp.us.debian.org/debian/pool/main/libr/librest/librest-0.7-0_0.8.0-2_i386.deb
-http://ftp.us.debian.org/debian/pool/main/libs/libselinux/libselinux1_2.7-2+b2_i386.deb
-http://ftp.us.debian.org/debian/pool/main/libs/libsm/libsm6_1.2.2-1+b3_i386.deb
-http://ftp.us.debian.org/debian/pool/main/libs/libsndfile/libsndfile1_1.0.28-4_i386.deb
-http://ftp.us.debian.org/debian/pool/main/libs/libsoup2.4/libsoup2.4-1_2.62.0-1_i386.deb
-http://ftp.us.debian.org/debian/pool/main/libs/libsoup2.4/libsoup-gnome2.4-1_2.62.0-1_i386.deb
-http://ftp.us.debian.org/debian/pool/main/libt/libtasn1-6/libtasn1-6_4.13-2_i386.deb
-http://ftp.us.debian.org/debian/pool/main/libt/libthai/libthai0_0.1.27-2_i386.deb
-http://ftp.us.debian.org/debian/pool/main/libt/libtool/libltdl7_2.4.6-2_i386.deb
-http://ftp.us.debian.org/debian/pool/main/libu/libunistring/libunistring2_0.9.8-1_i386.deb
-http://ftp.us.debian.org/debian/pool/main/libv/libva/libva2_2.1.0-3_i386.deb
-http://ftp.us.debian.org/debian/pool/main/libv/libva/libva-dev_2.1.0-3_i386.deb
-http://ftp.us.debian.org/debian/pool/main/libv/libva/libva-drm2_2.1.0-3_i386.deb
-http://ftp.us.debian.org/debian/pool/main/libv/libva/libva-glx2_2.1.0-3_i386.deb
-http://ftp.us.debian.org/debian/pool/main/libv/libva/libva-wayland2_2.1.0-3_i386.deb
-http://ftp.us.debian.org/debian/pool/main/libv/libva/libva-x11-2_2.1.0-3_i386.deb
-http://ftp.us.debian.org/debian/pool/main/libv/libvorbis/libvorbis0a_1.3.6-1_i386.deb
-http://ftp.us.debian.org/debian/pool/main/libv/libvorbis/libvorbisenc2_1.3.6-1_i386.deb
-http://ftp.us.debian.org/debian/pool/main/libv/libvpx/libvpx5_1.7.0-3_i386.deb
-http://ftp.us.debian.org/debian/pool/main/libv/libvpx/libvpx-dev_1.7.0-3_i386.deb
-http://ftp.us.debian.org/debian/pool/main/libw/libwebp/libwebp6_0.6.1-2_i386.deb
-http://ftp.us.debian.org/debian/pool/main/libw/libwebp/libwebpdemux2_0.6.1-2_i386.deb
-http://ftp.us.debian.org/debian/pool/main/libw/libwebp/libwebp-dev_0.6.1-2_i386.deb
-http://ftp.us.debian.org/debian/pool/main/libw/libwebp/libwebpmux3_0.6.1-2_i386.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libx11/libx11-6_1.6.5-1_i386.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libx11/libx11-dev_1.6.5-1_i386.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libx11/libx11-xcb1_1.6.5-1_i386.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libx11/libx11-xcb-dev_1.6.5-1_i386.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxau/libxau6_1.0.8-1+b2_i386.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxau/libxau-dev_1.0.8-1+b2_i386.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb1_1.13-1_i386.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb1-dev_1.13-1_i386.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb-dri2-0_1.13-1_i386.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb-dri3-0_1.13-1_i386.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb-glx0_1.13-1_i386.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb-present0_1.13-1_i386.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb-render0_1.13-1_i386.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb-render0-dev_1.13-1_i386.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb-shm0_1.13-1_i386.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb-shm0-dev_1.13-1_i386.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb-sync1_1.13-1_i386.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb-xfixes0_1.13-1_i386.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxcomposite/libxcomposite1_0.4.4-2_i386.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxcomposite/libxcomposite-dev_0.4.4-2_i386.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxcursor/libxcursor1_1.1.15-1_i386.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxcursor/libxcursor-dev_1.1.15-1_i386.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxdamage/libxdamage1_1.1.4-3_i386.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxdamage/libxdamage-dev_1.1.4-3_i386.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxdmcp/libxdmcp6_1.1.2-3_i386.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxdmcp/libxdmcp-dev_1.1.2-3_i386.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxext/libxext6_1.3.3-1+b2_i386.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxext/libxext-dev_1.3.3-1+b2_i386.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxfixes/libxfixes3_5.0.3-1_i386.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxfixes/libxfixes-dev_5.0.3-1_i386.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxi/libxi6_1.7.9-1_i386.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxi/libxi-dev_1.7.9-1_i386.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxinerama/libxinerama1_1.1.3-1+b3_i386.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxinerama/libxinerama-dev_1.1.3-1+b3_i386.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxkbcommon/libxkbcommon0_0.8.0-1_i386.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxkbcommon/libxkbcommon-dev_0.8.0-1_i386.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxml2/libxml2_2.9.4+dfsg1-6.1_i386.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxml2/libxml2-dev_2.9.4+dfsg1-6.1_i386.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxrandr/libxrandr2_1.5.1-1_i386.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxrandr/libxrandr-dev_1.5.1-1_i386.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxrender/libxrender1_0.9.10-1_i386.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxrender/libxrender-dev_0.9.10-1_i386.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxshmfence/libxshmfence1_1.3-1_i386.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxslt/libxslt1.1_1.1.29-5_i386.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxslt/libxslt1-dev_1.1.29-5_i386.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxss/libxss1_1.2.2-1+b2_i386.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxss/libxss-dev_1.2.2-1+b2_i386.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxt/libxt6_1.1.5-1_i386.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxt/libxt-dev_1.1.5-1_i386.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxtst/libxtst6_1.2.3-1_i386.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxtst/libxtst-dev_1.2.3-1_i386.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxxf86vm/libxxf86vm1_1.1.4-1+b2_i386.deb
-http://ftp.us.debian.org/debian/pool/main/l/lcms2/liblcms2-2_2.9-1_i386.deb
-http://ftp.us.debian.org/debian/pool/main/l/linux/linux-libc-dev_4.15.11-1_i386.deb
-http://ftp.us.debian.org/debian/pool/main/l/lz4/liblz4-1_0.0~r131-2+b1_i386.deb
-http://ftp.us.debian.org/debian/pool/main/l/lzo2/liblzo2-2_2.08-1.2+b2_i386.deb
-http://ftp.us.debian.org/debian/pool/main/m/mesa/libegl1-mesa_17.3.7-1_i386.deb
-http://ftp.us.debian.org/debian/pool/main/m/mesa/libegl1-mesa-dev_17.3.7-1_i386.deb
-http://ftp.us.debian.org/debian/pool/main/m/mesa/libgbm1_17.3.7-1_i386.deb
-http://ftp.us.debian.org/debian/pool/main/m/mesa/libgbm-dev_17.3.7-1_i386.deb
-http://ftp.us.debian.org/debian/pool/main/m/mesa/libgl1-mesa-dev_17.3.7-1_i386.deb
-http://ftp.us.debian.org/debian/pool/main/m/mesa/libgl1-mesa-glx_17.3.7-1_i386.deb
-http://ftp.us.debian.org/debian/pool/main/m/mesa/libglapi-mesa_17.3.7-1_i386.deb
-http://ftp.us.debian.org/debian/pool/main/m/mesa/libwayland-egl1-mesa_17.3.7-1_i386.deb
-http://ftp.us.debian.org/debian/pool/main/m/mesa/mesa-common-dev_17.3.7-1_i386.deb
-http://ftp.us.debian.org/debian/pool/main/m/minizip/libminizip1_1.1-8+b1_i386.deb
-http://ftp.us.debian.org/debian/pool/main/m/minizip/libminizip-dev_1.1-8+b1_i386.deb
-http://ftp.us.debian.org/debian/pool/main/n/nettle/libhogweed4_3.4-1_i386.deb
-http://ftp.us.debian.org/debian/pool/main/n/nettle/libnettle6_3.4-1_i386.deb
-http://ftp.us.debian.org/debian/pool/main/n/nspr/libnspr4_4.18-1_i386.deb
-http://ftp.us.debian.org/debian/pool/main/n/nspr/libnspr4-dev_4.18-1_i386.deb
-http://ftp.us.debian.org/debian/pool/main/n/nss/libnss3_3.35-2_i386.deb
-http://ftp.us.debian.org/debian/pool/main/n/nss/libnss3-dev_3.35-2_i386.deb
-http://ftp.us.debian.org/debian/pool/main/o/openssl1.0/libssl1.0.2_1.0.2o-1_i386.deb
-http://ftp.us.debian.org/debian/pool/main/o/openssl/libssl1.1_1.1.0h-2_i386.deb
-http://ftp.us.debian.org/debian/pool/main/o/openssl/libssl-dev_1.1.0h-2_i386.deb
-http://ftp.us.debian.org/debian/pool/main/o/opus/libopus0_1.2.1-1_i386.deb
-http://ftp.us.debian.org/debian/pool/main/o/opus/libopus-dev_1.2.1-1_i386.deb
-http://ftp.us.debian.org/debian/pool/main/o/orbit2/liborbit-2-0_2.14.19-4_i386.deb
-http://ftp.us.debian.org/debian/pool/main/o/orbit2/liborbit2_2.14.19-4_i386.deb
-http://ftp.us.debian.org/debian/pool/main/p/p11-kit/libp11-kit0_0.23.10-2_i386.deb
-http://ftp.us.debian.org/debian/pool/main/p/pam/libpam0g_1.1.8-3.7_i386.deb
-http://ftp.us.debian.org/debian/pool/main/p/pam/libpam0g-dev_1.1.8-3.7_i386.deb
-http://ftp.us.debian.org/debian/pool/main/p/pango1.0/libpango-1.0-0_1.42.0-1_i386.deb
-http://ftp.us.debian.org/debian/pool/main/p/pango1.0/libpango1.0-dev_1.42.0-1_i386.deb
-http://ftp.us.debian.org/debian/pool/main/p/pango1.0/libpangocairo-1.0-0_1.42.0-1_i386.deb
-http://ftp.us.debian.org/debian/pool/main/p/pango1.0/libpangoft2-1.0-0_1.42.0-1_i386.deb
-http://ftp.us.debian.org/debian/pool/main/p/pango1.0/libpangoxft-1.0-0_1.42.0-1_i386.deb
-http://ftp.us.debian.org/debian/pool/main/p/pangox-compat/libpangox-1.0-0_0.0.2-5+b2_i386.deb
-http://ftp.us.debian.org/debian/pool/main/p/pciutils/libpci3_3.5.2-1_i386.deb
-http://ftp.us.debian.org/debian/pool/main/p/pciutils/libpci-dev_3.5.2-1_i386.deb
-http://ftp.us.debian.org/debian/pool/main/p/pcre3/libpcre16-3_8.39-9_i386.deb
-http://ftp.us.debian.org/debian/pool/main/p/pcre3/libpcre32-3_8.39-9_i386.deb
-http://ftp.us.debian.org/debian/pool/main/p/pcre3/libpcre3_8.39-9_i386.deb
-http://ftp.us.debian.org/debian/pool/main/p/pcre3/libpcre3-dev_8.39-9_i386.deb
-http://ftp.us.debian.org/debian/pool/main/p/pcre3/libpcrecpp0v5_8.39-9_i386.deb
-http://ftp.us.debian.org/debian/pool/main/p/pixman/libpixman-1-0_0.34.0-2_i386.deb
-http://ftp.us.debian.org/debian/pool/main/p/pixman/libpixman-1-dev_0.34.0-2_i386.deb
-http://ftp.us.debian.org/debian/pool/main/p/pulseaudio/libpulse0_11.1-4_i386.deb
-http://ftp.us.debian.org/debian/pool/main/p/pulseaudio/libpulse-dev_11.1-4_i386.deb
-http://ftp.us.debian.org/debian/pool/main/p/pulseaudio/libpulse-mainloop-glib0_11.1-4_i386.deb
-http://ftp.us.debian.org/debian/pool/main/r/re2/libre2-4_20180301+dfsg-1_i386.deb
-http://ftp.us.debian.org/debian/pool/main/r/re2/libre2-dev_20180301+dfsg-1_i386.deb
-http://ftp.us.debian.org/debian/pool/main/s/snappy/libsnappy1v5_1.1.7-1_i386.deb
-http://ftp.us.debian.org/debian/pool/main/s/snappy/libsnappy-dev_1.1.7-1_i386.deb
-http://ftp.us.debian.org/debian/pool/main/s/speech-dispatcher/libspeechd2_0.8.8-1_i386.deb
-http://ftp.us.debian.org/debian/pool/main/s/speech-dispatcher/libspeechd-dev_0.8.8-1_i386.deb
-http://ftp.us.debian.org/debian/pool/main/s/speech-dispatcher/speech-dispatcher_0.8.8-1_i386.deb
-http://ftp.us.debian.org/debian/pool/main/s/sqlite3/libsqlite3-0_3.23.0-1_i386.deb
-http://ftp.us.debian.org/debian/pool/main/s/systemd/libsystemd0_238-4_i386.deb
-http://ftp.us.debian.org/debian/pool/main/s/systemd/libudev1_238-4_i386.deb
-http://ftp.us.debian.org/debian/pool/main/s/systemd/libudev-dev_238-4_i386.deb
-http://ftp.us.debian.org/debian/pool/main/t/tcp-wrappers/libwrap0_7.6.q-27_i386.deb
-http://ftp.us.debian.org/debian/pool/main/t/tiff/libtiff5_4.0.9-4_i386.deb
-http://ftp.us.debian.org/debian/pool/main/u/unbound/libunbound2_1.6.7-1_i386.deb
-http://ftp.us.debian.org/debian/pool/main/u/util-linux/libblkid1_2.31.1-0.5_i386.deb
-http://ftp.us.debian.org/debian/pool/main/u/util-linux/libmount1_2.31.1-0.5_i386.deb
-http://ftp.us.debian.org/debian/pool/main/u/util-linux/libuuid1_2.31.1-0.5_i386.deb
-http://ftp.us.debian.org/debian/pool/main/u/util-linux/uuid-dev_2.31.1-0.5_i386.deb
-http://ftp.us.debian.org/debian/pool/main/w/wayland/libwayland-client0_1.14.0-2_i386.deb
-http://ftp.us.debian.org/debian/pool/main/w/wayland/libwayland-cursor0_1.14.0-2_i386.deb
-http://ftp.us.debian.org/debian/pool/main/w/wayland/libwayland-dev_1.14.0-2_i386.deb
-http://ftp.us.debian.org/debian/pool/main/w/wayland/libwayland-server0_1.14.0-2_i386.deb
-http://ftp.us.debian.org/debian/pool/main/w/wayland-protocols/wayland-protocols_1.13-1_all.deb
-http://ftp.us.debian.org/debian/pool/main/x/xft/libxft2_2.3.2-1+b2_i386.deb
-http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-composite-dev_2018.4-4_all.deb
-http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-damage-dev_2018.4-4_all.deb
-http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-dev_2018.4-4_all.deb
-http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-fixes-dev_2018.4-4_all.deb
-http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-input-dev_2018.4-4_all.deb
-http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-kb-dev_2018.4-4_all.deb
-http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-randr-dev_2018.4-4_all.deb
-http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-record-dev_2018.4-4_all.deb
-http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-render-dev_2018.4-4_all.deb
-http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-scrnsaver-dev_2018.4-4_all.deb
-http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-xext-dev_2018.4-4_all.deb
-http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-xinerama-dev_2018.4-4_all.deb
-http://ftp.us.debian.org/debian/pool/main/x/xz-utils/liblzma5_5.2.2-1.3_i386.deb
-http://ftp.us.debian.org/debian/pool/main/z/zlib/zlib1g_1.2.8.dfsg-5_i386.deb
-http://ftp.us.debian.org/debian/pool/main/z/zlib/zlib1g-dev_1.2.8.dfsg-5_i386.deb
diff --git a/build/linux/sysroot_scripts/packagelist.sid.mips64el b/build/linux/sysroot_scripts/packagelist.sid.mips64el
deleted file mode 100644
index e913cb9..0000000
--- a/build/linux/sysroot_scripts/packagelist.sid.mips64el
+++ /dev/null
@@ -1,302 +0,0 @@
-http://ftp.us.debian.org/debian/pool/main/a/alsa-lib/libasound2_1.1.3-5_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/a/alsa-lib/libasound2-dev_1.1.3-5_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/a/atk1.0/libatk1.0-0_2.28.1-1_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/a/atk1.0/libatk1.0-dev_2.28.1-1_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/a/at-spi2-atk/libatk-bridge2.0-0_2.26.2-1_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/a/at-spi2-atk/libatk-bridge2.0-dev_2.26.2-1_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/a/at-spi2-core/libatspi2.0-0_2.28.0-1_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/a/at-spi2-core/libatspi2.0-dev_2.28.0-1_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/a/attr/libattr1_2.4.47-2+b2_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/a/audit/libaudit1_2.8.2-1_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/a/avahi/libavahi-client3_0.7-3.1_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/a/avahi/libavahi-common3_0.7-3.1_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/b/bluez/libbluetooth3_5.47-1+b1_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/b/bluez/libbluetooth-dev_5.47-1+b1_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/b/brltty/libbrlapi0.6_5.6-3_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/b/brltty/libbrlapi-dev_5.6-3_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/c/cairo/libcairo2_1.15.10-2_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/c/cairo/libcairo2-dev_1.15.10-2_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/c/cairo/libcairo-gobject2_1.15.10-2_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/c/cairo/libcairo-script-interpreter2_1.15.10-2_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/c/colord/libcolord2_1.3.3-2_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/c/cups/libcups2_2.2.7-2_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/c/cups/libcups2-dev_2.2.7-2_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/c/cups/libcupsimage2_2.2.7-2_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/c/cups/libcupsimage2-dev_2.2.7-2_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/d/db5.3/libdb5.3_5.3.28-13.1+b1_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/d/dbus-glib/libdbus-glib-1-2_0.110-2_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/d/dbus/libdbus-1-3_1.12.6-2_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/d/dbus/libdbus-1-dev_1.12.6-2_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/e/e2fsprogs/comerr-dev_2.1-1.44.1-1_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/e/e2fsprogs/libcom-err2_1.44.1-1_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/e/elfutils/libelf1_0.170-0.3_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/e/elfutils/libelf-dev_0.170-0.3_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/e/expat/libexpat1_2.2.5-3_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/e/expat/libexpat1-dev_2.2.5-3_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/f/flac/libflac8_1.3.2-1_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/f/flac/libflac-dev_1.3.2-1_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/f/fontconfig/libfontconfig1_2.12.6-0.1_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/f/fontconfig/libfontconfig1-dev_2.12.6-0.1_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/f/freetype/libfreetype6_2.8.1-2_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/f/freetype/libfreetype6-dev_2.8.1-2_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/f/fribidi/libfribidi0_0.19.7-2_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/g/gcc-6/libgcc-6-dev_6.4.0-15_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/g/gcc-6/libstdc++-6-dev_6.4.0-15_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/g/gcc-8/libatomic1_8-20180402-1_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/g/gcc-8/libgcc1_8-20180402-1_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/g/gcc-8/libgomp1_8-20180402-1_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/g/gcc-8/libstdc++6_8-20180402-1_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/g/gdk-pixbuf/libgdk-pixbuf2.0-0_2.36.11-2_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/g/gdk-pixbuf/libgdk-pixbuf2.0-dev_2.36.11-2_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/g/glib2.0/libglib2.0-0_2.56.0-6_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/g/glib2.0/libglib2.0-dev_2.56.0-6_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/g/glibc/libc6_2.27-3_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/g/glibc/libc6-dev_2.27-3_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/g/gmp/libgmp10_6.1.2+dfsg-3_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/g/gnutls28/libgnutls28-dev_3.5.18-1_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/g/gnutls28/libgnutls30_3.5.18-1_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/g/gnutls28/libgnutls-dane0_3.5.18-1_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/g/gnutls28/libgnutls-openssl27_3.5.18-1_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/g/gnutls28/libgnutlsxx28_3.5.18-1_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/g/graphite2/libgraphite2-3_1.3.11-2_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/g/graphite2/libgraphite2-dev_1.3.11-2_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/g/gtk+2.0/libgtk2.0-0_2.24.32-1_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/g/gtk+2.0/libgtk2.0-dev_2.24.32-1_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/g/gtk+3.0/libgtk-3-0_3.22.29-3_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/g/gtk+3.0/libgtk-3-dev_3.22.29-3_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/h/harfbuzz/libharfbuzz0b_1.7.2-1_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/h/harfbuzz/libharfbuzz-dev_1.7.2-1_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/h/harfbuzz/libharfbuzz-gobject0_1.7.2-1_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/h/harfbuzz/libharfbuzz-icu0_1.7.2-1_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/i/icu/libicu57_57.1-9_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/j/jbigkit/libjbig0_2.1-3.1+b2_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/j/json-glib/libjson-glib-1.0-0_1.4.2-3_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/k/keyutils/libkeyutils1_1.5.9-9.2_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/k/krb5/krb5-multidev_1.16-2_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/k/krb5/libgssapi-krb5-2_1.16-2_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/k/krb5/libgssrpc4_1.16-2_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/k/krb5/libk5crypto3_1.16-2_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/k/krb5/libkadm5clnt-mit11_1.16-2_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/k/krb5/libkadm5srv-mit11_1.16-2_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/k/krb5/libkdb5-9_1.16-2_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/k/krb5/libkrb5-3_1.16-2_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/k/krb5/libkrb5-dev_1.16-2_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/k/krb5/libkrb5support0_1.16-2_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/liba/libappindicator/libappindicator1_0.4.92-5_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/liba/libappindicator/libappindicator3-1_0.4.92-5_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/liba/libappindicator/libappindicator3-dev_0.4.92-5_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/liba/libappindicator/libappindicator-dev_0.4.92-5_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/liba/libasyncns/libasyncns0_0.8-6_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/libb/libbsd/libbsd0_0.8.7-1_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/libc/libcap2/libcap2_2.25-1.2_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/libc/libcap2/libcap-dev_2.25-1.2_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/libc/libcap-ng/libcap-ng0_0.7.7-3.1+b1_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/libd/libdatrie/libdatrie1_0.2.10-7_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/libd/libdbusmenu/libdbusmenu-glib4_16.04.1+17.04.20170109.1-5_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/libd/libdbusmenu/libdbusmenu-glib-dev_16.04.1+17.04.20170109.1-5_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/libd/libdbusmenu/libdbusmenu-gtk3-4_16.04.1+17.04.20170109.1-5_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/libd/libdbusmenu/libdbusmenu-gtk4_16.04.1+17.04.20170109.1-5_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/libd/libdrm/libdrm2_2.4.91-2_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/libd/libdrm/libdrm-amdgpu1_2.4.91-2_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/libd/libdrm/libdrm-dev_2.4.91-2_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/libd/libdrm/libdrm-nouveau2_2.4.91-2_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/libd/libdrm/libdrm-radeon1_2.4.91-2_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/libe/libepoxy/libepoxy0_1.4.3-1_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/libe/libepoxy/libepoxy-dev_1.4.3-1_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/libe/libevent/libevent-2.1-6_2.1.8-stable-4_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/libf/libffi/libffi6_3.2.1-8_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/libf/libffi/libffi-dev_3.2.1-8_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/libg/libgcrypt20/libgcrypt20_1.8.2-2_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/libg/libgcrypt20/libgcrypt20-dev_1.8.2-2_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/libg/libglvnd/libegl1_1.0.0-2_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/libg/libglvnd/libgl1_1.0.0-2_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/libg/libglvnd/libgles2_1.0.0-2_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/libg/libglvnd/libglvnd0_1.0.0-2_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/libg/libglvnd/libglvnd-dev_1.0.0-2_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/libg/libglvnd/libglx0_1.0.0-2_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/libg/libglvnd/libopengl0_1.0.0-2_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/libg/libgnome-keyring/libgnome-keyring0_3.12.0-1+b2_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/libg/libgnome-keyring/libgnome-keyring-dev_3.12.0-1+b2_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/libg/libgpg-error/libgpg-error0_1.28-2_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/libg/libgpg-error/libgpg-error-dev_1.28-2_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/libi/libice/libice6_1.0.9-2_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/libi/libidl/libidl-2-0_0.8.14-4_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/libi/libidn2/libidn2-0_2.0.4-1.1_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/libi/libidn/libidn11_1.33-2.1_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/libi/libindicator/libindicator3-7_0.5.0-3+b1_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/libi/libindicator/libindicator7_0.5.0-3+b1_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/libj/libjpeg-turbo/libjpeg62-turbo_1.5.2-2+b1_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/libj/libjpeg-turbo/libjpeg62-turbo-dev_1.5.2-2+b1_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/libj/libjsoncpp/libjsoncpp1_1.7.4-3_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/libj/libjsoncpp/libjsoncpp-dev_1.7.4-3_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/libn/libnss-db/libnss-db_2.2.3pre1-6+b5_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/libo/libogg/libogg0_1.3.2-1+b1_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/libo/libogg/libogg-dev_1.3.2-1+b1_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/libp/libpciaccess/libpciaccess0_0.14-1_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/libp/libpng1.6/libpng16-16_1.6.34-1_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/libp/libpng1.6/libpng-dev_1.6.34-1_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/libp/libpthread-stubs/libpthread-stubs0-dev_0.3-4_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/libr/librest/librest-0.7-0_0.8.0-2_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/libs/libselinux/libselinux1_2.7-2+b2_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/libs/libsm/libsm6_1.2.2-1+b3_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/libs/libsndfile/libsndfile1_1.0.28-4+b1_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/libs/libsoup2.4/libsoup2.4-1_2.62.0-1_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/libs/libsoup2.4/libsoup-gnome2.4-1_2.62.0-1_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/libt/libtasn1-6/libtasn1-6_4.13-2_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/libt/libthai/libthai0_0.1.27-2_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/libt/libtool/libltdl7_2.4.6-2_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/libu/libunistring/libunistring2_0.9.8-1_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/libv/libva/libva2_2.1.0-3_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/libv/libva/libva-dev_2.1.0-3_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/libv/libva/libva-drm2_2.1.0-3_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/libv/libva/libva-glx2_2.1.0-3_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/libv/libva/libva-wayland2_2.1.0-3_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/libv/libva/libva-x11-2_2.1.0-3_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/libv/libvorbis/libvorbis0a_1.3.6-1_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/libv/libvorbis/libvorbisenc2_1.3.6-1_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/libv/libvpx/libvpx5_1.7.0-3_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/libv/libvpx/libvpx-dev_1.7.0-3_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/libw/libwebp/libwebp6_0.6.1-2_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/libw/libwebp/libwebpdemux2_0.6.1-2_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/libw/libwebp/libwebp-dev_0.6.1-2_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/libw/libwebp/libwebpmux3_0.6.1-2_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libx11/libx11-6_1.6.5-1_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libx11/libx11-dev_1.6.5-1_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libx11/libx11-xcb1_1.6.5-1_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libx11/libx11-xcb-dev_1.6.5-1_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxau/libxau6_1.0.8-1+b2_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxau/libxau-dev_1.0.8-1+b2_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb1_1.13-1_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb1-dev_1.13-1_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb-dri2-0_1.13-1_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb-dri3-0_1.13-1_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb-glx0_1.13-1_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb-present0_1.13-1_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb-render0_1.13-1_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb-render0-dev_1.13-1_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb-shm0_1.13-1_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb-shm0-dev_1.13-1_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb-sync1_1.13-1_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb-xfixes0_1.13-1_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxcomposite/libxcomposite1_0.4.4-2_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxcomposite/libxcomposite-dev_0.4.4-2_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxcursor/libxcursor1_1.1.15-1_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxcursor/libxcursor-dev_1.1.15-1_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxdamage/libxdamage1_1.1.4-3+b1_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxdamage/libxdamage-dev_1.1.4-3+b1_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxdmcp/libxdmcp6_1.1.2-3_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxdmcp/libxdmcp-dev_1.1.2-3_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxext/libxext6_1.3.3-1+b2_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxext/libxext-dev_1.3.3-1+b2_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxfixes/libxfixes3_5.0.3-1_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxfixes/libxfixes-dev_5.0.3-1_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxi/libxi6_1.7.9-1_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxi/libxi-dev_1.7.9-1_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxinerama/libxinerama1_1.1.3-1+b3_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxinerama/libxinerama-dev_1.1.3-1+b3_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxkbcommon/libxkbcommon0_0.8.0-1_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxkbcommon/libxkbcommon-dev_0.8.0-1_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxml2/libxml2_2.9.4+dfsg1-6.1_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxml2/libxml2-dev_2.9.4+dfsg1-6.1_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxrandr/libxrandr2_1.5.1-1_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxrandr/libxrandr-dev_1.5.1-1_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxrender/libxrender1_0.9.10-1_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxrender/libxrender-dev_0.9.10-1_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxshmfence/libxshmfence1_1.3-1_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxslt/libxslt1.1_1.1.29-5_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxslt/libxslt1-dev_1.1.29-5_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxss/libxss1_1.2.2-1+b2_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxss/libxss-dev_1.2.2-1+b2_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxt/libxt6_1.1.5-1_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxt/libxt-dev_1.1.5-1_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxtst/libxtst6_1.2.3-1_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxtst/libxtst-dev_1.2.3-1_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxxf86vm/libxxf86vm1_1.1.4-1+b2_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/l/lcms2/liblcms2-2_2.9-1_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/l/linux/linux-libc-dev_4.15.11-1_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/l/lz4/liblz4-1_0.0~r131-2+b1_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/l/lzo2/liblzo2-2_2.08-1.2+b2_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/m/mesa/libegl1-mesa_17.3.7-1_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/m/mesa/libegl1-mesa-dev_17.3.7-1_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/m/mesa/libgbm1_17.3.7-1_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/m/mesa/libgbm-dev_17.3.7-1_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/m/mesa/libgl1-mesa-dev_17.3.7-1_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/m/mesa/libgl1-mesa-glx_17.3.7-1_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/m/mesa/libglapi-mesa_17.3.7-1_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/m/mesa/libwayland-egl1-mesa_17.3.7-1_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/m/mesa/mesa-common-dev_17.3.7-1_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/m/minizip/libminizip1_1.1-8+b1_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/m/minizip/libminizip-dev_1.1-8+b1_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/n/nettle/libhogweed4_3.4-1_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/n/nettle/libnettle6_3.4-1_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/n/nspr/libnspr4_4.18-1_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/n/nspr/libnspr4-dev_4.18-1_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/n/nss/libnss3_3.35-2_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/n/nss/libnss3-dev_3.35-2_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/o/openssl1.0/libssl1.0.2_1.0.2o-1_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/o/openssl/libssl1.1_1.1.0h-2_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/o/openssl/libssl-dev_1.1.0h-2_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/o/opus/libopus0_1.2.1-1_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/o/opus/libopus-dev_1.2.1-1_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/o/orbit2/liborbit-2-0_2.14.19-4_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/o/orbit2/liborbit2_2.14.19-4_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/p/p11-kit/libp11-kit0_0.23.10-2_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/p/pam/libpam0g_1.1.8-3.7_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/p/pam/libpam0g-dev_1.1.8-3.7_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/p/pango1.0/libpango-1.0-0_1.42.0-1_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/p/pango1.0/libpango1.0-dev_1.42.0-1_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/p/pango1.0/libpangocairo-1.0-0_1.42.0-1_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/p/pango1.0/libpangoft2-1.0-0_1.42.0-1_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/p/pango1.0/libpangoxft-1.0-0_1.42.0-1_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/p/pangox-compat/libpangox-1.0-0_0.0.2-5+b2_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/p/pciutils/libpci3_3.5.2-1_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/p/pciutils/libpci-dev_3.5.2-1_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/p/pcre3/libpcre16-3_8.39-9_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/p/pcre3/libpcre32-3_8.39-9_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/p/pcre3/libpcre3_8.39-9_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/p/pcre3/libpcre3-dev_8.39-9_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/p/pcre3/libpcrecpp0v5_8.39-9_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/p/pixman/libpixman-1-0_0.34.0-2_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/p/pixman/libpixman-1-dev_0.34.0-2_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/p/pulseaudio/libpulse0_11.1-4_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/p/pulseaudio/libpulse-dev_11.1-4_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/p/pulseaudio/libpulse-mainloop-glib0_11.1-4_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/r/re2/libre2-4_20180301+dfsg-1_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/r/re2/libre2-dev_20180301+dfsg-1_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/s/snappy/libsnappy1v5_1.1.7-1_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/s/snappy/libsnappy-dev_1.1.7-1_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/s/speech-dispatcher/libspeechd2_0.8.8-1_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/s/speech-dispatcher/libspeechd-dev_0.8.8-1_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/s/speech-dispatcher/speech-dispatcher_0.8.8-1_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/s/sqlite3/libsqlite3-0_3.23.0-1_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/s/systemd/libsystemd0_238-4_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/s/systemd/libudev1_238-4_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/s/systemd/libudev-dev_238-4_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/t/tcp-wrappers/libwrap0_7.6.q-27_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/t/tiff/libtiff5_4.0.9-4_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/u/unbound/libunbound2_1.6.7-1_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/u/util-linux/libblkid1_2.31.1-0.5_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/u/util-linux/libmount1_2.31.1-0.5_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/u/util-linux/libuuid1_2.31.1-0.5_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/u/util-linux/uuid-dev_2.31.1-0.5_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/w/wayland/libwayland-client0_1.14.0-2_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/w/wayland/libwayland-cursor0_1.14.0-2_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/w/wayland/libwayland-dev_1.14.0-2_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/w/wayland/libwayland-server0_1.14.0-2_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/w/wayland-protocols/wayland-protocols_1.13-1_all.deb
-http://ftp.us.debian.org/debian/pool/main/x/xft/libxft2_2.3.2-1+b2_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-composite-dev_2018.4-4_all.deb
-http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-damage-dev_2018.4-4_all.deb
-http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-dev_2018.4-4_all.deb
-http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-fixes-dev_2018.4-4_all.deb
-http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-input-dev_2018.4-4_all.deb
-http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-kb-dev_2018.4-4_all.deb
-http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-randr-dev_2018.4-4_all.deb
-http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-record-dev_2018.4-4_all.deb
-http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-render-dev_2018.4-4_all.deb
-http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-scrnsaver-dev_2018.4-4_all.deb
-http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-xext-dev_2018.4-4_all.deb
-http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-xinerama-dev_2018.4-4_all.deb
-http://ftp.us.debian.org/debian/pool/main/x/xz-utils/liblzma5_5.2.2-1.3_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/z/zlib/zlib1g_1.2.8.dfsg-5_mips64el.deb
-http://ftp.us.debian.org/debian/pool/main/z/zlib/zlib1g-dev_1.2.8.dfsg-5_mips64el.deb
diff --git a/build/linux/sysroot_scripts/packagelist.sid.mipsel b/build/linux/sysroot_scripts/packagelist.sid.mipsel
deleted file mode 100644
index 2405752..0000000
--- a/build/linux/sysroot_scripts/packagelist.sid.mipsel
+++ /dev/null
@@ -1,302 +0,0 @@
-http://ftp.us.debian.org/debian/pool/main/a/alsa-lib/libasound2_1.1.3-5_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/a/alsa-lib/libasound2-dev_1.1.3-5_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/a/atk1.0/libatk1.0-0_2.28.1-1_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/a/atk1.0/libatk1.0-dev_2.28.1-1_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/a/at-spi2-atk/libatk-bridge2.0-0_2.26.2-1_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/a/at-spi2-atk/libatk-bridge2.0-dev_2.26.2-1_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/a/at-spi2-core/libatspi2.0-0_2.28.0-1_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/a/at-spi2-core/libatspi2.0-dev_2.28.0-1_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/a/attr/libattr1_2.4.47-2+b2_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/a/audit/libaudit1_2.8.2-1_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/a/avahi/libavahi-client3_0.7-3.1_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/a/avahi/libavahi-common3_0.7-3.1_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/b/bluez/libbluetooth3_5.47-1+b1_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/b/bluez/libbluetooth-dev_5.47-1+b1_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/b/brltty/libbrlapi0.6_5.6-3_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/b/brltty/libbrlapi-dev_5.6-3_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/c/cairo/libcairo2_1.15.10-2_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/c/cairo/libcairo2-dev_1.15.10-2_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/c/cairo/libcairo-gobject2_1.15.10-2_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/c/cairo/libcairo-script-interpreter2_1.15.10-2_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/c/colord/libcolord2_1.3.3-2_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/c/cups/libcups2_2.2.7-2_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/c/cups/libcups2-dev_2.2.7-2_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/c/cups/libcupsimage2_2.2.7-2_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/c/cups/libcupsimage2-dev_2.2.7-2_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/d/db5.3/libdb5.3_5.3.28-13.1+b1_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/d/dbus-glib/libdbus-glib-1-2_0.110-2_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/d/dbus/libdbus-1-3_1.12.6-2_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/d/dbus/libdbus-1-dev_1.12.6-2_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/e/e2fsprogs/comerr-dev_2.1-1.44.1-1_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/e/e2fsprogs/libcom-err2_1.44.1-1_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/e/elfutils/libelf1_0.170-0.3_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/e/elfutils/libelf-dev_0.170-0.3_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/e/expat/libexpat1_2.2.5-3_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/e/expat/libexpat1-dev_2.2.5-3_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/f/flac/libflac8_1.3.2-1_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/f/flac/libflac-dev_1.3.2-1_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/f/fontconfig/libfontconfig1_2.12.6-0.1_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/f/fontconfig/libfontconfig1-dev_2.12.6-0.1_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/f/freetype/libfreetype6_2.8.1-2_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/f/freetype/libfreetype6-dev_2.8.1-2_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/f/fribidi/libfribidi0_0.19.7-2_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/g/gcc-6/libgcc-6-dev_6.4.0-15_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/g/gcc-6/libstdc++-6-dev_6.4.0-15_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/g/gcc-8/libatomic1_8-20180402-1_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/g/gcc-8/libgcc1_8-20180402-1_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/g/gcc-8/libgomp1_8-20180402-1_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/g/gcc-8/libstdc++6_8-20180402-1_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/g/gdk-pixbuf/libgdk-pixbuf2.0-0_2.36.11-2_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/g/gdk-pixbuf/libgdk-pixbuf2.0-dev_2.36.11-2_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/g/glib2.0/libglib2.0-0_2.56.0-6_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/g/glib2.0/libglib2.0-dev_2.56.0-6_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/g/glibc/libc6_2.27-3_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/g/glibc/libc6-dev_2.27-3_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/g/gmp/libgmp10_6.1.2+dfsg-3_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/g/gnutls28/libgnutls28-dev_3.5.18-1_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/g/gnutls28/libgnutls30_3.5.18-1_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/g/gnutls28/libgnutls-dane0_3.5.18-1_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/g/gnutls28/libgnutls-openssl27_3.5.18-1_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/g/gnutls28/libgnutlsxx28_3.5.18-1_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/g/graphite2/libgraphite2-3_1.3.11-2_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/g/graphite2/libgraphite2-dev_1.3.11-2_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/g/gtk+2.0/libgtk2.0-0_2.24.32-1_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/g/gtk+2.0/libgtk2.0-dev_2.24.32-1_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/g/gtk+3.0/libgtk-3-0_3.22.29-3_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/g/gtk+3.0/libgtk-3-dev_3.22.29-3_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/h/harfbuzz/libharfbuzz0b_1.7.2-1_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/h/harfbuzz/libharfbuzz-dev_1.7.2-1_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/h/harfbuzz/libharfbuzz-gobject0_1.7.2-1_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/h/harfbuzz/libharfbuzz-icu0_1.7.2-1_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/i/icu/libicu57_57.1-9_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/j/jbigkit/libjbig0_2.1-3.1+b2_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/j/json-glib/libjson-glib-1.0-0_1.4.2-3_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/k/keyutils/libkeyutils1_1.5.9-9.2_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/k/krb5/krb5-multidev_1.16-2_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/k/krb5/libgssapi-krb5-2_1.16-2_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/k/krb5/libgssrpc4_1.16-2_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/k/krb5/libk5crypto3_1.16-2_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/k/krb5/libkadm5clnt-mit11_1.16-2_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/k/krb5/libkadm5srv-mit11_1.16-2_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/k/krb5/libkdb5-9_1.16-2_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/k/krb5/libkrb5-3_1.16-2_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/k/krb5/libkrb5-dev_1.16-2_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/k/krb5/libkrb5support0_1.16-2_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/liba/libappindicator/libappindicator1_0.4.92-5_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/liba/libappindicator/libappindicator3-1_0.4.92-5_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/liba/libappindicator/libappindicator3-dev_0.4.92-5_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/liba/libappindicator/libappindicator-dev_0.4.92-5_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/liba/libasyncns/libasyncns0_0.8-6_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/libb/libbsd/libbsd0_0.8.7-1_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/libc/libcap2/libcap2_2.25-1.2_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/libc/libcap2/libcap-dev_2.25-1.2_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/libc/libcap-ng/libcap-ng0_0.7.7-3.1+b1_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/libd/libdatrie/libdatrie1_0.2.10-7_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/libd/libdbusmenu/libdbusmenu-glib4_16.04.1+17.04.20170109.1-5_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/libd/libdbusmenu/libdbusmenu-glib-dev_16.04.1+17.04.20170109.1-5_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/libd/libdbusmenu/libdbusmenu-gtk3-4_16.04.1+17.04.20170109.1-5_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/libd/libdbusmenu/libdbusmenu-gtk4_16.04.1+17.04.20170109.1-5_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/libd/libdrm/libdrm2_2.4.91-2_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/libd/libdrm/libdrm-amdgpu1_2.4.91-2_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/libd/libdrm/libdrm-dev_2.4.91-2_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/libd/libdrm/libdrm-nouveau2_2.4.91-2_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/libd/libdrm/libdrm-radeon1_2.4.91-2_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/libe/libepoxy/libepoxy0_1.4.3-1_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/libe/libepoxy/libepoxy-dev_1.4.3-1_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/libe/libevent/libevent-2.1-6_2.1.8-stable-4_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/libf/libffi/libffi6_3.2.1-8_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/libf/libffi/libffi-dev_3.2.1-8_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/libg/libgcrypt20/libgcrypt20_1.8.2-2_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/libg/libgcrypt20/libgcrypt20-dev_1.8.2-2_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/libg/libglvnd/libegl1_1.0.0-2_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/libg/libglvnd/libgl1_1.0.0-2_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/libg/libglvnd/libgles2_1.0.0-2_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/libg/libglvnd/libglvnd0_1.0.0-2_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/libg/libglvnd/libglvnd-dev_1.0.0-2_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/libg/libglvnd/libglx0_1.0.0-2_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/libg/libglvnd/libopengl0_1.0.0-2_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/libg/libgnome-keyring/libgnome-keyring0_3.12.0-1+b2_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/libg/libgnome-keyring/libgnome-keyring-dev_3.12.0-1+b2_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/libg/libgpg-error/libgpg-error0_1.28-2_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/libg/libgpg-error/libgpg-error-dev_1.28-2_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/libi/libice/libice6_1.0.9-2_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/libi/libidl/libidl-2-0_0.8.14-4+b1_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/libi/libidn2/libidn2-0_2.0.4-1.1_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/libi/libidn/libidn11_1.33-2.1_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/libi/libindicator/libindicator3-7_0.5.0-3+b1_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/libi/libindicator/libindicator7_0.5.0-3+b1_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/libj/libjpeg-turbo/libjpeg62-turbo_1.5.2-2+b1_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/libj/libjpeg-turbo/libjpeg62-turbo-dev_1.5.2-2+b1_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/libj/libjsoncpp/libjsoncpp1_1.7.4-3_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/libj/libjsoncpp/libjsoncpp-dev_1.7.4-3_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/libn/libnss-db/libnss-db_2.2.3pre1-6+b5_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/libo/libogg/libogg0_1.3.2-1+b1_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/libo/libogg/libogg-dev_1.3.2-1+b1_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/libp/libpciaccess/libpciaccess0_0.14-1_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/libp/libpng1.6/libpng16-16_1.6.34-1_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/libp/libpng1.6/libpng-dev_1.6.34-1_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/libp/libpthread-stubs/libpthread-stubs0-dev_0.3-4_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/libr/librest/librest-0.7-0_0.8.0-2_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/libs/libselinux/libselinux1_2.7-2+b2_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/libs/libsm/libsm6_1.2.2-1+b3_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/libs/libsndfile/libsndfile1_1.0.28-4_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/libs/libsoup2.4/libsoup2.4-1_2.62.0-1_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/libs/libsoup2.4/libsoup-gnome2.4-1_2.62.0-1_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/libt/libtasn1-6/libtasn1-6_4.13-2_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/libt/libthai/libthai0_0.1.27-2_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/libt/libtool/libltdl7_2.4.6-2_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/libu/libunistring/libunistring2_0.9.8-1_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/libv/libva/libva2_2.1.0-3_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/libv/libva/libva-dev_2.1.0-3_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/libv/libva/libva-drm2_2.1.0-3_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/libv/libva/libva-glx2_2.1.0-3_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/libv/libva/libva-wayland2_2.1.0-3_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/libv/libva/libva-x11-2_2.1.0-3_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/libv/libvorbis/libvorbis0a_1.3.6-1_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/libv/libvorbis/libvorbisenc2_1.3.6-1_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/libv/libvpx/libvpx5_1.7.0-3_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/libv/libvpx/libvpx-dev_1.7.0-3_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/libw/libwebp/libwebp6_0.6.1-2_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/libw/libwebp/libwebpdemux2_0.6.1-2_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/libw/libwebp/libwebp-dev_0.6.1-2_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/libw/libwebp/libwebpmux3_0.6.1-2_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libx11/libx11-6_1.6.5-1_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libx11/libx11-dev_1.6.5-1_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libx11/libx11-xcb1_1.6.5-1_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libx11/libx11-xcb-dev_1.6.5-1_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxau/libxau6_1.0.8-1+b2_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxau/libxau-dev_1.0.8-1+b2_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb1_1.13-1_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb1-dev_1.13-1_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb-dri2-0_1.13-1_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb-dri3-0_1.13-1_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb-glx0_1.13-1_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb-present0_1.13-1_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb-render0_1.13-1_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb-render0-dev_1.13-1_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb-shm0_1.13-1_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb-shm0-dev_1.13-1_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb-sync1_1.13-1_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb-xfixes0_1.13-1_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxcomposite/libxcomposite1_0.4.4-2_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxcomposite/libxcomposite-dev_0.4.4-2_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxcursor/libxcursor1_1.1.15-1_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxcursor/libxcursor-dev_1.1.15-1_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxdamage/libxdamage1_1.1.4-3_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxdamage/libxdamage-dev_1.1.4-3_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxdmcp/libxdmcp6_1.1.2-3_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxdmcp/libxdmcp-dev_1.1.2-3_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxext/libxext6_1.3.3-1+b2_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxext/libxext-dev_1.3.3-1+b2_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxfixes/libxfixes3_5.0.3-1_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxfixes/libxfixes-dev_5.0.3-1_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxi/libxi6_1.7.9-1_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxi/libxi-dev_1.7.9-1_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxinerama/libxinerama1_1.1.3-1+b3_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxinerama/libxinerama-dev_1.1.3-1+b3_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxkbcommon/libxkbcommon0_0.8.0-1_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxkbcommon/libxkbcommon-dev_0.8.0-1_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxml2/libxml2_2.9.4+dfsg1-6.1_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxml2/libxml2-dev_2.9.4+dfsg1-6.1_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxrandr/libxrandr2_1.5.1-1_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxrandr/libxrandr-dev_1.5.1-1_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxrender/libxrender1_0.9.10-1_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxrender/libxrender-dev_0.9.10-1_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxshmfence/libxshmfence1_1.3-1_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxslt/libxslt1.1_1.1.29-5_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxslt/libxslt1-dev_1.1.29-5_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxss/libxss1_1.2.2-1+b2_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxss/libxss-dev_1.2.2-1+b2_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxt/libxt6_1.1.5-1+b1_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxt/libxt-dev_1.1.5-1+b1_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxtst/libxtst6_1.2.3-1_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxtst/libxtst-dev_1.2.3-1_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/libx/libxxf86vm/libxxf86vm1_1.1.4-1+b2_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/l/lcms2/liblcms2-2_2.9-1_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/l/linux/linux-libc-dev_4.15.11-1_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/l/lz4/liblz4-1_0.0~r131-2+b1_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/l/lzo2/liblzo2-2_2.08-1.2+b2_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/m/mesa/libegl1-mesa_17.3.7-1_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/m/mesa/libegl1-mesa-dev_17.3.7-1_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/m/mesa/libgbm1_17.3.7-1_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/m/mesa/libgbm-dev_17.3.7-1_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/m/mesa/libgl1-mesa-dev_17.3.7-1_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/m/mesa/libgl1-mesa-glx_17.3.7-1_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/m/mesa/libglapi-mesa_17.3.7-1_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/m/mesa/libwayland-egl1-mesa_17.3.7-1_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/m/mesa/mesa-common-dev_17.3.7-1_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/m/minizip/libminizip1_1.1-8+b1_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/m/minizip/libminizip-dev_1.1-8+b1_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/n/nettle/libhogweed4_3.4-1_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/n/nettle/libnettle6_3.4-1_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/n/nspr/libnspr4_4.18-1_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/n/nspr/libnspr4-dev_4.18-1_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/n/nss/libnss3_3.35-2_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/n/nss/libnss3-dev_3.35-2_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/o/openssl1.0/libssl1.0.2_1.0.2o-1_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/o/openssl/libssl1.1_1.1.0h-2_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/o/openssl/libssl-dev_1.1.0h-2_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/o/opus/libopus0_1.2.1-1_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/o/opus/libopus-dev_1.2.1-1_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/o/orbit2/liborbit-2-0_2.14.19-4_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/o/orbit2/liborbit2_2.14.19-4_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/p/p11-kit/libp11-kit0_0.23.10-2_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/p/pam/libpam0g_1.1.8-3.7_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/p/pam/libpam0g-dev_1.1.8-3.7_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/p/pango1.0/libpango-1.0-0_1.42.0-1_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/p/pango1.0/libpango1.0-dev_1.42.0-1_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/p/pango1.0/libpangocairo-1.0-0_1.42.0-1_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/p/pango1.0/libpangoft2-1.0-0_1.42.0-1_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/p/pango1.0/libpangoxft-1.0-0_1.42.0-1_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/p/pangox-compat/libpangox-1.0-0_0.0.2-5+b2_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/p/pciutils/libpci3_3.5.2-1_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/p/pciutils/libpci-dev_3.5.2-1_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/p/pcre3/libpcre16-3_8.39-9_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/p/pcre3/libpcre32-3_8.39-9_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/p/pcre3/libpcre3_8.39-9_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/p/pcre3/libpcre3-dev_8.39-9_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/p/pcre3/libpcrecpp0v5_8.39-9_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/p/pixman/libpixman-1-0_0.34.0-2_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/p/pixman/libpixman-1-dev_0.34.0-2_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/p/pulseaudio/libpulse0_11.1-4_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/p/pulseaudio/libpulse-dev_11.1-4_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/p/pulseaudio/libpulse-mainloop-glib0_11.1-4_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/r/re2/libre2-4_20180301+dfsg-1_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/r/re2/libre2-dev_20180301+dfsg-1_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/s/snappy/libsnappy1v5_1.1.7-1_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/s/snappy/libsnappy-dev_1.1.7-1_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/s/speech-dispatcher/libspeechd2_0.8.8-1_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/s/speech-dispatcher/libspeechd-dev_0.8.8-1_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/s/speech-dispatcher/speech-dispatcher_0.8.8-1_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/s/sqlite3/libsqlite3-0_3.23.0-1_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/s/systemd/libsystemd0_238-4_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/s/systemd/libudev1_238-4_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/s/systemd/libudev-dev_238-4_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/t/tcp-wrappers/libwrap0_7.6.q-27_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/t/tiff/libtiff5_4.0.9-4_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/u/unbound/libunbound2_1.6.7-1_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/u/util-linux/libblkid1_2.31.1-0.5_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/u/util-linux/libmount1_2.31.1-0.5_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/u/util-linux/libuuid1_2.31.1-0.5_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/u/util-linux/uuid-dev_2.31.1-0.5_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/w/wayland/libwayland-client0_1.14.0-2_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/w/wayland/libwayland-cursor0_1.14.0-2_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/w/wayland/libwayland-dev_1.14.0-2_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/w/wayland/libwayland-server0_1.14.0-2_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/w/wayland-protocols/wayland-protocols_1.13-1_all.deb
-http://ftp.us.debian.org/debian/pool/main/x/xft/libxft2_2.3.2-1+b2_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-composite-dev_2018.4-4_all.deb
-http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-damage-dev_2018.4-4_all.deb
-http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-dev_2018.4-4_all.deb
-http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-fixes-dev_2018.4-4_all.deb
-http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-input-dev_2018.4-4_all.deb
-http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-kb-dev_2018.4-4_all.deb
-http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-randr-dev_2018.4-4_all.deb
-http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-record-dev_2018.4-4_all.deb
-http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-render-dev_2018.4-4_all.deb
-http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-scrnsaver-dev_2018.4-4_all.deb
-http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-xext-dev_2018.4-4_all.deb
-http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-xinerama-dev_2018.4-4_all.deb
-http://ftp.us.debian.org/debian/pool/main/x/xz-utils/liblzma5_5.2.2-1.3_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/z/zlib/zlib1g_1.2.8.dfsg-5_mipsel.deb
-http://ftp.us.debian.org/debian/pool/main/z/zlib/zlib1g-dev_1.2.8.dfsg-5_mipsel.deb
diff --git a/build/linux/sysroot_scripts/sysroot-creator-sid.sh b/build/linux/sysroot_scripts/sysroot-creator-sid.sh
deleted file mode 100755
index 18aa037..0000000
--- a/build/linux/sysroot_scripts/sysroot-creator-sid.sh
+++ /dev/null
@@ -1,369 +0,0 @@
-#!/bin/bash
-# Copyright 2017 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
-
-DISTRO=debian
-DIST=sid
-
-APT_SOURCES_LIST="http://ftp.us.debian.org/debian/ sid main"
-
-# gpg keyring file generated using:
-#   export KEYS="518E17E1 46925553 2B90D010 C857C906 F66AEC98 8AE22BA9 1A7B6500"
-#   gpg --recv-keys $KEYS
-#   gpg --output ./debian-archive-sid-stable.gpg --export $KEYS
-KEYRING_FILE="${SCRIPT_DIR}/debian-archive-sid-stable.gpg"
-
-HAS_ARCH_AMD64=1
-HAS_ARCH_I386=1
-HAS_ARCH_ARM=1
-HAS_ARCH_ARM64=1
-HAS_ARCH_MIPS=1
-HAS_ARCH_MIPS64EL=1
-
-# Sysroot packages: these are the packages needed to build chrome.
-# NOTE: When DEBIAN_PACKAGES is modified, the packagelist files must be updated
-# by running this script in GeneratePackageList mode.
-DEBIAN_PACKAGES="\
-  comerr-dev
-  krb5-multidev
-  libappindicator-dev
-  libappindicator1
-  libappindicator3-1
-  libappindicator3-dev
-  libasound2
-  libasound2-dev
-  libasyncns0
-  libatk-bridge2.0-0
-  libatk-bridge2.0-dev
-  libatk1.0-0
-  libatk1.0-dev
-  libatomic1
-  libatspi2.0-0
-  libatspi2.0-dev
-  libattr1
-  libaudit1
-  libavahi-client3
-  libavahi-common3
-  libblkid1
-  libbluetooth-dev
-  libbluetooth3
-  libbrlapi-dev
-  libbrlapi0.6
-  libbsd0
-  libc6
-  libc6-dev
-  libcairo-gobject2
-  libcairo-script-interpreter2
-  libcairo2
-  libcairo2-dev
-  libcap-dev
-  libcap-ng0
-  libcap2
-  libcolord2
-  libcom-err2
-  libcups2
-  libcups2-dev
-  libcupsimage2
-  libcupsimage2-dev
-  libdatrie1
-  libdb5.3
-  libdbus-1-3
-  libdbus-1-dev
-  libdbus-glib-1-2
-  libdbusmenu-glib-dev
-  libdbusmenu-glib4
-  libdbusmenu-gtk3-4
-  libdbusmenu-gtk4
-  libdrm-amdgpu1
-  libdrm-dev
-  libdrm-nouveau2
-  libdrm-radeon1
-  libdrm2
-  libegl1
-  libegl1-mesa
-  libegl1-mesa-dev
-  libelf-dev
-  libelf1
-  libepoxy-dev
-  libepoxy0
-  libevent-2.1-6
-  libexpat1
-  libexpat1-dev
-  libffi-dev
-  libffi6
-  libflac-dev
-  libflac8
-  libfontconfig1
-  libfontconfig1-dev
-  libfreetype6
-  libfreetype6-dev
-  libfribidi0
-  libgbm-dev
-  libgbm1
-  libgcc-6-dev
-  libgcc1
-  libgcrypt20
-  libgcrypt20-dev
-  libgdk-pixbuf2.0-0
-  libgdk-pixbuf2.0-dev
-  libgl1
-  libgl1-mesa-dev
-  libgl1-mesa-glx
-  libglapi-mesa
-  libgles2
-  libglib2.0-0
-  libglib2.0-dev
-  libglvnd-dev
-  libglvnd0
-  libglx0
-  libgmp10
-  libgnome-keyring-dev
-  libgnome-keyring0
-  libgnutls-dane0
-  libgnutls-openssl27
-  libgnutls28-dev
-  libgnutls30
-  libgnutlsxx28
-  libgomp1
-  libgpg-error-dev
-  libgpg-error0
-  libgraphite2-3
-  libgraphite2-dev
-  libgssapi-krb5-2
-  libgssrpc4
-  libgtk-3-0
-  libgtk-3-dev
-  libgtk2.0-0
-  libgtk2.0-dev
-  libharfbuzz-dev
-  libharfbuzz-gobject0
-  libharfbuzz-icu0
-  libharfbuzz0b
-  libhogweed4
-  libice6
-  libicu57
-  libidl-2-0
-  libidn11
-  libidn2-0
-  libindicator3-7
-  libindicator7
-  libjbig0
-  libjpeg62-turbo
-  libjpeg62-turbo-dev
-  libjson-glib-1.0-0
-  libjsoncpp-dev
-  libjsoncpp1
-  libk5crypto3
-  libkadm5clnt-mit11
-  libkadm5srv-mit11
-  libkdb5-9
-  libkeyutils1
-  libkrb5-3
-  libkrb5-dev
-  libkrb5support0
-  liblcms2-2
-  libltdl7
-  liblz4-1
-  liblzma5
-  liblzo2-2
-  libminizip-dev
-  libminizip1
-  libmount1
-  libnettle6
-  libnspr4
-  libnspr4-dev
-  libnss-db
-  libnss3
-  libnss3-dev
-  libogg-dev
-  libogg0
-  libopengl0
-  libopus-dev
-  libopus0
-  liborbit-2-0
-  liborbit2
-  libp11-kit0
-  libpam0g
-  libpam0g-dev
-  libpango-1.0-0
-  libpango1.0-dev
-  libpangocairo-1.0-0
-  libpangoft2-1.0-0
-  libpangox-1.0-0
-  libpangoxft-1.0-0
-  libpci-dev
-  libpci3
-  libpciaccess0
-  libpcre16-3
-  libpcre3
-  libpcre3-dev
-  libpcre32-3
-  libpcrecpp0v5
-  libpixman-1-0
-  libpixman-1-dev
-  libpng-dev
-  libpng16-16
-  libpthread-stubs0-dev
-  libpulse-dev
-  libpulse-mainloop-glib0
-  libpulse0
-  libre2-4
-  libre2-dev
-  librest-0.7-0
-  libselinux1
-  libsm6
-  libsnappy-dev
-  libsnappy1v5
-  libsndfile1
-  libsoup-gnome2.4-1
-  libsoup2.4-1
-  libspeechd-dev
-  libspeechd2
-  libsqlite3-0
-  libssl-dev
-  libssl1.0.2
-  libssl1.1
-  libstdc++-6-dev
-  libstdc++6
-  libsystemd0
-  libtasn1-6
-  libthai0
-  libtiff5
-  libudev-dev
-  libudev1
-  libunbound2
-  libunistring2
-  libuuid1
-  libva-dev
-  libva-drm2
-  libva-glx2
-  libva-wayland2
-  libva-x11-2
-  libva2
-  libvorbis0a
-  libvorbisenc2
-  libvpx-dev
-  libvpx5
-  libwayland-client0
-  libwayland-cursor0
-  libwayland-dev
-  libwayland-egl1-mesa
-  libwayland-server0
-  libwebp-dev
-  libwebp6
-  libwebpdemux2
-  libwebpmux3
-  libwrap0
-  libx11-6
-  libx11-dev
-  libx11-xcb-dev
-  libx11-xcb1
-  libxau-dev
-  libxau6
-  libxcb-dri2-0
-  libxcb-dri3-0
-  libxcb-glx0
-  libxcb-present0
-  libxcb-render0
-  libxcb-render0-dev
-  libxcb-shm0
-  libxcb-shm0-dev
-  libxcb-sync1
-  libxcb-xfixes0
-  libxcb1
-  libxcb1-dev
-  libxcomposite-dev
-  libxcomposite1
-  libxcursor-dev
-  libxcursor1
-  libxdamage-dev
-  libxdamage1
-  libxdmcp-dev
-  libxdmcp6
-  libxext-dev
-  libxext6
-  libxfixes-dev
-  libxfixes3
-  libxft2
-  libxi-dev
-  libxi6
-  libxinerama-dev
-  libxinerama1
-  libxkbcommon-dev
-  libxkbcommon0
-  libxml2
-  libxml2-dev
-  libxrandr-dev
-  libxrandr2
-  libxrender-dev
-  libxrender1
-  libxshmfence1
-  libxslt1-dev
-  libxslt1.1
-  libxss-dev
-  libxss1
-  libxt-dev
-  libxt6
-  libxtst-dev
-  libxtst6
-  libxxf86vm1
-  linux-libc-dev
-  mesa-common-dev
-  speech-dispatcher
-  uuid-dev
-  wayland-protocols
-  x11proto-composite-dev
-  x11proto-damage-dev
-  x11proto-dev
-  x11proto-fixes-dev
-  x11proto-input-dev
-  x11proto-kb-dev
-  x11proto-randr-dev
-  x11proto-record-dev
-  x11proto-render-dev
-  x11proto-scrnsaver-dev
-  x11proto-xext-dev
-  x11proto-xinerama-dev
-  zlib1g
-  zlib1g-dev
-"
-
-DEBIAN_PACKAGES_AMD64="
-  liblsan0
-  libtsan0
-"
-
-DEBIAN_PACKAGES_X86="
-  libasan3
-  libcilkrts5
-  libdrm-intel1
-  libitm1
-  libmpx2
-  libquadmath0
-  libubsan0
-"
-
-DEBIAN_PACKAGES_ARM="
-  libasan3
-  libdrm-etnaviv1
-  libdrm-exynos1
-  libdrm-freedreno1
-  libdrm-omap1
-  libdrm-tegra0
-  libubsan0
-"
-
-DEBIAN_PACKAGES_ARM64="
-  libasan3
-  libdrm-freedreno1
-  libdrm-tegra0
-  libgmp10
-  libitm1
-  libthai0
-  libubsan0
-"
-
-. "${SCRIPT_DIR}/sysroot-creator.sh"
diff --git a/build/linux/sysroot_scripts/sysroot-creator.sh b/build/linux/sysroot_scripts/sysroot-creator.sh
deleted file mode 100644
index 98b9063..0000000
--- a/build/linux/sysroot_scripts/sysroot-creator.sh
+++ /dev/null
@@ -1,961 +0,0 @@
-# Copyright 2014 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-#
-# This script should not be run directly but sourced by the other
-# scripts (e.g. sysroot-creator-sid.sh).  Its up to the parent scripts
-# to define certain environment variables: e.g.
-#  DISTRO=debian
-#  DIST=sid
-#  # Similar in syntax to /etc/apt/sources.list
-#  APT_SOURCES_LIST="http://ftp.us.debian.org/debian/ sid main"
-#  KEYRING_FILE=debian-archive-sid-stable.gpg
-#  DEBIAN_PACKAGES="gcc libz libssl"
-
-#@ This script builds Debian/Ubuntu sysroot images for building Google Chrome.
-#@
-#@  Generally this script is invoked as:
-#@  sysroot-creator-<flavour>.sh <mode> <args>*
-#@  Available modes are shown below.
-#@
-#@ List of modes:
-
-######################################################################
-# Config
-######################################################################
-
-set -o nounset
-set -o errexit
-
-SCRIPT_DIR=$(cd $(dirname $0) && pwd)
-
-if [ -z "${DIST:-}" ]; then
-  echo "error: DIST not defined"
-  exit 1
-fi
-
-if [ -z "${KEYRING_FILE:-}" ]; then
-  echo "error: KEYRING_FILE not defined"
-  exit 1
-fi
-
-if [ -z "${DEBIAN_PACKAGES:-}" ]; then
-  echo "error: DEBIAN_PACKAGES not defined"
-  exit 1
-fi
-
-readonly HAS_ARCH_AMD64=${HAS_ARCH_AMD64:=0}
-readonly HAS_ARCH_I386=${HAS_ARCH_I386:=0}
-readonly HAS_ARCH_ARM=${HAS_ARCH_ARM:=0}
-readonly HAS_ARCH_ARM64=${HAS_ARCH_ARM64:=0}
-readonly HAS_ARCH_MIPS=${HAS_ARCH_MIPS:=0}
-readonly HAS_ARCH_MIPS64EL=${HAS_ARCH_MIPS64EL:=0}
-
-readonly REQUIRED_TOOLS="curl xzcat"
-
-######################################################################
-# Package Config
-######################################################################
-
-readonly PACKAGES_EXT=xz
-readonly RELEASE_FILE="Release"
-readonly RELEASE_FILE_GPG="Release.gpg"
-
-readonly DEBIAN_DEP_LIST_AMD64="packagelist.${DIST}.amd64"
-readonly DEBIAN_DEP_LIST_I386="packagelist.${DIST}.i386"
-readonly DEBIAN_DEP_LIST_ARM="packagelist.${DIST}.arm"
-readonly DEBIAN_DEP_LIST_ARM64="packagelist.${DIST}.arm64"
-readonly DEBIAN_DEP_LIST_MIPS="packagelist.${DIST}.mipsel"
-readonly DEBIAN_DEP_LIST_MIPS64EL="packagelist.${DIST}.mips64el"
-
-
-######################################################################
-# Helper
-######################################################################
-
-Banner() {
-  echo "######################################################################"
-  echo $*
-  echo "######################################################################"
-}
-
-
-SubBanner() {
-  echo "----------------------------------------------------------------------"
-  echo $*
-  echo "----------------------------------------------------------------------"
-}
-
-
-Usage() {
-  egrep "^#@" "${BASH_SOURCE[0]}" | cut --bytes=3-
-}
-
-
-DownloadOrCopy() {
-  if [ -f "$2" ] ; then
-    echo "$2 already in place"
-    return
-  fi
-
-  HTTP=0
-  echo "$1" | grep -qs ^http:// && HTTP=1
-  if [ "$HTTP" = "1" ]; then
-    SubBanner "downloading from $1 -> $2"
-    # Appending the "$$" shell pid is necessary here to prevent concurrent
-    # instances of sysroot-creator.sh from trying to write to the same file.
-    # --create-dirs is added in case there are slashes in the filename, as can
-    # happen with the "debian/security" release class.
-    curl -L "$1" --create-dirs -o "${2}.partial.$$"
-    mv "${2}.partial.$$" $2
-  else
-    SubBanner "copying from $1"
-    cp "$1" "$2"
-  fi
-}
-
-
-SetEnvironmentVariables() {
-  ARCH=""
-  echo $1 | grep -qs Amd64$ && ARCH=AMD64
-  if [ -z "$ARCH" ]; then
-    echo $1 | grep -qs I386$ && ARCH=I386
-  fi
-  if [ -z "$ARCH" ]; then
-    echo $1 | grep -qs Mips64el$ && ARCH=MIPS64EL
-  fi
-  if [ -z "$ARCH" ]; then
-    echo $1 | grep -qs Mips$ && ARCH=MIPS
-  fi
-  if [ -z "$ARCH" ]; then
-    echo $1 | grep -qs ARM$ && ARCH=ARM
-  fi
-  if [ -z "$ARCH" ]; then
-    echo $1 | grep -qs ARM64$ && ARCH=ARM64
-  fi
-  if [ -z "${ARCH}" ]; then
-    echo "ERROR: Unable to determine architecture based on: $1"
-    exit 1
-  fi
-  ARCH_LOWER=$(echo $ARCH | tr '[:upper:]' '[:lower:]')
-}
-
-
-# some sanity checks to make sure this script is run from the right place
-# with the right tools
-SanityCheck() {
-  Banner "Sanity Checks"
-
-  local chrome_dir=$(cd "${SCRIPT_DIR}/../../.." && pwd)
-  BUILD_DIR="${chrome_dir}/out/sysroot-build/${DIST}"
-  mkdir -p ${BUILD_DIR}
-  echo "Using build directory: ${BUILD_DIR}"
-
-  for tool in ${REQUIRED_TOOLS} ; do
-    if ! which ${tool} > /dev/null ; then
-      echo "Required binary $tool not found."
-      echo "Exiting."
-      exit 1
-    fi
-  done
-
-  # This is where the staging sysroot is.
-  INSTALL_ROOT="${BUILD_DIR}/${DIST}_${ARCH_LOWER}_staging"
-  TARBALL="${BUILD_DIR}/${DISTRO}_${DIST}_${ARCH_LOWER}_sysroot.tar.xz"
-
-  if ! mkdir -p "${INSTALL_ROOT}" ; then
-    echo "ERROR: ${INSTALL_ROOT} can't be created."
-    exit 1
-  fi
-}
-
-
-ChangeDirectory() {
-  # Change directory to where this script is.
-  cd ${SCRIPT_DIR}
-}
-
-
-ClearInstallDir() {
-  Banner "Clearing dirs in ${INSTALL_ROOT}"
-  rm -rf ${INSTALL_ROOT}/*
-}
-
-
-CreateTarBall() {
-  Banner "Creating tarball ${TARBALL}"
-  tar -I "xz -9 -T0" -cf ${TARBALL} -C ${INSTALL_ROOT} .
-}
-
-ExtractPackageXz() {
-  local src_file="$1"
-  local dst_file="$2"
-  local repo="$3"
-  xzcat "${src_file}" | egrep '^(Package:|Filename:|SHA256:) ' |
-    sed "s|Filename: |Filename: ${repo}|" > "${dst_file}"
-}
-
-GeneratePackageListDist() {
-  local arch="$1"
-  set -- $2
-  local repo="$1"
-  local dist="$2"
-  local repo_name="$3"
-
-  TMP_PACKAGE_LIST="${BUILD_DIR}/Packages.${dist}_${repo_name}_${arch}"
-  local repo_basedir="${repo}/dists/${dist}"
-  local package_list="${BUILD_DIR}/Packages.${dist}_${repo_name}_${arch}.${PACKAGES_EXT}"
-  local package_file_arch="${repo_name}/binary-${arch}/Packages.${PACKAGES_EXT}"
-  local package_list_arch="${repo_basedir}/${package_file_arch}"
-
-  DownloadOrCopy "${package_list_arch}" "${package_list}"
-  VerifyPackageListing "${package_file_arch}" "${package_list}" ${repo} ${dist}
-  ExtractPackageXz "${package_list}" "${TMP_PACKAGE_LIST}" ${repo}
-}
-
-GeneratePackageListCommon() {
-  local output_file="$1"
-  local arch="$2"
-  local packages="$3"
-
-  local dists="${DIST} ${DIST_UPDATES:-}"
-  local repos="main ${REPO_EXTRA:-}"
-
-  local list_base="${BUILD_DIR}/Packages.${DIST}_${arch}"
-  > "${list_base}"  # Create (or truncate) a zero-length file.
-  echo "${APT_SOURCES_LIST}" | while read source; do
-    GeneratePackageListDist "${arch}" "${source}"
-    cat "${TMP_PACKAGE_LIST}" | ./merge-package-lists.py "${list_base}"
-  done
-
-  GeneratePackageList "${list_base}" "${output_file}" "${packages}"
-}
-
-GeneratePackageListAmd64() {
-  GeneratePackageListCommon "$1" amd64 "${DEBIAN_PACKAGES}
-    ${DEBIAN_PACKAGES_X86:=} ${DEBIAN_PACKAGES_AMD64:=}"
-}
-
-GeneratePackageListI386() {
-  GeneratePackageListCommon "$1" i386 "${DEBIAN_PACKAGES}
-    ${DEBIAN_PACKAGES_X86:=}"
-}
-
-GeneratePackageListARM() {
-  GeneratePackageListCommon "$1" armhf "${DEBIAN_PACKAGES}
-    ${DEBIAN_PACKAGES_ARM:=}"
-}
-
-GeneratePackageListARM64() {
-  GeneratePackageListCommon "$1" arm64 "${DEBIAN_PACKAGES}
-    ${DEBIAN_PACKAGES_ARM64:=}"
-}
-
-GeneratePackageListMips() {
-  GeneratePackageListCommon "$1" mipsel "${DEBIAN_PACKAGES}"
-}
-
-GeneratePackageListMips64el() {
-  GeneratePackageListCommon "$1" mips64el "${DEBIAN_PACKAGES}
-  ${DEBIAN_PACKAGES_MIPS64EL:=}"
-}
-
-StripChecksumsFromPackageList() {
-  local package_file="$1"
-  sed -i 's/ [a-f0-9]\{64\}$//' "$package_file"
-}
-
-VerifyPackageFilesMatch() {
-  local downloaded_package_file="$1"
-  local stored_package_file="$2"
-  diff -u "$downloaded_package_file" "$stored_package_file"
-  if [ "$?" -ne "0" ]; then
-    echo "ERROR: downloaded package files does not match $2."
-    echo "You may need to run UpdatePackageLists."
-    exit 1
-  fi
-}
-
-######################################################################
-#
-######################################################################
-
-HacksAndPatchesCommon() {
-  local arch=$1
-  local os=$2
-  local strip=$3
-  Banner "Misc Hacks & Patches"
-  # these are linker scripts with absolute pathnames in them
-  # which we rewrite here
-  lscripts="${INSTALL_ROOT}/usr/lib/${arch}-${os}/libpthread.so \
-            ${INSTALL_ROOT}/usr/lib/${arch}-${os}/libc.so"
-
-  # Rewrite linker scripts
-  sed -i -e 's|/usr/lib/${arch}-${os}/||g'  ${lscripts}
-  sed -i -e 's|/lib/${arch}-${os}/||g' ${lscripts}
-
-  # Unversion libdbus symbols.  This is required because libdbus-1-3
-  # switched from unversioned symbols to versioned ones, and we must
-  # still support distros using the unversioned library.  This hack
-  # can be removed once support for Ubuntu Trusty and Debian Jessie
-  # are dropped.
-  ${strip} -R .gnu.version_d -R .gnu.version \
-    "${INSTALL_ROOT}/lib/${arch}-${os}/libdbus-1.so.3"
-  cp "${SCRIPT_DIR}/libdbus-1-3-symbols" \
-    "${INSTALL_ROOT}/debian/libdbus-1-3/DEBIAN/symbols"
-
-  # Glibc 2.27 introduced some new optimizations to several math functions, but
-  # it will be a while before it makes it into all supported distros.  Luckily,
-  # glibc maintains ABI compatibility with previous versions, so the old symbols
-  # are still there.
-  # TODO(thomasanderson): Remove this once glibc 2.27 is available on all
-  # supported distros.
-  local math_h="${INSTALL_ROOT}/usr/include/math.h"
-  local libm_so="${INSTALL_ROOT}/lib/${arch}-${os}/libm.so.6"
-  nm -D --defined-only --with-symbol-versions "${libm_so}" | \
-    "${SCRIPT_DIR}/find_incompatible_glibc_symbols.py" >> "${math_h}"
-
-  # glob64() was also optimized in glibc 2.27.  Make sure to choose the older
-  # version.
-  local glob_h="${INSTALL_ROOT}/usr/include/glob.h"
-  local libc_so="${INSTALL_ROOT}/lib/${arch}-${os}/libc.so.6"
-  nm -D --defined-only --with-symbol-versions "${libc_so}" | \
-    "${SCRIPT_DIR}/find_incompatible_glibc_symbols.py" >> "${glob_h}"
-
-  # This is for chrome's ./build/linux/pkg-config-wrapper
-  # which overwrites PKG_CONFIG_LIBDIR internally
-  SubBanner "Move pkgconfig scripts"
-  mkdir -p ${INSTALL_ROOT}/usr/lib/pkgconfig
-  mv ${INSTALL_ROOT}/usr/lib/${arch}-${os}/pkgconfig/* \
-      ${INSTALL_ROOT}/usr/lib/pkgconfig
-}
-
-
-HacksAndPatchesAmd64() {
-  HacksAndPatchesCommon x86_64 linux-gnu strip
-}
-
-
-HacksAndPatchesI386() {
-  HacksAndPatchesCommon i386 linux-gnu strip
-}
-
-
-HacksAndPatchesARM() {
-  HacksAndPatchesCommon arm linux-gnueabihf arm-linux-gnueabihf-strip
-}
-
-
-HacksAndPatchesARM64() {
-  # Use the unstripped libdbus for arm64 to prevent linker errors.
-  # https://bugs.chromium.org/p/webrtc/issues/detail?id=8535
-  HacksAndPatchesCommon aarch64 linux-gnu true
-}
-
-
-HacksAndPatchesMips() {
-  HacksAndPatchesCommon mipsel linux-gnu mipsel-linux-gnu-strip
-}
-
-
-HacksAndPatchesMips64el() {
-  HacksAndPatchesCommon mips64el linux-gnuabi64 mips64el-linux-gnuabi64-strip
-}
-
-
-InstallIntoSysroot() {
-  Banner "Install Libs And Headers Into Jail"
-
-  mkdir -p ${BUILD_DIR}/debian-packages
-  # The /debian directory is an implementation detail that's used to cd into
-  # when running dpkg-shlibdeps.
-  mkdir -p ${INSTALL_ROOT}/debian
-  # An empty control file is necessary to run dpkg-shlibdeps.
-  touch ${INSTALL_ROOT}/debian/control
-  while (( "$#" )); do
-    local file="$1"
-    local package="${BUILD_DIR}/debian-packages/${file##*/}"
-    shift
-    local sha256sum="$1"
-    shift
-    if [ "${#sha256sum}" -ne "64" ]; then
-      echo "Bad sha256sum from package list"
-      exit 1
-    fi
-
-    Banner "Installing $(basename ${file})"
-    DownloadOrCopy ${file} ${package}
-    if [ ! -s "${package}" ] ; then
-      echo
-      echo "ERROR: bad package ${package}"
-      exit 1
-    fi
-    echo "${sha256sum}  ${package}" | sha256sum --quiet -c
-
-    SubBanner "Extracting to ${INSTALL_ROOT}"
-    dpkg-deb -x ${package} ${INSTALL_ROOT}
-
-    base_package=$(dpkg-deb --field ${package} Package)
-    mkdir -p ${INSTALL_ROOT}/debian/${base_package}/DEBIAN
-    dpkg-deb -e ${package} ${INSTALL_ROOT}/debian/${base_package}/DEBIAN
-  done
-
-  # Prune /usr/share, leaving only pkgconfig
-  for name in ${INSTALL_ROOT}/usr/share/*; do
-    if [ "${name}" != "${INSTALL_ROOT}/usr/share/pkgconfig" ]; then
-      rm -r ${name}
-    fi
-  done
-}
-
-
-CleanupJailSymlinks() {
-  Banner "Jail symlink cleanup"
-
-  SAVEDPWD=$(pwd)
-  cd ${INSTALL_ROOT}
-  local libdirs="lib usr/lib"
-  if [ "${ARCH}" != "MIPS" ]; then
-    libdirs="${libdirs} lib64"
-  fi
-  find $libdirs -type l -printf '%p %l\n' | while read link target; do
-    # skip links with non-absolute paths
-    echo "${target}" | grep -qs ^/ || continue
-    echo "${link}: ${target}"
-    # Relativize the symlink.
-    prefix=$(echo "${link}" | sed -e 's/[^/]//g' | sed -e 's|/|../|g')
-    ln -snfv "${prefix}${target}" "${link}"
-  done
-
-  find $libdirs -type l -printf '%p %l\n' | while read link target; do
-    # Make sure we catch new bad links.
-    if [ ! -r "${link}" ]; then
-      echo "ERROR: FOUND BAD LINK ${link}"
-      ls -l ${link}
-      exit 1
-    fi
-  done
-  cd "$SAVEDPWD"
-}
-
-
-VerifyLibraryDepsCommon() {
-  local arch=$1
-  local os=$2
-  local find_dirs=(
-    "${INSTALL_ROOT}/lib/${arch}-${os}/"
-    "${INSTALL_ROOT}/usr/lib/${arch}-${os}/"
-  )
-  local needed_libs="$(
-    find ${find_dirs[*]} -name "*\.so*" -type f -exec file {} \; | \
-      grep ': ELF' | sed 's/^\(.*\): .*$/\1/' | xargs readelf -d | \
-      grep NEEDED | sort | uniq | sed 's/^.*Shared library: \[\(.*\)\]$/\1/g')"
-  local all_libs="$(find ${find_dirs[*]} -printf '%f\n')"
-  local missing_libs="$(grep -vFxf <(echo "${all_libs}") \
-    <(echo "${needed_libs}"))"
-  if [ ! -z "${missing_libs}" ]; then
-    echo "Missing libraries:"
-    echo "${missing_libs}"
-    exit 1
-  fi
-}
-
-
-VerifyLibraryDepsAmd64() {
-  VerifyLibraryDepsCommon x86_64 linux-gnu
-}
-
-
-VerifyLibraryDepsI386() {
-  VerifyLibraryDepsCommon i386 linux-gnu
-}
-
-
-VerifyLibraryDepsARM() {
-  VerifyLibraryDepsCommon arm linux-gnueabihf
-}
-
-
-VerifyLibraryDepsARM64() {
-  VerifyLibraryDepsCommon aarch64 linux-gnu
-}
-
-
-VerifyLibraryDepsMips() {
-  VerifyLibraryDepsCommon mipsel linux-gnu
-}
-
-
-VerifyLibraryDepsMips64el() {
-  VerifyLibraryDepsCommon mips64el linux-gnuabi64
-}
-
-
-#@
-#@ BuildSysrootAmd64
-#@
-#@    Build everything and package it
-BuildSysrootAmd64() {
-  if [ "$HAS_ARCH_AMD64" = "0" ]; then
-    return
-  fi
-  ClearInstallDir
-  local package_file="$BUILD_DIR/package_with_sha256sum_amd64"
-  GeneratePackageListAmd64 "$package_file"
-  local files_and_sha256sums="$(cat ${package_file})"
-  StripChecksumsFromPackageList "$package_file"
-  VerifyPackageFilesMatch "$package_file" "$DEBIAN_DEP_LIST_AMD64"
-  InstallIntoSysroot ${files_and_sha256sums}
-  CleanupJailSymlinks
-  HacksAndPatchesAmd64
-  VerifyLibraryDepsAmd64
-  CreateTarBall
-}
-
-#@
-#@ BuildSysrootI386
-#@
-#@    Build everything and package it
-BuildSysrootI386() {
-  if [ "$HAS_ARCH_I386" = "0" ]; then
-    return
-  fi
-  ClearInstallDir
-  local package_file="$BUILD_DIR/package_with_sha256sum_i386"
-  GeneratePackageListI386 "$package_file"
-  local files_and_sha256sums="$(cat ${package_file})"
-  StripChecksumsFromPackageList "$package_file"
-  VerifyPackageFilesMatch "$package_file" "$DEBIAN_DEP_LIST_I386"
-  InstallIntoSysroot ${files_and_sha256sums}
-  CleanupJailSymlinks
-  HacksAndPatchesI386
-  VerifyLibraryDepsI386
-  CreateTarBall
-}
-
-#@
-#@ BuildSysrootARM
-#@
-#@    Build everything and package it
-BuildSysrootARM() {
-  if [ "$HAS_ARCH_ARM" = "0" ]; then
-    return
-  fi
-  ClearInstallDir
-  local package_file="$BUILD_DIR/package_with_sha256sum_arm"
-  GeneratePackageListARM "$package_file"
-  local files_and_sha256sums="$(cat ${package_file})"
-  StripChecksumsFromPackageList "$package_file"
-  VerifyPackageFilesMatch "$package_file" "$DEBIAN_DEP_LIST_ARM"
-  InstallIntoSysroot ${files_and_sha256sums}
-  CleanupJailSymlinks
-  HacksAndPatchesARM
-  VerifyLibraryDepsARM
-  CreateTarBall
-}
-
-#@
-#@ BuildSysrootARM64
-#@
-#@    Build everything and package it
-BuildSysrootARM64() {
-  if [ "$HAS_ARCH_ARM64" = "0" ]; then
-    return
-  fi
-  ClearInstallDir
-  local package_file="$BUILD_DIR/package_with_sha256sum_arm64"
-  GeneratePackageListARM64 "$package_file"
-  local files_and_sha256sums="$(cat ${package_file})"
-  StripChecksumsFromPackageList "$package_file"
-  VerifyPackageFilesMatch "$package_file" "$DEBIAN_DEP_LIST_ARM64"
-  InstallIntoSysroot ${files_and_sha256sums}
-  CleanupJailSymlinks
-  HacksAndPatchesARM64
-  VerifyLibraryDepsARM64
-  CreateTarBall
-}
-
-
-#@
-#@ BuildSysrootMips
-#@
-#@    Build everything and package it
-BuildSysrootMips() {
-  if [ "$HAS_ARCH_MIPS" = "0" ]; then
-    return
-  fi
-  ClearInstallDir
-  local package_file="$BUILD_DIR/package_with_sha256sum_mips"
-  GeneratePackageListMips "$package_file"
-  local files_and_sha256sums="$(cat ${package_file})"
-  StripChecksumsFromPackageList "$package_file"
-  VerifyPackageFilesMatch "$package_file" "$DEBIAN_DEP_LIST_MIPS"
-  InstallIntoSysroot ${files_and_sha256sums}
-  CleanupJailSymlinks
-  HacksAndPatchesMips
-  VerifyLibraryDepsMips
-  CreateTarBall
-}
-
-
-#@
-#@ BuildSysrootMips64el
-#@
-#@    Build everything and package it
-BuildSysrootMips64el() {
-  if [ "$HAS_ARCH_MIPS64EL" = "0" ]; then
-    return
-  fi
-  ClearInstallDir
-  local package_file="$BUILD_DIR/package_with_sha256sum_mips64el"
-  GeneratePackageListMips64el "$package_file"
-  local files_and_sha256sums="$(cat ${package_file})"
-  StripChecksumsFromPackageList "$package_file"
-  VerifyPackageFilesMatch "$package_file" "$DEBIAN_DEP_LIST_MIPS64EL"
-  InstallIntoSysroot ${files_and_sha256sums}
-  CleanupJailSymlinks
-  HacksAndPatchesMips64el
-  VerifyLibraryDepsMips64el
-  CreateTarBall
-}
-
-#@
-#@ BuildSysrootAll
-#@
-#@    Build sysroot images for all architectures
-BuildSysrootAll() {
-  RunCommand BuildSysrootAmd64
-  RunCommand BuildSysrootI386
-  RunCommand BuildSysrootARM
-  RunCommand BuildSysrootARM64
-  RunCommand BuildSysrootMips
-  RunCommand BuildSysrootMips64el
-}
-
-UploadSysroot() {
-  local rev=$1
-  if [ -z "${rev}" ]; then
-    echo "Please specify a revision to upload at."
-    exit 1
-  fi
-  set -x
-  gsutil cp -a public-read "${TARBALL}" \
-      "gs://chrome-linux-sysroot/toolchain/$rev/"
-  set +x
-}
-
-#@
-#@ UploadSysrootAmd64 <revision>
-#@
-UploadSysrootAmd64() {
-  if [ "$HAS_ARCH_AMD64" = "0" ]; then
-    return
-  fi
-  UploadSysroot "$@"
-}
-
-#@
-#@ UploadSysrootI386 <revision>
-#@
-UploadSysrootI386() {
-  if [ "$HAS_ARCH_I386" = "0" ]; then
-    return
-  fi
-  UploadSysroot "$@"
-}
-
-#@
-#@ UploadSysrootARM <revision>
-#@
-UploadSysrootARM() {
-  if [ "$HAS_ARCH_ARM" = "0" ]; then
-    return
-  fi
-  UploadSysroot "$@"
-}
-
-#@
-#@ UploadSysrootARM64 <revision>
-#@
-UploadSysrootARM64() {
-  if [ "$HAS_ARCH_ARM64" = "0" ]; then
-    return
-  fi
-  UploadSysroot "$@"
-}
-
-#@
-#@ UploadSysrootMips <revision>
-#@
-UploadSysrootMips() {
-  if [ "$HAS_ARCH_MIPS" = "0" ]; then
-    return
-  fi
-  UploadSysroot "$@"
-}
-
-#@
-#@ UploadSysrootMips64el <revision>
-#@
-UploadSysrootMips64el() {
-  if [ "$HAS_ARCH_MIPS64EL" = "0" ]; then
-    return
-  fi
-  UploadSysroot "$@"
-}
-
-#@
-#@ UploadSysrootAll <revision>
-#@
-#@    Upload sysroot image for all architectures
-UploadSysrootAll() {
-  RunCommand UploadSysrootAmd64 "$@"
-  RunCommand UploadSysrootI386 "$@"
-  RunCommand UploadSysrootARM "$@"
-  RunCommand UploadSysrootARM64 "$@"
-  RunCommand UploadSysrootMips "$@"
-  RunCommand UploadSysrootMips64el "$@"
-
-}
-
-#
-# CheckForDebianGPGKeyring
-#
-#     Make sure the Debian GPG keys exist. Otherwise print a helpful message.
-#
-CheckForDebianGPGKeyring() {
-  if [ ! -e "$KEYRING_FILE" ]; then
-    echo "KEYRING_FILE not found: ${KEYRING_FILE}"
-    echo "Debian GPG keys missing. Install the debian-archive-keyring package."
-    exit 1
-  fi
-}
-
-#
-# VerifyPackageListing
-#
-#     Verifies the downloaded Packages.xz file has the right checksums.
-#
-VerifyPackageListing() {
-  local file_path="$1"
-  local output_file="$2"
-  local repo="$3"
-  local dist="$4"
-
-  local repo_basedir="${repo}/dists/${dist}"
-  local release_list="${repo_basedir}/${RELEASE_FILE}"
-  local release_list_gpg="${repo_basedir}/${RELEASE_FILE_GPG}"
-
-  local release_file="${BUILD_DIR}/${dist}-${RELEASE_FILE}"
-  local release_file_gpg="${BUILD_DIR}/${dist}-${RELEASE_FILE_GPG}"
-
-  CheckForDebianGPGKeyring
-
-  DownloadOrCopy ${release_list} ${release_file}
-  DownloadOrCopy ${release_list_gpg} ${release_file_gpg}
-  echo "Verifying: ${release_file} with ${release_file_gpg}"
-  set -x
-  gpgv --keyring "${KEYRING_FILE}" "${release_file_gpg}" "${release_file}"
-  set +x
-
-  echo "Verifying: ${output_file}"
-  local sha256sum=$(grep -E "${file_path}\$|:\$" "${release_file}" | \
-    grep "SHA256:" -A 1 | xargs echo | awk '{print $2;}')
-
-  if [ "${#sha256sum}" -ne "64" ]; then
-    echo "Bad sha256sum from ${release_list}"
-    exit 1
-  fi
-
-  echo "${sha256sum}  ${output_file}" | sha256sum --quiet -c
-}
-
-#
-# GeneratePackageList
-#
-#     Looks up package names in ${BUILD_DIR}/Packages and write list of URLs
-#     to output file.
-#
-GeneratePackageList() {
-  local input_file="$1"
-  local output_file="$2"
-  echo "Updating: ${output_file} from ${input_file}"
-  /bin/rm -f "${output_file}"
-  shift
-  shift
-  for pkg in $@ ; do
-    local pkg_full=$(grep -A 1 " ${pkg}\$" "$input_file" | \
-      egrep "pool/.*" | sed 's/.*Filename: //')
-    if [ -z "${pkg_full}" ]; then
-        echo "ERROR: missing package: $pkg"
-        exit 1
-    fi
-    local sha256sum=$(grep -A 4 " ${pkg}\$" "$input_file" | \
-      grep ^SHA256: | sed 's/^SHA256: //')
-    if [ "${#sha256sum}" -ne "64" ]; then
-      echo "Bad sha256sum from Packages"
-      exit 1
-    fi
-    echo $pkg_full $sha256sum >> "$output_file"
-  done
-  # sort -o does an in-place sort of this file
-  sort "$output_file" -o "$output_file"
-}
-
-#@
-#@ UpdatePackageListsAmd64
-#@
-#@     Regenerate the package lists such that they contain an up-to-date
-#@     list of URLs within the Debian archive. (For amd64)
-UpdatePackageListsAmd64() {
-  if [ "$HAS_ARCH_AMD64" = "0" ]; then
-    return
-  fi
-  GeneratePackageListAmd64 "$DEBIAN_DEP_LIST_AMD64"
-  StripChecksumsFromPackageList "$DEBIAN_DEP_LIST_AMD64"
-}
-
-#@
-#@ UpdatePackageListsI386
-#@
-#@     Regenerate the package lists such that they contain an up-to-date
-#@     list of URLs within the Debian archive. (For i386)
-UpdatePackageListsI386() {
-  if [ "$HAS_ARCH_I386" = "0" ]; then
-    return
-  fi
-  GeneratePackageListI386 "$DEBIAN_DEP_LIST_I386"
-  StripChecksumsFromPackageList "$DEBIAN_DEP_LIST_I386"
-}
-
-#@
-#@ UpdatePackageListsARM
-#@
-#@     Regenerate the package lists such that they contain an up-to-date
-#@     list of URLs within the Debian archive. (For arm)
-UpdatePackageListsARM() {
-  if [ "$HAS_ARCH_ARM" = "0" ]; then
-    return
-  fi
-  GeneratePackageListARM "$DEBIAN_DEP_LIST_ARM"
-  StripChecksumsFromPackageList "$DEBIAN_DEP_LIST_ARM"
-}
-
-#@
-#@ UpdatePackageListsARM64
-#@
-#@     Regenerate the package lists such that they contain an up-to-date
-#@     list of URLs within the Debian archive. (For arm64)
-UpdatePackageListsARM64() {
-  if [ "$HAS_ARCH_ARM64" = "0" ]; then
-    return
-  fi
-  GeneratePackageListARM64 "$DEBIAN_DEP_LIST_ARM64"
-  StripChecksumsFromPackageList "$DEBIAN_DEP_LIST_ARM64"
-}
-
-#@
-#@ UpdatePackageListsMips
-#@
-#@     Regenerate the package lists such that they contain an up-to-date
-#@     list of URLs within the Debian archive. (For mips)
-UpdatePackageListsMips() {
-  if [ "$HAS_ARCH_MIPS" = "0" ]; then
-    return
-  fi
-  GeneratePackageListMips "$DEBIAN_DEP_LIST_MIPS"
-  StripChecksumsFromPackageList "$DEBIAN_DEP_LIST_MIPS"
-}
-
-
-#@
-#@ UpdatePackageListsMips64el
-#@
-#@     Regenerate the package lists such that they contain an up-to-date
-#@     list of URLs within the Debian archive. (For mips64el)
-UpdatePackageListsMips64el() {
-  if [ "$HAS_ARCH_MIPS64EL" = "0" ]; then
-    return
-  fi
-  GeneratePackageListMips64el "$DEBIAN_DEP_LIST_MIPS64EL"
-  StripChecksumsFromPackageList "$DEBIAN_DEP_LIST_MIPS64EL"
-}
-
-#@
-#@ UpdatePackageListsAll
-#@
-#@    Regenerate the package lists for all architectures.
-UpdatePackageListsAll() {
-  RunCommand UpdatePackageListsAmd64
-  RunCommand UpdatePackageListsI386
-  RunCommand UpdatePackageListsARM
-  RunCommand UpdatePackageListsARM64
-  RunCommand UpdatePackageListsMips
-  RunCommand UpdatePackageListsMips64el
-}
-
-#@
-#@ PrintArchitectures
-#@
-#@    Prints supported architectures.
-PrintArchitectures() {
-  if [ "$HAS_ARCH_AMD64" = "1" ]; then
-    echo Amd64
-  fi
-  if [ "$HAS_ARCH_I386" = "1" ]; then
-    echo I386
-  fi
-  if [ "$HAS_ARCH_ARM" = "1" ]; then
-    echo ARM
-  fi
-  if [ "$HAS_ARCH_ARM64" = "1" ]; then
-    echo ARM64
-  fi
-  if [ "$HAS_ARCH_MIPS" = "1" ]; then
-    echo Mips
-  fi
-  if [ "$HAS_ARCH_MIPS64EL" = "1" ]; then
-    echo Mips64el
-  fi
-}
-
-#@
-#@ PrintDistro
-#@
-#@    Prints distro.  eg: ubuntu
-PrintDistro() {
-  echo ${DISTRO}
-}
-
-#@
-#@ DumpRelease
-#@
-#@    Prints disto release.  eg: jessie
-PrintRelease() {
-  echo ${DIST}
-}
-
-RunCommand() {
-  SetEnvironmentVariables "$1"
-  SanityCheck
-  "$@"
-}
-
-if [ $# -eq 0 ] ; then
-  echo "ERROR: you must specify a mode on the commandline"
-  echo
-  Usage
-  exit 1
-elif [ "$(type -t $1)" != "function" ]; then
-  echo "ERROR: unknown function '$1'." >&2
-  echo "For help, try:"
-  echo "    $0 help"
-  exit 1
-else
-  ChangeDirectory
-  if echo $1 | grep -qs --regexp='\(^Print\)\|\(All$\)'; then
-    "$@"
-  else
-    RunCommand "$@"
-  fi
-fi
diff --git a/build/linux/sysroot_scripts/sysroots.json b/build/linux/sysroot_scripts/sysroots.json
deleted file mode 100644
index e79b69b..0000000
--- a/build/linux/sysroot_scripts/sysroots.json
+++ /dev/null
@@ -1,38 +0,0 @@
-{
-    "sid_amd64": {
-        "Revision": "1015a998c2adf188813cca60b558b0ea1a0b6ced",
-        "Sha1Sum": "4e7db513b0faeea8fb410f70c9909e8736f5c0ab",
-        "SysrootDir": "debian_sid_amd64-sysroot",
-        "Tarball": "debian_sid_amd64_sysroot.tar.xz"
-    },
-    "sid_arm": {
-        "Revision": "1015a998c2adf188813cca60b558b0ea1a0b6ced",
-        "Sha1Sum": "b2165ab47ab9c4cc780db53ace1b08607d68ae31",
-        "SysrootDir": "debian_sid_arm-sysroot",
-        "Tarball": "debian_sid_arm_sysroot.tar.xz"
-    },
-    "sid_arm64": {
-        "Revision": "1015a998c2adf188813cca60b558b0ea1a0b6ced",
-        "Sha1Sum": "37dfa5f22e2c0e847cee34f9848eb31230c33d35",
-        "SysrootDir": "debian_sid_arm64-sysroot",
-        "Tarball": "debian_sid_arm64_sysroot.tar.xz"
-    },
-    "sid_i386": {
-        "Revision": "1015a998c2adf188813cca60b558b0ea1a0b6ced",
-        "Sha1Sum": "b8639749e2b561a7462d7e7978ca74ee4044bdab",
-        "SysrootDir": "debian_sid_i386-sysroot",
-        "Tarball": "debian_sid_i386_sysroot.tar.xz"
-    },
-    "sid_mips": {
-        "Revision": "1015a998c2adf188813cca60b558b0ea1a0b6ced",
-        "Sha1Sum": "baaf37cdcbcf251fb9a4ccaf8b479722ae61fe49",
-        "SysrootDir": "debian_sid_mips-sysroot",
-        "Tarball": "debian_sid_mips_sysroot.tar.xz"
-    },
-    "sid_mips64el": {
-        "Revision": "1015a998c2adf188813cca60b558b0ea1a0b6ced",
-        "Sha1Sum": "bbfe1a513b849921cfcf78865faec8fc03f93b3d",
-        "SysrootDir": "debian_sid_mips64el-sysroot",
-        "Tarball": "debian_sid_mips64el_sysroot.tar.xz"
-    }
-}
diff --git a/build/linux/unbundle/README b/build/linux/unbundle/README
deleted file mode 100644
index 6e4f0a9..0000000
--- a/build/linux/unbundle/README
+++ /dev/null
@@ -1,57 +0,0 @@
-This directory contains files that make it possible for Linux
-distributions to build Chromium using system libraries and exclude the
-source code for Chromium's bundled copies of system libraries in a
-consistent manner. Nothing here is used in normal developer builds.
-
-
-For more info on the Linux distros' philosophy on bundling system
-libraries and why this exists, please read the following:
-
- - https://fedoraproject.org/wiki/Packaging:No_Bundled_Libraries
- - https://wiki.gentoo.org/wiki/Why_not_bundle_dependencies
- - http://www.debian.org/doc/debian-policy/ch-source.html#s-embeddedfiles
-
-For more Chromium-specific context please read
-http://spot.livejournal.com/312320.html .
-
-Additional resources which might provide even more context:
-
-  - http://events.linuxfoundation.org/sites/events/files/slides/LinuxCon%202014%20Slides_0.pdf
-  - https://lwn.net/Articles/619158/
-
-
-This directory is provided in the source tree so one can follow the
-above guidelines without having to download additional tools and worry
-about having the right version of the tool. It is a compromise solution
-which takes into account Chromium developers who want to avoid the
-perceived burden of more conditional code in build files, and
-expectations of Open Source community, where using system-provided
-libraries is the norm.
-
-Usage:
-
-1. remove_bundled_libraries.py <preserved-directories>
-
-   For example: remove_bundled_libraries.py third_party/mesa
-
-   The script scans sources looking for third_party directories.
-   Everything that is not explicitly preserved is removed (except for
-   GYP/GN build files), and the script fails if any directory passed on
-   command line does not exist (to ensure list is kept up to date).
-
-   This is intended to be used on source code extracted from a tarball,
-   not on a git repository.
-
-   NOTE: by default this will not remove anything (for safety). Pass
-   the --do-remove flag to actually remove files.
-
-2. replace_gn_files.py --system-libraries lib...
-
-   This swaps out a normal library GN build file that is intended for
-   use with a bundled library for a build file that is set up to use
-   the system library. While some build files have use_system_libfoo
-   build flags, using unbundled build files has the advantage that Linux
-   distros can build Chromium without having to specify many additional
-   build flags.
-
-   For example: replace_gn_files.py --system-libraries libxml
diff --git a/build/linux/unbundle/ffmpeg.gn b/build/linux/unbundle/ffmpeg.gn
deleted file mode 100644
index ee3da15..0000000
--- a/build/linux/unbundle/ffmpeg.gn
+++ /dev/null
@@ -1,37 +0,0 @@
-# Copyright 2016 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import("//build/buildflag_header.gni")
-import("//build/config/linux/pkg_config.gni")
-import("//build/shim_headers.gni")
-
-pkg_config("system_ffmpeg") {
-  packages = [
-    "libavcodec",
-    "libavformat",
-    "libavutil",
-  ]
-}
-
-buildflag_header("ffmpeg_buildflags") {
-  header = "ffmpeg_buildflags.h"
-  flags = [ "USE_SYSTEM_FFMPEG=true" ]
-}
-
-shim_headers("ffmpeg_shim") {
-  root_path = "."
-  headers = [
-    "libavcodec/avcodec.h",
-    "libavformat/avformat.h",
-    "libavutil/imgutils.h",
-  ]
-}
-
-source_set("ffmpeg") {
-  deps = [
-    ":ffmpeg_buildflags",
-    ":ffmpeg_shim",
-  ]
-  public_configs = [ ":system_ffmpeg" ]
-}
diff --git a/build/linux/unbundle/flac.gn b/build/linux/unbundle/flac.gn
deleted file mode 100644
index 5f2fdeb..0000000
--- a/build/linux/unbundle/flac.gn
+++ /dev/null
@@ -1,32 +0,0 @@
-# Copyright 2016 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import("//build/config/linux/pkg_config.gni")
-import("//build/shim_headers.gni")
-
-pkg_config("system_flac") {
-  packages = [ "flac" ]
-}
-
-shim_headers("flac_shim") {
-  root_path = "include"
-  headers = [
-    "FLAC/all.h",
-    "FLAC/assert.h",
-    "FLAC/callback.h",
-    "FLAC/export.h",
-    "FLAC/format.h",
-    "FLAC/metadata.h",
-    "FLAC/ordinals.h",
-    "FLAC/stream_decoder.h",
-    "FLAC/stream_encoder.h",
-  ]
-}
-
-source_set("flac") {
-  deps = [
-    ":flac_shim",
-  ]
-  public_configs = [ ":system_flac" ]
-}
diff --git a/build/linux/unbundle/fontconfig.gn b/build/linux/unbundle/fontconfig.gn
deleted file mode 100644
index c1e2298..0000000
--- a/build/linux/unbundle/fontconfig.gn
+++ /dev/null
@@ -1,13 +0,0 @@
-# Copyright 2017 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-assert(is_linux)
-
-config("fontconfig_config") {
-  libs = [ "fontconfig" ]
-}
-
-group("fontconfig") {
-  public_configs = [ ":fontconfig_config" ]
-}
diff --git a/build/linux/unbundle/freetype.gn b/build/linux/unbundle/freetype.gn
deleted file mode 100644
index cafa9db..0000000
--- a/build/linux/unbundle/freetype.gn
+++ /dev/null
@@ -1,14 +0,0 @@
-# Copyright 2017 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-declare_args() {
-  # Blink needs a recent and properly build-configured FreeType version to
-  # support OpenType variations, color emoji and avoid security bugs. By default
-  # we ship and link such a version as part of Chrome. For distributions that
-  # prefer to keep linking to the version the system, FreeType must be newer
-  # than version 2.7.1 and have color bitmap support compiled in. WARNING:
-  # System FreeType configurations other than as described WILL INTRODUCE TEXT
-  # RENDERING AND SECURITY REGRESSIONS.
-  use_system_freetype = true
-}
diff --git a/build/linux/unbundle/harfbuzz-ng.gn b/build/linux/unbundle/harfbuzz-ng.gn
deleted file mode 100644
index b4ba17a..0000000
--- a/build/linux/unbundle/harfbuzz-ng.gn
+++ /dev/null
@@ -1,13 +0,0 @@
-# Copyright 2017 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-declare_args() {
-  # Blink uses a cutting-edge version of Harfbuzz (version listed in
-  # third_party/harfbuzz-ng/README.chromium); most Linux distros do not contain
-  # a new enough version of the code to work correctly. However, ChromeOS
-  # chroots (i.e. real ChromeOS builds for devices) do contain a new enough
-  # version of the library, and so this variable exists so that ChromeOS can
-  # build against the system lib and keep binary sizes smaller.
-  use_system_harfbuzz = true
-}
diff --git a/build/linux/unbundle/icu.gn b/build/linux/unbundle/icu.gn
deleted file mode 100644
index 4450e40..0000000
--- a/build/linux/unbundle/icu.gn
+++ /dev/null
@@ -1,258 +0,0 @@
-# Copyright 2016 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import("//build/config/linux/pkg_config.gni")
-import("//build/shim_headers.gni")
-
-group("icu") {
-  public_deps = [
-    ":icui18n",
-    ":icuuc",
-  ]
-}
-
-config("icu_config") {
-  defines = [
-    "USING_SYSTEM_ICU=1",
-    "ICU_UTIL_DATA_IMPL=ICU_UTIL_DATA_STATIC",
-    "UCHAR_TYPE=uint16_t",
-
-    # U_EXPORT (defined in unicode/platform.h) is used to set public visibility
-    # on classes through the U_COMMON_API and U_I18N_API macros (among others).
-    # When linking against the system ICU library, we want its symbols to have
-    # public LTO visibility. This disables CFI checks for the ICU classes and
-    # allows whole-program optimization to be applied to the rest of Chromium.
-    #
-    # Both U_COMMON_API and U_I18N_API macros would be defined to U_EXPORT only
-    # when U_COMBINED_IMPLEMENTATION is defined (see unicode/utypes.h). Because
-    # we override the default system UCHAR_TYPE (char16_t), it is not possible
-    # to use U_COMBINED_IMPLEMENTATION at this moment, meaning the U_COMMON_API
-    # and U_I18N_API macros are set to U_IMPORT which is an empty definition.
-    #
-    # Until building with UCHAR_TYPE=char16_t is supported, one way to apply
-    # public visibility (and thus public LTO visibility) to all ICU classes is
-    # to define U_IMPORT to have the same value as U_EXPORT. For more details,
-    # please see: https://crbug.com/822820
-    "U_IMPORT=U_EXPORT",
-  ]
-}
-
-pkg_config("system_icui18n") {
-  packages = [ "icu-i18n" ]
-}
-
-pkg_config("system_icuuc") {
-  packages = [ "icu-uc" ]
-}
-
-source_set("icui18n") {
-  deps = [
-    ":icui18n_shim",
-  ]
-  public_configs = [
-    ":icu_config",
-    ":system_icui18n",
-  ]
-}
-
-source_set("icuuc") {
-  deps = [
-    ":icuuc_shim",
-  ]
-  public_configs = [
-    ":icu_config",
-    ":system_icuuc",
-  ]
-}
-
-shim_headers("icui18n_shim") {
-  root_path = "source/i18n"
-  headers = [
-    # This list can easily be updated using the commands below:
-    # cd third_party/icu/source/i18n
-    # find unicode -iname '*.h' -printf '    "%p",\n' | LC_ALL=C sort -u
-    "unicode/alphaindex.h",
-    "unicode/basictz.h",
-    "unicode/calendar.h",
-    "unicode/choicfmt.h",
-    "unicode/coleitr.h",
-    "unicode/coll.h",
-    "unicode/compactdecimalformat.h",
-    "unicode/curramt.h",
-    "unicode/currpinf.h",
-    "unicode/currunit.h",
-    "unicode/datefmt.h",
-    "unicode/dcfmtsym.h",
-    "unicode/decimfmt.h",
-    "unicode/dtfmtsym.h",
-    "unicode/dtitvfmt.h",
-    "unicode/dtitvinf.h",
-    "unicode/dtptngen.h",
-    "unicode/dtrule.h",
-    "unicode/fieldpos.h",
-    "unicode/fmtable.h",
-    "unicode/format.h",
-    "unicode/fpositer.h",
-    "unicode/gender.h",
-    "unicode/gregocal.h",
-    "unicode/measfmt.h",
-    "unicode/measunit.h",
-    "unicode/measure.h",
-    "unicode/msgfmt.h",
-    "unicode/numfmt.h",
-    "unicode/numsys.h",
-    "unicode/plurfmt.h",
-    "unicode/plurrule.h",
-    "unicode/rbnf.h",
-    "unicode/rbtz.h",
-    "unicode/regex.h",
-    "unicode/region.h",
-    "unicode/reldatefmt.h",
-    "unicode/scientificnumberformatter.h",
-    "unicode/search.h",
-    "unicode/selfmt.h",
-    "unicode/simpletz.h",
-    "unicode/smpdtfmt.h",
-    "unicode/sortkey.h",
-    "unicode/stsearch.h",
-    "unicode/tblcoll.h",
-    "unicode/timezone.h",
-    "unicode/tmunit.h",
-    "unicode/tmutamt.h",
-    "unicode/tmutfmt.h",
-    "unicode/translit.h",
-    "unicode/tzfmt.h",
-    "unicode/tznames.h",
-    "unicode/tzrule.h",
-    "unicode/tztrans.h",
-    "unicode/ucal.h",
-    "unicode/ucol.h",
-    "unicode/ucoleitr.h",
-    "unicode/ucsdet.h",
-    "unicode/udat.h",
-    "unicode/udateintervalformat.h",
-    "unicode/udatpg.h",
-    "unicode/ufieldpositer.h",
-    "unicode/uformattable.h",
-    "unicode/ugender.h",
-    "unicode/ulocdata.h",
-    "unicode/umsg.h",
-    "unicode/unirepl.h",
-    "unicode/unum.h",
-    "unicode/unumsys.h",
-    "unicode/upluralrules.h",
-    "unicode/uregex.h",
-    "unicode/uregion.h",
-    "unicode/ureldatefmt.h",
-    "unicode/usearch.h",
-    "unicode/uspoof.h",
-    "unicode/utmscale.h",
-    "unicode/utrans.h",
-    "unicode/vtzone.h",
-  ]
-}
-
-shim_headers("icuuc_shim") {
-  root_path = "source/common"
-  headers = [
-    # This list can easily be updated using the commands below:
-    # cd third_party/icu/source/common
-    # find unicode -iname '*.h' -printf '    "%p",\n' | LC_ALL=C sort -u
-    "unicode/appendable.h",
-    "unicode/brkiter.h",
-    "unicode/bytestream.h",
-    "unicode/bytestrie.h",
-    "unicode/bytestriebuilder.h",
-    "unicode/caniter.h",
-    "unicode/casemap.h",
-    "unicode/char16ptr.h",
-    "unicode/chariter.h",
-    "unicode/dbbi.h",
-    "unicode/docmain.h",
-    "unicode/dtintrv.h",
-    "unicode/edits.h",
-    "unicode/enumset.h",
-    "unicode/errorcode.h",
-    "unicode/filteredbrk.h",
-    "unicode/icudataver.h",
-    "unicode/icuplug.h",
-    "unicode/idna.h",
-    "unicode/listformatter.h",
-    "unicode/localpointer.h",
-    "unicode/locdspnm.h",
-    "unicode/locid.h",
-    "unicode/messagepattern.h",
-    "unicode/normalizer2.h",
-    "unicode/normlzr.h",
-    "unicode/parseerr.h",
-    "unicode/parsepos.h",
-    "unicode/platform.h",
-    "unicode/ptypes.h",
-    "unicode/putil.h",
-    "unicode/rbbi.h",
-    "unicode/rep.h",
-    "unicode/resbund.h",
-    "unicode/schriter.h",
-    "unicode/simpleformatter.h",
-    "unicode/std_string.h",
-    "unicode/strenum.h",
-    "unicode/stringpiece.h",
-    "unicode/stringtriebuilder.h",
-    "unicode/symtable.h",
-    "unicode/ubidi.h",
-    "unicode/ubiditransform.h",
-    "unicode/ubrk.h",
-    "unicode/ucasemap.h",
-    "unicode/ucat.h",
-    "unicode/uchar.h",
-    "unicode/ucharstrie.h",
-    "unicode/ucharstriebuilder.h",
-    "unicode/uchriter.h",
-    "unicode/uclean.h",
-    "unicode/ucnv.h",
-    "unicode/ucnv_cb.h",
-    "unicode/ucnv_err.h",
-    "unicode/ucnvsel.h",
-    "unicode/uconfig.h",
-    "unicode/ucurr.h",
-    "unicode/udata.h",
-    "unicode/udisplaycontext.h",
-    "unicode/uenum.h",
-    "unicode/uidna.h",
-    "unicode/uiter.h",
-    "unicode/uldnames.h",
-    "unicode/ulistformatter.h",
-    "unicode/uloc.h",
-    "unicode/umachine.h",
-    "unicode/umisc.h",
-    "unicode/unifilt.h",
-    "unicode/unifunct.h",
-    "unicode/unimatch.h",
-    "unicode/uniset.h",
-    "unicode/unistr.h",
-    "unicode/unorm.h",
-    "unicode/unorm2.h",
-    "unicode/uobject.h",
-    "unicode/urename.h",
-    "unicode/urep.h",
-    "unicode/ures.h",
-    "unicode/uscript.h",
-    "unicode/uset.h",
-    "unicode/usetiter.h",
-    "unicode/ushape.h",
-    "unicode/usprep.h",
-    "unicode/ustring.h",
-    "unicode/ustringtrie.h",
-    "unicode/utext.h",
-    "unicode/utf.h",
-    "unicode/utf16.h",
-    "unicode/utf32.h",
-    "unicode/utf8.h",
-    "unicode/utf_old.h",
-    "unicode/utrace.h",
-    "unicode/utypes.h",
-    "unicode/uvernum.h",
-    "unicode/uversion.h",
-  ]
-}
diff --git a/build/linux/unbundle/libdrm.gn b/build/linux/unbundle/libdrm.gn
deleted file mode 100644
index 22df98a..0000000
--- a/build/linux/unbundle/libdrm.gn
+++ /dev/null
@@ -1,22 +0,0 @@
-# Copyright 2017 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import("//build/config/linux/pkg_config.gni")
-import("//build/shim_headers.gni")
-
-pkg_config("system_libdrm") {
-  packages = [ "libdrm" ]
-}
-
-shim_headers("libdrm_shim") {
-  root_path = "src/include"
-  headers = [ "drm.h" ]
-}
-
-source_set("libdrm") {
-  deps = [
-    ":libdrm_shim",
-  ]
-  public_configs = [ ":system_libdrm" ]
-}
diff --git a/build/linux/unbundle/libevent.gn b/build/linux/unbundle/libevent.gn
deleted file mode 100644
index 47e48e9..0000000
--- a/build/linux/unbundle/libevent.gn
+++ /dev/null
@@ -1,17 +0,0 @@
-# Copyright 2016 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import("//build/shim_headers.gni")
-
-shim_headers("libevent_shim") {
-  root_path = "."
-  headers = [ "event.h" ]
-}
-
-source_set("libevent") {
-  deps = [
-    ":libevent_shim",
-  ]
-  libs = [ "event" ]
-}
diff --git a/build/linux/unbundle/libjpeg.gn b/build/linux/unbundle/libjpeg.gn
deleted file mode 100644
index 17398ea..0000000
--- a/build/linux/unbundle/libjpeg.gn
+++ /dev/null
@@ -1,12 +0,0 @@
-# Copyright 2018 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-declare_args() {
-  # Uses system libjpeg. If true, overrides use_libjpeg_turbo.
-  use_system_libjpeg = true
-
-  # Uses libjpeg_turbo as the jpeg implementation. Has no effect if
-  # use_system_libjpeg is set.
-  use_libjpeg_turbo = true
-}
diff --git a/build/linux/unbundle/libpng.gn b/build/linux/unbundle/libpng.gn
deleted file mode 100644
index 60f837b..0000000
--- a/build/linux/unbundle/libpng.gn
+++ /dev/null
@@ -1,25 +0,0 @@
-# Copyright 2016 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import("//build/config/linux/pkg_config.gni")
-import("//build/shim_headers.gni")
-
-pkg_config("libpng_config") {
-  packages = [ "libpng" ]
-}
-
-shim_headers("libpng_shim") {
-  root_path = "."
-  headers = [
-    "png.h",
-    "pngconf.h",
-  ]
-}
-
-source_set("libpng") {
-  deps = [
-    ":libpng_shim",
-  ]
-  public_configs = [ ":libpng_config" ]
-}
diff --git a/build/linux/unbundle/libvpx.gn b/build/linux/unbundle/libvpx.gn
deleted file mode 100644
index eb49e75..0000000
--- a/build/linux/unbundle/libvpx.gn
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2016 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import("//build/config/linux/pkg_config.gni")
-import("//build/shim_headers.gni")
-
-pkg_config("system_libvpx") {
-  packages = [ "vpx" ]
-}
-
-shim_headers("libvpx_shim") {
-  root_path = "source/libvpx"
-  headers = [
-    "vpx/vp8.h",
-    "vpx/vp8cx.h",
-    "vpx/vp8dx.h",
-    "vpx/vpx_codec.h",
-    "vpx/vpx_codec_impl_bottom.h",
-    "vpx/vpx_codec_impl_top.h",
-    "vpx/vpx_decoder.h",
-    "vpx/vpx_encoder.h",
-    "vpx/vpx_frame_buffer.h",
-    "vpx/vpx_image.h",
-    "vpx/vpx_integer.h",
-  ]
-}
-
-source_set("libvpx") {
-  deps = [
-    ":libvpx_shim",
-  ]
-  public_configs = [ ":system_libvpx" ]
-}
diff --git a/build/linux/unbundle/libwebp.gn b/build/linux/unbundle/libwebp.gn
deleted file mode 100644
index ab92ade..0000000
--- a/build/linux/unbundle/libwebp.gn
+++ /dev/null
@@ -1,17 +0,0 @@
-# Copyright 2016 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-source_set("libwebp_webp") {
-  libs = [
-    "webp",
-    "webpdemux",
-    "webpmux",
-  ]
-}
-
-group("libwebp") {
-  deps = [
-    ":libwebp_webp",
-  ]
-}
diff --git a/build/linux/unbundle/libxml.gn b/build/linux/unbundle/libxml.gn
deleted file mode 100644
index c481bd3..0000000
--- a/build/linux/unbundle/libxml.gn
+++ /dev/null
@@ -1,18 +0,0 @@
-# Copyright 2016 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import("//build/config/linux/pkg_config.gni")
-
-pkg_config("system_libxml") {
-  packages = [ "libxml-2.0" ]
-}
-
-static_library("libxml") {
-  sources = [
-    "chromium/libxml_utils.cc",
-    "chromium/libxml_utils.h",
-  ]
-
-  public_configs = [ ":system_libxml" ]
-}
diff --git a/build/linux/unbundle/libxslt.gn b/build/linux/unbundle/libxslt.gn
deleted file mode 100644
index 885574e..0000000
--- a/build/linux/unbundle/libxslt.gn
+++ /dev/null
@@ -1,13 +0,0 @@
-# Copyright 2016 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import("//build/config/linux/pkg_config.gni")
-
-pkg_config("system_libxslt") {
-  packages = [ "libxslt" ]
-}
-
-source_set("libxslt") {
-  public_configs = [ ":system_libxslt" ]
-}
diff --git a/build/linux/unbundle/openh264.gn b/build/linux/unbundle/openh264.gn
deleted file mode 100644
index 68ca487..0000000
--- a/build/linux/unbundle/openh264.gn
+++ /dev/null
@@ -1,42 +0,0 @@
-# Copyright 2017 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import("//build/config/linux/pkg_config.gni")
-import("//build/shim_headers.gni")
-
-pkg_config("config") {
-  packages = [ "openh264" ]
-}
-
-shim_headers("openh264_shim") {
-  prefix = "wels/"
-  root_path = "src/codec/api/svc"
-  headers = [
-    "codec_api.h",
-    "codec_app_def.h",
-    "codec_def.h",
-    "codec_ver.h",
-  ]
-}
-
-source_set("common") {
-  deps = [
-    ":openh264_shim",
-  ]
-  public_configs = [ ":config" ]
-}
-
-source_set("processing") {
-  deps = [
-    ":openh264_shim",
-  ]
-  public_configs = [ ":config" ]
-}
-
-source_set("encoder") {
-  deps = [
-    ":openh264_shim",
-  ]
-  public_configs = [ ":config" ]
-}
diff --git a/build/linux/unbundle/opus.gn b/build/linux/unbundle/opus.gn
deleted file mode 100644
index e998e3e..0000000
--- a/build/linux/unbundle/opus.gn
+++ /dev/null
@@ -1,45 +0,0 @@
-# Copyright 2017 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import("//build/config/linux/pkg_config.gni")
-import("//build/shim_headers.gni")
-
-pkg_config("opus_config") {
-  packages = [ "opus" ]
-}
-
-shim_headers("opus_shim") {
-  root_path = "src/include"
-  headers = [
-    "opus.h",
-    "opus_defines.h",
-    "opus_multistream.h",
-    "opus_types.h",
-  ]
-}
-
-source_set("opus") {
-  deps = [
-    ":opus_shim",
-  ]
-  public_configs = [ ":opus_config" ]
-}
-
-source_set("opus_compare") {
-}
-
-source_set("opus_demo") {
-}
-
-source_set("test_opus_api") {
-}
-
-source_set("test_opus_decode") {
-}
-
-source_set("test_opus_encode") {
-}
-
-source_set("test_opus_padding") {
-}
diff --git a/build/linux/unbundle/re2.gn b/build/linux/unbundle/re2.gn
deleted file mode 100644
index 94013e5..0000000
--- a/build/linux/unbundle/re2.gn
+++ /dev/null
@@ -1,23 +0,0 @@
-# Copyright 2016 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import("//build/shim_headers.gni")
-
-shim_headers("re2_shim") {
-  root_path = "src"
-  headers = [
-    "re2/filtered_re2.h",
-    "re2/re2.h",
-    "re2/set.h",
-    "re2/stringpiece.h",
-    "re2/variadic_function.h",
-  ]
-}
-
-source_set("re2") {
-  deps = [
-    ":re2_shim",
-  ]
-  libs = [ "re2" ]
-}
diff --git a/build/linux/unbundle/remove_bundled_libraries.py b/build/linux/unbundle/remove_bundled_libraries.py
deleted file mode 100755
index 1cf2841..0000000
--- a/build/linux/unbundle/remove_bundled_libraries.py
+++ /dev/null
@@ -1,106 +0,0 @@
-#!/usr/bin/env python
-# Copyright 2013 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Removes bundled libraries to make sure they are not used.
-
-See README for more details.
-"""
-
-
-import optparse
-import os.path
-import sys
-
-
-def DoMain(argv):
-  my_dirname = os.path.abspath(os.path.dirname(__file__))
-  source_tree_root = os.path.abspath(
-    os.path.join(my_dirname, '..', '..', '..'))
-
-  if os.path.join(source_tree_root, 'build', 'linux', 'unbundle') != my_dirname:
-    print ('Sanity check failed: please run this script from ' +
-           'build/linux/unbundle directory.')
-    return 1
-
-  parser = optparse.OptionParser()
-  parser.add_option('--do-remove', action='store_true')
-
-  options, args = parser.parse_args(argv)
-
-  exclusion_used = {}
-  for exclusion in args:
-    exclusion_used[exclusion] = False
-
-  for root, dirs, files in os.walk(source_tree_root, topdown=False):
-    # Only look at paths which contain a "third_party" component
-    # (note that e.g. third_party.png doesn't count).
-    root_relpath = os.path.relpath(root, source_tree_root)
-    if 'third_party' not in root_relpath.split(os.sep):
-      continue
-
-    for f in files:
-      path = os.path.join(root, f)
-      relpath = os.path.relpath(path, source_tree_root)
-
-      excluded = False
-      for exclusion in args:
-        # Require precise exclusions. Find the right-most third_party
-        # in the relative path, and if there is more than one ignore
-        # the exclusion if it's completely contained within the part
-        # before right-most third_party path component.
-        split = relpath.rsplit(os.sep + 'third_party' + os.sep, 1)
-        if len(split) > 1 and split[0].startswith(exclusion):
-          continue
-
-        if relpath.startswith(exclusion):
-          # Multiple exclusions can match the same path. Go through all of them
-          # and mark each one as used.
-          exclusion_used[exclusion] = True
-          excluded = True
-      if excluded:
-        continue
-
-      # Deleting gyp files almost always leads to gyp failures.
-      # These files come from Chromium project, and can be replaced if needed.
-      if f.endswith('.gyp') or f.endswith('.gypi'):
-        continue
-
-      # Same about GN files.
-      if f.endswith('.gn') or f.endswith('.gni'):
-        continue
-
-      # Deleting .isolate files leads to gyp failures. They are usually
-      # not used by a distro build anyway.
-      # See http://www.chromium.org/developers/testing/isolated-testing
-      # for more info.
-      if f.endswith('.isolate'):
-        continue
-
-      if options.do_remove:
-        # Delete the file - best way to ensure it's not used during build.
-        os.remove(path)
-      else:
-        # By default just print paths that would be removed.
-        print path
-
-  exit_code = 0
-
-  # Fail if exclusion list contains stale entries - this helps keep it
-  # up to date.
-  for exclusion, used in exclusion_used.iteritems():
-    if not used:
-      print '%s does not exist' % exclusion
-      exit_code = 1
-
-  if not options.do_remove:
-    print ('To actually remove files printed above, please pass ' +
-           '--do-remove flag.')
-
-  return exit_code
-
-
-if __name__ == '__main__':
-  sys.exit(DoMain(sys.argv[1:]))
diff --git a/build/linux/unbundle/replace_gn_files.py b/build/linux/unbundle/replace_gn_files.py
deleted file mode 100755
index d4d07f2..0000000
--- a/build/linux/unbundle/replace_gn_files.py
+++ /dev/null
@@ -1,84 +0,0 @@
-#!/usr/bin/env python
-# Copyright 2016 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Replaces GN files in tree with files from here that
-make the build use system libraries.
-"""
-
-from __future__ import print_function
-
-import argparse
-import os
-import shutil
-import sys
-
-
-REPLACEMENTS = {
-  'ffmpeg': 'third_party/ffmpeg/BUILD.gn',
-  'flac': 'third_party/flac/BUILD.gn',
-  'fontconfig': 'third_party/fontconfig/BUILD.gn',
-  'freetype': 'build/config/freetype/freetype.gni',
-  'harfbuzz-ng': 'third_party/harfbuzz-ng/harfbuzz.gni',
-  'icu': 'third_party/icu/BUILD.gn',
-  'libdrm': 'third_party/libdrm/BUILD.gn',
-  'libevent': 'base/third_party/libevent/BUILD.gn',
-  'libjpeg': 'third_party/libjpeg.gni',
-  'libpng': 'third_party/libpng/BUILD.gn',
-  'libvpx': 'third_party/libvpx/BUILD.gn',
-  'libwebp': 'third_party/libwebp/BUILD.gn',
-  'libxml': 'third_party/libxml/BUILD.gn',
-  'libxslt': 'third_party/libxslt/BUILD.gn',
-  'openh264': 'third_party/openh264/BUILD.gn',
-  'opus': 'third_party/opus/BUILD.gn',
-  're2': 'third_party/re2/BUILD.gn',
-  'snappy': 'third_party/snappy/BUILD.gn',
-  'yasm': 'third_party/yasm/yasm_assemble.gni',
-  'zlib': 'third_party/zlib/BUILD.gn',
-}
-
-
-def DoMain(argv):
-  my_dirname = os.path.dirname(__file__)
-  source_tree_root = os.path.abspath(
-    os.path.join(my_dirname, '..', '..', '..'))
-
-  parser = argparse.ArgumentParser()
-  parser.add_argument('--system-libraries', nargs='*', default=[])
-  parser.add_argument('--undo', action='store_true')
-
-  args = parser.parse_args(argv)
-
-  handled_libraries = set()
-  for lib, path in REPLACEMENTS.items():
-    if lib not in args.system_libraries:
-      continue
-    handled_libraries.add(lib)
-
-    if args.undo:
-      # Restore original file, and also remove the backup.
-      # This is meant to restore the source tree to its original state.
-      os.rename(os.path.join(source_tree_root, path + '.orig'),
-                os.path.join(source_tree_root, path))
-    else:
-      # Create a backup copy for --undo.
-      shutil.copyfile(os.path.join(source_tree_root, path),
-                      os.path.join(source_tree_root, path + '.orig'))
-
-      # Copy the GN file from directory of this script to target path.
-      shutil.copyfile(os.path.join(my_dirname, '%s.gn' % lib),
-                      os.path.join(source_tree_root, path))
-
-  unhandled_libraries = set(args.system_libraries) - handled_libraries
-  if unhandled_libraries:
-    print('Unrecognized system libraries requested: %s' % ', '.join(
-        sorted(unhandled_libraries)), file=sys.stderr)
-    return 1
-
-  return 0
-
-
-if __name__ == '__main__':
-  sys.exit(DoMain(sys.argv[1:]))
diff --git a/build/linux/unbundle/snappy.gn b/build/linux/unbundle/snappy.gn
deleted file mode 100644
index 9956ef8..0000000
--- a/build/linux/unbundle/snappy.gn
+++ /dev/null
@@ -1,22 +0,0 @@
-# Copyright 2016 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import("//build/shim_headers.gni")
-
-shim_headers("snappy_shim") {
-  root_path = "src"
-  headers = [
-    "snappy-c.h",
-    "snappy-sinksource.h",
-    "snappy-stubs-public.h",
-    "snappy.h",
-  ]
-}
-
-source_set("snappy") {
-  deps = [
-    ":snappy_shim",
-  ]
-  libs = [ "snappy" ]
-}
diff --git a/build/linux/unbundle/yasm.gn b/build/linux/unbundle/yasm.gn
deleted file mode 100644
index b5b440e..0000000
--- a/build/linux/unbundle/yasm.gn
+++ /dev/null
@@ -1,102 +0,0 @@
-# Copyright 2016 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-if (current_cpu == "x86") {
-  _yasm_flags = [
-    "-felf32",
-    "-m",
-    "x86",
-  ]
-} else if (current_cpu == "x64") {
-  _yasm_flags = [
-    "-DPIC",
-    "-felf64",
-    "-m",
-    "amd64",
-  ]
-}
-
-template("yasm_assemble") {
-  action_name = "${target_name}_action"
-  source_set_name = target_name
-
-  action_foreach(action_name) {
-    # Only the source set can depend on this.
-    visibility = [ ":$source_set_name" ]
-
-    script = "//third_party/yasm/run_yasm.py"
-    sources = invoker.sources
-
-    if (defined(invoker.inputs)) {
-      inputs = invoker.inputs
-    }
-
-    deps = []
-    if (defined(invoker.deps)) {
-      deps += invoker.deps
-    }
-
-    args = [ "yasm" ] + _yasm_flags
-    if (defined(invoker.yasm_flags)) {
-      args += invoker.yasm_flags
-    }
-
-    # User defined include dirs go first.
-    if (defined(invoker.include_dirs)) {
-      foreach(include, invoker.include_dirs) {
-        args += [ "-I" + rebase_path(include, root_build_dir) ]
-      }
-    }
-
-    # Default yasm include dirs. Make it match the native build (source root and
-    # root generated code directory).
-    # This goes to the end of include list.
-    args += [
-      "-I.",
-
-      # Using "//." will produce a relative path "../.." which looks better than
-      # "../../" which will result from using "//" as the base (although both
-      # work). This is because rebase_path will terminate the result in a
-      # slash if the input ends in a slash.
-      "-I" + rebase_path("//.", root_build_dir),
-      "-I" + rebase_path(root_gen_dir, root_build_dir),
-    ]
-
-    # Extra defines.
-    if (defined(invoker.defines)) {
-      foreach(def, invoker.defines) {
-        args += [ "-D$def" ]
-      }
-    }
-
-    # Output file.
-    outputs = [
-      "$target_out_dir/$source_set_name/{{source_name_part}}.o",
-    ]
-    args += [
-      "-o",
-      rebase_path(outputs[0], root_build_dir),
-      "{{source}}",
-    ]
-
-    # The wrapper script run_yasm will write the depfile to the same name as
-    # the output but with .d appended (like gcc will).
-    depfile = outputs[0] + ".d"
-  }
-
-  # Gather the .o files into a linkable thing. This doesn't actually link
-  # anything (a source set just compiles files to link later), but will pass
-  # the object files generated by the action up the dependency chain.
-  static_library(source_set_name) {
-    if (defined(invoker.visibility)) {
-      visibility = invoker.visibility
-    }
-
-    sources = get_target_outputs(":$action_name")
-
-    deps = [
-      ":$action_name",
-    ]
-  }
-}
diff --git a/build/linux/unbundle/zlib.gn b/build/linux/unbundle/zlib.gn
deleted file mode 100644
index 020fc7e..0000000
--- a/build/linux/unbundle/zlib.gn
+++ /dev/null
@@ -1,66 +0,0 @@
-# Copyright 2016 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import("//build/shim_headers.gni")
-
-shim_headers("zlib_shim") {
-  root_path = "."
-  headers = [ "zlib.h" ]
-}
-
-config("system_zlib") {
-  defines = [ "USE_SYSTEM_ZLIB=1" ]
-}
-
-source_set("zlib") {
-  deps = [
-    ":zlib_shim",
-  ]
-  libs = [ "z" ]
-  public_configs = [ ":system_zlib" ]
-}
-
-shim_headers("minizip_shim") {
-  root_path = "contrib"
-  headers = [
-    "minizip/crypt.h",
-    "minizip/ioapi.h",
-    "minizip/iowin32.h",
-    "minizip/mztools.h",
-    "minizip/unzip.h",
-    "minizip/zip.h",
-  ]
-}
-
-source_set("minizip") {
-  deps = [
-    ":minizip_shim",
-  ]
-  libs = [ "minizip" ]
-}
-
-static_library("zip") {
-  sources = [
-    "google/zip.cc",
-    "google/zip.h",
-    "google/zip_internal.cc",
-    "google/zip_internal.h",
-    "google/zip_reader.cc",
-    "google/zip_reader.h",
-  ]
-  deps = [
-    ":minizip",
-    "//base",
-  ]
-}
-
-static_library("compression_utils") {
-  sources = [
-    "google/compression_utils.cc",
-    "google/compression_utils.h",
-  ]
-  deps = [
-    ":zlib",
-  ]
-}
diff --git a/build/mac/OWNERS b/build/mac/OWNERS
deleted file mode 100644
index a2d7cc8..0000000
--- a/build/mac/OWNERS
+++ /dev/null
@@ -1,4 +0,0 @@
-mark@chromium.org
-rsesek@chromium.org
-
-# COMPONENT: Build
diff --git a/build/mac/edit_xibs.sh b/build/mac/edit_xibs.sh
deleted file mode 100755
index b7b749e..0000000
--- a/build/mac/edit_xibs.sh
+++ /dev/null
@@ -1,19 +0,0 @@
-#!/bin/sh
-
-# Copyright (c) 2012 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This script is a convenience to run GYP for /src/chrome/chrome_nibs.gyp
-# with the Xcode generator (as you likely use ninja). Documentation:
-#   http://dev.chromium.org/developers/design-documents/mac-xib-files
-
-set -e
-
-RELSRC=$(dirname "$0")/../..
-SRC=$(cd "$RELSRC" && pwd)
-export PYTHONPATH="$PYTHONPATH:$SRC/build"
-export GYP_GENERATORS=xcode
-"$SRC/tools/gyp/gyp" -I"$SRC/build/common.gypi" "$SRC/chrome/chrome_nibs.gyp"
-echo "You can now edit XIB files in Xcode using:"
-echo "  $SRC/chrome/chrome_nibs.xcodeproj"
diff --git a/build/mac/find_sdk.py b/build/mac/find_sdk.py
deleted file mode 100755
index 540a320..0000000
--- a/build/mac/find_sdk.py
+++ /dev/null
@@ -1,105 +0,0 @@
-#!/usr/bin/env python
-# Copyright (c) 2012 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Prints the lowest locally available SDK version greater than or equal to a
-given minimum sdk version to standard output. If --developer_dir is passed, then
-the script will use the Xcode toolchain located at DEVELOPER_DIR.
-
-Usage:
-  python find_sdk.py [--developer_dir DEVELOPER_DIR] 10.6  # Ignores SDKs < 10.6
-"""
-
-import os
-import re
-import subprocess
-import sys
-
-from optparse import OptionParser
-
-
-class SdkError(Exception):
-  def __init__(self, value):
-    self.value = value
-  def __str__(self):
-    return repr(self.value)
-
-
-def parse_version(version_str):
-  """'10.6' => [10, 6]"""
-  return map(int, re.findall(r'(\d+)', version_str))
-
-
-def main():
-  parser = OptionParser()
-  parser.add_option("--verify",
-                    action="store_true", dest="verify", default=False,
-                    help="return the sdk argument and warn if it doesn't exist")
-  parser.add_option("--sdk_path",
-                    action="store", type="string", dest="sdk_path", default="",
-                    help="user-specified SDK path; bypasses verification")
-  parser.add_option("--print_sdk_path",
-                    action="store_true", dest="print_sdk_path", default=False,
-                    help="Additionally print the path the SDK (appears first).")
-  parser.add_option("--developer_dir", help='Path to Xcode.')
-  options, args = parser.parse_args()
-  if len(args) != 1:
-    parser.error('Please specify a minimum SDK version')
-  min_sdk_version = args[0]
-
-  if options.developer_dir:
-    os.environ['DEVELOPER_DIR'] = options.developer_dir
-
-  job = subprocess.Popen(['xcode-select', '-print-path'],
-                         stdout=subprocess.PIPE,
-                         stderr=subprocess.STDOUT)
-  out, err = job.communicate()
-  if job.returncode != 0:
-    print >> sys.stderr, out
-    print >> sys.stderr, err
-    raise Exception('Error %d running xcode-select' % job.returncode)
-  sdk_dir = os.path.join(
-      out.rstrip(), 'Platforms/MacOSX.platform/Developer/SDKs')
-  # Xcode must be installed, its license agreement must be accepted, and its
-  # command-line tools must be installed. Stand-alone installations (in
-  # /Library/Developer/CommandLineTools) are not supported.
-  # https://bugs.chromium.org/p/chromium/issues/detail?id=729990#c1
-  if not os.path.isdir(sdk_dir) or not '.app/Contents/Developer' in sdk_dir:
-    raise SdkError('Install Xcode, launch it, accept the license ' +
-      'agreement, and run `sudo xcode-select -s /path/to/Xcode.app` ' +
-      'to continue.')
-  sdks = [re.findall('^MacOSX(10\.\d+)\.sdk$', s) for s in os.listdir(sdk_dir)]
-  sdks = [s[0] for s in sdks if s]  # [['10.5'], ['10.6']] => ['10.5', '10.6']
-  sdks = [s for s in sdks  # ['10.5', '10.6'] => ['10.6']
-          if parse_version(s) >= parse_version(min_sdk_version)]
-  if not sdks:
-    raise Exception('No %s+ SDK found' % min_sdk_version)
-  best_sdk = sorted(sdks, key=parse_version)[0]
-
-  if options.verify and best_sdk != min_sdk_version and not options.sdk_path:
-    print >> sys.stderr, ''
-    print >> sys.stderr, '                                           vvvvvvv'
-    print >> sys.stderr, ''
-    print >> sys.stderr, \
-        'This build requires the %s SDK, but it was not found on your system.' \
-        % min_sdk_version
-    print >> sys.stderr, \
-        'Either install it, or explicitly set mac_sdk in your GYP_DEFINES.'
-    print >> sys.stderr, ''
-    print >> sys.stderr, '                                           ^^^^^^^'
-    print >> sys.stderr, ''
-    sys.exit(1)
-
-  if options.print_sdk_path:
-    print subprocess.check_output(
-        ['xcrun', '-sdk', 'macosx' + best_sdk, '--show-sdk-path']).strip()
-
-  return best_sdk
-
-
-if __name__ == '__main__':
-  if sys.platform != 'darwin':
-    raise Exception("This script only runs on Mac")
-  print main()
-  sys.exit(0)
diff --git a/build/mac/should_use_hermetic_xcode.py b/build/mac/should_use_hermetic_xcode.py
deleted file mode 100755
index 124cf54..0000000
--- a/build/mac/should_use_hermetic_xcode.py
+++ /dev/null
@@ -1,42 +0,0 @@
-#!/usr/bin/env python
-# Copyright 2016 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Prints "1" if Chrome targets should be built with hermetic Xcode.
-Prints "2" if Chrome targets should be built with hermetic Xcode, but the OS
-version does not meet the minimum requirements of the hermetic version of Xcode.
-Otherwise prints "0".
-
-Usage:
-  python should_use_hermetic_xcode.py <target_os>
-"""
-
-import os
-import sys
-
-_THIS_DIR_PATH = os.path.abspath(os.path.dirname(os.path.realpath(__file__)))
-_BUILD_PATH = os.path.join(_THIS_DIR_PATH, os.pardir)
-sys.path.insert(0, _BUILD_PATH)
-
-import mac_toolchain
-
-
-def _IsCorpMachine():
-  return os.path.isdir('/Library/GoogleCorpSupport/')
-
-
-def main():
-  allow_corp = sys.argv[1] == 'mac' and _IsCorpMachine()
-  if os.environ.get('FORCE_MAC_TOOLCHAIN') or allow_corp:
-    if not mac_toolchain.PlatformMeetsHermeticXcodeRequirements():
-      return "2"
-    return "1"
-  else:
-    return "0"
-
-
-if __name__ == '__main__':
-  print main()
-  sys.exit(0)
diff --git a/build/mac/tweak_info_plist.gni b/build/mac/tweak_info_plist.gni
deleted file mode 100644
index 505f5e5..0000000
--- a/build/mac/tweak_info_plist.gni
+++ /dev/null
@@ -1,83 +0,0 @@
-# Copyright 2016 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# Template to run the tweak_info_plist.py script on a plist.
-#
-# Arguments:
-#
-#     info_plist:
-#         (optional), string, the plist to tweak.
-#
-#     info_plists:
-#         (optional), list of string, the plist files to merge and tweak.
-#
-#     args:
-#         (optional), list of string, the arguments to pass to the
-#         tweak_info_plist.py script.
-#
-# Callers should use get_target_outputs() to get the output name. One of
-# info_plist or info_plists must be specified.
-template("tweak_info_plist") {
-  _output_name = "$target_gen_dir/${target_name}_tweaked.plist"
-
-  if (defined(invoker.info_plists)) {
-    assert(!defined(invoker.info_plist),
-           "Cannot have both info_plist and info_plists for $target_name")
-
-    _source_name = "$target_gen_dir/${target_name}_merged.plist"
-    _deps = [ ":" + target_name + "_merge_plist" ]
-
-    action(target_name + "_merge_plist") {
-      forward_variables_from(invoker, [ "testonly" ])
-      script = "//build/config/mac/plist_util.py"
-      sources = invoker.info_plists
-      outputs = [
-        _source_name,
-      ]
-      args = [
-               "merge",
-               "-f=xml1",
-               "-o=" + rebase_path(_source_name, root_build_dir),
-             ] + rebase_path(invoker.info_plists, root_build_dir)
-    }
-  } else {
-    assert(defined(invoker.info_plist),
-           "The info_plist must be specified in $target_name")
-
-    _source_name = invoker.info_plist
-    _deps = []
-  }
-
-  action(target_name) {
-    forward_variables_from(invoker,
-                           [
-                             "args",
-                             "testonly",
-                           ])
-    script = "//build/mac/tweak_info_plist.py"
-    inputs = [
-      script,
-      "//build/util/version.py",
-      "//build/util/LASTCHANGE",
-      "//chrome/VERSION",
-    ]
-    sources = [
-      _source_name,
-    ]
-    outputs = [
-      _output_name,
-    ]
-    if (!defined(args)) {
-      args = []
-    }
-    args += [
-      "--plist",
-      rebase_path(_source_name, root_build_dir),
-      "--output",
-      rebase_path(_output_name, root_build_dir),
-      "--platform=$current_os",
-    ]
-    deps = _deps
-  }
-}
diff --git a/build/mac/tweak_info_plist.py b/build/mac/tweak_info_plist.py
deleted file mode 100755
index 9ea794b..0000000
--- a/build/mac/tweak_info_plist.py
+++ /dev/null
@@ -1,366 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-#
-# Xcode supports build variable substitutions and CPP; sadly, that doesn't work
-# because:
-#
-# 1. Xcode wants to do the Info.plist work before it runs any build phases,
-#    this means if we were to generate a .h file for INFOPLIST_PREFIX_HEADER
-#    we'd have to put it in another target so it runs in time.
-# 2. Xcode also doesn't check to see if the header being used as a prefix for
-#    the Info.plist has changed.  So even if we updated it, it's only looking
-#    at the modtime of the info.plist to see if that's changed.
-#
-# So, we work around all of this by making a script build phase that will run
-# during the app build, and simply update the info.plist in place.  This way
-# by the time the app target is done, the info.plist is correct.
-#
-
-import optparse
-import os
-import plistlib
-import re
-import subprocess
-import sys
-import tempfile
-
-TOP = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))
-
-
-def _ConvertPlist(source_plist, output_plist, fmt):
-  """Convert |source_plist| to |fmt| and save as |output_plist|."""
-  return subprocess.call(
-      ['plutil', '-convert', fmt, '-o', output_plist, source_plist])
-
-
-def _GetOutput(args):
-  """Runs a subprocess and waits for termination. Returns (stdout, returncode)
-  of the process. stderr is attached to the parent."""
-  proc = subprocess.Popen(args, stdout=subprocess.PIPE)
-  (stdout, stderr) = proc.communicate()
-  return (stdout, proc.returncode)
-
-
-def _GetOutputNoError(args):
-  """Similar to _GetOutput() but ignores stderr. If there's an error launching
-  the child (like file not found), the exception will be caught and (None, 1)
-  will be returned to mimic quiet failure."""
-  try:
-    proc = subprocess.Popen(args, stdout=subprocess.PIPE,
-                            stderr=subprocess.PIPE)
-  except OSError:
-    return (None, 1)
-  (stdout, stderr) = proc.communicate()
-  return (stdout, proc.returncode)
-
-
-def _RemoveKeys(plist, *keys):
-  """Removes a varargs of keys from the plist."""
-  for key in keys:
-    try:
-      del plist[key]
-    except KeyError:
-      pass
-
-
-def _ApplyVersionOverrides(version, keys, overrides, separator='.'):
-  """Applies version overrides.
-
-  Given a |version| string as "a.b.c.d" (assuming a default separator) with
-  version components named by |keys| then overrides any value that is present
-  in |overrides|.
-
-  >>> _ApplyVersionOverrides('a.b', ['major', 'minor'], {'minor': 'd'})
-  'a.d'
-  """
-  if not overrides:
-    return version
-  version_values = version.split(separator)
-  for i, (key, value) in enumerate(zip(keys, version_values)):
-    if key in overrides:
-      version_values[i] = overrides[key]
-  return separator.join(version_values)
-
-
-def _GetVersion(version_format, values, overrides=None):
-  """Generates a version number according to |version_format| using the values
-  from |values| or |overrides| if given."""
-  result = version_format
-  for key in values:
-    if overrides and key in overrides:
-      value = overrides[key]
-    else:
-      value = values[key]
-    result = result.replace('@%s@' % key, value)
-  return result
-
-
-def _AddVersionKeys(
-    plist, version_format_for_key, version=None, overrides=None):
-  """Adds the product version number into the plist. Returns True on success and
-  False on error. The error will be printed to stderr."""
-  if not version:
-    # Pull in the Chrome version number.
-    VERSION_TOOL = os.path.join(TOP, 'build/util/version.py')
-    VERSION_FILE = os.path.join(TOP, 'chrome/VERSION')
-    (stdout, retval) = _GetOutput([
-        VERSION_TOOL, '-f', VERSION_FILE,
-        '-t', '@MAJOR@.@MINOR@.@BUILD@.@PATCH@'])
-
-    # If the command finished with a non-zero return code, then report the
-    # error up.
-    if retval != 0:
-      return False
-
-    version = stdout.strip()
-
-  # Parse the given version number, that should be in MAJOR.MINOR.BUILD.PATCH
-  # format (where each value is a number). Note that str.isdigit() returns
-  # True if the string is composed only of digits (and thus match \d+ regexp).
-  groups = version.split('.')
-  if len(groups) != 4 or not all(element.isdigit() for element in groups):
-    print >>sys.stderr, 'Invalid version string specified: "%s"' % version
-    return False
-  values = dict(zip(('MAJOR', 'MINOR', 'BUILD', 'PATCH'), groups))
-
-  for key in version_format_for_key:
-    plist[key] = _GetVersion(version_format_for_key[key], values, overrides)
-
-  # Return with no error.
-  return True
-
-
-def _DoSCMKeys(plist, add_keys):
-  """Adds the SCM information, visible in about:version, to property list. If
-  |add_keys| is True, it will insert the keys, otherwise it will remove them."""
-  scm_revision = None
-  if add_keys:
-    # Pull in the Chrome revision number.
-    VERSION_TOOL = os.path.join(TOP, 'build/util/version.py')
-    LASTCHANGE_FILE = os.path.join(TOP, 'build/util/LASTCHANGE')
-    (stdout, retval) = _GetOutput([VERSION_TOOL, '-f', LASTCHANGE_FILE, '-t',
-                                  '@LASTCHANGE@'])
-    if retval:
-      return False
-    scm_revision = stdout.rstrip()
-
-  # See if the operation failed.
-  _RemoveKeys(plist, 'SCMRevision')
-  if scm_revision != None:
-    plist['SCMRevision'] = scm_revision
-  elif add_keys:
-    print >>sys.stderr, 'Could not determine SCM revision.  This may be OK.'
-
-  return True
-
-
-def _AddBreakpadKeys(plist, branding, platform, staging):
-  """Adds the Breakpad keys. This must be called AFTER _AddVersionKeys() and
-  also requires the |branding| argument."""
-  plist['BreakpadReportInterval'] = '3600'  # Deliberately a string.
-  plist['BreakpadProduct'] = '%s_%s' % (branding, platform)
-  plist['BreakpadProductDisplay'] = branding
-  if staging:
-    plist['BreakpadURL'] = 'https://clients2.google.com/cr/staging_report'
-  else:
-    plist['BreakpadURL'] = 'https://clients2.google.com/cr/report'
-
-  # These are both deliberately strings and not boolean.
-  plist['BreakpadSendAndExit'] = 'YES'
-  plist['BreakpadSkipConfirm'] = 'YES'
-
-
-def _RemoveBreakpadKeys(plist):
-  """Removes any set Breakpad keys."""
-  _RemoveKeys(plist,
-      'BreakpadURL',
-      'BreakpadReportInterval',
-      'BreakpadProduct',
-      'BreakpadProductDisplay',
-      'BreakpadVersion',
-      'BreakpadSendAndExit',
-      'BreakpadSkipConfirm')
-
-
-def _TagSuffixes():
-  # Keep this list sorted in the order that tag suffix components are to
-  # appear in a tag value. That is to say, it should be sorted per ASCII.
-  components = ('full',)
-  assert tuple(sorted(components)) == components
-
-  components_len = len(components)
-  combinations = 1 << components_len
-  tag_suffixes = []
-  for combination in xrange(0, combinations):
-    tag_suffix = ''
-    for component_index in xrange(0, components_len):
-      if combination & (1 << component_index):
-        tag_suffix += '-' + components[component_index]
-    tag_suffixes.append(tag_suffix)
-  return tag_suffixes
-
-
-def _AddKeystoneKeys(plist, bundle_identifier):
-  """Adds the Keystone keys. This must be called AFTER _AddVersionKeys() and
-  also requires the |bundle_identifier| argument (com.example.product)."""
-  plist['KSVersion'] = plist['CFBundleShortVersionString']
-  plist['KSProductID'] = bundle_identifier
-  plist['KSUpdateURL'] = 'https://tools.google.com/service/update2'
-
-  _RemoveKeys(plist, 'KSChannelID')
-  for tag_suffix in _TagSuffixes():
-    if tag_suffix:
-      plist['KSChannelID' + tag_suffix] = tag_suffix
-
-
-def _RemoveKeystoneKeys(plist):
-  """Removes any set Keystone keys."""
-  _RemoveKeys(plist,
-      'KSVersion',
-      'KSProductID',
-      'KSUpdateURL')
-
-  tag_keys = []
-  for tag_suffix in _TagSuffixes():
-    tag_keys.append('KSChannelID' + tag_suffix)
-  _RemoveKeys(plist, *tag_keys)
-
-
-def Main(argv):
-  parser = optparse.OptionParser('%prog [options]')
-  parser.add_option('--plist', dest='plist_path', action='store',
-      type='string', default=None, help='The path of the plist to tweak.')
-  parser.add_option('--output', dest='plist_output', action='store',
-      type='string', default=None, help='If specified, the path to output ' + \
-      'the tweaked plist, rather than overwriting the input.')
-  parser.add_option('--breakpad', dest='use_breakpad', action='store',
-      type='int', default=False, help='Enable Breakpad [1 or 0]')
-  parser.add_option('--breakpad_staging', dest='use_breakpad_staging',
-      action='store_true', default=False,
-      help='Use staging breakpad to upload reports. Ignored if --breakpad=0.')
-  parser.add_option('--keystone', dest='use_keystone', action='store',
-      type='int', default=False, help='Enable Keystone [1 or 0]')
-  parser.add_option('--scm', dest='add_scm_info', action='store', type='int',
-      default=True, help='Add SCM metadata [1 or 0]')
-  parser.add_option('--branding', dest='branding', action='store',
-      type='string', default=None, help='The branding of the binary')
-  parser.add_option('--bundle_id', dest='bundle_identifier',
-      action='store', type='string', default=None,
-      help='The bundle id of the binary')
-  parser.add_option('--platform', choices=('ios', 'mac'), default='mac',
-      help='The target platform of the bundle')
-  parser.add_option('--version-overrides', action='append',
-      help='Key-value pair to override specific component of version '
-           'like key=value (can be passed multiple time to configure '
-           'more than one override)')
-  parser.add_option('--format', choices=('binary1', 'xml1', 'json'),
-      default='xml1', help='Format to use when writing property list '
-          '(default: %(default)s)')
-  parser.add_option('--version', dest='version', action='store', type='string',
-      default=None, help='The version string [major.minor.build.patch]')
-  (options, args) = parser.parse_args(argv)
-
-  if len(args) > 0:
-    print >>sys.stderr, parser.get_usage()
-    return 1
-
-  if not options.plist_path:
-    print >>sys.stderr, 'No --plist specified.'
-    return 1
-
-  # Read the plist into its parsed format. Convert the file to 'xml1' as
-  # plistlib only supports that format in Python 2.7.
-  with tempfile.NamedTemporaryFile() as temp_info_plist:
-    retcode = _ConvertPlist(options.plist_path, temp_info_plist.name, 'xml1')
-    if retcode != 0:
-      return retcode
-    plist = plistlib.readPlist(temp_info_plist.name)
-
-  # Convert overrides.
-  overrides = {}
-  if options.version_overrides:
-    for pair in options.version_overrides:
-      if not '=' in pair:
-        print >>sys.stderr, 'Invalid value for --version-overrides:', pair
-        return 1
-      key, value = pair.split('=', 1)
-      overrides[key] = value
-      if key not in ('MAJOR', 'MINOR', 'BUILD', 'PATCH'):
-        print >>sys.stderr, 'Unsupported key for --version-overrides:', key
-        return 1
-
-  if options.platform == 'mac':
-    version_format_for_key = {
-      # Add public version info so "Get Info" works.
-      'CFBundleShortVersionString': '@MAJOR@.@MINOR@.@BUILD@.@PATCH@',
-
-      # Honor the 429496.72.95 limit.  The maximum comes from splitting 2^32 - 1
-      # into  6, 2, 2 digits.  The limitation was present in Tiger, but it could
-      # have been fixed in later OS release, but hasn't been tested (it's easy
-      # enough to find out with "lsregister -dump).
-      # http://lists.apple.com/archives/carbon-dev/2006/Jun/msg00139.html
-      # BUILD will always be an increasing value, so BUILD_PATH gives us
-      # something unique that meetings what LS wants.
-      'CFBundleVersion': '@BUILD@.@PATCH@',
-    }
-  else:
-    version_format_for_key = {
-      'CFBundleShortVersionString': '@MAJOR@.@BUILD@.@PATCH@',
-      'CFBundleVersion': '@MAJOR@.@MINOR@.@BUILD@.@PATCH@'
-    }
-
-  if options.use_breakpad:
-    version_format_for_key['BreakpadVersion'] = \
-        '@MAJOR@.@MINOR@.@BUILD@.@PATCH@'
-
-  # Insert the product version.
-  if not _AddVersionKeys(
-      plist, version_format_for_key, version=options.version,
-      overrides=overrides):
-    return 2
-
-  # Add Breakpad if configured to do so.
-  if options.use_breakpad:
-    if options.branding is None:
-      print >>sys.stderr, 'Use of Breakpad requires branding.'
-      return 1
-    # Map "target_os" passed from gn via the --platform parameter
-    # to the platform as known by breakpad.
-    platform = {'mac': 'Mac', 'ios': 'iOS'}[options.platform]
-    _AddBreakpadKeys(plist, options.branding, platform,
-        options.use_breakpad_staging)
-  else:
-    _RemoveBreakpadKeys(plist)
-
-  # Add Keystone if configured to do so.
-  if options.use_keystone:
-    if options.bundle_identifier is None:
-      print >>sys.stderr, 'Use of Keystone requires the bundle id.'
-      return 1
-    _AddKeystoneKeys(plist, options.bundle_identifier)
-  else:
-    _RemoveKeystoneKeys(plist)
-
-  # Adds or removes any SCM keys.
-  if not _DoSCMKeys(plist, options.add_scm_info):
-    return 3
-
-  output_path = options.plist_path
-  if options.plist_output is not None:
-    output_path = options.plist_output
-
-  # Now that all keys have been mutated, rewrite the file.
-  with tempfile.NamedTemporaryFile() as temp_info_plist:
-    plistlib.writePlist(plist, temp_info_plist.name)
-
-    # Convert Info.plist to the format requested by the --format flag. Any
-    # format would work on Mac but iOS requires specific format.
-    return _ConvertPlist(temp_info_plist.name, output_path, options.format)
-
-
-if __name__ == '__main__':
-  sys.exit(Main(sys.argv[1:]))
diff --git a/build/mac_toolchain.py b/build/mac_toolchain.py
deleted file mode 100755
index 24ee355..0000000
--- a/build/mac_toolchain.py
+++ /dev/null
@@ -1,169 +0,0 @@
-#!/usr/bin/env python
-# Copyright 2018 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-If should_use_hermetic_xcode.py emits "1", and the current toolchain is out of
-date:
-  * Downloads the hermetic mac toolchain
-    * Requires CIPD authentication. Run `cipd auth-login`, use Google account.
-  * Accepts the license.
-    * If xcode-select and xcodebuild are not passwordless in sudoers, requires
-      user interaction.
-
-The toolchain version can be overridden by setting MAC_TOOLCHAIN_REVISION with
-the full revision, e.g. 9A235.
-"""
-
-import os
-import platform
-import shutil
-import subprocess
-import sys
-
-
-# This can be changed after running:
-#    mac_toolchain upload -xcode-path path/to/Xcode.app
-MAC_TOOLCHAIN_VERSION = '8E2002'
-
-# The toolchain will not be downloaded if the minimum OS version is not met.
-# 16 is the major version number for macOS 10.12.
-MAC_MINIMUM_OS_VERSION = 16
-
-MAC_TOOLCHAIN_INSTALLER = 'mac_toolchain'
-
-# Absolute path to src/ directory.
-REPO_ROOT = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
-
-# Absolute path to a file with gclient solutions.
-GCLIENT_CONFIG = os.path.join(os.path.dirname(REPO_ROOT), '.gclient')
-
-BASE_DIR = os.path.abspath(os.path.dirname(__file__))
-TOOLCHAIN_ROOT = os.path.join(BASE_DIR, 'mac_files')
-TOOLCHAIN_BUILD_DIR = os.path.join(TOOLCHAIN_ROOT, 'Xcode.app')
-STAMP_FILE = os.path.join(TOOLCHAIN_ROOT, 'toolchain_build_revision')
-
-
-def PlatformMeetsHermeticXcodeRequirements():
-  return int(platform.release().split('.')[0]) >= MAC_MINIMUM_OS_VERSION
-
-
-def _UseHermeticToolchain():
-  current_dir = os.path.dirname(os.path.realpath(__file__))
-  script_path = os.path.join(current_dir, 'mac/should_use_hermetic_xcode.py')
-  proc = subprocess.Popen([script_path, 'mac'], stdout=subprocess.PIPE)
-  return '1' in proc.stdout.readline()
-
-
-def RequestCipdAuthentication():
-  """Requests that the user authenticate to access Xcode CIPD packages."""
-
-  print 'Access to Xcode CIPD package requires authentication.'
-  print '-----------------------------------------------------------------'
-  print
-  print 'You appear to be a Googler.'
-  print
-  print 'I\'m sorry for the hassle, but you may need to do a one-time manual'
-  print 'authentication. Please run:'
-  print
-  print '    cipd auth-login'
-  print
-  print 'and follow the instructions.'
-  print
-  print 'NOTE: Use your google.com credentials, not chromium.org.'
-  print
-  print '-----------------------------------------------------------------'
-  print
-  sys.stdout.flush()
-
-
-def PrintError(message):
-  # Flush buffers to ensure correct output ordering.
-  sys.stdout.flush()
-  sys.stderr.write(message + '\n')
-  sys.stderr.flush()
-
-
-def InstallXcode(xcode_build_version, installer_cmd, xcode_app_path):
-  """Installs the requested Xcode build version.
-
-  Args:
-    xcode_build_version: (string) Xcode build version to install.
-    installer_cmd: (string) Path to mac_toolchain command to install Xcode.
-      See https://chromium.googlesource.com/infra/infra/+/master/go/src/infra/cmd/mac_toolchain/
-    xcode_app_path: (string) Path to install the contents of Xcode.app.
-
-  Returns:
-    True if installation was successful. False otherwise.
-  """
-  args = [
-      installer_cmd, 'install',
-      '-kind', 'mac',
-      '-xcode-version', xcode_build_version.lower(),
-      '-output-dir', xcode_app_path,
-  ]
-
-  # Buildbot slaves need to use explicit credentials. LUCI bots should NOT set
-  # this variable.
-  creds = os.environ.get('MAC_TOOLCHAIN_CREDS')
-  if creds:
-    args.extend(['--service-account-json', creds])
-
-  try:
-    subprocess.check_call(args)
-  except subprocess.CalledProcessError as e:
-    PrintError('Xcode build version %s failed to install: %s\n' % (
-        xcode_build_version, e))
-    RequestCipdAuthentication()
-    return False
-  except OSError as e:
-    PrintError(('Xcode installer "%s" failed to execute'
-                ' (not on PATH or not installed).') % installer_cmd)
-    return False
-
-  return True
-
-
-def main():
-  if sys.platform != 'darwin':
-    return 0
-
-  if not _UseHermeticToolchain():
-    print 'Skipping Mac toolchain installation for mac'
-    return 0
-
-  if not PlatformMeetsHermeticXcodeRequirements():
-    print 'OS version does not support toolchain.'
-    return 0
-
-  toolchain_version = os.environ.get('MAC_TOOLCHAIN_REVISION',
-                                      MAC_TOOLCHAIN_VERSION)
-
-  # On developer machines, mac_toolchain tool is provided by
-  # depot_tools. On the bots, the recipe is responsible for installing
-  # it and providing the path to the executable.
-  installer_cmd = os.environ.get('MAC_TOOLCHAIN_INSTALLER',
-                                 MAC_TOOLCHAIN_INSTALLER)
-
-  toolchain_root = TOOLCHAIN_ROOT
-  xcode_app_path = TOOLCHAIN_BUILD_DIR
-  stamp_file = STAMP_FILE
-
-  # Delete the old "hermetic" installation if detected.
-  # TODO(crbug.com/797051): remove this once the old "hermetic" solution is no
-  # longer in use.
-  if os.path.exists(stamp_file):
-    print 'Detected old hermetic installation at %s. Deleting.' % (
-      toolchain_root)
-    shutil.rmtree(toolchain_root)
-
-  success = InstallXcode(toolchain_version, installer_cmd, xcode_app_path)
-  if not success:
-    return 1
-
-  return 0
-
-
-if __name__ == '__main__':
-  sys.exit(main())
diff --git a/build/nocompile.gni b/build/nocompile.gni
deleted file mode 100644
index be6e5af..0000000
--- a/build/nocompile.gni
+++ /dev/null
@@ -1,109 +0,0 @@
-# Copyright (c) 2011 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file is meant to be included into an target to create a unittest that
-# invokes a set of no-compile tests.  A no-compile test is a test that asserts
-# a particular construct will not compile.
-#
-# Also see:
-#   http://dev.chromium.org/developers/testing/no-compile-tests
-#
-# To use this, create a gyp target with the following form:
-#
-# import("//build/nocompile.gni")
-# nocompile_test("my_module_nc_unittests") {
-#   sources = [
-#     'nc_testset_1.nc',
-#     'nc_testset_2.nc',
-#   ]
-# }
-#
-# The .nc files are C++ files that contain code we wish to assert will not
-# compile.  Each individual test case in the file should be put in its own
-# #ifdef section.  The expected output should be appended with a C++-style
-# comment that has a python list of regular expressions.  This will likely
-# be greater than 80-characters. Giving a solid expected output test is
-# important so that random compile failures do not cause the test to pass.
-#
-# Example .nc file:
-#
-#   #if defined(TEST_NEEDS_SEMICOLON)  // [r"expected ',' or ';' at end of input"]
-#
-#   int a = 1
-#
-#   #elif defined(TEST_NEEDS_CAST)  // [r"invalid conversion from 'void*' to 'char*'"]
-#
-#   void* a = NULL;
-#   char* b = a;
-#
-#   #endif
-#
-# If we needed disable TEST_NEEDS_SEMICOLON, then change the define to:
-#
-#   DISABLE_TEST_NEEDS_SEMICOLON
-#   TEST_NEEDS_CAST
-#
-# The lines above are parsed by a regexp so avoid getting creative with the
-# formatting or ifdef logic; it will likely just not work.
-#
-# Implementation notes:
-# The .nc files are actually processed by a python script which executes the
-# compiler and generates a .cc file that is empty on success, or will have a
-# series of #error lines on failure, and a set of trivially passing gunit
-# TEST() functions on success. This allows us to fail at the compile step when
-# something goes wrong, and know during the unittest run that the test was at
-# least processed when things go right.
-
-import("//testing/test.gni")
-
-declare_args() {
-  # TODO(crbug.com/105388): make sure no-compile test is not flaky.
-  enable_nocompile_tests =
-      (is_linux || is_mac || is_ios) && is_clang && host_cpu == target_cpu
-}
-
-if (enable_nocompile_tests) {
-  import("//build/config/sysroot.gni")
-  import("//build/config/c++/c++.gni")
-  template("nocompile_test") {
-    nocompile_target = target_name + "_run_nocompile"
-
-    action_foreach(nocompile_target) {
-      script = "//tools/nocompile_driver.py"
-      sources = invoker.sources
-
-      result_path = "$target_gen_dir/{{source_name_part}}_nc.cc"
-      depfile = "${result_path}.d"
-      outputs = [
-        result_path,
-      ]
-      args = [
-        "4",  # number of compilers to invoke in parallel.
-        "{{source}}",
-        rebase_path(result_path, root_build_dir),
-        "--",
-        "-nostdinc++",
-        "-isystem" + rebase_path("$libcxx_prefix/include", root_build_dir),
-        "-isystem" + rebase_path("$libcxxabi_prefix/include", root_build_dir),
-        "-std=c++14",
-        "-Wall",
-        "-Werror",
-        "-Wfatal-errors",
-        "-Wthread-safety",
-        "-I" + rebase_path("//", root_build_dir),
-      ]
-      if (sysroot != "") {
-        args += [
-          "--sysroot",
-          rebase_path(sysroot, root_build_dir),
-        ]
-      }
-    }
-
-    test(target_name) {
-      deps = invoker.deps + [ ":$nocompile_target" ]
-      sources = get_target_outputs(":$nocompile_target")
-    }
-  }
-}
diff --git a/build/package_mac_toolchain.py b/build/package_mac_toolchain.py
deleted file mode 100755
index 48672bb..0000000
--- a/build/package_mac_toolchain.py
+++ /dev/null
@@ -1,145 +0,0 @@
-#!/usr/bin/env python
-# Copyright 2016 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Compress and upload Mac toolchain files.
-
-Stored in in https://pantheon.corp.google.com/storage/browser/chrome-mac-sdk/.
-"""
-
-import argparse
-import glob
-import os
-import plistlib
-import re
-import subprocess
-import sys
-import tarfile
-import tempfile
-
-
-TOOLCHAIN_URL = "gs://chrome-mac-sdk"
-
-# It's important to at least remove unused Platform folders to cut down on the
-# size of the toolchain folder.  There are other various unused folders that
-# have been removed through trial and error.  If future versions of Xcode become
-# problematic it's possible this list is incorrect, and can be reduced to just
-# the unused platforms.  On the flip side, it's likely more directories can be
-# excluded.
-DEFAULT_EXCLUDE_FOLDERS = [
-'Contents/Applications',
-'Contents/Developer/Documentation',
-'Contents/Developer/Library/Xcode/Templates',
-'Contents/Developer/Platforms/AppleTVOS.platform',
-'Contents/Developer/Platforms/AppleTVSimulator.platform',
-'Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/'
-    'usr/share/man/',
-'Contents/Developer/Platforms/WatchOS.platform',
-'Contents/Developer/Platforms/WatchSimulator.platform',
-'Contents/Developer/Toolchains/Swift*',
-'Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/lib/swift',
-'Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/lib/swift-migrator',
-'Contents/Resources/Packages/MobileDevice.pkg',
-]
-
-MAC_EXCLUDE_FOLDERS = [
-# The only thing we need in iPhoneOS.platform on mac is:
-#  \Developer\Library\Xcode\PrivatePlugins
-#  \Info.Plist.
-#  This is the cleanest way to get these.
-'Contents/Developer/Platforms/iPhoneOS.platform/Developer/Library/Frameworks',
-'Contents/Developer/Platforms/iPhoneOS.platform/Developer/Library/GPUTools',
-'Contents/Developer/Platforms/iPhoneOS.platform/Developer/Library/'
-    'GPUToolsPlatform',
-'Contents/Developer/Platforms/iPhoneOS.platform/Developer/Library/'
-    'PrivateFrameworks',
-'Contents/Developer/Platforms/iPhoneOS.platform/Developer/usr',
-'Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs',
-'Contents/Developer/Platforms/iPhoneOS.platform/DeviceSupport',
-'Contents/Developer/Platforms/iPhoneOS.platform/Library',
-'Contents/Developer/Platforms/iPhoneOS.platform/usr',
-
-# iPhoneSimulator has a similar requirement, but the bulk of the binary size is
-# in \Developer\SDKs, so only excluding that here.
-'Contents/Developer/Platforms/iPhoneSimulator.platform/Developer/SDKs',
-]
-
-IOS_EXCLUDE_FOLDERS = [
-'Contents/Developer/Platforms/iPhoneOS.platform/DeviceSupport/'
-'Contents/Developer/Platforms/iPhoneSimulator.platform/Developer/SDKs/'
-    'iPhoneSimulator.sdk/Applications/',
-'Contents/Developer/Platforms/iPhoneSimulator.platform/Developer/SDKs/'
-    'iPhoneSimulator.sdk/System/Library/AccessibilityBundles/',
-'Contents/Developer/Platforms/iPhoneSimulator.platform/Developer/SDKs/'
-    'iPhoneSimulator.sdk/System/Library/CoreServices/',
-'Contents/Developer/Platforms/iPhoneSimulator.platform/Developer/SDKs/'
-    'iPhoneSimulator.sdk/System/Library/LinguisticData/',
-]
-
-def main():
-  """Compress |target_dir| and upload to |TOOLCHAIN_URL|"""
-  parser = argparse.ArgumentParser()
-  parser.add_argument('target_dir',
-                      help="Xcode installation directory.")
-  parser.add_argument('platform', choices=['ios', 'mac'],
-                      help="Target platform for bundle.")
-  parser_args = parser.parse_args()
-
-  # Verify this looks like an Xcode directory.
-  contents_dir = os.path.join(parser_args.target_dir, 'Contents')
-  plist_file = os.path.join(contents_dir, 'version.plist')
-  try:
-    info = plistlib.readPlist(plist_file)
-  except:
-    print "Invalid Xcode dir."
-    return 0
-  build_version = info['ProductBuildVersion']
-
-  # Look for previous toolchain tgz files with the same |build_version|.
-  fname = 'toolchain'
-  if parser_args.platform == 'ios':
-    fname = 'ios-' + fname
-  wildcard_filename = '%s/%s-%s-*.tgz' % (TOOLCHAIN_URL, fname, build_version)
-  p = subprocess.Popen(['gsutil.py', 'ls', wildcard_filename],
-                       stdout=subprocess.PIPE,
-                       stderr=subprocess.PIPE)
-  output = p.communicate()[0]
-  next_count = 1
-  if p.returncode == 0:
-    next_count = len(output.split('\n'))
-    sys.stdout.write("%s already exists (%s). "
-                     "Do you want to create another? [y/n] "
-                     % (build_version, next_count - 1))
-
-    if raw_input().lower() not in set(['yes','y', 'ye']):
-      print "Skipping duplicate upload."
-      return 0
-
-  os.chdir(parser_args.target_dir)
-  toolchain_file_name = "%s-%s-%s" % (fname, build_version, next_count)
-  toolchain_name = tempfile.mktemp(suffix='toolchain.tgz')
-
-  print "Creating %s (%s)." % (toolchain_file_name, toolchain_name)
-  os.environ["COPYFILE_DISABLE"] = "1"
-  os.environ["GZ_OPT"] = "-8"
-  args = ['tar', '-cvzf', toolchain_name]
-  exclude_folders = DEFAULT_EXCLUDE_FOLDERS
-  if parser_args.platform == 'mac':
-    exclude_folders += MAC_EXCLUDE_FOLDERS
-  else:
-    exclude_folders += IOS_EXCLUDE_FOLDERS
-  args.extend(map('--exclude={0}'.format, exclude_folders))
-  args.extend(['.'])
-  subprocess.check_call(args)
-
-  print "Uploading %s toolchain." % toolchain_file_name
-  destination_path = '%s/%s.tgz' % (TOOLCHAIN_URL, toolchain_file_name)
-  subprocess.check_call(['gsutil.py', 'cp', '-n', toolchain_name,
-                         destination_path])
-
-  print "Done with %s upload." % toolchain_file_name
-  return 0
-
-if __name__ == '__main__':
-  sys.exit(main())
diff --git a/build/precompile.cc b/build/precompile.cc
deleted file mode 100644
index db1ef6d..0000000
--- a/build/precompile.cc
+++ /dev/null
@@ -1,7 +0,0 @@
-// Copyright (c) 2011 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-// Precompiled header generator for Windows builds. No include is needed
-// in this file as the PCH include is forced via the "Forced Include File"
-// flag in the projects generated by GYP.
diff --git a/build/precompile.h b/build/precompile.h
deleted file mode 100644
index c699562..0000000
--- a/build/precompile.h
+++ /dev/null
@@ -1,53 +0,0 @@
-// Copyright (c) 2012 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-// This file is used as a precompiled header for both C and C++ files. So
-// any C++ headers must go in the __cplusplus block below.
-
-#if defined(BUILD_PRECOMPILE_H_)
-#error You shouldn't include the precompiled header file more than once.
-#endif
-
-#define BUILD_PRECOMPILE_H_
-
-#include <errno.h>
-#include <fcntl.h>
-#include <limits.h>
-#include <math.h>
-#include <memory.h>
-#include <signal.h>
-#include <stdarg.h>
-#include <stddef.h>
-#include <stdio.h>
-#include <stdlib.h>
-#include <string.h>
-#include <time.h>
-
-#if defined(__cplusplus)
-
-#include <algorithm>
-#include <bitset>
-#include <cmath>
-#include <cstddef>
-#include <cstdio>
-#include <cstdlib>
-#include <cstring>
-#include <fstream>
-#include <functional>
-#include <iomanip>
-#include <iosfwd>
-#include <iterator>
-#include <limits>
-#include <list>
-#include <map>
-#include <numeric>
-#include <ostream>
-#include <queue>
-#include <set>
-#include <sstream>
-#include <string>
-#include <utility>
-#include <vector>
-
-#endif  // __cplusplus
diff --git a/build/print_python_deps.py b/build/print_python_deps.py
deleted file mode 100755
index fe71c4a..0000000
--- a/build/print_python_deps.py
+++ /dev/null
@@ -1,107 +0,0 @@
-#!/usr/bin/env vpython
-# Copyright 2016 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Prints all non-system dependencies for the given module.
-
-The primary use-case for this script is to genererate the list of python modules
-required for .isolate files.
-"""
-
-import argparse
-import imp
-import os
-import pipes
-import sys
-
-# Don't use any helper modules, or else they will end up in the results.
-
-
-_SRC_ROOT = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir))
-
-
-def _ComputePythonDependencies():
-  """Gets the paths of imported non-system python modules.
-
-  A path is assumed to be a "system" import if it is outside of chromium's
-  src/. The paths will be relative to the current directory.
-  """
-  module_paths = (m.__file__ for m in sys.modules.values()
-                  if m and hasattr(m, '__file__'))
-
-  src_paths = set()
-  for path in module_paths:
-    if path == __file__:
-      continue
-    path = os.path.abspath(path)
-    if not path.startswith(_SRC_ROOT):
-      continue
-
-    if (path.endswith('.pyc')
-        or (path.endswith('c') and not os.path.splitext(path)[1])):
-      path = path[:-1]
-    src_paths.add(path)
-
-  return src_paths
-
-
-def _NormalizeCommandLine(options):
-  """Returns a string that when run from SRC_ROOT replicates the command."""
-  args = ['build/print_python_deps.py']
-  root = os.path.relpath(options.root, _SRC_ROOT)
-  if root != '.':
-    args.extend(('--root', root))
-  if options.output:
-    args.extend(('--output', os.path.relpath(options.output, _SRC_ROOT)))
-  for whitelist in sorted(options.whitelists):
-    args.extend(('--whitelist', os.path.relpath(whitelist, _SRC_ROOT)))
-  args.append(os.path.relpath(options.module, _SRC_ROOT))
-  return ' '.join(pipes.quote(x) for x in args)
-
-
-def _FindPythonInDirectory(directory):
-  """Returns an iterable of all non-test python files in the given directory."""
-  files = []
-  for root, _dirnames, filenames in os.walk(directory):
-    for filename in filenames:
-      if filename.endswith('.py') and not filename.endswith('_test.py'):
-        yield os.path.join(root, filename)
-
-
-def main():
-  parser = argparse.ArgumentParser(
-      description='Prints all non-system dependencies for the given module.')
-  parser.add_argument('module',
-                      help='The python module to analyze.')
-  parser.add_argument('--root', default='.',
-                      help='Directory to make paths relative to.')
-  parser.add_argument('--output',
-                      help='Write output to a file rather than stdout.')
-  parser.add_argument('--whitelist', default=[], action='append',
-                      dest='whitelists',
-                      help='Recursively include all non-test python files '
-                      'within this directory. May be specified multiple times.')
-  options = parser.parse_args()
-  # Replace the path entry for print_python_deps.py with the one for the given
-  # module.
-  sys.path[0] = os.path.dirname(options.module)
-  imp.load_source('NAME', options.module)
-
-  paths_set = _ComputePythonDependencies()
-  for path in options.whitelists:
-    paths_set.update(os.path.abspath(p) for p in _FindPythonInDirectory(path))
-
-  paths = [os.path.relpath(p, options.root) for p in paths_set]
-
-  normalized_cmdline = _NormalizeCommandLine(options)
-  out = open(options.output, 'w') if options.output else sys.stdout
-  with out:
-    out.write('# Generated by running:\n')
-    out.write('#   %s\n' % normalized_cmdline)
-    for path in sorted(paths):
-      out.write(path + '\n')
-
-
-if __name__ == '__main__':
-  sys.exit(main())
diff --git a/build/protoc_java.py b/build/protoc_java.py
deleted file mode 100755
index 2addb82..0000000
--- a/build/protoc_java.py
+++ /dev/null
@@ -1,85 +0,0 @@
-#!/usr/bin/env python
-# Copyright (c) 2012 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Generate java source files from protobuf files.
-
-This is a helper file for the genproto_java action in protoc_java.gypi.
-
-It performs the following steps:
-1. Deletes all old sources (ensures deleted classes are not part of new jars).
-2. Creates source directory.
-3. Generates Java files using protoc (output into either --java-out-dir or
-   --srcjar).
-4. Creates a new stamp file.
-"""
-
-import os
-import optparse
-import shutil
-import subprocess
-import sys
-
-sys.path.append(os.path.join(os.path.dirname(__file__), "android", "gyp"))
-from util import build_utils
-
-def main(argv):
-  parser = optparse.OptionParser()
-  build_utils.AddDepfileOption(parser)
-  parser.add_option("--protoc", help="Path to protoc binary.")
-  parser.add_option("--proto-path", help="Path to proto directory.")
-  parser.add_option("--java-out-dir",
-      help="Path to output directory for java files.")
-  parser.add_option("--srcjar", help="Path to output srcjar.")
-  parser.add_option("--stamp", help="File to touch on success.")
-  parser.add_option("--nano",
-      help="Use to generate nano protos.", action='store_true')
-  options, args = parser.parse_args(argv)
-
-  build_utils.CheckOptions(options, parser, ['protoc', 'proto_path'])
-  if not options.java_out_dir and not options.srcjar:
-    print 'One of --java-out-dir or --srcjar must be specified.'
-    return 1
-
-  with build_utils.TempDir() as temp_dir:
-    if options.nano:
-      # Specify arguments to the generator.
-      generator_args = ['optional_field_style=reftypes',
-                        'store_unknown_fields=true']
-      out_arg = '--javanano_out=' + ','.join(generator_args) + ':' + temp_dir
-    else:
-      out_arg = '--java_out=' + temp_dir
-
-      # Check if all proto files (which are listed in the args) are opting to
-      # use the lite runtime, otherwise we'd have to include the much heavier
-      # regular proto runtime in Chrome.
-      # TODO(jkrcal): Replace this check by '--java_lite_out=' for the out_arg
-      # above once this works on the master branch of the protobuf library,
-      # expected in version 4.0 (see https://crbug.com/800281).
-      for proto_file in args:
-        if not 'LITE_RUNTIME' in open(proto_file).read():
-          raise Exception(
-              'Chrome only supports lite protos. Please add "optimize_for = '
-              'LITE_RUNTIME" to your proto file to enable the lite runtime.')
-    # Generate Java files using protoc.
-    build_utils.CheckOutput(
-        [options.protoc, '--proto_path', options.proto_path, out_arg]
-        + args)
-
-    if options.java_out_dir:
-      build_utils.DeleteDirectory(options.java_out_dir)
-      shutil.copytree(temp_dir, options.java_out_dir)
-    else:
-      build_utils.ZipDir(options.srcjar, temp_dir)
-
-  if options.depfile:
-    assert options.srcjar
-    deps = args + [options.protoc]
-    build_utils.WriteDepfile(options.depfile, options.srcjar, deps)
-
-  if options.stamp:
-    build_utils.Touch(options.stamp)
-
-if __name__ == '__main__':
-  sys.exit(main(sys.argv[1:]))
diff --git a/build/redirect_stdout.py b/build/redirect_stdout.py
deleted file mode 100644
index 72d0732..0000000
--- a/build/redirect_stdout.py
+++ /dev/null
@@ -1,19 +0,0 @@
-# Copyright 2016 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import subprocess
-import sys
-
-# This script executes a command and redirects the stdout to a file. This is
-# equivalent to |command... > output_file|.
-#
-# Usage: python redirect_stdout.py output_file command...
-
-if __name__ == '__main__':
-  if len(sys.argv) < 2:
-    print >> sys.stderr, "Usage: %s output_file command..." % (sys.argv[0])
-    sys.exit(1)
-
-  with open(sys.argv[1], 'w') as fp:
-    sys.exit(subprocess.check_call(sys.argv[2:], stdout=fp))
diff --git a/build/rm.py b/build/rm.py
deleted file mode 100755
index 5ca642d..0000000
--- a/build/rm.py
+++ /dev/null
@@ -1,38 +0,0 @@
-#!/usr/bin/env python
-# Copyright (c) 2016 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Delete a file.
-
-This module works much like the rm posix command.
-"""
-
-import argparse
-import os
-import sys
-
-
-def Main():
-  parser = argparse.ArgumentParser()
-  parser.add_argument('files', nargs='+')
-  parser.add_argument('-f', '--force', action='store_true',
-                      help="don't err on missing")
-  parser.add_argument('--stamp', required=True, help='touch this file')
-  args = parser.parse_args()
-  for f in args.files:
-    try:
-      os.remove(f)
-    except OSError:
-      if not args.force:
-        print >>sys.stderr, "'%s' does not exist" % f
-        return 1
-
-  with open(args.stamp, 'w'):
-    os.utime(args.stamp, None)
-
-  return 0
-
-
-if __name__ == '__main__':
-  sys.exit(Main())
diff --git a/build/run_swarming_xcode_install.py b/build/run_swarming_xcode_install.py
deleted file mode 100755
index a731c1b..0000000
--- a/build/run_swarming_xcode_install.py
+++ /dev/null
@@ -1,81 +0,0 @@
-#!/usr/bin/env python
-# Copyright 2017 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-This script runs swarming_xcode_install on the bots.  It should be run when we
-need to upgrade all the swarming testers.  It:
-  1) Packages two python files into an isolate.
-  2) Runs the isolate on swarming machines that satisfy certain dimensions.
-
-Example usage:
-  $  ./build/run_swarming_xcode_install.py  --luci_path ~/work/luci-py \
-       --swarming-server touch-swarming.appspot.com \
-       --isolate-server touch-isolate.appspot.com
-"""
-
-import argparse
-import os
-import shutil
-import subprocess
-import sys
-import tempfile
-
-
-def main():
-  parser = argparse.ArgumentParser(
-      description='Run swarming_xcode_install on the bots.')
-  parser.add_argument('--luci_path', required=True, type=os.path.abspath)
-  parser.add_argument('--swarming-server', required=True, type=str)
-  parser.add_argument('--isolate-server', required=True, type=str)
-  parser.add_argument('--batches', type=int, default=25,
-                      help="Run xcode install in batches of size |batches|.")
-  parser.add_argument('--dimension', nargs=2, action='append')
-  args = parser.parse_args()
-
-  args.dimension = args.dimension or []
-
-  script_dir = os.path.dirname(os.path.abspath(__file__))
-  tmp_dir = tempfile.mkdtemp(prefix='swarming_xcode')
-  try:
-    print 'Making isolate.'
-    shutil.copyfile(os.path.join(script_dir, 'swarming_xcode_install.py'),
-                    os.path.join(tmp_dir, 'swarming_xcode_install.py'))
-    shutil.copyfile(os.path.join(script_dir, 'mac_toolchain.py'),
-                    os.path.join(tmp_dir, 'mac_toolchain.py'))
-
-    luci_client = os.path.join(args.luci_path, 'client')
-    cmd = [
-      sys.executable, os.path.join(luci_client, 'isolateserver.py'), 'archive',
-      '-I', args.isolate_server, tmp_dir,
-    ]
-    isolate_hash = subprocess.check_output(cmd).split()[0]
-
-    print 'Running swarming_xcode_install.'
-    # TODO(crbug.com/765361): The dimensions below should be updated once
-    # swarming for iOS is fleshed out, likely removing xcode_version 9 and
-    # adding different dimensions.
-    luci_tools = os.path.join(luci_client, 'tools')
-    dimensions = [['pool', 'Chrome'], ['xcode_version', '9.0']] + args.dimension
-    dim_args = []
-    for d in dimensions:
-      dim_args += ['--dimension'] + d
-    cmd = [
-      sys.executable, os.path.join(luci_tools, 'run_on_bots.py'),
-      '--swarming', args.swarming_server, '--isolate-server',
-      args.isolate_server, '--priority', '20', '--batches', str(args.batches),
-      '--tags', 'name:run_swarming_xcode_install',
-    ] + dim_args + ['--name', 'run_swarming_xcode_install', '--', isolate_hash,
-      'python', 'swarming_xcode_install.py',
-    ]
-    subprocess.check_call(cmd)
-    print 'All tasks completed.'
-
-  finally:
-    shutil.rmtree(tmp_dir)
-  return 0
-
-
-if __name__ == '__main__':
-  sys.exit(main())
diff --git a/build/sample_arg_file.gn b/build/sample_arg_file.gn
deleted file mode 100644
index 91e9045..0000000
--- a/build/sample_arg_file.gn
+++ /dev/null
@@ -1,6 +0,0 @@
-# Build arguments go here. Here are some of the most commonly set ones.
-# Run `gn args <out_dir> --list` for the full list.
-#   is_component_build = true
-#   is_debug = true
-#   symbol_level = 2
-#   use_goma = false
diff --git a/build/sanitize-mac-build-log.sed b/build/sanitize-mac-build-log.sed
deleted file mode 100644
index b4111c7..0000000
--- a/build/sanitize-mac-build-log.sed
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright (c) 2012 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# Use this sed script to reduce a Mac build log into something readable.
-
-# Drop uninformative lines.
-/^distcc/d
-/^Check dependencies/d
-/^    setenv /d
-/^    cd /d
-/^make: Nothing to be done/d
-/^$/d
-
-# Xcode prints a short "compiling foobar.o" line followed by the lengthy
-# full command line.  These deletions drop the command line.
-\|^    /Developer/usr/bin/|d
-\|^    /Developer/Library/PrivateFrameworks/DevToolsCore\.framework/|d
-\|^    /Developer/Library/Xcode/Plug-ins/CoreBuildTasks\.xcplugin/|d
-
-# Drop any goma command lines as well.
-\|^    .*/gomacc |d
-
-# And, if you've overridden something from your own bin directory, remove those
-# full command lines, too.
-\|^    /Users/[^/]*/bin/|d
-
-# There's already a nice note for bindings, don't need the command line.
-\|^python scripts/rule_binding\.py|d
-
-# Shorten the "compiling foobar.o" line.
-s|^Distributed-CompileC (.*) normal i386 c\+\+ com\.apple\.compilers\.gcc\.4_2|    CC \1|
-s|^CompileC (.*) normal i386 c\+\+ com\.apple\.compilers\.gcc\.4_2|    CC \1|
diff --git a/build/sanitize-mac-build-log.sh b/build/sanitize-mac-build-log.sh
deleted file mode 100755
index df5a7af..0000000
--- a/build/sanitize-mac-build-log.sh
+++ /dev/null
@@ -1,5 +0,0 @@
-#!/bin/sh
-# Copyright (c) 2010 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-sed -r -f `dirname "${0}"`/`basename "${0}" sh`sed
diff --git a/build/sanitize-win-build-log.sed b/build/sanitize-win-build-log.sed
deleted file mode 100644
index c18e664..0000000
--- a/build/sanitize-win-build-log.sed
+++ /dev/null
@@ -1,15 +0,0 @@
-# Copyright (c) 2012 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# Use this sed script to reduce a Windows build log into something
-# machine-parsable.
-
-# Drop uninformative lines.
-/The operation completed successfully\./d
-
-# Drop parallelization indicators on lines.
-s/^[0-9]+>//
-
-# Shorten bindings generation lines
-s/^.*"python".*idl_compiler\.py".*("[^"]+\.idl").*$/  idl_compiler \1/
diff --git a/build/sanitize-win-build-log.sh b/build/sanitize-win-build-log.sh
deleted file mode 100755
index df5a7af..0000000
--- a/build/sanitize-win-build-log.sh
+++ /dev/null
@@ -1,5 +0,0 @@
-#!/bin/sh
-# Copyright (c) 2010 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-sed -r -f `dirname "${0}"`/`basename "${0}" sh`sed
diff --git a/build/sanitizers/OWNERS b/build/sanitizers/OWNERS
deleted file mode 100644
index 3059b0e..0000000
--- a/build/sanitizers/OWNERS
+++ /dev/null
@@ -1,4 +0,0 @@
-glider@chromium.org
-eugenis@chromium.org
-per-file tsan_suppressions.cc=*
-per-file lsan_suppressions.cc=*
diff --git a/build/sanitizers/asan_suppressions.cc b/build/sanitizers/asan_suppressions.cc
deleted file mode 100644
index df94bc8..0000000
--- a/build/sanitizers/asan_suppressions.cc
+++ /dev/null
@@ -1,23 +0,0 @@
-// Copyright 2015 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-// This file contains the default suppressions for AddressSanitizer.
-// It should only be used under very limited circumstances such as suppressing
-// a report caused by an interceptor call in a system-installed library.
-
-#if defined(ADDRESS_SANITIZER)
-
-// Please make sure the code below declares a single string variable
-// kASanDefaultSuppressions which contains ASan suppressions delimited by
-// newlines.
-char kASanDefaultSuppressions[] =
-// http://crbug.com/178677
-"interceptor_via_lib:libsqlite3.so\n"
-
-// PLEASE READ ABOVE BEFORE ADDING NEW SUPPRESSIONS.
-
-// End of suppressions.
-;  // Please keep this semicolon.
-
-#endif  // ADDRESS_SANITIZER
diff --git a/build/sanitizers/lsan_suppressions.cc b/build/sanitizers/lsan_suppressions.cc
deleted file mode 100644
index a64f998..0000000
--- a/build/sanitizers/lsan_suppressions.cc
+++ /dev/null
@@ -1,108 +0,0 @@
-// Copyright 2015 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-// This file contains the default suppressions for LeakSanitizer.
-// You can also pass additional suppressions via LSAN_OPTIONS:
-// LSAN_OPTIONS=suppressions=/path/to/suppressions. Please refer to
-// http://dev.chromium.org/developers/testing/leaksanitizer for more info.
-
-#if defined(LEAK_SANITIZER)
-
-// Please make sure the code below declares a single string variable
-// kLSanDefaultSuppressions which contains LSan suppressions delimited by
-// newlines. See http://dev.chromium.org/developers/testing/leaksanitizer
-// for the instructions on writing suppressions.
-char kLSanDefaultSuppressions[] =
-    // Intentional leak used as sanity test for Valgrind/memcheck.
-    "leak:base::ToolsSanityTest_MemoryLeak_Test::TestBody\n"
-
-    // ================ Leaks in third-party code ================
-
-    // False positives in libfontconfig. http://crbug.com/39050
-    "leak:libfontconfig\n"
-    // eglibc-2.19/string/strdup.c creates false positive leak errors because of
-    // the same reason as crbug.com/39050. The leak error stack trace, when
-    // unwind on malloc, includes a call to libfontconfig. But the default stack
-    // trace is too short in leak sanitizer bot to make the libfontconfig
-    // suppression works. http://crbug.com/605286
-    "leak:__strdup\n"
-
-    // Leaks in Nvidia's libGL.
-    "leak:libGL.so\n"
-
-    // TODO(eugenis): revisit NSS suppressions after the switch to BoringSSL
-    // NSS leaks in CertDatabaseNSSTest tests. http://crbug.com/51988
-    "leak:net::NSSCertDatabase::ImportFromPKCS12\n"
-    "leak:net::NSSCertDatabase::ListCerts\n"
-    "leak:net::NSSCertDatabase::DeleteCertAndKey\n"
-    "leak:crypto::ScopedTestNSSDB::ScopedTestNSSDB\n"
-    // Another leak due to not shutting down NSS properly.
-    // http://crbug.com/124445
-    "leak:error_get_my_stack\n"
-    // The NSS suppressions above will not fire when the fast stack unwinder is
-    // used, because it can't unwind through NSS libraries. Apply blanket
-    // suppressions for now.
-    "leak:libnssutil3\n"
-    "leak:libnspr4\n"
-    "leak:libnss3\n"
-    "leak:libplds4\n"
-    "leak:libnssckbi\n"
-
-    // XRandR has several one time leaks.
-    "leak:libxrandr\n"
-
-    // xrandr leak. http://crbug.com/119677
-    "leak:XRRFindDisplay\n"
-
-    // http://crbug.com/431213, http://crbug.com/416665
-    "leak:gin/object_template_builder.h\n"
-
-    // Leaks in swrast_dri.so. http://crbug.com/540042
-    "leak:swrast_dri.so\n"
-
-    // Leak in glibc's gconv caused by fopen(..., "r,ccs=UNICODE")
-    "leak:__gconv_lookup_cache\n"
-
-    // ================ Leaks in Chromium code ================
-    // PLEASE DO NOT ADD SUPPRESSIONS FOR NEW LEAKS.
-    // Instead, commits that introduce memory leaks should be reverted.
-    // Suppressing the leak is acceptable in some cases when reverting is
-    // impossible, i.e. when enabling leak detection for the first time for a
-    // test target with pre-existing leaks.
-
-    // Small test-only leak in ppapi_unittests. http://crbug.com/258113
-    "leak:ppapi::proxy::PPP_Instance_Private_ProxyTest_PPPInstancePrivate_"
-    "Test\n"
-
-    // http://crbug.com/322671
-    "leak:content::SpeechRecognitionBrowserTest::SetUpOnMainThread\n"
-
-    // http://crbug.com/355641
-    "leak:TrayAccessibilityTest\n"
-
-    // http://crbug.com/354644
-    "leak:CertificateViewerUITest::ShowModalCertificateViewer\n"
-
-    // http://crbug.com/356306
-    "leak:service_manager::SetProcessTitleFromCommandLine\n"
-
-    // https://crbug.com/755670
-    "leak:third_party/yasm/\n"
-
-    // v8 leaks caused by weak ref not call
-    "leak:blink::DOMWrapperWorld::Create\n"
-    "leak:blink::ScriptState::Create\n"
-
-    // https://crbug.com/795148
-    "leak:third_party/fontconfig/\n"
-
-    // https://crbug.com/831667
-    "leak:gin/*_unittest.cc\n"
-
-    // PLEASE READ ABOVE BEFORE ADDING NEW SUPPRESSIONS.
-
-    // End of suppressions.
-    ;  // Please keep this semicolon.
-
-#endif  // LEAK_SANITIZER
diff --git a/build/sanitizers/sanitizer_options.cc b/build/sanitizers/sanitizer_options.cc
deleted file mode 100644
index 7f90f19..0000000
--- a/build/sanitizers/sanitizer_options.cc
+++ /dev/null
@@ -1,176 +0,0 @@
-// Copyright 2014 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-//
-// This file contains the default options for various compiler-based dynamic
-// tools.
-
-#include "build/build_config.h"
-
-#if defined(ADDRESS_SANITIZER) || defined(LEAK_SANITIZER) ||  \
-    defined(MEMORY_SANITIZER) || defined(THREAD_SANITIZER) || \
-    defined(UNDEFINED_SANITIZER)
-// Functions returning default options are declared weak in the tools' runtime
-// libraries. To make the linker pick the strong replacements for those
-// functions from this module, we explicitly force its inclusion by passing
-// -Wl,-u_sanitizer_options_link_helper
-extern "C"
-void _sanitizer_options_link_helper() { }
-
-// The callbacks we define here will be called from the sanitizer runtime, but
-// aren't referenced from the Chrome executable. We must ensure that those
-// callbacks are not sanitizer-instrumented, and that they aren't stripped by
-// the linker.
-#define SANITIZER_HOOK_ATTRIBUTE                                           \
-  extern "C"                                                               \
-  __attribute__((no_sanitize("address", "memory", "thread", "undefined"))) \
-  __attribute__((visibility("default")))                                   \
-  __attribute__((used))
-#endif
-
-#if defined(ADDRESS_SANITIZER)
-// Default options for AddressSanitizer in various configurations:
-//   malloc_context_size=5 - limit the size of stack traces collected by ASan
-//     for each malloc/free by 5 frames. These stack traces tend to accumulate
-//     very fast in applications using JIT (v8 in Chrome's case), see
-//     https://code.google.com/p/address-sanitizer/issues/detail?id=177
-//   symbolize=1 - enable in-process symbolization.
-//   legacy_pthread_cond=1 - run in the libpthread 2.2.5 compatibility mode to
-//     work around libGL.so using the obsolete API, see
-//     http://crbug.com/341805. This may break if pthread_cond_t objects are
-//     accessed by both instrumented and non-instrumented binaries (e.g. if
-//     they reside in shared memory). This option is going to be deprecated in
-//     upstream AddressSanitizer and must not be used anywhere except the
-//     official builds.
-//   check_printf=1 - check the memory accesses to printf (and other formatted
-//     output routines) arguments.
-//   use_sigaltstack=1 - handle signals on an alternate signal stack. Useful
-//     for stack overflow detection.
-//   strip_path_prefix=/../../ - prefixes up to and including this
-//     substring will be stripped from source file paths in symbolized reports
-//   fast_unwind_on_fatal=1 - use the fast (frame-pointer-based) stack unwinder
-//     to print error reports. V8 doesn't generate debug info for the JIT code,
-//     so the slow unwinder may not work properly.
-//   detect_stack_use_after_return=1 - use fake stack to delay the reuse of
-//     stack allocations and detect stack-use-after-return errors.
-#if defined(OS_LINUX)
-#if defined(GOOGLE_CHROME_BUILD)
-// Default AddressSanitizer options for the official build. These do not affect
-// tests on buildbots (which don't set GOOGLE_CHROME_BUILD) or non-official
-// Chromium builds.
-const char kAsanDefaultOptions[] =
-    "legacy_pthread_cond=1 malloc_context_size=5 "
-    "symbolize=1 check_printf=1 use_sigaltstack=1 detect_leaks=0 "
-    "strip_path_prefix=/../../ fast_unwind_on_fatal=1 "
-    "allow_user_segv_handler=1 ";
-#else
-// Default AddressSanitizer options for buildbots and non-official builds.
-const char* kAsanDefaultOptions =
-    "symbolize=1 check_printf=1 use_sigaltstack=1 "
-    "detect_leaks=0 strip_path_prefix=/../../ fast_unwind_on_fatal=1 "
-    "detect_stack_use_after_return=1 "
-    "allow_user_segv_handler=1 ";
-#endif  // GOOGLE_CHROME_BUILD
-
-#elif defined(OS_MACOSX)
-const char *kAsanDefaultOptions =
-    "check_printf=1 use_sigaltstack=1 "
-    "strip_path_prefix=/../../ fast_unwind_on_fatal=1 "
-    "detect_stack_use_after_return=1 detect_odr_violation=0 ";
-#endif  // OS_LINUX
-
-#if defined(OS_LINUX) || defined(OS_MACOSX)
-// Allow NaCl to override the default asan options.
-extern const char* kAsanDefaultOptionsNaCl;
-__attribute__((weak)) const char* kAsanDefaultOptionsNaCl = nullptr;
-
-SANITIZER_HOOK_ATTRIBUTE const char *__asan_default_options() {
-  if (kAsanDefaultOptionsNaCl)
-    return kAsanDefaultOptionsNaCl;
-  return kAsanDefaultOptions;
-}
-
-extern char kASanDefaultSuppressions[];
-
-SANITIZER_HOOK_ATTRIBUTE const char *__asan_default_suppressions() {
-  return kASanDefaultSuppressions;
-}
-#endif  // OS_LINUX || OS_MACOSX
-#endif  // ADDRESS_SANITIZER
-
-#if defined(THREAD_SANITIZER) && defined(OS_LINUX)
-// Default options for ThreadSanitizer in various configurations:
-//   detect_deadlocks=1 - enable deadlock (lock inversion) detection.
-//   second_deadlock_stack=1 - more verbose deadlock reports.
-//   report_signal_unsafe=0 - do not report async-signal-unsafe functions
-//     called from signal handlers.
-//   report_thread_leaks=0 - do not report unjoined threads at the end of
-//     the program execution.
-//   print_suppressions=1 - print the list of matched suppressions.
-//   history_size=7 - make the history buffer proportional to 2^7 (the maximum
-//     value) to keep more stack traces.
-//   strip_path_prefix=/../../ - prefixes up to and including this
-//     substring will be stripped from source file paths in symbolized reports.
-const char kTsanDefaultOptions[] =
-    "detect_deadlocks=1 second_deadlock_stack=1 report_signal_unsafe=0 "
-    "report_thread_leaks=0 print_suppressions=1 history_size=7 "
-    "strict_memcmp=0 strip_path_prefix=/../../ ";
-
-SANITIZER_HOOK_ATTRIBUTE const char *__tsan_default_options() {
-  return kTsanDefaultOptions;
-}
-
-extern char kTSanDefaultSuppressions[];
-
-SANITIZER_HOOK_ATTRIBUTE const char *__tsan_default_suppressions() {
-  return kTSanDefaultSuppressions;
-}
-
-#endif  // THREAD_SANITIZER && OS_LINUX
-
-#if defined(MEMORY_SANITIZER)
-// Default options for MemorySanitizer:
-//   intercept_memcmp=0 - do not detect uninitialized memory in memcmp() calls.
-//     Pending cleanup, see http://crbug.com/523428
-//   strip_path_prefix=/../../ - prefixes up to and including this
-//     substring will be stripped from source file paths in symbolized reports.
-const char kMsanDefaultOptions[] =
-    "intercept_memcmp=0 strip_path_prefix=/../../ ";
-
-SANITIZER_HOOK_ATTRIBUTE const char *__msan_default_options() {
-  return kMsanDefaultOptions;
-}
-
-#endif  // MEMORY_SANITIZER
-
-#if defined(LEAK_SANITIZER)
-// Default options for LeakSanitizer:
-//   print_suppressions=1 - print the list of matched suppressions.
-//   strip_path_prefix=/../../ - prefixes up to and including this
-//     substring will be stripped from source file paths in symbolized reports.
-const char kLsanDefaultOptions[] =
-    "print_suppressions=1 strip_path_prefix=/../../ ";
-
-SANITIZER_HOOK_ATTRIBUTE const char *__lsan_default_options() {
-  return kLsanDefaultOptions;
-}
-
-extern char kLSanDefaultSuppressions[];
-
-SANITIZER_HOOK_ATTRIBUTE const char *__lsan_default_suppressions() {
-  return kLSanDefaultSuppressions;
-}
-
-#endif  // LEAK_SANITIZER
-
-#if defined(UNDEFINED_SANITIZER)
-// Default options for UndefinedBehaviorSanitizer:
-//   print_stacktrace=1 - print the stacktrace when UBSan reports an error.
-const char kUbsanDefaultOptions[] =
-    "print_stacktrace=1 strip_path_prefix=/../../ ";
-
-SANITIZER_HOOK_ATTRIBUTE const char* __ubsan_default_options() {
-  return kUbsanDefaultOptions;
-}
-
-#endif  // UNDEFINED_SANITIZER
diff --git a/build/sanitizers/tsan_suppressions.cc b/build/sanitizers/tsan_suppressions.cc
deleted file mode 100644
index 9e475c6..0000000
--- a/build/sanitizers/tsan_suppressions.cc
+++ /dev/null
@@ -1,270 +0,0 @@
-// Copyright 2014 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-// This file contains the default suppressions for ThreadSanitizer.
-// You can also pass additional suppressions via TSAN_OPTIONS:
-// TSAN_OPTIONS=suppressions=/path/to/suppressions. Please refer to
-// http://dev.chromium.org/developers/testing/threadsanitizer-tsan-v2
-// for more info.
-
-#if defined(THREAD_SANITIZER)
-
-// Please make sure the code below declares a single string variable
-// kTSanDefaultSuppressions contains TSan suppressions delimited by newlines.
-// See http://dev.chromium.org/developers/testing/threadsanitizer-tsan-v2
-// for the instructions on writing suppressions.
-char kTSanDefaultSuppressions[] =
-    // False positives in libflashplayer.so, libgio.so and libglib.so.
-    // Since we don't instrument them, we cannot reason about the
-    // synchronization in them.
-    "race:libflashplayer.so\n"
-    "race:libgio*.so\n"
-    "race:libglib*.so\n"
-
-    // Intentional race in ToolsSanityTest.DataRace in base_unittests.
-    "race:base/tools_sanity_unittest.cc\n"
-
-    // Data race on WatchdogCounter [test-only].
-    "race:base/threading/watchdog_unittest.cc\n"
-
-    // Races in libevent, http://crbug.com/23244.
-    "race:libevent/event.c\n"
-
-    // http://crbug.com/84094.
-    "race:sqlite3StatusSet\n"
-    "race:pcache1EnforceMaxPage\n"
-    "race:pcache1AllocPage\n"
-
-    // http://crbug.com/120808
-    "race:base/threading/watchdog.cc\n"
-
-    // http://crbug.com/157586
-    "race:third_party/libvpx/source/libvpx/vp8/decoder/threading.c\n"
-
-    // http://crbug.com/158718
-    "race:third_party/ffmpeg/libavcodec/pthread.c\n"
-    "race:third_party/ffmpeg/libavcodec/pthread_frame.c\n"
-    "race:third_party/ffmpeg/libavcodec/vp8.c\n"
-    "race:third_party/ffmpeg/libavutil/mem.c\n"
-    "race:*HashFrameForTesting\n"
-    "race:third_party/ffmpeg/libavcodec/h264pred.c\n"
-    "race:media::ReleaseData\n"
-
-    // http://crbug.com/158922
-    "race:third_party/libvpx/source/libvpx/vp8/encoder/*\n"
-    "race:third_party/libvpx/source/libvpx/vp9/encoder/*\n"
-
-    // http://crbug.com/239359
-    "race:media::TestInputCallback::OnData\n"
-
-    // http://crbug.com/244368
-    "race:skia::BeginPlatformPaint\n"
-
-    // http://crbug.com/244385
-    "race:unixTempFileDir\n"
-
-    // http://crbug.com/244755
-    "race:v8::internal::Zone::NewExpand\n"
-    "race:TooLateToEnableNow\n"
-    "race:adjust_segment_bytes_allocated\n"
-
-    // http://crbug.com/244774
-    "race:webrtc::RTPReceiver::ProcessBitrate\n"
-    "race:webrtc::RTPSender::ProcessBitrate\n"
-    "race:webrtc::VideoCodingModuleImpl::Decode\n"
-    "race:webrtc::RTPSender::SendOutgoingData\n"
-    "race:webrtc::LibvpxVp8Encoder::GetEncodedPartitions\n"
-    "race:webrtc::LibvpxVp8Encoder::Encode\n"
-    "race:webrtc::ViEEncoder::DeliverFrame\n"
-    "race:webrtc::vcm::VideoReceiver::Decode\n"
-    "race:webrtc::VCMReceiver::FrameForDecoding\n"
-    "race:*trace_event_unique_catstatic*\n"
-
-    // http://crbug.com/244856
-    "race:libpulsecommon*.so\n"
-
-    // http://crbug.com/246968
-    "race:webrtc::VideoCodingModuleImpl::RegisterPacketRequestCallback\n"
-
-    // http://crbug.com/257396
-    "race:base::trace_event::"
-    "TraceEventTestFixture_TraceSamplingScope_Test::TestBody\n"
-
-    // http://crbug.com/258479
-    "race:SamplingStateScope\n"
-    "race:g_trace_state\n"
-
-    // http://crbug.com/258499
-    "race:third_party/skia/include/core/SkRefCnt.h\n"
-
-    // http://crbug.com/268924
-    "race:base::g_power_monitor\n"
-    "race:base::PowerMonitor::PowerMonitor\n"
-    "race:base::PowerMonitor::AddObserver\n"
-    "race:base::PowerMonitor::RemoveObserver\n"
-    "race:base::PowerMonitor::IsOnBatteryPower\n"
-
-    // http://crbug.com/258935
-    "race:base::Thread::StopSoon\n"
-
-    // http://crbug.com/272095
-    "race:base::g_top_manager\n"
-
-    // http://crbug.com/308590
-    "race:CustomThreadWatcher::~CustomThreadWatcher\n"
-
-    // http://crbug.com/310851
-    "race:net::ProxyResolverV8Tracing::Job::~Job\n"
-
-    // http://crbug.com/476529
-    "deadlock:cc::VideoLayerImpl::WillDraw\n"
-
-    // http://crbug.com/328826
-    "race:gLCDOrder\n"
-    "race:gLCDOrientation\n"
-
-    // http://crbug.com/328868
-    "race:PR_Lock\n"
-
-    // http://crbug.com/333244
-    "race:content::"
-    "VideoCaptureImplTest::MockVideoCaptureImpl::~MockVideoCaptureImpl\n"
-
-    // http://crbug.com/333871
-    "race:v8::internal::Interface::NewValue()::value_interface\n"
-    "race:v8::internal::IsMinusZero(double)::minus_zero\n"
-    "race:v8::internal::FastCloneShallowObjectStub::"
-    "InitializeInterfaceDescriptor\n"
-    "race:v8::internal::KeyedLoadStubCompiler::registers\n"
-    "race:v8::internal::KeyedStoreStubCompiler::registers()::registers\n"
-    "race:v8::internal::KeyedLoadFastElementStub::"
-    "InitializeInterfaceDescriptor\n"
-    "race:v8::internal::KeyedStoreFastElementStub::"
-    "InitializeInterfaceDescriptor\n"
-    "race:v8::internal::LoadStubCompiler::registers\n"
-    "race:v8::internal::StoreStubCompiler::registers\n"
-    "race:v8::internal::HValue::LoopWeight\n"
-
-    // http://crbug.com/334140
-    "race:CommandLine::HasSwitch\n"
-    "race:CommandLine::current_process_commandline_\n"
-    "race:CommandLine::GetSwitchValueASCII\n"
-
-    // http://crbug.com/338675
-    "race:blink::s_platform\n"
-    "race:content::"
-    "RendererWebKitPlatformSupportImpl::~RendererWebKitPlatformSupportImpl\n"
-
-    // http://crbug.com/347534
-    "race:v8::internal::V8::TearDown\n"
-
-    // http://crbug.com/347538
-    "race:sctp_timer_start\n"
-
-    // http://crbug.com/347553
-    "race:blink::WebString::reset\n"
-
-    // http://crbug.com/348511
-    "race:webrtc::acm1::AudioCodingModuleImpl::PlayoutData10Ms\n"
-
-    // http://crbug.com/348982
-    "race:cricket::P2PTransportChannel::OnConnectionDestroyed\n"
-    "race:cricket::P2PTransportChannel::AddConnection\n"
-
-    // http://crbug.com/348984
-    "race:sctp_express_handle_sack\n"
-    "race:system_base_info\n"
-
-    // https://code.google.com/p/v8/issues/detail?id=3143
-    "race:v8::internal::FLAG_track_double_fields\n"
-
-    // http://crbug.com/374135
-    "race:media::AlsaWrapper::PcmWritei\n"
-
-    // False positive in libc's tzset_internal, http://crbug.com/379738.
-    "race:tzset_internal\n"
-
-    // http://crbug.com/380554
-    "deadlock:g_type_add_interface_static\n"
-
-    // http:://crbug.com/386385
-    "race:content::AppCacheStorageImpl::DatabaseTask::CallRunCompleted\n"
-
-    // http://crbug.com/388730
-    "race:g_next_user_script_id\n"
-
-    // http://crbug.com/397022
-    "deadlock:"
-    "base::trace_event::TraceEventTestFixture_ThreadOnceBlocking_Test::"
-    "TestBody\n"
-
-    // http://crbug.com/415472
-    "deadlock:base::trace_event::TraceLog::GetCategoryGroupEnabled\n"
-
-    // http://crbug.com/490856
-    "deadlock:content::TracingControllerImpl::SetEnabledOnFileThread\n"
-
-    // https://code.google.com/p/skia/issues/detail?id=3294
-    "race:SkBaseMutex::acquire\n"
-
-    // https://crbug.com/430533
-    "race:TileTaskGraphRunner::Run\n"
-
-    // Lock inversion in third party code, won't fix.
-    // https://crbug.com/455638
-    "deadlock:dbus::Bus::ShutdownAndBlock\n"
-
-    // https://crbug.com/459429
-    "race:randomnessPid\n"
-
-    // https://crbug.com/454655
-    "race:content::BrowserTestBase::PostTaskToInProcessRendererAndWait\n"
-
-    // https://crbug.com/569682
-    "race:blink::ThreadState::visitStackRoots\n"
-
-    // http://crbug.com/582274
-    "race:usrsctp_close\n"
-
-    // http://crbug.com/633145
-    "race:third_party/libjpeg_turbo/simd/jsimd_x86_64.c\n"
-
-    // http://crbug.com/587199
-    "race:base::TimerTest_OneShotTimer_CustomTaskRunner_Test::TestBody\n"
-    "race:base::TimerSequenceTest_OneShotTimerTaskOnPoolSequence_Test::"
-    "TestBody\n"
-    "race:base::TimerSequenceTest_"
-    "OneShotTimerUsedAndTaskedOnDifferentSequences\n"
-
-    // http://crbug.com/v8/6065
-    "race:net::(anonymous namespace)::ProxyResolverV8TracingImpl::RequestImpl"
-    "::~RequestImpl()\n"
-
-    // http://crbug.com/691029
-    "deadlock:libGLX.so*\n"
-
-    // http://crbug.com/719633
-    "race:crypto::EnsureNSSInit()\n"
-
-    // http://crbug.com/695929
-    "race:base::i18n::IsRTL\n"
-    "race:base::i18n::SetICUDefaultLocale\n"
-
-    // https://crbug.com/794920
-    "race:base::debug::SetCrashKeyString\n"
-    "race:crash_reporter::internal::CrashKeyStringImpl::Set\n"
-
-    // http://crbug.com/795110
-    "race:third_party/fontconfig/*\n"
-
-    // http://crbug.com/797998
-    "race:content::SandboxIPCHandler::HandleLocaltime\n"
-
-    //
-    "race:third_party/harfbuzz-ng/src/*\n"
-
-    // End of suppressions.
-    ;  // Please keep this semicolon.
-
-#endif  // THREAD_SANITIZER
diff --git a/build/secondary/testing/gmock/BUILD.gn b/build/secondary/testing/gmock/BUILD.gn
deleted file mode 100644
index 9184d62..0000000
--- a/build/secondary/testing/gmock/BUILD.gn
+++ /dev/null
@@ -1,65 +0,0 @@
-# Copyright 2014 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# Chromium's GN configuration for gmock now lives at testing/gmock/BUILD.gn.
-#
-# This configuration is left in the tree because it is pulled by V8 and PDFium,
-# and will be removed as soon as the projects switch off of it.
-#
-# Details at http://crbug.com/630705 and http://crrev.com/2779193002
-
-config("gmock_config") {
-  # Gmock headers need to be able to find themselves.
-  include_dirs = [
-    "//testing/gmock_custom",
-    "include",
-  ]
-}
-
-static_library("gmock") {
-  testonly = true
-  sources = [
-    # Sources based on files in r173 of gmock.
-    "include/gmock/gmock-actions.h",
-    "include/gmock/gmock-cardinalities.h",
-    "include/gmock/gmock-generated-actions.h",
-    "include/gmock/gmock-generated-function-mockers.h",
-    "include/gmock/gmock-generated-matchers.h",
-    "include/gmock/gmock-generated-nice-strict.h",
-    "include/gmock/gmock-matchers.h",
-    "include/gmock/gmock-spec-builders.h",
-    "include/gmock/gmock.h",
-    "include/gmock/internal/gmock-generated-internal-utils.h",
-    "include/gmock/internal/gmock-internal-utils.h",
-    "include/gmock/internal/gmock-port.h",
-
-    # gmock helpers.
-    "../gmock_custom/gmock/internal/custom/gmock-port.h",
-
-    #"src/gmock-all.cc",  # Not needed by our build.
-    "src/gmock-cardinalities.cc",
-    "src/gmock-internal-utils.cc",
-    "src/gmock-matchers.cc",
-    "src/gmock-spec-builders.cc",
-    "src/gmock.cc",
-  ]
-
-  # This project includes some stuff form gtest's guts.
-  include_dirs = [ "../gtest/include" ]
-
-  public_configs = [
-    ":gmock_config",
-    "//testing/gtest:gtest_config",
-  ]
-}
-
-static_library("gmock_main") {
-  testonly = true
-  sources = [
-    "src/gmock_main.cc",
-  ]
-  deps = [
-    ":gmock",
-  ]
-}
diff --git a/build/secondary/testing/gtest/BUILD.gn b/build/secondary/testing/gtest/BUILD.gn
deleted file mode 100644
index aa24d7e..0000000
--- a/build/secondary/testing/gtest/BUILD.gn
+++ /dev/null
@@ -1,150 +0,0 @@
-# Copyright 2014 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# Chromium's GN configuration for gtest now lives at testing/gtest/BUILD.gn.
-#
-# This configuration is left in the tree because it is pulled by V8 and PDFium,
-# and will be removed as soon as the projects switch off of it.
-#
-# Details at http://crbug.com/630705 and http://crrev.com/2779193002
-
-import("//build_overrides/gtest.gni")
-if (is_ios) {
-  import("//build/config/ios/ios_sdk.gni")
-  import("//build/buildflag_header.gni")
-}
-
-config("gtest_config") {
-  visibility = [
-    ":*",
-    "//testing/gmock:*",  # gmock also shares this config.
-  ]
-
-  defines = [
-    # In order to allow regex matches in gtest to be shared between Windows
-    # and other systems, we tell gtest to always use it's internal engine.
-    "GTEST_HAS_POSIX_RE=0",
-    "GTEST_LANG_CXX11=1",
-  ]
-
-  # Gtest headers need to be able to find themselves.
-  include_dirs = [ "include" ]
-
-  if (is_win) {
-    cflags = [ "/wd4800" ]  # Unused variable warning.
-  }
-}
-
-config("gtest_direct_config") {
-  visibility = [ ":*" ]
-  defines = [ "UNIT_TEST" ]
-}
-
-config("gtest_warnings") {
-  if (is_win && is_clang) {
-    # The Mutex constructor initializer list in gtest-port.cc is incorrectly
-    # ordered. See
-    # https://groups.google.com/d/msg/googletestframework/S5uSV8L2TX8/U1FaTDa6J6sJ.
-    cflags = [ "-Wno-reorder" ]
-  }
-}
-
-static_library("gtest") {
-  testonly = true
-  sources = [
-    "include/gtest/gtest-death-test.h",
-    "include/gtest/gtest-message.h",
-    "include/gtest/gtest-param-test.h",
-    "include/gtest/gtest-printers.h",
-    "include/gtest/gtest-spi.h",
-    "include/gtest/gtest-test-part.h",
-    "include/gtest/gtest-typed-test.h",
-    "include/gtest/gtest.h",
-    "include/gtest/gtest_pred_impl.h",
-    "include/gtest/internal/gtest-death-test-internal.h",
-    "include/gtest/internal/gtest-filepath.h",
-    "include/gtest/internal/gtest-internal.h",
-    "include/gtest/internal/gtest-linked_ptr.h",
-    "include/gtest/internal/gtest-param-util-generated.h",
-    "include/gtest/internal/gtest-param-util.h",
-    "include/gtest/internal/gtest-port.h",
-    "include/gtest/internal/gtest-string.h",
-    "include/gtest/internal/gtest-tuple.h",
-    "include/gtest/internal/gtest-type-util.h",
-
-    #"gtest/src/gtest-all.cc",  # Not needed by our build.
-    "src/gtest-death-test.cc",
-    "src/gtest-filepath.cc",
-    "src/gtest-internal-inl.h",
-    "src/gtest-port.cc",
-    "src/gtest-printers.cc",
-    "src/gtest-test-part.cc",
-    "src/gtest-typed-test.cc",
-    "src/gtest.cc",
-  ]
-  deps = []
-
-  if (gtest_include_multiprocess) {
-    sources += [
-      "../multiprocess_func_list.cc",
-      "../multiprocess_func_list.h",
-    ]
-  }
-
-  if (gtest_include_platform_test) {
-    sources += [ "../platform_test.h" ]
-  }
-
-  if ((is_mac || is_ios) && gtest_include_objc_support) {
-    if (is_ios) {
-      set_sources_assignment_filter([])
-    }
-    sources += [
-      "../gtest_mac.h",
-      "../gtest_mac.mm",
-    ]
-    if (gtest_include_platform_test) {
-      sources += [ "../platform_test_mac.mm" ]
-    }
-    set_sources_assignment_filter(sources_assignment_filter)
-  }
-
-  if (is_ios && gtest_include_ios_coverage) {
-    sources += [
-      "../coverage_util_ios.h",
-      "../coverage_util_ios.mm",
-    ]
-    deps += [ ":ios_enable_coverage" ]
-  }
-
-  include_dirs = [ "." ]
-
-  all_dependent_configs = [ ":gtest_config" ]
-  public_configs = [ ":gtest_direct_config" ]
-
-  configs -= [ "//build/config/compiler:chromium_code" ]
-  configs += [
-    "//build/config/compiler:no_chromium_code",
-
-    # Must be after no_chromium_code for warning flags to be ordered correctly.
-    ":gtest_warnings",
-  ]
-}
-
-source_set("gtest_main") {
-  testonly = true
-  sources = [
-    "src/gtest_main.cc",
-  ]
-  deps = [
-    ":gtest",
-  ]
-}
-
-if (is_ios) {
-  buildflag_header("ios_enable_coverage") {
-    header = "ios_enable_coverage.h"
-    flags = [ "IOS_ENABLE_COVERAGE=$ios_enable_coverage" ]
-  }
-}
diff --git a/build/secondary/third_party/android_platform/development/scripts/BUILD.gn b/build/secondary/third_party/android_platform/development/scripts/BUILD.gn
deleted file mode 100644
index 2c9758e..0000000
--- a/build/secondary/third_party/android_platform/development/scripts/BUILD.gn
+++ /dev/null
@@ -1,14 +0,0 @@
-# Copyright 2017 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import("//build/config/android/config.gni")
-import("//build/config/python.gni")
-
-python_library("stack_py") {
-  pydeps_file = "//build/secondary/third_party/android_platform/development/scripts/stack.pydeps"
-  data = [
-    "//third_party/android_platform/development/scripts/stack",
-    "//third_party/llvm-build/Release+Asserts/bin/llvm-symbolizer",
-  ]
-}
diff --git a/build/secondary/third_party/android_platform/development/scripts/stack.pydeps b/build/secondary/third_party/android_platform/development/scripts/stack.pydeps
deleted file mode 100644
index a8972fd..0000000
--- a/build/secondary/third_party/android_platform/development/scripts/stack.pydeps
+++ /dev/null
@@ -1,21 +0,0 @@
-# Generated by running:
-#   build/print_python_deps.py --root third_party/android_platform/development/scripts --output build/secondary/third_party/android_platform/development/scripts/stack.pydeps third_party/android_platform/development/scripts/stack.py
-../../../../build/android/pylib/__init__.py
-../../../../build/android/pylib/constants/__init__.py
-../../../../build/android/pylib/constants/host_paths.py
-../../../../build/android/pylib/symbols/__init__.py
-../../../../build/android/pylib/symbols/elf_symbolizer.py
-../../../../tools/python/llvm_symbolizer.py
-../../../catapult/devil/devil/__init__.py
-../../../catapult/devil/devil/android/__init__.py
-../../../catapult/devil/devil/android/constants/__init__.py
-../../../catapult/devil/devil/android/constants/chrome.py
-../../../catapult/devil/devil/android/sdk/__init__.py
-../../../catapult/devil/devil/android/sdk/keyevent.py
-../../../catapult/devil/devil/android/sdk/version_codes.py
-../../../catapult/devil/devil/constants/__init__.py
-../../../catapult/devil/devil/constants/exit_codes.py
-stack.py
-stack_core.py
-stack_libs.py
-symbol.py
diff --git a/build/secondary/third_party/android_tools/BUILD.gn b/build/secondary/third_party/android_tools/BUILD.gn
deleted file mode 100644
index 4a902b5..0000000
--- a/build/secondary/third_party/android_tools/BUILD.gn
+++ /dev/null
@@ -1,244 +0,0 @@
-# Copyright 2014 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import("//build/config/android/rules.gni")
-
-config("cpu_features_include") {
-  include_dirs = [ "$android_ndk_root/sources/android/cpufeatures" ]
-}
-
-config("cpu_features_warnings") {
-  if (is_clang) {
-    # cpu-features.c has few unused functions on x86 b/26403333
-    cflags = [ "-Wno-unused-function" ]
-  }
-}
-
-source_set("cpu_features") {
-  sources = [
-    "$android_ndk_root/sources/android/cpufeatures/cpu-features.c",
-  ]
-  public_configs = [ ":cpu_features_include" ]
-
-  configs -= [ "//build/config/compiler:chromium_code" ]
-  configs += [
-    "//build/config/compiler:no_chromium_code",
-
-    # Must be after no_chromium_code for warning flags to be ordered correctly.
-    ":cpu_features_warnings",
-  ]
-}
-
-if (enable_java_templates) {
-  template("support_lib_alias") {
-    java_group(target_name) {
-      forward_variables_from(invoker, [ "testonly" ])
-      deps = [
-        "$android_support_library_package:$target_name",
-      ]
-    }
-  }
-
-  support_lib_alias("android_support_chromium_java") {
-    testonly = true
-  }
-  support_lib_alias("android_gcm_java") {
-  }
-  support_lib_alias("emma_device_java") {
-  }
-  prebuilt_wrapper("android_arch_lifecycle_common_java") {
-    fallback_target = "$android_support_library_package:$target_name"
-  }
-  prebuilt_wrapper("android_arch_lifecycle_runtime_java") {
-    fallback_target = "$android_support_library_package:$target_name"
-  }
-  prebuilt_wrapper("android_support_multidex_java") {
-    android_deps_target_name = "com_android_support_multidex_java"
-    fallback_target = "$android_support_library_package:$target_name"
-  }
-  prebuilt_wrapper("android_support_annotations_java") {
-    android_deps_target_name = "com_android_support_support_annotations_java"
-    fallback_target = "$android_support_library_package:$target_name"
-  }
-  prebuilt_wrapper("android_support_cardview_java") {
-    android_deps_target_name = "com_android_support_cardview_v7_java"
-    fallback_target = "$android_support_library_package:$target_name"
-  }
-  prebuilt_wrapper("android_support_compat_java") {
-    android_deps_target_name = "com_android_support_support_compat_java"
-    fallback_target = "$android_support_library_package:$target_name"
-  }
-  prebuilt_wrapper("android_support_core_ui_java") {
-    android_deps_target_name = "com_android_support_support_core_ui_java"
-    fallback_target = "$android_support_library_package:$target_name"
-  }
-  prebuilt_wrapper("android_support_core_utils_java") {
-    android_deps_target_name = "com_android_support_support_core_utils_java"
-    fallback_target = "$android_support_library_package:$target_name"
-  }
-  prebuilt_wrapper("android_support_design_java") {
-    android_deps_target_name = "com_android_support_design_java"
-    fallback_target = "$android_support_library_package:$target_name"
-  }
-  prebuilt_wrapper("android_support_v4_java") {
-    fallback_target = "$android_support_library_package:$target_name"
-  }
-  prebuilt_wrapper("android_support_v7_appcompat_java") {
-    fallback_target = "$android_support_library_package:$target_name"
-  }
-  prebuilt_wrapper("android_support_v7_gridlayout_java") {
-    android_deps_target_name = "com_android_support_gridlayout_v7_java"
-    fallback_target = "$android_support_library_package:$target_name"
-  }
-  prebuilt_wrapper("android_support_v7_mediarouter_java") {
-    android_deps_target_name = "com_android_support_mediarouter_v7_java"
-    fallback_target = "$android_support_library_package:$target_name"
-  }
-  prebuilt_wrapper("android_support_v7_recyclerview_java") {
-    android_deps_target_name = "com_android_support_recyclerview_v7_java"
-    fallback_target = "$android_support_library_package:$target_name"
-  }
-  prebuilt_wrapper("android_support_v13_java") {
-    android_deps_target_name = "com_android_support_support_v13_java"
-    fallback_target = "$android_support_library_package:$target_name"
-  }
-
-  # TODO(dgn): Use the POM files instead of hardcoding the dependencies.
-  gms_path = "$default_extras_android_sdk_root/extras/google/m2repository/com/google/android/gms"
-  gms_version = "11.2.0"
-
-  android_aar_prebuilt("google_play_services_basement_java") {
-    deps = [
-      ":android_support_v4_java",
-    ]
-    _lib_name = "play-services-basement"
-    aar_path = "$gms_path/$_lib_name/$gms_version/$_lib_name-$gms_version.aar"
-    info_path = "//build/secondary/third_party/android_tools/$target_name.info"
-    input_jars_paths = [ "$android_sdk/optional/org.apache.http.legacy.jar" ]
-  }
-
-  android_aar_prebuilt("google_play_services_tasks_java") {
-    deps = [
-      ":google_play_services_basement_java",
-    ]
-    _lib_name = "play-services-tasks"
-    aar_path = "$gms_path/$_lib_name/$gms_version/$_lib_name-$gms_version.aar"
-    info_path = "//build/secondary/third_party/android_tools/$target_name.info"
-    proguard_configs =
-        [ "${target_gen_dir}/google_play_services_basement_java/proguard.txt" ]
-  }
-
-  android_aar_prebuilt("google_play_services_base_java") {
-    deps = [
-      ":android_support_v4_java",
-      ":google_play_services_basement_java",
-      ":google_play_services_tasks_java",
-    ]
-    _lib_name = "play-services-base"
-    aar_path = "$gms_path/$_lib_name/$gms_version/$_lib_name-$gms_version.aar"
-    info_path = "//build/secondary/third_party/android_tools/$target_name.info"
-    proguard_configs =
-        [ "${target_gen_dir}/google_play_services_basement_java/proguard.txt" ]
-  }
-
-  android_aar_prebuilt("google_play_services_auth_base_java") {
-    deps = [
-      ":google_play_services_base_java",
-      ":google_play_services_basement_java",
-    ]
-    _lib_name = "play-services-auth-base"
-    aar_path = "$gms_path/$_lib_name/$gms_version/$_lib_name-$gms_version.aar"
-    info_path = "//build/secondary/third_party/android_tools/$target_name.info"
-    proguard_configs =
-        [ "${target_gen_dir}/google_play_services_basement_java/proguard.txt" ]
-  }
-
-  android_aar_prebuilt("google_play_services_auth_java") {
-    deps = [
-      ":google_play_services_auth_base_java",
-      ":google_play_services_base_java",
-      ":google_play_services_basement_java",
-    ]
-    _lib_name = "play-services-auth"
-    aar_path = "$gms_path/$_lib_name/$gms_version/$_lib_name-$gms_version.aar"
-    info_path = "//build/secondary/third_party/android_tools/$target_name.info"
-    proguard_configs =
-        [ "${target_gen_dir}/google_play_services_basement_java/proguard.txt" ]
-  }
-
-  android_aar_prebuilt("google_play_services_cast_java") {
-    deps = [
-      ":android_support_v7_mediarouter_java",
-      ":google_play_services_base_java",
-      ":google_play_services_basement_java",
-    ]
-    _lib_name = "play-services-cast"
-    aar_path = "$gms_path/$_lib_name/$gms_version/$_lib_name-$gms_version.aar"
-    info_path = "//build/secondary/third_party/android_tools/$target_name.info"
-    proguard_configs =
-        [ "${target_gen_dir}/google_play_services_basement_java/proguard.txt" ]
-  }
-
-  android_aar_prebuilt("google_play_services_iid_java") {
-    deps = [
-      ":google_play_services_base_java",
-      ":google_play_services_basement_java",
-    ]
-    _lib_name = "play-services-iid"
-    aar_path = "$gms_path/$_lib_name/$gms_version/$_lib_name-$gms_version.aar"
-    info_path = "//build/secondary/third_party/android_tools/$target_name.info"
-    proguard_configs =
-        [ "${target_gen_dir}/google_play_services_basement_java/proguard.txt" ]
-  }
-
-  android_aar_prebuilt("google_play_services_gcm_java") {
-    deps = [
-      ":google_play_services_base_java",
-      ":google_play_services_basement_java",
-      ":google_play_services_iid_java",
-    ]
-    _lib_name = "play-services-gcm"
-    aar_path = "$gms_path/$_lib_name/$gms_version/$_lib_name-$gms_version.aar"
-    info_path = "//build/secondary/third_party/android_tools/$target_name.info"
-    proguard_configs =
-        [ "${target_gen_dir}/google_play_services_basement_java/proguard.txt" ]
-  }
-
-  android_aar_prebuilt("google_play_services_location_java") {
-    deps = [
-      ":google_play_services_base_java",
-      ":google_play_services_basement_java",
-    ]
-    _lib_name = "play-services-location"
-    aar_path = "$gms_path/$_lib_name/$gms_version/$_lib_name-$gms_version.aar"
-    info_path = "//build/secondary/third_party/android_tools/$target_name.info"
-    proguard_configs =
-        [ "${target_gen_dir}/google_play_services_basement_java/proguard.txt" ]
-  }
-
-  android_aar_prebuilt("google_play_services_vision_java") {
-    deps = [
-      ":google_play_services_base_java",
-      ":google_play_services_basement_java",
-      ":google_play_services_vision_common_java",
-    ]
-    _lib_name = "play-services-vision"
-    aar_path = "$gms_path/$_lib_name/$gms_version/$_lib_name-$gms_version.aar"
-    info_path = "//build/secondary/third_party/android_tools/$target_name.info"
-    proguard_configs =
-        [ "${target_gen_dir}/google_play_services_basement_java/proguard.txt" ]
-  }
-
-  android_aar_prebuilt("google_play_services_vision_common_java") {
-    deps = [
-      ":google_play_services_base_java",
-      ":google_play_services_basement_java",
-    ]
-    _lib_name = "play-services-vision-common"
-    aar_path = "$gms_path/$_lib_name/$gms_version/$_lib_name-$gms_version.aar"
-    info_path = "//build/secondary/third_party/android_tools/$target_name.info"
-    proguard_configs =
-        [ "${target_gen_dir}/google_play_services_basement_java/proguard.txt" ]
-  }
-}
diff --git a/build/secondary/third_party/android_tools/apk_proguard.flags b/build/secondary/third_party/android_tools/apk_proguard.flags
deleted file mode 100644
index 953e8b9..0000000
--- a/build/secondary/third_party/android_tools/apk_proguard.flags
+++ /dev/null
@@ -1,10 +0,0 @@
-# Copyright 2016 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# Keep all Parcelables, since Play Services has some that are used only by
-# reflection.
-# TODO(agrieve): Remove this once proguard flags provided by play services via
-#     .aars are used.  https://crbug.com/640836
--keep class * implements android.os.Parcelable
-
diff --git a/build/secondary/third_party/android_tools/google_play_services_auth_base_java.info b/build/secondary/third_party/android_tools/google_play_services_auth_base_java.info
deleted file mode 100644
index a2ebd4a..0000000
--- a/build/secondary/third_party/android_tools/google_play_services_auth_base_java.info
+++ /dev/null
@@ -1,13 +0,0 @@
-# Generated by //build/android/gyp/aar.py
-# To regenerate, use "update_android_aar_prebuilts = true" and run "gn gen".
-
-aidl = [  ]
-assets = [  ]
-has_classes_jar = true
-has_native_libraries = false
-has_proguard_flags = false
-has_r_text_file = false
-is_manifest_empty = true
-resources = [  ]
-subjar_tuples = [  ]
-subjars = [  ]
diff --git a/build/secondary/third_party/android_tools/google_play_services_auth_java.info b/build/secondary/third_party/android_tools/google_play_services_auth_java.info
deleted file mode 100644
index 5c44fcb..0000000
--- a/build/secondary/third_party/android_tools/google_play_services_auth_java.info
+++ /dev/null
@@ -1,13 +0,0 @@
-# Generated by //build/android/gyp/aar.py
-# To regenerate, use "update_android_aar_prebuilts = true" and run "gn gen".
-
-aidl = [  ]
-assets = [  ]
-has_classes_jar = true
-has_native_libraries = false
-has_proguard_flags = false
-has_r_text_file = false
-is_manifest_empty = false
-resources = [ "res/drawable-watch-v20/common_google_signin_btn_text_dark_normal.xml", "res/drawable-watch-v20/common_google_signin_btn_text_disabled.xml", "res/drawable-watch-v20/common_google_signin_btn_text_light_normal.xml", "res/values-watch-v20/values.xml" ]
-subjar_tuples = [  ]
-subjars = [  ]
diff --git a/build/secondary/third_party/android_tools/google_play_services_base_java.info b/build/secondary/third_party/android_tools/google_play_services_base_java.info
deleted file mode 100644
index 01b9405..0000000
--- a/build/secondary/third_party/android_tools/google_play_services_base_java.info
+++ /dev/null
@@ -1,13 +0,0 @@
-# Generated by //build/android/gyp/aar.py
-# To regenerate, use "update_android_aar_prebuilts = true" and run "gn gen".
-
-aidl = [  ]
-assets = [  ]
-has_classes_jar = true
-has_native_libraries = false
-has_proguard_flags = false
-has_r_text_file = false
-is_manifest_empty = false
-resources = [ "res/color/common_google_signin_btn_text_dark.xml", "res/color/common_google_signin_btn_text_light.xml", "res/color/common_google_signin_btn_tint.xml", "res/drawable-hdpi-v4/common_full_open_on_phone.png", "res/drawable-hdpi-v4/common_google_signin_btn_icon_dark_normal_background.9.png", "res/drawable-hdpi-v4/common_google_signin_btn_icon_light_normal_background.9.png", "res/drawable-hdpi-v4/common_google_signin_btn_text_dark_normal_background.9.png", "res/drawable-hdpi-v4/common_google_signin_btn_text_light_normal_background.9.png", "res/drawable-hdpi-v4/googleg_disabled_color_18.png", "res/drawable-hdpi-v4/googleg_standard_color_18.png", "res/drawable-mdpi-v4/common_google_signin_btn_icon_dark_normal_background.9.png", "res/drawable-mdpi-v4/common_google_signin_btn_icon_light_normal_background.9.png", "res/drawable-mdpi-v4/common_google_signin_btn_text_dark_normal_background.9.png", "res/drawable-mdpi-v4/common_google_signin_btn_text_light_normal_background.9.png", "res/drawable-mdpi-v4/googleg_disabled_color_18.png", "res/drawable-mdpi-v4/googleg_standard_color_18.png", "res/drawable-xhdpi-v4/common_full_open_on_phone.png", "res/drawable-xhdpi-v4/common_google_signin_btn_icon_dark_normal_background.9.png", "res/drawable-xhdpi-v4/common_google_signin_btn_icon_light_normal_background.9.png", "res/drawable-xhdpi-v4/common_google_signin_btn_text_dark_normal_background.9.png", "res/drawable-xhdpi-v4/common_google_signin_btn_text_light_normal_background.9.png", "res/drawable-xhdpi-v4/googleg_disabled_color_18.png", "res/drawable-xhdpi-v4/googleg_standard_color_18.png", "res/drawable-xxhdpi-v4/common_google_signin_btn_icon_dark_normal_background.9.png", "res/drawable-xxhdpi-v4/common_google_signin_btn_icon_light_normal_background.9.png", "res/drawable-xxhdpi-v4/common_google_signin_btn_text_dark_normal_background.9.png", "res/drawable-xxhdpi-v4/common_google_signin_btn_text_light_normal_background.9.png", "res/drawable-xxhdpi-v4/googleg_disabled_color_18.png", "res/drawable-xxhdpi-v4/googleg_standard_color_18.png", "res/drawable/common_google_signin_btn_icon_dark.xml", "res/drawable/common_google_signin_btn_icon_dark_focused.xml", "res/drawable/common_google_signin_btn_icon_dark_normal.xml", "res/drawable/common_google_signin_btn_icon_disabled.xml", "res/drawable/common_google_signin_btn_icon_light.xml", "res/drawable/common_google_signin_btn_icon_light_focused.xml", "res/drawable/common_google_signin_btn_icon_light_normal.xml", "res/drawable/common_google_signin_btn_text_dark.xml", "res/drawable/common_google_signin_btn_text_dark_focused.xml", "res/drawable/common_google_signin_btn_text_dark_normal.xml", "res/drawable/common_google_signin_btn_text_disabled.xml", "res/drawable/common_google_signin_btn_text_light.xml", "res/drawable/common_google_signin_btn_text_light_focused.xml", "res/drawable/common_google_signin_btn_text_light_normal.xml", "res/values-af/values.xml", "res/values-am/values.xml", "res/values-ar/values.xml", "res/values-az/values.xml", "res/values-b+sr+Latn/values.xml", "res/values-be/values.xml", "res/values-bg/values.xml", "res/values-bn/values.xml", "res/values-bs/values.xml", "res/values-ca/values.xml", "res/values-cs/values.xml", "res/values-da/values.xml", "res/values-de/values.xml", "res/values-el/values.xml", "res/values-en-rGB/values.xml", "res/values-es-rUS/values.xml", "res/values-es/values.xml", "res/values-et/values.xml", "res/values-eu/values.xml", "res/values-fa/values.xml", "res/values-fi/values.xml", "res/values-fr-rCA/values.xml", "res/values-fr/values.xml", "res/values-gl/values.xml", "res/values-gu/values.xml", "res/values-hi/values.xml", "res/values-hr/values.xml", "res/values-hu/values.xml", "res/values-hy/values.xml", "res/values-in/values.xml", "res/values-is/values.xml", "res/values-it/values.xml", "res/values-iw/values.xml", "res/values-ja/values.xml", "res/values-ka/values.xml", "res/values-kk/values.xml", "res/values-km/values.xml", "res/values-kn/values.xml", "res/values-ko/values.xml", "res/values-ky/values.xml", "res/values-lo/values.xml", "res/values-lt/values.xml", "res/values-lv/values.xml", "res/values-mk/values.xml", "res/values-ml/values.xml", "res/values-mn/values.xml", "res/values-mr/values.xml", "res/values-ms/values.xml", "res/values-my/values.xml", "res/values-nb/values.xml", "res/values-ne/values.xml", "res/values-nl/values.xml", "res/values-pa/values.xml", "res/values-pl/values.xml", "res/values-pt-rBR/values.xml", "res/values-pt-rPT/values.xml", "res/values-ro/values.xml", "res/values-ru/values.xml", "res/values-si/values.xml", "res/values-sk/values.xml", "res/values-sl/values.xml", "res/values-sq/values.xml", "res/values-sr/values.xml", "res/values-sv/values.xml", "res/values-sw/values.xml", "res/values-ta/values.xml", "res/values-te/values.xml", "res/values-th/values.xml", "res/values-tl/values.xml", "res/values-tr/values.xml", "res/values-uk/values.xml", "res/values-ur/values.xml", "res/values-uz/values.xml", "res/values-vi/values.xml", "res/values-zh-rCN/values.xml", "res/values-zh-rHK/values.xml", "res/values-zh-rTW/values.xml", "res/values-zu/values.xml", "res/values/values.xml" ]
-subjar_tuples = [  ]
-subjars = [  ]
diff --git a/build/secondary/third_party/android_tools/google_play_services_basement_java.info b/build/secondary/third_party/android_tools/google_play_services_basement_java.info
deleted file mode 100644
index ac3ce23..0000000
--- a/build/secondary/third_party/android_tools/google_play_services_basement_java.info
+++ /dev/null
@@ -1,13 +0,0 @@
-# Generated by //build/android/gyp/aar.py
-# To regenerate, use "update_android_aar_prebuilts = true" and run "gn gen".
-
-aidl = [  ]
-assets = [  ]
-has_classes_jar = true
-has_native_libraries = false
-has_proguard_flags = true
-has_r_text_file = true
-is_manifest_empty = false
-resources = [ "res/values-af/values.xml", "res/values-am/values.xml", "res/values-ar/values.xml", "res/values-az/values.xml", "res/values-b+sr+Latn/values.xml", "res/values-be/values.xml", "res/values-bg/values.xml", "res/values-bn/values.xml", "res/values-bs/values.xml", "res/values-ca/values.xml", "res/values-cs/values.xml", "res/values-da/values.xml", "res/values-de/values.xml", "res/values-el/values.xml", "res/values-en-rGB/values.xml", "res/values-es-rUS/values.xml", "res/values-es/values.xml", "res/values-et/values.xml", "res/values-eu/values.xml", "res/values-fa/values.xml", "res/values-fi/values.xml", "res/values-fr-rCA/values.xml", "res/values-fr/values.xml", "res/values-gl/values.xml", "res/values-gu/values.xml", "res/values-hi/values.xml", "res/values-hr/values.xml", "res/values-hu/values.xml", "res/values-hy/values.xml", "res/values-in/values.xml", "res/values-is/values.xml", "res/values-it/values.xml", "res/values-iw/values.xml", "res/values-ja/values.xml", "res/values-ka/values.xml", "res/values-kk/values.xml", "res/values-km/values.xml", "res/values-kn/values.xml", "res/values-ko/values.xml", "res/values-ky/values.xml", "res/values-lo/values.xml", "res/values-lt/values.xml", "res/values-lv/values.xml", "res/values-mk/values.xml", "res/values-ml/values.xml", "res/values-mn/values.xml", "res/values-mr/values.xml", "res/values-ms/values.xml", "res/values-my/values.xml", "res/values-nb/values.xml", "res/values-ne/values.xml", "res/values-nl/values.xml", "res/values-pa/values.xml", "res/values-pl/values.xml", "res/values-pt-rBR/values.xml", "res/values-pt-rPT/values.xml", "res/values-ro/values.xml", "res/values-ru/values.xml", "res/values-si/values.xml", "res/values-sk/values.xml", "res/values-sl/values.xml", "res/values-sq/values.xml", "res/values-sr/values.xml", "res/values-sv/values.xml", "res/values-sw/values.xml", "res/values-ta/values.xml", "res/values-te/values.xml", "res/values-th/values.xml", "res/values-tl/values.xml", "res/values-tr/values.xml", "res/values-uk/values.xml", "res/values-ur/values.xml", "res/values-uz/values.xml", "res/values-vi/values.xml", "res/values-zh-rCN/values.xml", "res/values-zh-rHK/values.xml", "res/values-zh-rTW/values.xml", "res/values-zu/values.xml", "res/values/values.xml" ]
-subjar_tuples = [  ]
-subjars = [  ]
diff --git a/build/secondary/third_party/android_tools/google_play_services_cast_java.info b/build/secondary/third_party/android_tools/google_play_services_cast_java.info
deleted file mode 100644
index 492ffdc..0000000
--- a/build/secondary/third_party/android_tools/google_play_services_cast_java.info
+++ /dev/null
@@ -1,13 +0,0 @@
-# Generated by //build/android/gyp/aar.py
-# To regenerate, use "update_android_aar_prebuilts = true" and run "gn gen".
-
-aidl = [  ]
-assets = [  ]
-has_classes_jar = true
-has_native_libraries = false
-has_proguard_flags = false
-has_r_text_file = false
-is_manifest_empty = true
-resources = [ "res/drawable-hdpi-v4/cast_ic_notification_0.png", "res/drawable-hdpi-v4/cast_ic_notification_1.png", "res/drawable-hdpi-v4/cast_ic_notification_2.png", "res/drawable-hdpi-v4/cast_ic_notification_on.png", "res/drawable-mdpi-v4/cast_ic_notification_0.png", "res/drawable-mdpi-v4/cast_ic_notification_1.png", "res/drawable-mdpi-v4/cast_ic_notification_2.png", "res/drawable-mdpi-v4/cast_ic_notification_on.png", "res/drawable-xhdpi-v4/cast_ic_notification_0.png", "res/drawable-xhdpi-v4/cast_ic_notification_1.png", "res/drawable-xhdpi-v4/cast_ic_notification_2.png", "res/drawable-xhdpi-v4/cast_ic_notification_on.png", "res/drawable-xxhdpi-v4/cast_ic_notification_0.png", "res/drawable-xxhdpi-v4/cast_ic_notification_1.png", "res/drawable-xxhdpi-v4/cast_ic_notification_2.png", "res/drawable-xxhdpi-v4/cast_ic_notification_on.png", "res/drawable/cast_ic_notification_connecting.xml", "res/values-af/values.xml", "res/values-am/values.xml", "res/values-ar/values.xml", "res/values-az/values.xml", "res/values-b+sr+Latn/values.xml", "res/values-be/values.xml", "res/values-bg/values.xml", "res/values-bn/values.xml", "res/values-bs/values.xml", "res/values-ca/values.xml", "res/values-cs/values.xml", "res/values-da/values.xml", "res/values-de/values.xml", "res/values-el/values.xml", "res/values-en-rGB/values.xml", "res/values-es-rUS/values.xml", "res/values-es/values.xml", "res/values-et/values.xml", "res/values-eu/values.xml", "res/values-fa/values.xml", "res/values-fi/values.xml", "res/values-fr-rCA/values.xml", "res/values-fr/values.xml", "res/values-gl/values.xml", "res/values-gu/values.xml", "res/values-hi/values.xml", "res/values-hr/values.xml", "res/values-hu/values.xml", "res/values-hy/values.xml", "res/values-in/values.xml", "res/values-is/values.xml", "res/values-it/values.xml", "res/values-iw/values.xml", "res/values-ja/values.xml", "res/values-ka/values.xml", "res/values-kk/values.xml", "res/values-km/values.xml", "res/values-kn/values.xml", "res/values-ko/values.xml", "res/values-ky/values.xml", "res/values-lo/values.xml", "res/values-lt/values.xml", "res/values-lv/values.xml", "res/values-mk/values.xml", "res/values-ml/values.xml", "res/values-mn/values.xml", "res/values-mr/values.xml", "res/values-ms/values.xml", "res/values-my/values.xml", "res/values-nb/values.xml", "res/values-ne/values.xml", "res/values-nl/values.xml", "res/values-pa/values.xml", "res/values-pl/values.xml", "res/values-pt-rBR/values.xml", "res/values-pt-rPT/values.xml", "res/values-ro/values.xml", "res/values-ru/values.xml", "res/values-si/values.xml", "res/values-sk/values.xml", "res/values-sl/values.xml", "res/values-sq/values.xml", "res/values-sr/values.xml", "res/values-sv/values.xml", "res/values-sw/values.xml", "res/values-ta/values.xml", "res/values-te/values.xml", "res/values-th/values.xml", "res/values-tl/values.xml", "res/values-tr/values.xml", "res/values-uk/values.xml", "res/values-ur/values.xml", "res/values-uz/values.xml", "res/values-vi/values.xml", "res/values-zh-rCN/values.xml", "res/values-zh-rHK/values.xml", "res/values-zh-rTW/values.xml", "res/values-zu/values.xml", "res/values/values.xml" ]
-subjar_tuples = [  ]
-subjars = [  ]
diff --git a/build/secondary/third_party/android_tools/google_play_services_gcm_java.info b/build/secondary/third_party/android_tools/google_play_services_gcm_java.info
deleted file mode 100644
index 020cd82..0000000
--- a/build/secondary/third_party/android_tools/google_play_services_gcm_java.info
+++ /dev/null
@@ -1,13 +0,0 @@
-# Generated by //build/android/gyp/aar.py
-# To regenerate, use "update_android_aar_prebuilts = true" and run "gn gen".
-
-aidl = [  ]
-assets = [  ]
-has_classes_jar = true
-has_native_libraries = false
-has_proguard_flags = true
-has_r_text_file = false
-is_manifest_empty = false
-resources = [ "res/values-af/values.xml", "res/values-am/values.xml", "res/values-ar/values.xml", "res/values-az/values.xml", "res/values-b+sr+Latn/values.xml", "res/values-be/values.xml", "res/values-bg/values.xml", "res/values-bn/values.xml", "res/values-bs/values.xml", "res/values-ca/values.xml", "res/values-cs/values.xml", "res/values-da/values.xml", "res/values-de/values.xml", "res/values-el/values.xml", "res/values-en-rGB/values.xml", "res/values-es-rUS/values.xml", "res/values-es/values.xml", "res/values-et/values.xml", "res/values-eu/values.xml", "res/values-fa/values.xml", "res/values-fi/values.xml", "res/values-fr-rCA/values.xml", "res/values-fr/values.xml", "res/values-gl/values.xml", "res/values-gu/values.xml", "res/values-hi/values.xml", "res/values-hr/values.xml", "res/values-hu/values.xml", "res/values-hy/values.xml", "res/values-in/values.xml", "res/values-is/values.xml", "res/values-it/values.xml", "res/values-iw/values.xml", "res/values-ja/values.xml", "res/values-ka/values.xml", "res/values-kk/values.xml", "res/values-km/values.xml", "res/values-kn/values.xml", "res/values-ko/values.xml", "res/values-ky/values.xml", "res/values-lo/values.xml", "res/values-lt/values.xml", "res/values-lv/values.xml", "res/values-mk/values.xml", "res/values-ml/values.xml", "res/values-mn/values.xml", "res/values-mr/values.xml", "res/values-ms/values.xml", "res/values-my/values.xml", "res/values-nb/values.xml", "res/values-ne/values.xml", "res/values-nl/values.xml", "res/values-pa/values.xml", "res/values-pl/values.xml", "res/values-pt-rBR/values.xml", "res/values-pt-rPT/values.xml", "res/values-ro/values.xml", "res/values-ru/values.xml", "res/values-si/values.xml", "res/values-sk/values.xml", "res/values-sl/values.xml", "res/values-sq/values.xml", "res/values-sr/values.xml", "res/values-sv/values.xml", "res/values-sw/values.xml", "res/values-ta/values.xml", "res/values-te/values.xml", "res/values-th/values.xml", "res/values-tl/values.xml", "res/values-tr/values.xml", "res/values-uk/values.xml", "res/values-ur/values.xml", "res/values-uz/values.xml", "res/values-vi/values.xml", "res/values-zh-rCN/values.xml", "res/values-zh-rHK/values.xml", "res/values-zh-rTW/values.xml", "res/values-zu/values.xml", "res/values/values.xml" ]
-subjar_tuples = [  ]
-subjars = [  ]
diff --git a/build/secondary/third_party/android_tools/google_play_services_iid_java.info b/build/secondary/third_party/android_tools/google_play_services_iid_java.info
deleted file mode 100644
index 2307336..0000000
--- a/build/secondary/third_party/android_tools/google_play_services_iid_java.info
+++ /dev/null
@@ -1,13 +0,0 @@
-# Generated by //build/android/gyp/aar.py
-# To regenerate, use "update_android_aar_prebuilts = true" and run "gn gen".
-
-aidl = [  ]
-assets = [  ]
-has_classes_jar = true
-has_native_libraries = false
-has_proguard_flags = false
-has_r_text_file = false
-is_manifest_empty = false
-resources = [  ]
-subjar_tuples = [  ]
-subjars = [  ]
diff --git a/build/secondary/third_party/android_tools/google_play_services_location_java.info b/build/secondary/third_party/android_tools/google_play_services_location_java.info
deleted file mode 100644
index a2ebd4a..0000000
--- a/build/secondary/third_party/android_tools/google_play_services_location_java.info
+++ /dev/null
@@ -1,13 +0,0 @@
-# Generated by //build/android/gyp/aar.py
-# To regenerate, use "update_android_aar_prebuilts = true" and run "gn gen".
-
-aidl = [  ]
-assets = [  ]
-has_classes_jar = true
-has_native_libraries = false
-has_proguard_flags = false
-has_r_text_file = false
-is_manifest_empty = true
-resources = [  ]
-subjar_tuples = [  ]
-subjars = [  ]
diff --git a/build/secondary/third_party/android_tools/google_play_services_tasks_java.info b/build/secondary/third_party/android_tools/google_play_services_tasks_java.info
deleted file mode 100644
index a2ebd4a..0000000
--- a/build/secondary/third_party/android_tools/google_play_services_tasks_java.info
+++ /dev/null
@@ -1,13 +0,0 @@
-# Generated by //build/android/gyp/aar.py
-# To regenerate, use "update_android_aar_prebuilts = true" and run "gn gen".
-
-aidl = [  ]
-assets = [  ]
-has_classes_jar = true
-has_native_libraries = false
-has_proguard_flags = false
-has_r_text_file = false
-is_manifest_empty = true
-resources = [  ]
-subjar_tuples = [  ]
-subjars = [  ]
diff --git a/build/secondary/third_party/android_tools/google_play_services_vision_common_java.info b/build/secondary/third_party/android_tools/google_play_services_vision_common_java.info
deleted file mode 100644
index a2ebd4a..0000000
--- a/build/secondary/third_party/android_tools/google_play_services_vision_common_java.info
+++ /dev/null
@@ -1,13 +0,0 @@
-# Generated by //build/android/gyp/aar.py
-# To regenerate, use "update_android_aar_prebuilts = true" and run "gn gen".
-
-aidl = [  ]
-assets = [  ]
-has_classes_jar = true
-has_native_libraries = false
-has_proguard_flags = false
-has_r_text_file = false
-is_manifest_empty = true
-resources = [  ]
-subjar_tuples = [  ]
-subjars = [  ]
diff --git a/build/secondary/third_party/android_tools/google_play_services_vision_java.info b/build/secondary/third_party/android_tools/google_play_services_vision_java.info
deleted file mode 100644
index a2ebd4a..0000000
--- a/build/secondary/third_party/android_tools/google_play_services_vision_java.info
+++ /dev/null
@@ -1,13 +0,0 @@
-# Generated by //build/android/gyp/aar.py
-# To regenerate, use "update_android_aar_prebuilts = true" and run "gn gen".
-
-aidl = [  ]
-assets = [  ]
-has_classes_jar = true
-has_native_libraries = false
-has_proguard_flags = false
-has_r_text_file = false
-is_manifest_empty = true
-resources = [  ]
-subjar_tuples = [  ]
-subjars = [  ]
diff --git a/build/secondary/third_party/android_tools/support/BUILD.gn b/build/secondary/third_party/android_tools/support/BUILD.gn
deleted file mode 100644
index 55609b6..0000000
--- a/build/secondary/third_party/android_tools/support/BUILD.gn
+++ /dev/null
@@ -1,262 +0,0 @@
-# Copyright 2017 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import("//build/config/android/rules.gni")
-
-assert(enable_java_templates)
-
-visibility = [ ":*" ]
-
-lib_version = "27.0.0"
-lib_path = "//third_party/android_tools/sdk/extras/android/m2repository/com/android/support"
-arch_lib_version = "1.0.0"
-arch_lib_path =
-    "//third_party/android_tools/sdk/extras/android/m2repository/android/arch"
-build_file_dir = "//build/secondary/third_party/android_tools/support"
-
-android_library("android_support_chromium_java") {
-  testonly = true
-  java_files = [ "//third_party/android_tools/sdk/extras/chromium/support/src/org/chromium/android/support/PackageManagerWrapper.java" ]
-  visibility += [ "//third_party/android_tools:*" ]
-}
-
-android_java_prebuilt("android_gcm_java") {
-  jar_path = "//third_party/android_tools/sdk/extras/google/gcm/gcm-client/dist/gcm.jar"
-  visibility += [ "//third_party/android_tools:*" ]
-}
-
-android_java_prebuilt("emma_device_java") {
-  jar_path = "//third_party/android_tools/sdk/tools/lib/emma_device.jar"
-  include_java_resources = true
-  visibility += [ "//third_party/android_tools:*" ]
-}
-
-android_aar_prebuilt("android_support_design_java") {
-  deps = [
-    ":android_support_transition_java",
-    ":android_support_v4_java",
-    ":android_support_v7_appcompat_java",
-    ":android_support_v7_recyclerview_java",
-  ]
-  _lib_name = "design"
-  aar_path = "$lib_path/$_lib_name/$lib_version/$_lib_name-$lib_version.aar"
-  info_path = "$build_file_dir/$target_name.info"
-  visibility += [ "//third_party/android_tools:*" ]
-}
-
-android_aar_prebuilt("android_support_transition_java") {
-  deps = [
-    ":android_support_v4_java",
-  ]
-  _lib_name = "transition"
-  aar_path = "$lib_path/$_lib_name/$lib_version/$_lib_name-$lib_version.aar"
-  info_path = "$build_file_dir/$target_name.info"
-}
-
-android_aar_prebuilt("android_support_cardview_java") {
-  deps = [
-    ":android_support_v7_appcompat_java",
-  ]
-  _lib_name = "cardview-v7"
-  aar_path = "$lib_path/$_lib_name/$lib_version/$_lib_name-$lib_version.aar"
-  info_path = "$build_file_dir/$target_name.info"
-  visibility += [ "//third_party/android_tools:*" ]
-}
-
-android_aar_prebuilt("android_support_multidex_java") {
-  aar_path = "$lib_path/multidex/1.0.1/multidex-1.0.1.aar"
-  info_path = "$build_file_dir/$target_name.info"
-  visibility += [ "//third_party/android_tools:*" ]
-}
-
-android_aar_prebuilt("android_arch_lifecycle_runtime_java") {
-  aar_path = "$arch_lib_path/lifecycle/runtime/$arch_lib_version/runtime-$arch_lib_version.aar"
-  info_path = "$build_file_dir/$target_name.info"
-  deps = [
-    ":android_arch_core_common_java",
-    ":android_arch_lifecycle_common_java",
-    ":android_support_annotations_java",
-  ]
-  visibility += [ "//third_party/android_tools:*" ]
-}
-
-android_java_prebuilt("android_arch_lifecycle_common_java") {
-  output_name = "arch_lifecycle_common"
-  jar_path = "$arch_lib_path/lifecycle/common/$arch_lib_version/common-$arch_lib_version.jar"
-  deps = [
-    ":android_support_annotations_java",
-  ]
-  visibility += [ "//third_party/android_tools:*" ]
-}
-
-android_java_prebuilt("android_arch_core_common_java") {
-  output_name = "arch_core_common"
-  jar_path = "$arch_lib_path/core/common/$arch_lib_version/common-$arch_lib_version.jar"
-  deps = [
-    ":android_support_annotations_java",
-  ]
-}
-
-android_java_prebuilt("android_support_annotations_java") {
-  _lib_name = "support-annotations"
-  jar_path = "$lib_path/$_lib_name/$lib_version/$_lib_name-$lib_version.jar"
-  visibility += [ "//third_party/android_tools:*" ]
-}
-
-java_group("android_support_v4_java") {
-  deps = [
-    ":android_support_compat_java",
-    ":android_support_core_ui_java",
-    ":android_support_core_utils_java",
-    ":android_support_fragment_java",
-    ":android_support_media_compat_java",
-  ]
-  visibility += [ "//third_party/android_tools:*" ]
-}
-
-android_aar_prebuilt("android_support_compat_java") {
-  deps = [
-    ":android_arch_lifecycle_runtime_java",
-    ":android_support_annotations_java",
-  ]
-  _lib_name = "support-compat"
-  aar_path = "$lib_path/$_lib_name/$lib_version/$_lib_name-$lib_version.aar"
-  info_path = "$build_file_dir/$target_name.info"
-  ignore_aidl = true  # We don't appear to need these currently.
-  visibility += [ "//third_party/android_tools:*" ]
-}
-
-android_aar_prebuilt("android_support_core_ui_java") {
-  deps = [
-    ":android_support_compat_java",
-  ]
-  _lib_name = "support-core-ui"
-  aar_path = "$lib_path/$_lib_name/$lib_version/$_lib_name-$lib_version.aar"
-  info_path = "$build_file_dir/$target_name.info"
-  visibility += [ "//third_party/android_tools:*" ]
-}
-
-android_aar_prebuilt("android_support_core_utils_java") {
-  deps = [
-    ":android_support_compat_java",
-  ]
-  _lib_name = "support-core-utils"
-  aar_path = "$lib_path/$_lib_name/$lib_version/$_lib_name-$lib_version.aar"
-  info_path = "$build_file_dir/$target_name.info"
-  visibility += [ "//third_party/android_tools:*" ]
-}
-
-android_aar_prebuilt("android_support_fragment_java") {
-  deps = [
-    ":android_support_compat_java",
-    ":android_support_core_ui_java",
-    ":android_support_core_utils_java",
-    ":android_support_media_compat_java",
-  ]
-  _lib_name = "support-fragment"
-  aar_path = "$lib_path/$_lib_name/$lib_version/$_lib_name-$lib_version.aar"
-  info_path = "$build_file_dir/$target_name.info"
-}
-
-android_aar_prebuilt("android_support_media_compat_java") {
-  deps = [
-    ":android_support_compat_java",
-  ]
-  _lib_name = "support-media-compat"
-  aar_path = "$lib_path/$_lib_name/$lib_version/$_lib_name-$lib_version.aar"
-  info_path = "$build_file_dir/$target_name.info"
-  ignore_aidl = true  # We don't appear to need these currently.
-}
-
-android_aar_prebuilt("android_support_v13_java") {
-  deps = [
-    ":android_support_v4_java",
-  ]
-  _lib_name = "support-v13"
-  aar_path = "$lib_path/$_lib_name/$lib_version/$_lib_name-$lib_version.aar"
-  info_path = "$build_file_dir/$target_name.info"
-  visibility += [ "//third_party/android_tools:*" ]
-}
-
-android_aar_prebuilt("android_support_vector_drawable_java") {
-  deps = [
-    ":android_support_compat_java",
-  ]
-  _lib_name = "support-vector-drawable"
-  aar_path = "$lib_path/$_lib_name/$lib_version/$_lib_name-$lib_version.aar"
-  info_path = "$build_file_dir/$target_name.info"
-}
-
-android_aar_prebuilt("android_support_animated_vector_drawable_java") {
-  deps = [
-    ":android_support_core_ui_java",
-    ":android_support_vector_drawable_java",
-  ]
-  _lib_name = "animated-vector-drawable"
-  aar_path = "$lib_path/$_lib_name/$lib_version/$_lib_name-$lib_version.aar"
-  info_path = "$build_file_dir/$target_name.info"
-}
-
-android_aar_prebuilt("android_support_v7_appcompat_java_internal") {
-  deps = [
-    ":android_support_v4_java",
-  ]
-  _lib_name = "appcompat-v7"
-  aar_path = "$lib_path/$_lib_name/$lib_version/$_lib_name-$lib_version.aar"
-  info_path = "$build_file_dir/$target_name.info"
-}
-
-java_group("android_support_v7_appcompat_java") {
-  deps = [
-    ":android_support_animated_vector_drawable_java",
-    ":android_support_v4_java",
-    ":android_support_v7_appcompat_java_internal",
-    ":android_support_vector_drawable_java",
-  ]
-  visibility += [ "//third_party/android_tools:*" ]
-}
-
-android_aar_prebuilt("android_support_v7_gridlayout_java") {
-  deps = [
-    ":android_support_compat_java",
-    ":android_support_core_ui_java",
-  ]
-  _lib_name = "gridlayout-v7"
-  aar_path = "$lib_path/$_lib_name/$lib_version/$_lib_name-$lib_version.aar"
-  info_path = "$build_file_dir/$target_name.info"
-  visibility += [ "//third_party/android_tools:*" ]
-}
-
-android_aar_prebuilt("android_support_v7_mediarouter_java") {
-  deps = [
-    ":android_support_v7_appcompat_java",
-    ":android_support_v7_palette_java",
-  ]
-  _lib_name = "mediarouter-v7"
-  aar_path = "$lib_path/$_lib_name/$lib_version/$_lib_name-$lib_version.aar"
-  info_path = "$build_file_dir/$target_name.info"
-  visibility += [ "//third_party/android_tools:*" ]
-}
-
-android_aar_prebuilt("android_support_v7_recyclerview_java") {
-  deps = [
-    ":android_support_annotations_java",
-    ":android_support_compat_java",
-    ":android_support_core_ui_java",
-  ]
-  _lib_name = "recyclerview-v7"
-  aar_path = "$lib_path/$_lib_name/$lib_version/$_lib_name-$lib_version.aar"
-  info_path = "$build_file_dir/$target_name.info"
-  visibility += [ "//third_party/android_tools:*" ]
-}
-
-android_aar_prebuilt("android_support_v7_palette_java") {
-  deps = [
-    ":android_support_compat_java",
-    ":android_support_core_utils_java",
-  ]
-  _lib_name = "palette-v7"
-  aar_path = "$lib_path/$_lib_name/$lib_version/$_lib_name-$lib_version.aar"
-  info_path = "$build_file_dir/$target_name.info"
-}
diff --git a/build/secondary/third_party/android_tools/support/android_arch_lifecycle_runtime_java.info b/build/secondary/third_party/android_tools/support/android_arch_lifecycle_runtime_java.info
deleted file mode 100644
index acf40e8..0000000
--- a/build/secondary/third_party/android_tools/support/android_arch_lifecycle_runtime_java.info
+++ /dev/null
@@ -1,13 +0,0 @@
-# Generated by //build/android/gyp/aar.py
-# To regenerate, use "update_android_aar_prebuilts = true" and run "gn gen".
-
-aidl = [  ]
-assets = [  ]
-has_classes_jar = true
-has_native_libraries = false
-has_proguard_flags = true
-has_r_text_file = false
-is_manifest_empty = false
-resources = [  ]
-subjar_tuples = [  ]
-subjars = [  ]
diff --git a/build/secondary/third_party/android_tools/support/android_support_animated_vector_drawable_java.info b/build/secondary/third_party/android_tools/support/android_support_animated_vector_drawable_java.info
deleted file mode 100644
index 7103bc0..0000000
--- a/build/secondary/third_party/android_tools/support/android_support_animated_vector_drawable_java.info
+++ /dev/null
@@ -1,13 +0,0 @@
-# Generated by //build/android/gyp/aar.py
-# To regenerate, use "update_android_aar_prebuilts = true" and run "gn gen".
-
-aidl = [  ]
-assets = [  ]
-has_classes_jar = true
-has_native_libraries = false
-has_proguard_flags = true
-has_r_text_file = true
-is_manifest_empty = true
-resources = [  ]
-subjar_tuples = [  ]
-subjars = [  ]
diff --git a/build/secondary/third_party/android_tools/support/android_support_cardview_java.info b/build/secondary/third_party/android_tools/support/android_support_cardview_java.info
deleted file mode 100644
index e458725..0000000
--- a/build/secondary/third_party/android_tools/support/android_support_cardview_java.info
+++ /dev/null
@@ -1,13 +0,0 @@
-# Generated by //build/android/gyp/aar.py
-# To regenerate, use "update_android_aar_prebuilts = true" and run "gn gen".
-
-aidl = [  ]
-assets = [  ]
-has_classes_jar = true
-has_native_libraries = false
-has_proguard_flags = false
-has_r_text_file = true
-is_manifest_empty = true
-resources = [ "res/values-v23/values-v23.xml", "res/values/values.xml" ]
-subjar_tuples = [  ]
-subjars = [  ]
diff --git a/build/secondary/third_party/android_tools/support/android_support_compat_java.info b/build/secondary/third_party/android_tools/support/android_support_compat_java.info
deleted file mode 100644
index fc66ea7..0000000
--- a/build/secondary/third_party/android_tools/support/android_support_compat_java.info
+++ /dev/null
@@ -1,13 +0,0 @@
-# Generated by //build/android/gyp/aar.py
-# To regenerate, use "update_android_aar_prebuilts = true" and run "gn gen".
-
-aidl = [ "aidl/android/support/v4/os/ResultReceiver.aidl" ]
-assets = [  ]
-has_classes_jar = true
-has_native_libraries = false
-has_proguard_flags = false
-has_r_text_file = true
-is_manifest_empty = true
-resources = [ "res/values-ur/values-ur.xml", "res/values-ru/values-ru.xml", "res/values-en-rGB/values-en-rGB.xml", "res/values-pt-rBR/values-pt-rBR.xml", "res/values-el/values-el.xml", "res/drawable-hdpi-v4/notification_bg_low_normal.9.png", "res/drawable-hdpi-v4/notify_panel_notification_icon_bg.png", "res/drawable-hdpi-v4/notification_bg_normal_pressed.9.png", "res/drawable-hdpi-v4/notification_bg_normal.9.png", "res/drawable-hdpi-v4/notification_bg_low_pressed.9.png", "res/values-tr/values-tr.xml", "res/values-hu/values-hu.xml", "res/values-km/values-km.xml", "res/values-fr-rCA/values-fr-rCA.xml", "res/values-gu/values-gu.xml", "res/layout/notification_template_icon_group.xml", "res/layout/notification_template_custom_big.xml", "res/layout/notification_template_part_time.xml", "res/layout/notification_template_part_chronometer.xml", "res/layout/notification_action_tombstone.xml", "res/layout/notification_action.xml", "res/values-ms/values-ms.xml", "res/values-v21/values-v21.xml", "res/values-ja/values-ja.xml", "res/values-eu/values-eu.xml", "res/values-sv/values-sv.xml", "res/values-mn/values-mn.xml", "res/layout-v16/notification_template_custom_big.xml", "res/values-ta/values-ta.xml", "res/values-pl/values-pl.xml", "res/values-lt/values-lt.xml", "res/values-bg/values-bg.xml", "res/values-es-rUS/values-es-rUS.xml", "res/values-iw/values-iw.xml", "res/values-mr/values-mr.xml", "res/values-uz/values-uz.xml", "res/values-pa/values-pa.xml", "res/values-fi/values-fi.xml", "res/values-am/values-am.xml", "res/values-pt-rPT/values-pt-rPT.xml", "res/values-ar/values-ar.xml", "res/values-ky/values-ky.xml", "res/drawable-mdpi-v4/notification_bg_low_normal.9.png", "res/drawable-mdpi-v4/notify_panel_notification_icon_bg.png", "res/drawable-mdpi-v4/notification_bg_normal_pressed.9.png", "res/drawable-mdpi-v4/notification_bg_normal.9.png", "res/drawable-mdpi-v4/notification_bg_low_pressed.9.png", "res/values-bs/values-bs.xml", "res/values/values.xml", "res/values-hr/values-hr.xml", "res/values-en-rAU/values-en-rAU.xml", "res/values-ro/values-ro.xml", "res/drawable-xhdpi-v4/notification_bg_low_normal.9.png", "res/drawable-xhdpi-v4/notify_panel_notification_icon_bg.png", "res/drawable-xhdpi-v4/notification_bg_normal_pressed.9.png", "res/drawable-xhdpi-v4/notification_bg_normal.9.png", "res/drawable-xhdpi-v4/notification_bg_low_pressed.9.png", "res/values-vi/values-vi.xml", "res/values-ko/values-ko.xml", "res/values-zh-rTW/values-zh-rTW.xml", "res/values-cs/values-cs.xml", "res/values-ml/values-ml.xml", "res/values-te/values-te.xml", "res/values-si/values-si.xml", "res/values-es/values-es.xml", "res/values-af/values-af.xml", "res/values-zu/values-zu.xml", "res/values-lo/values-lo.xml", "res/values-mk/values-mk.xml", "res/values-sl/values-sl.xml", "res/values-sw/values-sw.xml", "res/values-bn/values-bn.xml", "res/values-sk/values-sk.xml", "res/values-lv/values-lv.xml", "res/values-is/values-is.xml", "res/values-da/values-da.xml", "res/values-it/values-it.xml", "res/values-gl/values-gl.xml", "res/values-de/values-de.xml", "res/values-be/values-be.xml", "res/values-fa/values-fa.xml", "res/values-ca/values-ca.xml", "res/values-th/values-th.xml", "res/values-nl/values-nl.xml", "res/values-hy/values-hy.xml", "res/values-zh-rHK/values-zh-rHK.xml", "res/values-tl/values-tl.xml", "res/values-kk/values-kk.xml", "res/values-pt/values-pt.xml", "res/values-my/values-my.xml", "res/values-et/values-et.xml", "res/values-en-rIN/values-en-rIN.xml", "res/values-sr/values-sr.xml", "res/values-v16/values-v16.xml", "res/values-fr/values-fr.xml", "res/values-kn/values-kn.xml", "res/values-nb/values-nb.xml", "res/values-port/values-port.xml", "res/values-ne/values-ne.xml", "res/drawable/notification_bg_low.xml", "res/drawable/notification_bg.xml", "res/drawable/notification_icon_background.xml", "res/drawable/notification_tile_bg.xml", "res/values-b+sr+Latn/values-b+sr+Latn.xml", "res/values-ka/values-ka.xml", "res/values-zh-rCN/values-zh-rCN.xml", "res/values-az/values-az.xml", "res/values-sq/values-sq.xml", "res/values-hi/values-hi.xml", "res/layout-v21/notification_template_icon_group.xml", "res/layout-v21/notification_template_custom_big.xml", "res/layout-v21/notification_action_tombstone.xml", "res/layout-v21/notification_action.xml", "res/values-in/values-in.xml", "res/values-uk/values-uk.xml", "res/drawable-v21/notification_action_background.xml" ]
-subjar_tuples = [  ]
-subjars = [  ]
diff --git a/build/secondary/third_party/android_tools/support/android_support_core_ui_java.info b/build/secondary/third_party/android_tools/support/android_support_core_ui_java.info
deleted file mode 100644
index 7103bc0..0000000
--- a/build/secondary/third_party/android_tools/support/android_support_core_ui_java.info
+++ /dev/null
@@ -1,13 +0,0 @@
-# Generated by //build/android/gyp/aar.py
-# To regenerate, use "update_android_aar_prebuilts = true" and run "gn gen".
-
-aidl = [  ]
-assets = [  ]
-has_classes_jar = true
-has_native_libraries = false
-has_proguard_flags = true
-has_r_text_file = true
-is_manifest_empty = true
-resources = [  ]
-subjar_tuples = [  ]
-subjars = [  ]
diff --git a/build/secondary/third_party/android_tools/support/android_support_core_utils_java.info b/build/secondary/third_party/android_tools/support/android_support_core_utils_java.info
deleted file mode 100644
index cd54060..0000000
--- a/build/secondary/third_party/android_tools/support/android_support_core_utils_java.info
+++ /dev/null
@@ -1,13 +0,0 @@
-# Generated by //build/android/gyp/aar.py
-# To regenerate, use "update_android_aar_prebuilts = true" and run "gn gen".
-
-aidl = [  ]
-assets = [  ]
-has_classes_jar = true
-has_native_libraries = false
-has_proguard_flags = false
-has_r_text_file = true
-is_manifest_empty = true
-resources = [  ]
-subjar_tuples = [  ]
-subjars = [  ]
diff --git a/build/secondary/third_party/android_tools/support/android_support_design_java.info b/build/secondary/third_party/android_tools/support/android_support_design_java.info
deleted file mode 100644
index ec2b700..0000000
--- a/build/secondary/third_party/android_tools/support/android_support_design_java.info
+++ /dev/null
@@ -1,13 +0,0 @@
-# Generated by //build/android/gyp/aar.py
-# To regenerate, use "update_android_aar_prebuilts = true" and run "gn gen".
-
-aidl = [  ]
-assets = [  ]
-has_classes_jar = true
-has_native_libraries = false
-has_proguard_flags = true
-has_r_text_file = true
-is_manifest_empty = true
-resources = [ "res/drawable-hdpi-v4/design_ic_visibility.png", "res/drawable-hdpi-v4/design_ic_visibility_off.png", "res/layout/design_bottom_sheet_dialog.xml", "res/layout/design_layout_tab_text.xml", "res/layout/design_bottom_navigation_item.xml", "res/layout/design_navigation_item.xml", "res/layout/design_layout_snackbar_include.xml", "res/layout/design_navigation_menu.xml", "res/layout/design_navigation_item_header.xml", "res/layout/design_navigation_item_separator.xml", "res/layout/design_layout_snackbar.xml", "res/layout/design_text_input_password_icon.xml", "res/layout/design_layout_tab_icon.xml", "res/layout/design_navigation_item_subheader.xml", "res/layout/design_menu_item_action_area.xml", "res/layout/design_navigation_menu_item.xml", "res/values-v21/values-v21.xml", "res/drawable-anydpi-v21/design_ic_visibility.xml", "res/drawable-anydpi-v21/design_ic_visibility_off.xml", "res/layout-sw600dp-v13/design_layout_snackbar.xml", "res/drawable-xxhdpi-v4/design_ic_visibility.png", "res/drawable-xxhdpi-v4/design_ic_visibility_off.png", "res/color/design_error.xml", "res/color/design_tint_password_toggle.xml", "res/animator-v21/design_appbar_state_list_animator.xml", "res/drawable-mdpi-v4/design_ic_visibility.png", "res/drawable-mdpi-v4/design_ic_visibility_off.png", "res/values/values.xml", "res/drawable-xhdpi-v4/design_ic_visibility.png", "res/drawable-xhdpi-v4/design_ic_visibility_off.png", "res/values-land/values-land.xml", "res/values-sw600dp-v13/values-sw600dp-v13.xml", "res/anim/design_snackbar_out.xml", "res/anim/design_snackbar_in.xml", "res/anim/design_bottom_sheet_slide_out.xml", "res/anim/design_bottom_sheet_slide_in.xml", "res/drawable-xxxhdpi-v4/design_ic_visibility.png", "res/drawable-xxxhdpi-v4/design_ic_visibility_off.png", "res/anim-v21/design_bottom_sheet_slide_out.xml", "res/anim-v21/design_bottom_sheet_slide_in.xml", "res/values-v26/values-v26.xml", "res/color-v23/design_tint_password_toggle.xml", "res/drawable/design_snackbar_background.xml", "res/drawable/design_fab_background.xml", "res/drawable/design_password_eye.xml", "res/drawable/navigation_empty_icon.xml", "res/drawable/design_bottom_navigation_item_background.xml", "res/drawable-v21/avd_hide_password.xml", "res/drawable-v21/avd_show_password.xml", "res/drawable-v21/design_password_eye.xml", "res/drawable-v21/design_bottom_navigation_item_background.xml" ]
-subjar_tuples = [  ]
-subjars = [  ]
diff --git a/build/secondary/third_party/android_tools/support/android_support_fragment_java.info b/build/secondary/third_party/android_tools/support/android_support_fragment_java.info
deleted file mode 100644
index cd54060..0000000
--- a/build/secondary/third_party/android_tools/support/android_support_fragment_java.info
+++ /dev/null
@@ -1,13 +0,0 @@
-# Generated by //build/android/gyp/aar.py
-# To regenerate, use "update_android_aar_prebuilts = true" and run "gn gen".
-
-aidl = [  ]
-assets = [  ]
-has_classes_jar = true
-has_native_libraries = false
-has_proguard_flags = false
-has_r_text_file = true
-is_manifest_empty = true
-resources = [  ]
-subjar_tuples = [  ]
-subjars = [  ]
diff --git a/build/secondary/third_party/android_tools/support/android_support_media_compat_java.info b/build/secondary/third_party/android_tools/support/android_support_media_compat_java.info
deleted file mode 100644
index 6140dd1..0000000
--- a/build/secondary/third_party/android_tools/support/android_support_media_compat_java.info
+++ /dev/null
@@ -1,13 +0,0 @@
-# Generated by //build/android/gyp/aar.py
-# To regenerate, use "update_android_aar_prebuilts = true" and run "gn gen".
-
-aidl = [ "aidl/android/support/v4/media/session/PlaybackStateCompat.aidl", "aidl/android/support/v4/media/session/MediaSessionCompat.aidl", "aidl/android/support/v4/media/session/ParcelableVolumeInfo.aidl", "aidl/android/support/v4/media/RatingCompat.aidl", "aidl/android/support/v4/media/MediaMetadataCompat.aidl", "aidl/android/support/v4/media/MediaDescriptionCompat.aidl" ]
-assets = [  ]
-has_classes_jar = true
-has_native_libraries = false
-has_proguard_flags = true
-has_r_text_file = true
-is_manifest_empty = true
-resources = [ "res/layout/notification_template_media_custom.xml", "res/layout/notification_template_big_media_narrow_custom.xml", "res/layout/notification_template_media.xml", "res/layout/notification_template_big_media.xml", "res/layout/notification_media_action.xml", "res/layout/notification_template_lines_media.xml", "res/layout/notification_media_cancel_action.xml", "res/layout/notification_template_big_media_custom.xml", "res/layout/notification_template_big_media_narrow.xml", "res/values-v21/values-v21.xml", "res/values/values.xml", "res/values-v24/values-v24.xml" ]
-subjar_tuples = [  ]
-subjars = [  ]
diff --git a/build/secondary/third_party/android_tools/support/android_support_multidex_java.info b/build/secondary/third_party/android_tools/support/android_support_multidex_java.info
deleted file mode 100644
index a2ebd4a..0000000
--- a/build/secondary/third_party/android_tools/support/android_support_multidex_java.info
+++ /dev/null
@@ -1,13 +0,0 @@
-# Generated by //build/android/gyp/aar.py
-# To regenerate, use "update_android_aar_prebuilts = true" and run "gn gen".
-
-aidl = [  ]
-assets = [  ]
-has_classes_jar = true
-has_native_libraries = false
-has_proguard_flags = false
-has_r_text_file = false
-is_manifest_empty = true
-resources = [  ]
-subjar_tuples = [  ]
-subjars = [  ]
diff --git a/build/secondary/third_party/android_tools/support/android_support_transition_java.info b/build/secondary/third_party/android_tools/support/android_support_transition_java.info
deleted file mode 100644
index a25d255..0000000
--- a/build/secondary/third_party/android_tools/support/android_support_transition_java.info
+++ /dev/null
@@ -1,13 +0,0 @@
-# Generated by //build/android/gyp/aar.py
-# To regenerate, use "update_android_aar_prebuilts = true" and run "gn gen".
-
-aidl = [  ]
-assets = [  ]
-has_classes_jar = true
-has_native_libraries = false
-has_proguard_flags = true
-has_r_text_file = true
-is_manifest_empty = true
-resources = [ "res/values/values.xml" ]
-subjar_tuples = [  ]
-subjars = [  ]
diff --git a/build/secondary/third_party/android_tools/support/android_support_v13_java.info b/build/secondary/third_party/android_tools/support/android_support_v13_java.info
deleted file mode 100644
index cd54060..0000000
--- a/build/secondary/third_party/android_tools/support/android_support_v13_java.info
+++ /dev/null
@@ -1,13 +0,0 @@
-# Generated by //build/android/gyp/aar.py
-# To regenerate, use "update_android_aar_prebuilts = true" and run "gn gen".
-
-aidl = [  ]
-assets = [  ]
-has_classes_jar = true
-has_native_libraries = false
-has_proguard_flags = false
-has_r_text_file = true
-is_manifest_empty = true
-resources = [  ]
-subjar_tuples = [  ]
-subjars = [  ]
diff --git a/build/secondary/third_party/android_tools/support/android_support_v14_preference_java.info b/build/secondary/third_party/android_tools/support/android_support_v14_preference_java.info
deleted file mode 100644
index 74388f4..0000000
--- a/build/secondary/third_party/android_tools/support/android_support_v14_preference_java.info
+++ /dev/null
@@ -1,13 +0,0 @@
-# Generated by //build/android/gyp/aar.py
-# To regenerate, use "update_android_aar_prebuilts = true" and run "gn gen".
-
-aidl = [  ]
-assets = [  ]
-has_classes_jar = true
-has_native_libraries = false
-has_proguard_flags = false
-has_r_text_file = true
-is_manifest_empty = true
-resources = [ "res/layout/preference_information_material.xml", "res/layout/preference_widget_seekbar_material.xml", "res/layout/preference_category_material.xml", "res/layout/preference_dropdown_material.xml", "res/layout/preference_widget_switch.xml", "res/layout/preference_material.xml", "res/values/values.xml", "res/values-v17/values-v17.xml", "res/drawable/preference_list_divider_material.xml", "res/layout-v21/preference_information_material.xml", "res/layout-v21/preference_category_material.xml", "res/layout-v21/preference_dropdown_material.xml", "res/layout-v21/preference_material.xml", "res/drawable-v21/preference_list_divider_material.xml", "res/layout-v17/preference_information_material.xml", "res/layout-v17/preference_category_material.xml", "res/layout-v17/preference_dropdown_material.xml", "res/layout-v17/preference_material.xml" ]
-subjar_tuples = [  ]
-subjars = [  ]
diff --git a/build/secondary/third_party/android_tools/support/android_support_v17_leanback_java.info b/build/secondary/third_party/android_tools/support/android_support_v17_leanback_java.info
deleted file mode 100644
index 34d3693..0000000
--- a/build/secondary/third_party/android_tools/support/android_support_v17_leanback_java.info
+++ /dev/null
@@ -1,13 +0,0 @@
-# Generated by //build/android/gyp/aar.py
-# To regenerate, use "update_android_aar_prebuilts = true" and run "gn gen".
-
-aidl = [  ]
-assets = [  ]
-has_classes_jar = true
-has_native_libraries = false
-has_proguard_flags = false
-has_r_text_file = true
-is_manifest_empty = true
-resources = [ "res/values-ur/values-ur.xml", "res/values-ru/values-ru.xml", "res/values-en-rGB/values-en-rGB.xml", "res/values-pt-rBR/values-pt-rBR.xml", "res/values-el/values-el.xml", "res/drawable-hdpi-v4/lb_action_bg_focused.9.png", "res/drawable-hdpi-v4/lb_ic_search_mic.png", "res/drawable-hdpi-v4/lb_in_app_search_shadow_focused.9.png", "res/drawable-hdpi-v4/lb_ic_search_mic_out.png", "res/drawable-hdpi-v4/lb_in_app_search_bg.9.png", "res/drawable-hdpi-v4/lb_in_app_search_shadow_normal.9.png", "res/drawable-hdpi-v4/lb_ic_actions_right_arrow.png", "res/drawable-hdpi-v4/lb_ic_sad_cloud.png", "res/drawable-hdpi-v4/lb_ic_in_app_search.png", "res/values-tr/values-tr.xml", "res/values-hu/values-hu.xml", "res/values-v18/values-v18.xml", "res/values-km/values-km.xml", "res/values-fr-rCA/values-fr-rCA.xml", "res/values-gu/values-gu.xml", "res/layout/lb_browse_fragment.xml", "res/layout/lb_image_card_view_themed_title.xml", "res/layout/lb_list_row_hovercard.xml", "res/layout/lb_guidedactions.xml", "res/layout/lb_guidedactions_item.xml", "res/layout/lb_playback_transport_controls.xml", "res/layout/lb_image_card_view_themed_content.xml", "res/layout/lb_image_card_view_themed_badge_right.xml", "res/layout/lb_image_card_view.xml", "res/layout/lb_media_item_number_view_flipper.xml", "res/layout/lb_title_view.xml", "res/layout/lb_browse_title.xml", "res/layout/lb_control_bar.xml", "res/layout/lb_divider.xml", "res/layout/lb_background_window.xml", "res/layout/lb_guidance.xml", "res/layout/lb_shadow.xml", "res/layout/lb_video_surface.xml", "res/layout/lb_control_button_primary.xml", "res/layout/lb_fullwidth_details_overview_logo.xml", "res/layout/lb_onboarding_fragment.xml", "res/layout/lb_error_fragment.xml", "res/layout/lb_vertical_grid_fragment.xml", "res/layout/lb_action_1_line.xml", "res/layout/video_surface_fragment.xml", "res/layout/lb_guidedbuttonactions.xml", "res/layout/lb_picker_item.xml", "res/layout/lb_guidedstep_background.xml", "res/layout/lb_details_fragment.xml", "res/layout/lb_playback_controls_row.xml", "res/layout/lb_row_header.xml", "res/layout/lb_playback_transport_controls_row.xml", "res/layout/lb_search_orb.xml", "res/layout/lb_guidedstep_fragment.xml", "res/layout/lb_picker.xml", "res/layout/lb_speech_orb.xml", "res/layout/lb_playback_now_playing_bars.xml", "res/layout/lb_list_row.xml", "res/layout/lb_row_media_item_action.xml", "res/layout/lb_vertical_grid.xml", "res/layout/lb_header.xml", "res/layout/lb_row_media_item.xml", "res/layout/lb_playback_fragment.xml", "res/layout/lb_search_fragment.xml", "res/layout/lb_playback_controls.xml", "res/layout/lb_section_header.xml", "res/layout/lb_guidedactions_datepicker_item.xml", "res/layout/lb_details_description.xml", "res/layout/lb_details_overview.xml", "res/layout/lb_picker_column.xml", "res/layout/lb_media_list_header.xml", "res/layout/lb_image_card_view_themed_badge_left.xml", "res/layout/lb_rows_fragment.xml", "res/layout/lb_headers_fragment.xml", "res/layout/lb_action_2_lines.xml", "res/layout/lb_control_button_secondary.xml", "res/layout/lb_search_bar.xml", "res/layout/lb_row_container.xml", "res/layout/lb_fullwidth_details_overview.xml", "res/layout/lb_picker_separator.xml", "res/values-ms/values-ms.xml", "res/values-v21/values-v21.xml", "res/values-ja/values-ja.xml", "res/values-eu/values-eu.xml", "res/values-sv/values-sv.xml", "res/values-mn/values-mn.xml", "res/values-ta/values-ta.xml", "res/values-pl/values-pl.xml", "res/drawable-xxhdpi-v4/lb_action_bg_focused.9.png", "res/drawable-xxhdpi-v4/lb_ic_search_mic.png", "res/drawable-xxhdpi-v4/lb_in_app_search_shadow_focused.9.png", "res/drawable-xxhdpi-v4/lb_ic_search_mic_out.png", "res/drawable-xxhdpi-v4/lb_in_app_search_bg.9.png", "res/drawable-xxhdpi-v4/lb_in_app_search_shadow_normal.9.png", "res/drawable-xxhdpi-v4/lb_ic_actions_right_arrow.png", "res/drawable-xxhdpi-v4/lb_ic_sad_cloud.png", "res/drawable-xxhdpi-v4/lb_ic_in_app_search.png", "res/values-lt/values-lt.xml", "res/values-bg/values-bg.xml", "res/values-es-rUS/values-es-rUS.xml", "res/values-iw/values-iw.xml", "res/values-en-rXC/values-en-rXC.xml", "res/values-mr/values-mr.xml", "res/values-uz/values-uz.xml", "res/values-pa/values-pa.xml", "res/values-v22/values-v22.xml", "res/animator-v21/lb_onboarding_logo_exit.xml", "res/animator-v21/lb_onboarding_title_enter.xml", "res/animator-v21/lb_playback_bg_fade_out.xml", "res/animator-v21/lb_playback_bg_fade_in.xml", "res/animator-v21/lb_playback_description_fade_out.xml", "res/animator-v21/lb_onboarding_description_enter.xml", "res/animator-v21/lb_onboarding_logo_enter.xml", "res/animator-v21/lb_onboarding_page_indicator_enter.xml", "res/values-fi/values-fi.xml", "res/values-am/values-am.xml", "res/values-pt-rPT/values-pt-rPT.xml", "res/values-ar/values-ar.xml", "res/values-ky/values-ky.xml", "res/drawable-mdpi-v4/lb_action_bg_focused.9.png", "res/drawable-mdpi-v4/lb_ic_search_mic.png", "res/drawable-mdpi-v4/lb_in_app_search_shadow_focused.9.png", "res/drawable-mdpi-v4/lb_ic_search_mic_out.png", "res/drawable-mdpi-v4/lb_in_app_search_bg.9.png", "res/drawable-mdpi-v4/lb_in_app_search_shadow_normal.9.png", "res/drawable-mdpi-v4/lb_ic_actions_right_arrow.png", "res/drawable-mdpi-v4/lb_ic_sad_cloud.png", "res/drawable-mdpi-v4/lb_ic_in_app_search.png", "res/values-bs/values-bs.xml", "res/values/values.xml", "res/values-hr/values-hr.xml", "res/values-en-rAU/values-en-rAU.xml", "res/values-ro/values-ro.xml", "res/drawable-xhdpi-v4/lb_ic_pause.png", "res/drawable-xhdpi-v4/lb_ic_more.png", "res/drawable-xhdpi-v4/lb_action_bg_focused.9.png", "res/drawable-xhdpi-v4/lb_ic_skip_next.png", "res/drawable-xhdpi-v4/lb_ic_search_mic.png", "res/drawable-xhdpi-v4/lb_ic_fast_forward.png", "res/drawable-xhdpi-v4/lb_ic_shuffle.png", "res/drawable-xhdpi-v4/lb_ic_hq.png", "res/drawable-xhdpi-v4/lb_ic_skip_previous.png", "res/drawable-xhdpi-v4/lb_ic_replay.png", "res/drawable-xhdpi-v4/lb_text_dot_two.png", "res/drawable-xhdpi-v4/lb_ic_cc.png", "res/drawable-xhdpi-v4/lb_ic_thumb_up_outline.png", "res/drawable-xhdpi-v4/lb_ic_thumb_up.png", "res/drawable-xhdpi-v4/lb_in_app_search_shadow_focused.9.png", "res/drawable-xhdpi-v4/lb_ic_playback_loop.png", "res/drawable-xhdpi-v4/lb_text_dot_one.png", "res/drawable-xhdpi-v4/lb_ic_search_mic_out.png", "res/drawable-xhdpi-v4/lb_ic_loop_one.png", "res/drawable-xhdpi-v4/lb_text_dot_one_small.png", "res/drawable-xhdpi-v4/lb_ic_stop.png", "res/drawable-xhdpi-v4/lb_ic_pip.png", "res/drawable-xhdpi-v4/lb_card_shadow_normal.9.png", "res/drawable-xhdpi-v4/lb_ic_nav_arrow.png", "res/drawable-xhdpi-v4/lb_text_dot_two_small.png", "res/drawable-xhdpi-v4/lb_ic_loop.png", "res/drawable-xhdpi-v4/lb_ic_play.png", "res/drawable-xhdpi-v4/lb_ic_play_fit.png", "res/drawable-xhdpi-v4/lb_ic_fast_rewind.png", "res/drawable-xhdpi-v4/lb_in_app_search_bg.9.png", "res/drawable-xhdpi-v4/lb_ic_thumb_down_outline.png", "res/drawable-xhdpi-v4/lb_in_app_search_shadow_normal.9.png", "res/drawable-xhdpi-v4/lb_card_shadow_focused.9.png", "res/drawable-xhdpi-v4/lb_ic_actions_right_arrow.png", "res/drawable-xhdpi-v4/lb_ic_sad_cloud.png", "res/drawable-xhdpi-v4/lb_ic_thumb_down.png", "res/drawable-xhdpi-v4/lb_ic_guidedactions_item_chevron.png", "res/drawable-xhdpi-v4/lb_ic_in_app_search.png", "res/values-vi/values-vi.xml", "res/values-ko/values-ko.xml", "res/values-zh-rTW/values-zh-rTW.xml", "res/values-cs/values-cs.xml", "res/values-ml/values-ml.xml", "res/values-ldrtl-v17/values-ldrtl-v17.xml", "res/values-te/values-te.xml", "res/values-si/values-si.xml", "res/values-es/values-es.xml", "res/values-af/values-af.xml", "res/values-zu/values-zu.xml", "res/values-lo/values-lo.xml", "res/values-v19/values-v19.xml", "res/values-mk/values-mk.xml", "res/values-sl/values-sl.xml", "res/values-sw/values-sw.xml", "res/values-bn/values-bn.xml", "res/raw/lb_voice_open.ogg", "res/raw/lb_voice_success.ogg", "res/raw/lb_voice_failure.ogg", "res/raw/lb_voice_no_input.ogg", "res/transition-v21/lb_browse_headers_out.xml", "res/transition-v21/lb_browse_enter_transition.xml", "res/transition-v21/lb_return_transition.xml", "res/transition-v21/lb_details_return_transition.xml", "res/transition-v21/lb_browse_entrance_transition.xml", "res/transition-v21/lb_details_enter_transition.xml", "res/transition-v21/lb_enter_transition.xml", "res/transition-v21/lb_vertical_grid_entrance_transition.xml", "res/transition-v21/lb_shared_element_enter_transition.xml", "res/transition-v21/lb_guidedstep_activity_enter.xml", "res/transition-v21/lb_shared_element_return_transition.xml", "res/transition-v21/lb_title_in.xml", "res/transition-v21/lb_guidedstep_activity_enter_bottom.xml", "res/transition-v21/lb_title_out.xml", "res/transition-v21/lb_browse_return_transition.xml", "res/transition-v21/lb_vertical_grid_enter_transition.xml", "res/transition-v21/lb_vertical_grid_return_transition.xml", "res/transition-v21/lb_browse_headers_in.xml", "res/values-sk/values-sk.xml", "res/values-lv/values-lv.xml", "res/anim/lb_decelerator_4.xml", "res/anim/lb_decelerator_2.xml", "res/values-is/values-is.xml", "res/values-da/values-da.xml", "res/values-it/values-it.xml", "res/values-gl/values-gl.xml", "res/values-de/values-de.xml", "res/values-be/values-be.xml", "res/values-en-rCA/values-en-rCA.xml", "res/animator/lb_onboarding_page_indicator_fade_in.xml", "res/animator/lb_guidedstep_slide_down.xml", "res/animator/lb_onboarding_logo_exit.xml", "res/animator/lb_onboarding_title_enter.xml", "res/animator/lb_onboarding_start_button_fade_out.xml", "res/animator/lb_playback_rows_fade_out.xml", "res/animator/lb_playback_controls_fade_in.xml", "res/animator/lb_playback_description_fade_in.xml", "res/animator/lb_playback_bg_fade_out.xml", "res/animator/lb_playback_bg_fade_in.xml", "res/animator/lb_guidedactions_item_unpressed.xml", "res/animator/lb_playback_description_fade_out.xml", "res/animator/lb_onboarding_start_button_fade_in.xml", "res/animator/lb_playback_rows_fade_in.xml", "res/animator/lb_guidedactions_item_pressed.xml", "res/animator/lb_onboarding_description_enter.xml", "res/animator/lb_guidedstep_slide_up.xml", "res/animator/lb_onboarding_logo_enter.xml", "res/animator/lb_onboarding_page_indicator_enter.xml", "res/animator/lb_playback_controls_fade_out.xml", "res/animator/lb_onboarding_page_indicator_fade_out.xml", "res/values-fa/values-fa.xml", "res/values-ca/values-ca.xml", "res/values-th/values-th.xml", "res/values-nl/values-nl.xml", "res/values-hy/values-hy.xml", "res/values-zh-rHK/values-zh-rHK.xml", "res/values-tl/values-tl.xml", "res/values-kk/values-kk.xml", "res/values-pt/values-pt.xml", "res/values-my/values-my.xml", "res/values-et/values-et.xml", "res/values-en-rIN/values-en-rIN.xml", "res/values-sr/values-sr.xml", "res/values-fr/values-fr.xml", "res/values-kn/values-kn.xml", "res/values-nb/values-nb.xml", "res/values-ne/values-ne.xml", "res/transition-v19/lb_browse_headers_out.xml", "res/transition-v19/lb_browse_headers_in.xml", "res/drawable/lb_card_foreground.xml", "res/drawable/lb_control_button_primary.xml", "res/drawable/lb_playback_progress_bar.xml", "res/drawable/lb_onboarding_start_button_background.xml", "res/drawable/lb_search_orb.xml", "res/drawable/lb_speech_orb.xml", "res/drawable/lb_headers_right_fading.xml", "res/drawable/lb_background.xml", "res/drawable/lb_playback_now_playing_bar.xml", "res/drawable/lb_control_button_secondary.xml", "res/values-b+sr+Latn/values-b+sr+Latn.xml", "res/values-ka/values-ka.xml", "res/values-zh-rCN/values-zh-rCN.xml", "res/values-az/values-az.xml", "res/values-sq/values-sq.xml", "res/values-hi/values-hi.xml", "res/values-in/values-in.xml", "res/values-uk/values-uk.xml", "res/drawable-v21/lb_card_foreground.xml", "res/drawable-v21/lb_selectable_item_rounded_rect.xml", "res/drawable-v21/lb_control_button_primary.xml", "res/drawable-v21/lb_action_bg.xml", "res/drawable-v21/lb_control_button_secondary.xml" ]
-subjar_tuples = [  ]
-subjars = [  ]
diff --git a/build/secondary/third_party/android_tools/support/android_support_v17_preference_java.info b/build/secondary/third_party/android_tools/support/android_support_v17_preference_java.info
deleted file mode 100644
index 87ea13e..0000000
--- a/build/secondary/third_party/android_tools/support/android_support_v17_preference_java.info
+++ /dev/null
@@ -1,13 +0,0 @@
-# Generated by //build/android/gyp/aar.py
-# To regenerate, use "update_android_aar_prebuilts = true" and run "gn gen".
-
-aidl = [  ]
-assets = [  ]
-has_classes_jar = true
-has_native_libraries = false
-has_proguard_flags = false
-has_r_text_file = true
-is_manifest_empty = true
-resources = [ "res/layout/leanback_preference_widget_seekbar.xml", "res/layout/leanback_preference_information.xml", "res/layout/leanback_preference.xml", "res/layout/leanback_preferences_list.xml", "res/layout/leanback_settings_fragment.xml", "res/layout/leanback_list_preference_item_single.xml", "res/layout/leanback_preference_category.xml", "res/layout/leanback_list_preference_fragment.xml", "res/layout/leanback_list_preference_item_multi.xml", "res/layout/leanback_preference_fragment.xml", "res/color/lb_preference_item_primary_text_color.xml", "res/color/lb_preference_item_secondary_text_color.xml", "res/values/values.xml", "res/layout-v21/leanback_settings_fragment.xml", "res/layout-v21/leanback_preference_category.xml" ]
-subjar_tuples = [  ]
-subjars = [  ]
diff --git a/build/secondary/third_party/android_tools/support/android_support_v7_appcompat_java_internal.info b/build/secondary/third_party/android_tools/support/android_support_v7_appcompat_java_internal.info
deleted file mode 100644
index e99e4de..0000000
--- a/build/secondary/third_party/android_tools/support/android_support_v7_appcompat_java_internal.info
+++ /dev/null
@@ -1,13 +0,0 @@
-# Generated by //build/android/gyp/aar.py
-# To regenerate, use "update_android_aar_prebuilts = true" and run "gn gen".
-
-aidl = [  ]
-assets = [  ]
-has_classes_jar = true
-has_native_libraries = false
-has_proguard_flags = false
-has_r_text_file = true
-is_manifest_empty = true
-resources = [ "res/values-ur/values-ur.xml", "res/values-ru/values-ru.xml", "res/values-en-rGB/values-en-rGB.xml", "res/values-pt-rBR/values-pt-rBR.xml", "res/values-el/values-el.xml", "res/drawable-hdpi-v4/abc_ab_share_pack_mtrl_alpha.9.png", "res/drawable-hdpi-v4/abc_btn_switch_to_on_mtrl_00012.9.png", "res/drawable-hdpi-v4/abc_list_longpressed_holo.9.png", "res/drawable-hdpi-v4/abc_ic_star_black_48dp.png", "res/drawable-hdpi-v4/abc_ic_commit_search_api_mtrl_alpha.png", "res/drawable-hdpi-v4/abc_text_select_handle_middle_mtrl_light.png", "res/drawable-hdpi-v4/abc_list_focused_holo.9.png", "res/drawable-hdpi-v4/abc_popup_background_mtrl_mult.9.png", "res/drawable-hdpi-v4/abc_ic_star_black_36dp.png", "res/drawable-hdpi-v4/abc_text_select_handle_right_mtrl_dark.png", "res/drawable-hdpi-v4/abc_btn_switch_to_on_mtrl_00001.9.png", "res/drawable-hdpi-v4/abc_btn_check_to_on_mtrl_000.png", "res/drawable-hdpi-v4/abc_text_select_handle_left_mtrl_light.png", "res/drawable-hdpi-v4/abc_ic_menu_selectall_mtrl_alpha.png", "res/drawable-hdpi-v4/abc_textfield_default_mtrl_alpha.9.png", "res/drawable-hdpi-v4/abc_scrubber_primary_mtrl_alpha.9.png", "res/drawable-hdpi-v4/abc_scrubber_control_to_pressed_mtrl_000.png", "res/drawable-hdpi-v4/abc_switch_track_mtrl_alpha.9.png", "res/drawable-hdpi-v4/abc_ic_star_black_16dp.png", "res/drawable-hdpi-v4/abc_scrubber_control_off_mtrl_alpha.png", "res/drawable-hdpi-v4/abc_ic_menu_cut_mtrl_alpha.png", "res/drawable-hdpi-v4/abc_ic_star_half_black_36dp.png", "res/drawable-hdpi-v4/abc_list_selector_disabled_holo_light.9.png", "res/drawable-hdpi-v4/abc_text_select_handle_left_mtrl_dark.png", "res/drawable-hdpi-v4/abc_ic_menu_paste_mtrl_am_alpha.png", "res/drawable-hdpi-v4/abc_btn_radio_to_on_mtrl_015.png", "res/drawable-hdpi-v4/abc_list_pressed_holo_dark.9.png", "res/drawable-hdpi-v4/abc_list_pressed_holo_light.9.png", "res/drawable-hdpi-v4/abc_tab_indicator_mtrl_alpha.9.png", "res/drawable-hdpi-v4/abc_textfield_search_default_mtrl_alpha.9.png", "res/drawable-hdpi-v4/abc_text_select_handle_right_mtrl_light.png", "res/drawable-hdpi-v4/abc_btn_check_to_on_mtrl_015.png", "res/drawable-hdpi-v4/abc_list_selector_disabled_holo_dark.9.png", "res/drawable-hdpi-v4/abc_ic_star_half_black_16dp.png", "res/drawable-hdpi-v4/abc_ic_star_half_black_48dp.png", "res/drawable-hdpi-v4/abc_ic_menu_share_mtrl_alpha.png", "res/drawable-hdpi-v4/abc_textfield_search_activated_mtrl_alpha.9.png", "res/drawable-hdpi-v4/abc_btn_radio_to_on_mtrl_000.png", "res/drawable-hdpi-v4/abc_list_divider_mtrl_alpha.9.png", "res/drawable-hdpi-v4/abc_ic_menu_copy_mtrl_am_alpha.png", "res/drawable-hdpi-v4/abc_spinner_mtrl_am_alpha.9.png", "res/drawable-hdpi-v4/abc_scrubber_track_mtrl_alpha.9.png", "res/drawable-hdpi-v4/abc_text_select_handle_middle_mtrl_dark.png", "res/drawable-hdpi-v4/abc_scrubber_control_to_pressed_mtrl_005.png", "res/drawable-hdpi-v4/abc_menu_hardkey_panel_mtrl_mult.9.png", "res/drawable-hdpi-v4/abc_cab_background_top_mtrl_alpha.9.png", "res/drawable-hdpi-v4/abc_textfield_activated_mtrl_alpha.9.png", "res/values-tr/values-tr.xml", "res/values-hu/values-hu.xml", "res/values-v18/values-v18.xml", "res/values-km/values-km.xml", "res/values-fr-rCA/values-fr-rCA.xml", "res/values-gu/values-gu.xml", "res/layout/abc_list_menu_item_layout.xml", "res/layout/abc_action_menu_layout.xml", "res/layout/abc_search_dropdown_item_icons_2line.xml", "res/layout/support_simple_spinner_dropdown_item.xml", "res/layout/abc_screen_simple.xml", "res/layout/abc_action_menu_item_layout.xml", "res/layout/abc_list_menu_item_radio.xml", "res/layout/abc_alert_dialog_title_material.xml", "res/layout/abc_alert_dialog_material.xml", "res/layout/abc_screen_toolbar.xml", "res/layout/abc_screen_simple_overlay_action_mode.xml", "res/layout/abc_select_dialog_material.xml", "res/layout/abc_action_mode_bar.xml", "res/layout/abc_dialog_title_material.xml", "res/layout/abc_list_menu_item_checkbox.xml", "res/layout/abc_popup_menu_header_item_layout.xml", "res/layout/abc_action_bar_up_container.xml", "res/layout/select_dialog_singlechoice_material.xml", "res/layout/abc_action_mode_close_item_material.xml", "res/layout/tooltip.xml", "res/layout/select_dialog_multichoice_material.xml", "res/layout/abc_action_bar_title_item.xml", "res/layout/abc_activity_chooser_view_list_item.xml", "res/layout/abc_popup_menu_item_layout.xml", "res/layout/abc_alert_dialog_button_bar_material.xml", "res/layout/abc_list_menu_item_icon.xml", "res/layout/abc_expanded_menu_layout.xml", "res/layout/select_dialog_item_material.xml", "res/layout/abc_activity_chooser_view.xml", "res/layout/abc_search_view.xml", "res/layout/abc_screen_content_include.xml", "res/values-ms/values-ms.xml", "res/values-v21/values-v21.xml", "res/values-ja/values-ja.xml", "res/values-eu/values-eu.xml", "res/values-hdpi-v4/values-hdpi-v4.xml", "res/values-sv/values-sv.xml", "res/values-mn/values-mn.xml", "res/values-v14/values-v14.xml", "res/values-ta/values-ta.xml", "res/values-v23/values-v23.xml", "res/values-pl/values-pl.xml", "res/drawable-xxhdpi-v4/abc_ab_share_pack_mtrl_alpha.9.png", "res/drawable-xxhdpi-v4/abc_btn_switch_to_on_mtrl_00012.9.png", "res/drawable-xxhdpi-v4/abc_list_longpressed_holo.9.png", "res/drawable-xxhdpi-v4/abc_ic_star_black_48dp.png", "res/drawable-xxhdpi-v4/abc_ic_commit_search_api_mtrl_alpha.png", "res/drawable-xxhdpi-v4/abc_text_select_handle_middle_mtrl_light.png", "res/drawable-xxhdpi-v4/abc_list_focused_holo.9.png", "res/drawable-xxhdpi-v4/abc_popup_background_mtrl_mult.9.png", "res/drawable-xxhdpi-v4/abc_ic_star_black_36dp.png", "res/drawable-xxhdpi-v4/abc_text_select_handle_right_mtrl_dark.png", "res/drawable-xxhdpi-v4/abc_btn_switch_to_on_mtrl_00001.9.png", "res/drawable-xxhdpi-v4/abc_btn_check_to_on_mtrl_000.png", "res/drawable-xxhdpi-v4/abc_text_select_handle_left_mtrl_light.png", "res/drawable-xxhdpi-v4/abc_ic_menu_selectall_mtrl_alpha.png", "res/drawable-xxhdpi-v4/abc_textfield_default_mtrl_alpha.9.png", "res/drawable-xxhdpi-v4/abc_scrubber_primary_mtrl_alpha.9.png", "res/drawable-xxhdpi-v4/abc_scrubber_control_to_pressed_mtrl_000.png", "res/drawable-xxhdpi-v4/abc_switch_track_mtrl_alpha.9.png", "res/drawable-xxhdpi-v4/abc_ic_star_black_16dp.png", "res/drawable-xxhdpi-v4/abc_scrubber_control_off_mtrl_alpha.png", "res/drawable-xxhdpi-v4/abc_ic_menu_cut_mtrl_alpha.png", "res/drawable-xxhdpi-v4/abc_ic_star_half_black_36dp.png", "res/drawable-xxhdpi-v4/abc_list_selector_disabled_holo_light.9.png", "res/drawable-xxhdpi-v4/abc_text_select_handle_left_mtrl_dark.png", "res/drawable-xxhdpi-v4/abc_ic_menu_paste_mtrl_am_alpha.png", "res/drawable-xxhdpi-v4/abc_btn_radio_to_on_mtrl_015.png", "res/drawable-xxhdpi-v4/abc_list_pressed_holo_dark.9.png", "res/drawable-xxhdpi-v4/abc_list_pressed_holo_light.9.png", "res/drawable-xxhdpi-v4/abc_tab_indicator_mtrl_alpha.9.png", "res/drawable-xxhdpi-v4/abc_textfield_search_default_mtrl_alpha.9.png", "res/drawable-xxhdpi-v4/abc_text_select_handle_right_mtrl_light.png", "res/drawable-xxhdpi-v4/abc_btn_check_to_on_mtrl_015.png", "res/drawable-xxhdpi-v4/abc_list_selector_disabled_holo_dark.9.png", "res/drawable-xxhdpi-v4/abc_ic_star_half_black_16dp.png", "res/drawable-xxhdpi-v4/abc_ic_star_half_black_48dp.png", "res/drawable-xxhdpi-v4/abc_ic_menu_share_mtrl_alpha.png", "res/drawable-xxhdpi-v4/abc_textfield_search_activated_mtrl_alpha.9.png", "res/drawable-xxhdpi-v4/abc_btn_radio_to_on_mtrl_000.png", "res/drawable-xxhdpi-v4/abc_list_divider_mtrl_alpha.9.png", "res/drawable-xxhdpi-v4/abc_ic_menu_copy_mtrl_am_alpha.png", "res/drawable-xxhdpi-v4/abc_spinner_mtrl_am_alpha.9.png", "res/drawable-xxhdpi-v4/abc_scrubber_track_mtrl_alpha.9.png", "res/drawable-xxhdpi-v4/abc_text_select_handle_middle_mtrl_dark.png", "res/drawable-xxhdpi-v4/abc_scrubber_control_to_pressed_mtrl_005.png", "res/drawable-xxhdpi-v4/abc_menu_hardkey_panel_mtrl_mult.9.png", "res/drawable-xxhdpi-v4/abc_cab_background_top_mtrl_alpha.9.png", "res/drawable-xxhdpi-v4/abc_textfield_activated_mtrl_alpha.9.png", "res/color/abc_search_url_text.xml", "res/color/abc_btn_colored_borderless_text_material.xml", "res/color/abc_primary_text_material_light.xml", "res/color/abc_tint_seek_thumb.xml", "res/color/abc_tint_btn_checkable.xml", "res/color/abc_secondary_text_material_dark.xml", "res/color/abc_tint_edittext.xml", "res/color/abc_tint_spinner.xml", "res/color/abc_primary_text_material_dark.xml", "res/color/abc_secondary_text_material_light.xml", "res/color/switch_thumb_material_dark.xml", "res/color/abc_hint_foreground_material_dark.xml", "res/color/abc_tint_switch_track.xml", "res/color/abc_primary_text_disable_only_material_dark.xml", "res/color/abc_btn_colored_text_material.xml", "res/color/abc_hint_foreground_material_light.xml", "res/color/switch_thumb_material_light.xml", "res/color/abc_tint_default.xml", "res/color/abc_primary_text_disable_only_material_light.xml", "res/values-ldltr-v21/values-ldltr-v21.xml", "res/values-lt/values-lt.xml", "res/values-bg/values-bg.xml", "res/values-es-rUS/values-es-rUS.xml", "res/values-iw/values-iw.xml", "res/values-en-rXC/values-en-rXC.xml", "res/values-mr/values-mr.xml", "res/values-uz/values-uz.xml", "res/values-large-v4/values-large-v4.xml", "res/values-v11/values-v11.xml", "res/color-v11/abc_background_cache_hint_selector_material_light.xml", "res/color-v11/abc_background_cache_hint_selector_material_dark.xml", "res/values-pa/values-pa.xml", "res/values-v22/values-v22.xml", "res/values-v25/values-v25.xml", "res/values-fi/values-fi.xml", "res/values-am/values-am.xml", "res/values-pt-rPT/values-pt-rPT.xml", "res/values-ar/values-ar.xml", "res/values-ky/values-ky.xml", "res/values-v12/values-v12.xml", "res/drawable-mdpi-v4/abc_ab_share_pack_mtrl_alpha.9.png", "res/drawable-mdpi-v4/abc_btn_switch_to_on_mtrl_00012.9.png", "res/drawable-mdpi-v4/abc_list_longpressed_holo.9.png", "res/drawable-mdpi-v4/abc_ic_star_black_48dp.png", "res/drawable-mdpi-v4/abc_ic_commit_search_api_mtrl_alpha.png", "res/drawable-mdpi-v4/abc_text_select_handle_middle_mtrl_light.png", "res/drawable-mdpi-v4/abc_list_focused_holo.9.png", "res/drawable-mdpi-v4/abc_popup_background_mtrl_mult.9.png", "res/drawable-mdpi-v4/abc_ic_star_black_36dp.png", "res/drawable-mdpi-v4/abc_text_select_handle_right_mtrl_dark.png", "res/drawable-mdpi-v4/abc_btn_switch_to_on_mtrl_00001.9.png", "res/drawable-mdpi-v4/abc_btn_check_to_on_mtrl_000.png", "res/drawable-mdpi-v4/abc_text_select_handle_left_mtrl_light.png", "res/drawable-mdpi-v4/abc_ic_menu_selectall_mtrl_alpha.png", "res/drawable-mdpi-v4/abc_textfield_default_mtrl_alpha.9.png", "res/drawable-mdpi-v4/abc_scrubber_primary_mtrl_alpha.9.png", "res/drawable-mdpi-v4/abc_scrubber_control_to_pressed_mtrl_000.png", "res/drawable-mdpi-v4/abc_switch_track_mtrl_alpha.9.png", "res/drawable-mdpi-v4/abc_ic_star_black_16dp.png", "res/drawable-mdpi-v4/abc_scrubber_control_off_mtrl_alpha.png", "res/drawable-mdpi-v4/abc_ic_menu_cut_mtrl_alpha.png", "res/drawable-mdpi-v4/abc_ic_star_half_black_36dp.png", "res/drawable-mdpi-v4/abc_list_selector_disabled_holo_light.9.png", "res/drawable-mdpi-v4/abc_text_select_handle_left_mtrl_dark.png", "res/drawable-mdpi-v4/abc_ic_menu_paste_mtrl_am_alpha.png", "res/drawable-mdpi-v4/abc_btn_radio_to_on_mtrl_015.png", "res/drawable-mdpi-v4/abc_list_pressed_holo_dark.9.png", "res/drawable-mdpi-v4/abc_list_pressed_holo_light.9.png", "res/drawable-mdpi-v4/abc_tab_indicator_mtrl_alpha.9.png", "res/drawable-mdpi-v4/abc_textfield_search_default_mtrl_alpha.9.png", "res/drawable-mdpi-v4/abc_text_select_handle_right_mtrl_light.png", "res/drawable-mdpi-v4/abc_btn_check_to_on_mtrl_015.png", "res/drawable-mdpi-v4/abc_list_selector_disabled_holo_dark.9.png", "res/drawable-mdpi-v4/abc_ic_star_half_black_16dp.png", "res/drawable-mdpi-v4/abc_ic_star_half_black_48dp.png", "res/drawable-mdpi-v4/abc_ic_menu_share_mtrl_alpha.png", "res/drawable-mdpi-v4/abc_textfield_search_activated_mtrl_alpha.9.png", "res/drawable-mdpi-v4/abc_btn_radio_to_on_mtrl_000.png", "res/drawable-mdpi-v4/abc_list_divider_mtrl_alpha.9.png", "res/drawable-mdpi-v4/abc_ic_menu_copy_mtrl_am_alpha.png", "res/drawable-mdpi-v4/abc_spinner_mtrl_am_alpha.9.png", "res/drawable-mdpi-v4/abc_scrubber_track_mtrl_alpha.9.png", "res/drawable-mdpi-v4/abc_text_select_handle_middle_mtrl_dark.png", "res/drawable-mdpi-v4/abc_scrubber_control_to_pressed_mtrl_005.png", "res/drawable-mdpi-v4/abc_menu_hardkey_panel_mtrl_mult.9.png", "res/drawable-mdpi-v4/abc_cab_background_top_mtrl_alpha.9.png", "res/drawable-mdpi-v4/abc_textfield_activated_mtrl_alpha.9.png", "res/values-bs/values-bs.xml", "res/values/values.xml", "res/values-hr/values-hr.xml", "res/values-v24/values-v24.xml", "res/drawable-ldrtl-xxhdpi-v17/abc_ic_menu_cut_mtrl_alpha.png", "res/drawable-ldrtl-xxhdpi-v17/abc_ic_menu_copy_mtrl_am_alpha.png", "res/drawable-ldrtl-xxhdpi-v17/abc_spinner_mtrl_am_alpha.9.png", "res/drawable-ldrtl-xhdpi-v17/abc_ic_menu_cut_mtrl_alpha.png", "res/drawable-ldrtl-xhdpi-v17/abc_ic_menu_copy_mtrl_am_alpha.png", "res/drawable-ldrtl-xhdpi-v17/abc_spinner_mtrl_am_alpha.9.png", "res/values-en-rAU/values-en-rAU.xml", "res/values-ro/values-ro.xml", "res/drawable-xhdpi-v4/abc_ab_share_pack_mtrl_alpha.9.png", "res/drawable-xhdpi-v4/abc_btn_switch_to_on_mtrl_00012.9.png", "res/drawable-xhdpi-v4/abc_list_longpressed_holo.9.png", "res/drawable-xhdpi-v4/abc_ic_star_black_48dp.png", "res/drawable-xhdpi-v4/abc_ic_commit_search_api_mtrl_alpha.png", "res/drawable-xhdpi-v4/abc_text_select_handle_middle_mtrl_light.png", "res/drawable-xhdpi-v4/abc_list_focused_holo.9.png", "res/drawable-xhdpi-v4/abc_popup_background_mtrl_mult.9.png", "res/drawable-xhdpi-v4/abc_ic_star_black_36dp.png", "res/drawable-xhdpi-v4/abc_text_select_handle_right_mtrl_dark.png", "res/drawable-xhdpi-v4/abc_btn_switch_to_on_mtrl_00001.9.png", "res/drawable-xhdpi-v4/abc_btn_check_to_on_mtrl_000.png", "res/drawable-xhdpi-v4/abc_text_select_handle_left_mtrl_light.png", "res/drawable-xhdpi-v4/abc_ic_menu_selectall_mtrl_alpha.png", "res/drawable-xhdpi-v4/abc_textfield_default_mtrl_alpha.9.png", "res/drawable-xhdpi-v4/abc_scrubber_primary_mtrl_alpha.9.png", "res/drawable-xhdpi-v4/abc_scrubber_control_to_pressed_mtrl_000.png", "res/drawable-xhdpi-v4/abc_switch_track_mtrl_alpha.9.png", "res/drawable-xhdpi-v4/abc_ic_star_black_16dp.png", "res/drawable-xhdpi-v4/abc_scrubber_control_off_mtrl_alpha.png", "res/drawable-xhdpi-v4/abc_ic_menu_cut_mtrl_alpha.png", "res/drawable-xhdpi-v4/abc_ic_star_half_black_36dp.png", "res/drawable-xhdpi-v4/abc_list_selector_disabled_holo_light.9.png", "res/drawable-xhdpi-v4/abc_text_select_handle_left_mtrl_dark.png", "res/drawable-xhdpi-v4/abc_ic_menu_paste_mtrl_am_alpha.png", "res/drawable-xhdpi-v4/abc_btn_radio_to_on_mtrl_015.png", "res/drawable-xhdpi-v4/abc_list_pressed_holo_dark.9.png", "res/drawable-xhdpi-v4/abc_list_pressed_holo_light.9.png", "res/drawable-xhdpi-v4/abc_tab_indicator_mtrl_alpha.9.png", "res/drawable-xhdpi-v4/abc_textfield_search_default_mtrl_alpha.9.png", "res/drawable-xhdpi-v4/abc_text_select_handle_right_mtrl_light.png", "res/drawable-xhdpi-v4/abc_btn_check_to_on_mtrl_015.png", "res/drawable-xhdpi-v4/abc_list_selector_disabled_holo_dark.9.png", "res/drawable-xhdpi-v4/abc_ic_star_half_black_16dp.png", "res/drawable-xhdpi-v4/abc_ic_star_half_black_48dp.png", "res/drawable-xhdpi-v4/abc_ic_menu_share_mtrl_alpha.png", "res/drawable-xhdpi-v4/abc_textfield_search_activated_mtrl_alpha.9.png", "res/drawable-xhdpi-v4/abc_btn_radio_to_on_mtrl_000.png", "res/drawable-xhdpi-v4/abc_list_divider_mtrl_alpha.9.png", "res/drawable-xhdpi-v4/abc_ic_menu_copy_mtrl_am_alpha.png", "res/drawable-xhdpi-v4/abc_spinner_mtrl_am_alpha.9.png", "res/drawable-xhdpi-v4/abc_scrubber_track_mtrl_alpha.9.png", "res/drawable-xhdpi-v4/abc_text_select_handle_middle_mtrl_dark.png", "res/drawable-xhdpi-v4/abc_scrubber_control_to_pressed_mtrl_005.png", "res/drawable-xhdpi-v4/abc_menu_hardkey_panel_mtrl_mult.9.png", "res/drawable-xhdpi-v4/abc_cab_background_top_mtrl_alpha.9.png", "res/drawable-xhdpi-v4/abc_textfield_activated_mtrl_alpha.9.png", "res/values-vi/values-vi.xml", "res/values-ko/values-ko.xml", "res/values-zh-rTW/values-zh-rTW.xml", "res/drawable-ldrtl-mdpi-v17/abc_ic_menu_cut_mtrl_alpha.png", "res/drawable-ldrtl-mdpi-v17/abc_ic_menu_copy_mtrl_am_alpha.png", "res/drawable-ldrtl-mdpi-v17/abc_spinner_mtrl_am_alpha.9.png", "res/values-cs/values-cs.xml", "res/values-ml/values-ml.xml", "res/values-te/values-te.xml", "res/values-si/values-si.xml", "res/values-es/values-es.xml", "res/values-af/values-af.xml", "res/values-zu/values-zu.xml", "res/values-lo/values-lo.xml", "res/values-land/values-land.xml", "res/values-mk/values-mk.xml", "res/values-sl/values-sl.xml", "res/values-xlarge-v4/values-xlarge-v4.xml", "res/values-sw600dp-v13/values-sw600dp-v13.xml", "res/values-sw/values-sw.xml", "res/values-bn/values-bn.xml", "res/values-sk/values-sk.xml", "res/values-lv/values-lv.xml", "res/anim/abc_shrink_fade_out_from_bottom.xml", "res/anim/abc_slide_in_bottom.xml", "res/anim/abc_fade_out.xml", "res/anim/abc_slide_in_top.xml", "res/anim/abc_slide_out_top.xml", "res/anim/tooltip_exit.xml", "res/anim/abc_popup_exit.xml", "res/anim/abc_grow_fade_in_from_bottom.xml", "res/anim/tooltip_enter.xml", "res/anim/abc_fade_in.xml", "res/anim/abc_slide_out_bottom.xml", "res/anim/abc_popup_enter.xml", "res/values-is/values-is.xml", "res/values-da/values-da.xml", "res/values-it/values-it.xml", "res/values-gl/values-gl.xml", "res/values-de/values-de.xml", "res/values-be/values-be.xml", "res/values-en-rCA/values-en-rCA.xml", "res/drawable-ldrtl-xxxhdpi-v17/abc_ic_menu_cut_mtrl_alpha.png", "res/drawable-ldrtl-xxxhdpi-v17/abc_ic_menu_copy_mtrl_am_alpha.png", "res/drawable-ldrtl-xxxhdpi-v17/abc_spinner_mtrl_am_alpha.9.png", "res/values-fa/values-fa.xml", "res/values-ca/values-ca.xml", "res/values-th/values-th.xml", "res/values-nl/values-nl.xml", "res/values-hy/values-hy.xml", "res/values-zh-rHK/values-zh-rHK.xml", "res/values-tl/values-tl.xml", "res/values-kk/values-kk.xml", "res/values-pt/values-pt.xml", "res/values-my/values-my.xml", "res/values-night-v8/values-night-v8.xml", "res/drawable-v23/abc_control_background_material.xml", "res/drawable-xxxhdpi-v4/abc_btn_switch_to_on_mtrl_00012.9.png", "res/drawable-xxxhdpi-v4/abc_ic_star_black_48dp.png", "res/drawable-xxxhdpi-v4/abc_ic_star_black_36dp.png", "res/drawable-xxxhdpi-v4/abc_text_select_handle_right_mtrl_dark.png", "res/drawable-xxxhdpi-v4/abc_btn_switch_to_on_mtrl_00001.9.png", "res/drawable-xxxhdpi-v4/abc_btn_check_to_on_mtrl_000.png", "res/drawable-xxxhdpi-v4/abc_text_select_handle_left_mtrl_light.png", "res/drawable-xxxhdpi-v4/abc_ic_menu_selectall_mtrl_alpha.png", "res/drawable-xxxhdpi-v4/abc_scrubber_control_to_pressed_mtrl_000.png", "res/drawable-xxxhdpi-v4/abc_switch_track_mtrl_alpha.9.png", "res/drawable-xxxhdpi-v4/abc_ic_star_black_16dp.png", "res/drawable-xxxhdpi-v4/abc_ic_menu_cut_mtrl_alpha.png", "res/drawable-xxxhdpi-v4/abc_ic_star_half_black_36dp.png", "res/drawable-xxxhdpi-v4/abc_text_select_handle_left_mtrl_dark.png", "res/drawable-xxxhdpi-v4/abc_ic_menu_paste_mtrl_am_alpha.png", "res/drawable-xxxhdpi-v4/abc_btn_radio_to_on_mtrl_015.png", "res/drawable-xxxhdpi-v4/abc_tab_indicator_mtrl_alpha.9.png", "res/drawable-xxxhdpi-v4/abc_text_select_handle_right_mtrl_light.png", "res/drawable-xxxhdpi-v4/abc_btn_check_to_on_mtrl_015.png", "res/drawable-xxxhdpi-v4/abc_ic_star_half_black_16dp.png", "res/drawable-xxxhdpi-v4/abc_ic_star_half_black_48dp.png", "res/drawable-xxxhdpi-v4/abc_ic_menu_share_mtrl_alpha.png", "res/drawable-xxxhdpi-v4/abc_btn_radio_to_on_mtrl_000.png", "res/drawable-xxxhdpi-v4/abc_ic_menu_copy_mtrl_am_alpha.png", "res/drawable-xxxhdpi-v4/abc_spinner_mtrl_am_alpha.9.png", "res/drawable-xxxhdpi-v4/abc_scrubber_control_to_pressed_mtrl_005.png", "res/values-et/values-et.xml", "res/values-en-rIN/values-en-rIN.xml", "res/values-sr/values-sr.xml", "res/values-v16/values-v16.xml", "res/values-fr/values-fr.xml", "res/values-kn/values-kn.xml", "res/values-nb/values-nb.xml", "res/drawable-ldrtl-hdpi-v17/abc_ic_menu_cut_mtrl_alpha.png", "res/drawable-ldrtl-hdpi-v17/abc_ic_menu_copy_mtrl_am_alpha.png", "res/drawable-ldrtl-hdpi-v17/abc_spinner_mtrl_am_alpha.9.png", "res/layout-v26/abc_screen_toolbar.xml", "res/values-v26/values-v26.xml", "res/values-port/values-port.xml", "res/color-v23/abc_btn_colored_borderless_text_material.xml", "res/color-v23/abc_color_highlight_material.xml", "res/color-v23/abc_tint_seek_thumb.xml", "res/color-v23/abc_tint_btn_checkable.xml", "res/color-v23/abc_tint_edittext.xml", "res/color-v23/abc_tint_spinner.xml", "res/color-v23/abc_tint_switch_track.xml", "res/color-v23/abc_btn_colored_text_material.xml", "res/color-v23/abc_tint_default.xml", "res/values-v17/values-v17.xml", "res/values-ne/values-ne.xml", "res/values-v13/values-v13.xml", "res/drawable/abc_tab_indicator_material.xml", "res/drawable/abc_item_background_holo_dark.xml", "res/drawable/abc_edit_text_material.xml", "res/drawable/abc_ic_voice_search_api_material.xml", "res/drawable/abc_ic_menu_overflow_material.xml", "res/drawable/abc_ic_search_api_material.xml", "res/drawable/abc_cab_background_top_material.xml", "res/drawable/abc_list_selector_holo_light.xml", "res/drawable/abc_ic_arrow_drop_right_black_24dp.xml", "res/drawable/abc_spinner_textfield_background_material.xml", "res/drawable/abc_ratingbar_small_material.xml", "res/drawable/tooltip_frame_dark.xml", "res/drawable/abc_item_background_holo_light.xml", "res/drawable/abc_btn_colored_material.xml", "res/drawable/abc_ratingbar_material.xml", "res/drawable/abc_cab_background_internal_bg.xml", "res/drawable/abc_btn_radio_material.xml", "res/drawable/tooltip_frame_light.xml", "res/drawable/abc_ic_clear_material.xml", "res/drawable/abc_ic_ab_back_material.xml", "res/drawable/abc_textfield_search_material.xml", "res/drawable/abc_ic_go_search_api_material.xml", "res/drawable/abc_seekbar_track_material.xml", "res/drawable/abc_dialog_material_background.xml", "res/drawable/abc_seekbar_tick_mark_material.xml", "res/drawable/abc_list_selector_background_transition_holo_light.xml", "res/drawable/abc_text_cursor_material.xml", "res/drawable/abc_btn_default_mtrl_shape.xml", "res/drawable/abc_ratingbar_indicator_material.xml", "res/drawable/abc_switch_thumb_material.xml", "res/drawable/abc_seekbar_thumb_material.xml", "res/drawable/abc_list_selector_background_transition_holo_dark.xml", "res/drawable/abc_btn_check_material.xml", "res/drawable/abc_list_selector_holo_dark.xml", "res/drawable/abc_btn_borderless_material.xml", "res/drawable/abc_vector_test.xml", "res/values-b+sr+Latn/values-b+sr+Latn.xml", "res/values-h720dp-v13/values-h720dp-v13.xml", "res/values-ka/values-ka.xml", "res/values-zh-rCN/values-zh-rCN.xml", "res/values-az/values-az.xml", "res/values-sq/values-sq.xml", "res/values-hi/values-hi.xml", "res/values-in/values-in.xml", "res/values-uk/values-uk.xml", "res/drawable-v21/abc_edit_text_material.xml", "res/drawable-v21/abc_btn_colored_material.xml", "res/drawable-v21/abc_action_bar_item_background_material.xml" ]
-subjar_tuples = [  ]
-subjars = [  ]
diff --git a/build/secondary/third_party/android_tools/support/android_support_v7_gridlayout_java.info b/build/secondary/third_party/android_tools/support/android_support_v7_gridlayout_java.info
deleted file mode 100644
index 28c8330..0000000
--- a/build/secondary/third_party/android_tools/support/android_support_v7_gridlayout_java.info
+++ /dev/null
@@ -1,13 +0,0 @@
-# Generated by //build/android/gyp/aar.py
-# To regenerate, use "update_android_aar_prebuilts = true" and run "gn gen".
-
-aidl = [  ]
-assets = [  ]
-has_classes_jar = true
-has_native_libraries = false
-has_proguard_flags = false
-has_r_text_file = true
-is_manifest_empty = true
-resources = [ "res/values/values.xml" ]
-subjar_tuples = [  ]
-subjars = [  ]
diff --git a/build/secondary/third_party/android_tools/support/android_support_v7_mediarouter_java.info b/build/secondary/third_party/android_tools/support/android_support_v7_mediarouter_java.info
deleted file mode 100644
index 6fc7871..0000000
--- a/build/secondary/third_party/android_tools/support/android_support_v7_mediarouter_java.info
+++ /dev/null
@@ -1,13 +0,0 @@
-# Generated by //build/android/gyp/aar.py
-# To regenerate, use "update_android_aar_prebuilts = true" and run "gn gen".
-
-aidl = [  ]
-assets = [  ]
-has_classes_jar = true
-has_native_libraries = false
-has_proguard_flags = false
-has_r_text_file = true
-is_manifest_empty = true
-resources = [ "res/values-ur/values-ur.xml", "res/values-ru/values-ru.xml", "res/values-en-rGB/values-en-rGB.xml", "res/values-pt-rBR/values-pt-rBR.xml", "res/values-el/values-el.xml", "res/drawable-hdpi-v4/ic_dialog_close_light.png", "res/drawable-hdpi-v4/ic_media_pause_light.png", "res/drawable-hdpi-v4/ic_mr_button_disabled_light.png", "res/drawable-hdpi-v4/ic_mr_button_grey.png", "res/drawable-hdpi-v4/ic_media_play_light.png", "res/drawable-hdpi-v4/ic_dialog_close_dark.png", "res/drawable-hdpi-v4/ic_vol_type_tv_dark.png", "res/drawable-hdpi-v4/ic_media_pause_dark.png", "res/drawable-hdpi-v4/ic_vol_type_tv_light.png", "res/drawable-hdpi-v4/ic_media_stop_dark.png", "res/drawable-hdpi-v4/ic_vol_type_speaker_dark.png", "res/drawable-hdpi-v4/ic_mr_button_disabled_dark.png", "res/drawable-hdpi-v4/ic_audiotrack_light.png", "res/drawable-hdpi-v4/ic_vol_type_speaker_group_dark.png", "res/drawable-hdpi-v4/ic_media_stop_light.png", "res/drawable-hdpi-v4/ic_media_play_dark.png", "res/drawable-hdpi-v4/ic_audiotrack_dark.png", "res/drawable-hdpi-v4/ic_vol_type_speaker_light.png", "res/drawable-hdpi-v4/ic_mr_button_disconnected_dark.png", "res/drawable-hdpi-v4/ic_mr_button_disconnected_light.png", "res/drawable-hdpi-v4/ic_vol_type_speaker_group_light.png", "res/values-tr/values-tr.xml", "res/values-hu/values-hu.xml", "res/values-km/values-km.xml", "res/values-fr-rCA/values-fr-rCA.xml", "res/values-gu/values-gu.xml", "res/interpolator/mr_fast_out_slow_in.xml", "res/interpolator/mr_linear_out_slow_in.xml", "res/layout/mr_controller_material_dialog_b.xml", "res/layout/mr_volume_control.xml", "res/layout/mr_controller_volume_item.xml", "res/layout/mr_chooser_list_item.xml", "res/layout/mr_chooser_dialog.xml", "res/layout/mr_playback_control.xml", "res/values-ms/values-ms.xml", "res/values-ja/values-ja.xml", "res/values-eu/values-eu.xml", "res/values-sv/values-sv.xml", "res/values-mn/values-mn.xml", "res/values-ta/values-ta.xml", "res/values-pl/values-pl.xml", "res/drawable-xxhdpi-v4/ic_mr_button_connected_28_light.png", "res/drawable-xxhdpi-v4/ic_mr_button_connected_00_light.png", "res/drawable-xxhdpi-v4/ic_mr_button_connected_28_dark.png", "res/drawable-xxhdpi-v4/ic_mr_button_connected_04_dark.png", "res/drawable-xxhdpi-v4/ic_mr_button_connecting_28_dark.png", "res/drawable-xxhdpi-v4/ic_mr_button_connecting_06_light.png", "res/drawable-xxhdpi-v4/ic_dialog_close_light.png", "res/drawable-xxhdpi-v4/ic_mr_button_connecting_29_dark.png", "res/drawable-xxhdpi-v4/ic_mr_button_connected_01_light.png", "res/drawable-xxhdpi-v4/ic_mr_button_connected_23_dark.png", "res/drawable-xxhdpi-v4/ic_mr_button_connecting_18_light.png", "res/drawable-xxhdpi-v4/ic_mr_button_connecting_04_dark.png", "res/drawable-xxhdpi-v4/ic_mr_button_connecting_12_light.png", "res/drawable-xxhdpi-v4/ic_media_pause_light.png", "res/drawable-xxhdpi-v4/ic_mr_button_disabled_light.png", "res/drawable-xxhdpi-v4/ic_mr_button_grey.png", "res/drawable-xxhdpi-v4/ic_mr_button_connecting_14_dark.png", "res/drawable-xxhdpi-v4/ic_media_play_light.png", "res/drawable-xxhdpi-v4/ic_mr_button_connecting_12_dark.png", "res/drawable-xxhdpi-v4/ic_mr_button_connected_14_light.png", "res/drawable-xxhdpi-v4/ic_dialog_close_dark.png", "res/drawable-xxhdpi-v4/ic_mr_button_connecting_30_dark.png", "res/drawable-xxhdpi-v4/ic_mr_button_connected_27_dark.png", "res/drawable-xxhdpi-v4/ic_mr_button_connecting_07_light.png", "res/drawable-xxhdpi-v4/ic_mr_button_connecting_25_dark.png", "res/drawable-xxhdpi-v4/ic_mr_button_connecting_18_dark.png", "res/drawable-xxhdpi-v4/ic_mr_button_connected_23_light.png", "res/drawable-xxhdpi-v4/ic_mr_button_connected_11_light.png", "res/drawable-xxhdpi-v4/ic_mr_button_connecting_08_light.png", "res/drawable-xxhdpi-v4/ic_mr_button_connected_02_dark.png", "res/drawable-xxhdpi-v4/ic_mr_button_connecting_22_dark.png", "res/drawable-xxhdpi-v4/ic_mr_button_connected_08_light.png", "res/drawable-xxhdpi-v4/ic_mr_button_connecting_20_light.png", "res/drawable-xxhdpi-v4/ic_mr_button_connected_18_light.png", "res/drawable-xxhdpi-v4/ic_mr_button_connected_19_light.png", "res/drawable-xxhdpi-v4/ic_vol_type_tv_dark.png", "res/drawable-xxhdpi-v4/ic_mr_button_connecting_05_light.png", "res/drawable-xxhdpi-v4/ic_mr_button_connecting_10_dark.png", "res/drawable-xxhdpi-v4/ic_mr_button_connecting_02_dark.png", "res/drawable-xxhdpi-v4/ic_mr_button_connected_25_dark.png", "res/drawable-xxhdpi-v4/ic_mr_button_connected_18_dark.png", "res/drawable-xxhdpi-v4/ic_mr_button_connecting_22_light.png", "res/drawable-xxhdpi-v4/ic_mr_button_connected_22_light.png", "res/drawable-xxhdpi-v4/ic_mr_button_connecting_24_dark.png", "res/drawable-xxhdpi-v4/ic_media_pause_dark.png", "res/drawable-xxhdpi-v4/ic_mr_button_connected_29_light.png", "res/drawable-xxhdpi-v4/ic_mr_button_connected_03_light.png", "res/drawable-xxhdpi-v4/ic_vol_type_tv_light.png", "res/drawable-xxhdpi-v4/ic_mr_button_connected_11_dark.png", "res/drawable-xxhdpi-v4/ic_mr_button_connected_30_light.png", "res/drawable-xxhdpi-v4/ic_mr_button_connecting_23_light.png", "res/drawable-xxhdpi-v4/ic_mr_button_connected_29_dark.png", "res/drawable-xxhdpi-v4/ic_media_stop_dark.png", "res/drawable-xxhdpi-v4/ic_mr_button_connected_27_light.png", "res/drawable-xxhdpi-v4/ic_mr_button_connected_25_light.png", "res/drawable-xxhdpi-v4/ic_mr_button_connected_06_dark.png", "res/drawable-xxhdpi-v4/ic_mr_button_connected_00_dark.png", "res/drawable-xxhdpi-v4/ic_mr_button_connected_02_light.png", "res/drawable-xxhdpi-v4/ic_mr_button_connected_17_light.png", "res/drawable-xxhdpi-v4/ic_mr_button_connecting_03_dark.png", "res/drawable-xxhdpi-v4/ic_mr_button_connected_09_dark.png", "res/drawable-xxhdpi-v4/ic_vol_type_speaker_dark.png", "res/drawable-xxhdpi-v4/ic_mr_button_connecting_00_dark.png", "res/drawable-xxhdpi-v4/ic_mr_button_connected_24_light.png", "res/drawable-xxhdpi-v4/ic_mr_button_connected_26_light.png", "res/drawable-xxhdpi-v4/ic_mr_button_connecting_29_light.png", "res/drawable-xxhdpi-v4/ic_mr_button_connecting_30_light.png", "res/drawable-xxhdpi-v4/ic_mr_button_connected_05_dark.png", "res/drawable-xxhdpi-v4/ic_mr_button_connecting_08_dark.png", "res/drawable-xxhdpi-v4/ic_mr_button_connecting_15_light.png", "res/drawable-xxhdpi-v4/ic_mr_button_connecting_17_dark.png", "res/drawable-xxhdpi-v4/ic_mr_button_disabled_dark.png", "res/drawable-xxhdpi-v4/ic_mr_button_connecting_26_light.png", "res/drawable-xxhdpi-v4/ic_mr_button_connected_24_dark.png", "res/drawable-xxhdpi-v4/ic_mr_button_connecting_16_light.png", "res/drawable-xxhdpi-v4/ic_mr_button_connecting_27_dark.png", "res/drawable-xxhdpi-v4/ic_audiotrack_light.png", "res/drawable-xxhdpi-v4/ic_mr_button_connected_13_dark.png", "res/drawable-xxhdpi-v4/ic_mr_button_connected_16_dark.png", "res/drawable-xxhdpi-v4/ic_mr_button_connecting_04_light.png", "res/drawable-xxhdpi-v4/ic_vol_type_speaker_group_dark.png", "res/drawable-xxhdpi-v4/ic_mr_button_connecting_13_light.png", "res/drawable-xxhdpi-v4/ic_media_stop_light.png", "res/drawable-xxhdpi-v4/ic_mr_button_connecting_17_light.png", "res/drawable-xxhdpi-v4/ic_mr_button_connected_16_light.png", "res/drawable-xxhdpi-v4/ic_mr_button_connecting_16_dark.png", "res/drawable-xxhdpi-v4/ic_mr_button_connecting_01_light.png", "res/drawable-xxhdpi-v4/ic_mr_button_connected_07_dark.png", "res/drawable-xxhdpi-v4/ic_mr_button_connected_13_light.png", "res/drawable-xxhdpi-v4/ic_mr_button_connected_19_dark.png", "res/drawable-xxhdpi-v4/ic_mr_button_connecting_00_light.png", "res/drawable-xxhdpi-v4/ic_mr_button_connected_14_dark.png", "res/drawable-xxhdpi-v4/ic_mr_button_connecting_03_light.png", "res/drawable-xxhdpi-v4/ic_mr_button_connecting_15_dark.png", "res/drawable-xxhdpi-v4/ic_mr_button_connected_20_light.png", "res/drawable-xxhdpi-v4/ic_media_play_dark.png", "res/drawable-xxhdpi-v4/ic_mr_button_connected_03_dark.png", "res/drawable-xxhdpi-v4/ic_mr_button_connecting_27_light.png", "res/drawable-xxhdpi-v4/ic_mr_button_connected_21_light.png", "res/drawable-xxhdpi-v4/ic_mr_button_connecting_28_light.png", "res/drawable-xxhdpi-v4/ic_mr_button_connecting_02_light.png", "res/drawable-xxhdpi-v4/ic_mr_button_connecting_10_light.png", "res/drawable-xxhdpi-v4/ic_mr_button_connecting_23_dark.png", "res/drawable-xxhdpi-v4/ic_mr_button_connected_15_dark.png", "res/drawable-xxhdpi-v4/ic_mr_button_connected_21_dark.png", "res/drawable-xxhdpi-v4/ic_mr_button_connecting_05_dark.png", "res/drawable-xxhdpi-v4/ic_mr_button_connecting_19_dark.png", "res/drawable-xxhdpi-v4/ic_mr_button_connecting_20_dark.png", "res/drawable-xxhdpi-v4/ic_mr_button_connecting_06_dark.png", "res/drawable-xxhdpi-v4/ic_audiotrack_dark.png", "res/drawable-xxhdpi-v4/ic_mr_button_connecting_21_dark.png", "res/drawable-xxhdpi-v4/ic_mr_button_connecting_09_dark.png", "res/drawable-xxhdpi-v4/ic_mr_button_connected_20_dark.png", "res/drawable-xxhdpi-v4/ic_vol_type_speaker_light.png", "res/drawable-xxhdpi-v4/ic_mr_button_disconnected_dark.png", "res/drawable-xxhdpi-v4/ic_mr_button_connecting_19_light.png", "res/drawable-xxhdpi-v4/ic_mr_button_connected_22_dark.png", "res/drawable-xxhdpi-v4/ic_mr_button_connecting_25_light.png", "res/drawable-xxhdpi-v4/ic_mr_button_connecting_09_light.png", "res/drawable-xxhdpi-v4/ic_mr_button_connecting_11_light.png", "res/drawable-xxhdpi-v4/ic_mr_button_connected_12_dark.png", "res/drawable-xxhdpi-v4/ic_mr_button_connected_10_light.png", "res/drawable-xxhdpi-v4/ic_mr_button_connecting_13_dark.png", "res/drawable-xxhdpi-v4/ic_mr_button_connecting_24_light.png", "res/drawable-xxhdpi-v4/ic_mr_button_connecting_01_dark.png", "res/drawable-xxhdpi-v4/ic_mr_button_connecting_26_dark.png", "res/drawable-xxhdpi-v4/ic_mr_button_connected_26_dark.png", "res/drawable-xxhdpi-v4/ic_mr_button_connecting_07_dark.png", "res/drawable-xxhdpi-v4/ic_mr_button_connected_04_light.png", "res/drawable-xxhdpi-v4/ic_mr_button_connected_10_dark.png", "res/drawable-xxhdpi-v4/ic_mr_button_connected_17_dark.png", "res/drawable-xxhdpi-v4/ic_mr_button_connected_05_light.png", "res/drawable-xxhdpi-v4/ic_mr_button_connected_07_light.png", "res/drawable-xxhdpi-v4/ic_mr_button_connected_15_light.png", "res/drawable-xxhdpi-v4/ic_mr_button_connected_08_dark.png", "res/drawable-xxhdpi-v4/ic_mr_button_connecting_11_dark.png", "res/drawable-xxhdpi-v4/ic_mr_button_connected_09_light.png", "res/drawable-xxhdpi-v4/ic_mr_button_disconnected_light.png", "res/drawable-xxhdpi-v4/ic_mr_button_connected_12_light.png", "res/drawable-xxhdpi-v4/ic_vol_type_speaker_group_light.png", "res/drawable-xxhdpi-v4/ic_mr_button_connecting_14_light.png", "res/drawable-xxhdpi-v4/ic_mr_button_connecting_21_light.png", "res/drawable-xxhdpi-v4/ic_mr_button_connected_30_dark.png", "res/drawable-xxhdpi-v4/ic_mr_button_connected_06_light.png", "res/drawable-xxhdpi-v4/ic_mr_button_connected_01_dark.png", "res/values-lt/values-lt.xml", "res/values-bg/values-bg.xml", "res/values-es-rUS/values-es-rUS.xml", "res/values-iw/values-iw.xml", "res/values-en-rXC/values-en-rXC.xml", "res/values-mr/values-mr.xml", "res/values-uz/values-uz.xml", "res/values-pa/values-pa.xml", "res/values-fi/values-fi.xml", "res/values-am/values-am.xml", "res/values-pt-rPT/values-pt-rPT.xml", "res/values-ar/values-ar.xml", "res/values-ky/values-ky.xml", "res/drawable-mdpi-v4/ic_dialog_close_light.png", "res/drawable-mdpi-v4/ic_media_pause_light.png", "res/drawable-mdpi-v4/ic_mr_button_disabled_light.png", "res/drawable-mdpi-v4/ic_mr_button_grey.png", "res/drawable-mdpi-v4/ic_media_play_light.png", "res/drawable-mdpi-v4/ic_dialog_close_dark.png", "res/drawable-mdpi-v4/ic_vol_type_tv_dark.png", "res/drawable-mdpi-v4/ic_media_pause_dark.png", "res/drawable-mdpi-v4/ic_vol_type_tv_light.png", "res/drawable-mdpi-v4/ic_media_stop_dark.png", "res/drawable-mdpi-v4/ic_vol_type_speaker_dark.png", "res/drawable-mdpi-v4/ic_mr_button_disabled_dark.png", "res/drawable-mdpi-v4/ic_audiotrack_light.png", "res/drawable-mdpi-v4/ic_vol_type_speaker_group_dark.png", "res/drawable-mdpi-v4/ic_media_stop_light.png", "res/drawable-mdpi-v4/ic_media_play_dark.png", "res/drawable-mdpi-v4/ic_audiotrack_dark.png", "res/drawable-mdpi-v4/ic_vol_type_speaker_light.png", "res/drawable-mdpi-v4/ic_mr_button_disconnected_dark.png", "res/drawable-mdpi-v4/ic_mr_button_disconnected_light.png", "res/drawable-mdpi-v4/ic_vol_type_speaker_group_light.png", "res/values-bs/values-bs.xml", "res/values/values.xml", "res/values-hr/values-hr.xml", "res/values-en-rAU/values-en-rAU.xml", "res/values-ro/values-ro.xml", "res/drawable-xhdpi-v4/ic_mr_button_connected_28_light.png", "res/drawable-xhdpi-v4/ic_mr_button_connected_00_light.png", "res/drawable-xhdpi-v4/ic_mr_button_connected_28_dark.png", "res/drawable-xhdpi-v4/ic_mr_button_connected_04_dark.png", "res/drawable-xhdpi-v4/ic_mr_button_connecting_28_dark.png", "res/drawable-xhdpi-v4/ic_mr_button_connecting_06_light.png", "res/drawable-xhdpi-v4/ic_dialog_close_light.png", "res/drawable-xhdpi-v4/ic_mr_button_connecting_29_dark.png", "res/drawable-xhdpi-v4/ic_mr_button_connected_01_light.png", "res/drawable-xhdpi-v4/ic_mr_button_connected_23_dark.png", "res/drawable-xhdpi-v4/ic_mr_button_connecting_18_light.png", "res/drawable-xhdpi-v4/ic_mr_button_connecting_04_dark.png", "res/drawable-xhdpi-v4/ic_mr_button_connecting_12_light.png", "res/drawable-xhdpi-v4/ic_media_pause_light.png", "res/drawable-xhdpi-v4/ic_mr_button_disabled_light.png", "res/drawable-xhdpi-v4/ic_mr_button_grey.png", "res/drawable-xhdpi-v4/ic_mr_button_connecting_14_dark.png", "res/drawable-xhdpi-v4/ic_media_play_light.png", "res/drawable-xhdpi-v4/ic_mr_button_connecting_12_dark.png", "res/drawable-xhdpi-v4/ic_mr_button_connected_14_light.png", "res/drawable-xhdpi-v4/ic_dialog_close_dark.png", "res/drawable-xhdpi-v4/ic_mr_button_connecting_30_dark.png", "res/drawable-xhdpi-v4/ic_mr_button_connected_27_dark.png", "res/drawable-xhdpi-v4/ic_mr_button_connecting_07_light.png", "res/drawable-xhdpi-v4/ic_mr_button_connecting_25_dark.png", "res/drawable-xhdpi-v4/ic_mr_button_connecting_18_dark.png", "res/drawable-xhdpi-v4/ic_mr_button_connected_23_light.png", "res/drawable-xhdpi-v4/ic_mr_button_connected_11_light.png", "res/drawable-xhdpi-v4/ic_mr_button_connecting_08_light.png", "res/drawable-xhdpi-v4/ic_mr_button_connected_02_dark.png", "res/drawable-xhdpi-v4/ic_mr_button_connecting_22_dark.png", "res/drawable-xhdpi-v4/ic_mr_button_connected_08_light.png", "res/drawable-xhdpi-v4/ic_mr_button_connecting_20_light.png", "res/drawable-xhdpi-v4/ic_mr_button_connected_18_light.png", "res/drawable-xhdpi-v4/ic_mr_button_connected_19_light.png", "res/drawable-xhdpi-v4/ic_vol_type_tv_dark.png", "res/drawable-xhdpi-v4/ic_mr_button_connecting_05_light.png", "res/drawable-xhdpi-v4/ic_mr_button_connecting_10_dark.png", "res/drawable-xhdpi-v4/ic_mr_button_connecting_02_dark.png", "res/drawable-xhdpi-v4/ic_mr_button_connected_25_dark.png", "res/drawable-xhdpi-v4/ic_mr_button_connected_18_dark.png", "res/drawable-xhdpi-v4/ic_mr_button_connecting_22_light.png", "res/drawable-xhdpi-v4/ic_mr_button_connected_22_light.png", "res/drawable-xhdpi-v4/ic_mr_button_connecting_24_dark.png", "res/drawable-xhdpi-v4/ic_media_pause_dark.png", "res/drawable-xhdpi-v4/ic_mr_button_connected_29_light.png", "res/drawable-xhdpi-v4/ic_mr_button_connected_03_light.png", "res/drawable-xhdpi-v4/ic_vol_type_tv_light.png", "res/drawable-xhdpi-v4/ic_mr_button_connected_11_dark.png", "res/drawable-xhdpi-v4/ic_mr_button_connected_30_light.png", "res/drawable-xhdpi-v4/ic_mr_button_connecting_23_light.png", "res/drawable-xhdpi-v4/ic_mr_button_connected_29_dark.png", "res/drawable-xhdpi-v4/ic_media_stop_dark.png", "res/drawable-xhdpi-v4/ic_mr_button_connected_27_light.png", "res/drawable-xhdpi-v4/ic_mr_button_connected_25_light.png", "res/drawable-xhdpi-v4/ic_mr_button_connected_06_dark.png", "res/drawable-xhdpi-v4/ic_mr_button_connected_00_dark.png", "res/drawable-xhdpi-v4/ic_mr_button_connected_02_light.png", "res/drawable-xhdpi-v4/ic_mr_button_connected_17_light.png", "res/drawable-xhdpi-v4/ic_mr_button_connecting_03_dark.png", "res/drawable-xhdpi-v4/ic_mr_button_connected_09_dark.png", "res/drawable-xhdpi-v4/ic_vol_type_speaker_dark.png", "res/drawable-xhdpi-v4/ic_mr_button_connecting_00_dark.png", "res/drawable-xhdpi-v4/ic_mr_button_connected_24_light.png", "res/drawable-xhdpi-v4/ic_mr_button_connected_26_light.png", "res/drawable-xhdpi-v4/ic_mr_button_connecting_29_light.png", "res/drawable-xhdpi-v4/ic_mr_button_connecting_30_light.png", "res/drawable-xhdpi-v4/ic_mr_button_connected_05_dark.png", "res/drawable-xhdpi-v4/ic_mr_button_connecting_08_dark.png", "res/drawable-xhdpi-v4/ic_mr_button_connecting_15_light.png", "res/drawable-xhdpi-v4/ic_mr_button_connecting_17_dark.png", "res/drawable-xhdpi-v4/ic_mr_button_disabled_dark.png", "res/drawable-xhdpi-v4/ic_mr_button_connecting_26_light.png", "res/drawable-xhdpi-v4/ic_mr_button_connected_24_dark.png", "res/drawable-xhdpi-v4/ic_mr_button_connecting_16_light.png", "res/drawable-xhdpi-v4/ic_mr_button_connecting_27_dark.png", "res/drawable-xhdpi-v4/ic_audiotrack_light.png", "res/drawable-xhdpi-v4/ic_mr_button_connected_13_dark.png", "res/drawable-xhdpi-v4/ic_mr_button_connected_16_dark.png", "res/drawable-xhdpi-v4/ic_mr_button_connecting_04_light.png", "res/drawable-xhdpi-v4/ic_vol_type_speaker_group_dark.png", "res/drawable-xhdpi-v4/ic_mr_button_connecting_13_light.png", "res/drawable-xhdpi-v4/ic_media_stop_light.png", "res/drawable-xhdpi-v4/ic_mr_button_connecting_17_light.png", "res/drawable-xhdpi-v4/ic_mr_button_connected_16_light.png", "res/drawable-xhdpi-v4/ic_mr_button_connecting_16_dark.png", "res/drawable-xhdpi-v4/ic_mr_button_connecting_01_light.png", "res/drawable-xhdpi-v4/ic_mr_button_connected_07_dark.png", "res/drawable-xhdpi-v4/ic_mr_button_connected_13_light.png", "res/drawable-xhdpi-v4/ic_mr_button_connected_19_dark.png", "res/drawable-xhdpi-v4/ic_mr_button_connecting_00_light.png", "res/drawable-xhdpi-v4/ic_mr_button_connected_14_dark.png", "res/drawable-xhdpi-v4/ic_mr_button_connecting_03_light.png", "res/drawable-xhdpi-v4/ic_mr_button_connecting_15_dark.png", "res/drawable-xhdpi-v4/ic_mr_button_connected_20_light.png", "res/drawable-xhdpi-v4/ic_media_play_dark.png", "res/drawable-xhdpi-v4/ic_mr_button_connected_03_dark.png", "res/drawable-xhdpi-v4/ic_mr_button_connecting_27_light.png", "res/drawable-xhdpi-v4/ic_mr_button_connected_21_light.png", "res/drawable-xhdpi-v4/ic_mr_button_connecting_28_light.png", "res/drawable-xhdpi-v4/ic_mr_button_connecting_02_light.png", "res/drawable-xhdpi-v4/ic_mr_button_connecting_10_light.png", "res/drawable-xhdpi-v4/ic_mr_button_connecting_23_dark.png", "res/drawable-xhdpi-v4/ic_mr_button_connected_15_dark.png", "res/drawable-xhdpi-v4/ic_mr_button_connected_21_dark.png", "res/drawable-xhdpi-v4/ic_mr_button_connecting_05_dark.png", "res/drawable-xhdpi-v4/ic_mr_button_connecting_19_dark.png", "res/drawable-xhdpi-v4/ic_mr_button_connecting_20_dark.png", "res/drawable-xhdpi-v4/ic_mr_button_connecting_06_dark.png", "res/drawable-xhdpi-v4/ic_audiotrack_dark.png", "res/drawable-xhdpi-v4/ic_mr_button_connecting_21_dark.png", "res/drawable-xhdpi-v4/ic_mr_button_connecting_09_dark.png", "res/drawable-xhdpi-v4/ic_mr_button_connected_20_dark.png", "res/drawable-xhdpi-v4/ic_vol_type_speaker_light.png", "res/drawable-xhdpi-v4/ic_mr_button_disconnected_dark.png", "res/drawable-xhdpi-v4/ic_mr_button_connecting_19_light.png", "res/drawable-xhdpi-v4/ic_mr_button_connected_22_dark.png", "res/drawable-xhdpi-v4/ic_mr_button_connecting_25_light.png", "res/drawable-xhdpi-v4/ic_mr_button_connecting_09_light.png", "res/drawable-xhdpi-v4/ic_mr_button_connecting_11_light.png", "res/drawable-xhdpi-v4/ic_mr_button_connected_12_dark.png", "res/drawable-xhdpi-v4/ic_mr_button_connected_10_light.png", "res/drawable-xhdpi-v4/ic_mr_button_connecting_13_dark.png", "res/drawable-xhdpi-v4/ic_mr_button_connecting_24_light.png", "res/drawable-xhdpi-v4/ic_mr_button_connecting_01_dark.png", "res/drawable-xhdpi-v4/ic_mr_button_connecting_26_dark.png", "res/drawable-xhdpi-v4/ic_mr_button_connected_26_dark.png", "res/drawable-xhdpi-v4/ic_mr_button_connecting_07_dark.png", "res/drawable-xhdpi-v4/ic_mr_button_connected_04_light.png", "res/drawable-xhdpi-v4/ic_mr_button_connected_10_dark.png", "res/drawable-xhdpi-v4/ic_mr_button_connected_17_dark.png", "res/drawable-xhdpi-v4/ic_mr_button_connected_05_light.png", "res/drawable-xhdpi-v4/ic_mr_button_connected_07_light.png", "res/drawable-xhdpi-v4/ic_mr_button_connected_15_light.png", "res/drawable-xhdpi-v4/ic_mr_button_connected_08_dark.png", "res/drawable-xhdpi-v4/ic_mr_button_connecting_11_dark.png", "res/drawable-xhdpi-v4/ic_mr_button_connected_09_light.png", "res/drawable-xhdpi-v4/ic_mr_button_disconnected_light.png", "res/drawable-xhdpi-v4/ic_mr_button_connected_12_light.png", "res/drawable-xhdpi-v4/ic_vol_type_speaker_group_light.png", "res/drawable-xhdpi-v4/ic_mr_button_connecting_14_light.png", "res/drawable-xhdpi-v4/ic_mr_button_connecting_21_light.png", "res/drawable-xhdpi-v4/ic_mr_button_connected_30_dark.png", "res/drawable-xhdpi-v4/ic_mr_button_connected_06_light.png", "res/drawable-xhdpi-v4/ic_mr_button_connected_01_dark.png", "res/values-vi/values-vi.xml", "res/values-ko/values-ko.xml", "res/values-zh-rTW/values-zh-rTW.xml", "res/values-cs/values-cs.xml", "res/values-ml/values-ml.xml", "res/values-te/values-te.xml", "res/values-si/values-si.xml", "res/values-es/values-es.xml", "res/values-af/values-af.xml", "res/values-zu/values-zu.xml", "res/values-lo/values-lo.xml", "res/values-land/values-land.xml", "res/values-mk/values-mk.xml", "res/values-sl/values-sl.xml", "res/values-sw600dp-v13/values-sw600dp-v13.xml", "res/values-sw/values-sw.xml", "res/values-bn/values-bn.xml", "res/values-sk/values-sk.xml", "res/values-lv/values-lv.xml", "res/values-is/values-is.xml", "res/values-da/values-da.xml", "res/values-it/values-it.xml", "res/values-gl/values-gl.xml", "res/values-de/values-de.xml", "res/values-be/values-be.xml", "res/values-en-rCA/values-en-rCA.xml", "res/values-fa/values-fa.xml", "res/values-ca/values-ca.xml", "res/values-th/values-th.xml", "res/values-nl/values-nl.xml", "res/values-hy/values-hy.xml", "res/values-zh-rHK/values-zh-rHK.xml", "res/values-tl/values-tl.xml", "res/values-kk/values-kk.xml", "res/values-pt/values-pt.xml", "res/values-my/values-my.xml", "res/drawable-xxxhdpi-v4/ic_group_collapse_13.png", "res/drawable-xxxhdpi-v4/ic_mr_button_grey.png", "res/drawable-xxxhdpi-v4/ic_group_collapse_09.png", "res/drawable-xxxhdpi-v4/ic_group_expand_06.png", "res/drawable-xxxhdpi-v4/ic_group_expand_03.png", "res/drawable-xxxhdpi-v4/ic_group_expand_09.png", "res/drawable-xxxhdpi-v4/ic_group_expand_07.png", "res/drawable-xxxhdpi-v4/ic_group_expand_08.png", "res/drawable-xxxhdpi-v4/ic_group_expand_14.png", "res/drawable-xxxhdpi-v4/ic_group_collapse_04.png", "res/drawable-xxxhdpi-v4/ic_group_collapse_14.png", "res/drawable-xxxhdpi-v4/ic_group_expand_04.png", "res/drawable-xxxhdpi-v4/ic_group_collapse_05.png", "res/drawable-xxxhdpi-v4/ic_group_expand_11.png", "res/drawable-xxxhdpi-v4/ic_group_expand_10.png", "res/drawable-xxxhdpi-v4/ic_group_expand_05.png", "res/drawable-xxxhdpi-v4/ic_group_expand_13.png", "res/drawable-xxxhdpi-v4/ic_group_collapse_06.png", "res/drawable-xxxhdpi-v4/ic_group_expand_15.png", "res/drawable-xxxhdpi-v4/ic_group_expand_02.png", "res/drawable-xxxhdpi-v4/ic_group_collapse_10.png", "res/drawable-xxxhdpi-v4/ic_group_collapse_15.png", "res/drawable-xxxhdpi-v4/ic_group_expand_00.png", "res/drawable-xxxhdpi-v4/ic_group_collapse_02.png", "res/drawable-xxxhdpi-v4/ic_group_expand_12.png", "res/drawable-xxxhdpi-v4/ic_group_collapse_03.png", "res/drawable-xxxhdpi-v4/ic_group_collapse_08.png", "res/drawable-xxxhdpi-v4/ic_group_collapse_01.png", "res/drawable-xxxhdpi-v4/ic_group_expand_01.png", "res/drawable-xxxhdpi-v4/ic_group_collapse_00.png", "res/drawable-xxxhdpi-v4/ic_group_collapse_12.png", "res/drawable-xxxhdpi-v4/ic_group_collapse_07.png", "res/drawable-xxxhdpi-v4/ic_group_collapse_11.png", "res/values-et/values-et.xml", "res/values-en-rIN/values-en-rIN.xml", "res/values-sr/values-sr.xml", "res/values-fr/values-fr.xml", "res/values-kn/values-kn.xml", "res/values-nb/values-nb.xml", "res/values-sw720dp-v13/values-sw720dp-v13.xml", "res/values-ne/values-ne.xml", "res/drawable/mr_media_pause_dark.xml", "res/drawable/mr_dialog_close_dark.xml", "res/drawable/mr_media_stop_light.xml", "res/drawable/mr_button_dark.xml", "res/drawable/mr_button_connected_dark.xml", "res/drawable/mr_dialog_material_background_dark.xml", "res/drawable/mr_button_connecting_light.xml", "res/drawable/mr_media_play_light.xml", "res/drawable/mr_media_pause_light.xml", "res/drawable/mr_dialog_material_background_light.xml", "res/drawable/mr_button_connecting_dark.xml", "res/drawable/mr_media_stop_dark.xml", "res/drawable/mr_dialog_close_light.xml", "res/drawable/mr_group_expand.xml", "res/drawable/mr_vol_type_audiotrack_dark.xml", "res/drawable/mr_vol_type_audiotrack_light.xml", "res/drawable/mr_media_play_dark.xml", "res/drawable/mr_group_collapse.xml", "res/drawable/mr_button_light.xml", "res/drawable/mr_button_connected_light.xml", "res/values-b+sr+Latn/values-b+sr+Latn.xml", "res/values-ka/values-ka.xml", "res/values-zh-rCN/values-zh-rCN.xml", "res/values-az/values-az.xml", "res/values-sq/values-sq.xml", "res/values-hi/values-hi.xml", "res/values-in/values-in.xml", "res/values-uk/values-uk.xml" ]
-subjar_tuples = [  ]
-subjars = [  ]
diff --git a/build/secondary/third_party/android_tools/support/android_support_v7_palette_java.info b/build/secondary/third_party/android_tools/support/android_support_v7_palette_java.info
deleted file mode 100644
index cd54060..0000000
--- a/build/secondary/third_party/android_tools/support/android_support_v7_palette_java.info
+++ /dev/null
@@ -1,13 +0,0 @@
-# Generated by //build/android/gyp/aar.py
-# To regenerate, use "update_android_aar_prebuilts = true" and run "gn gen".
-
-aidl = [  ]
-assets = [  ]
-has_classes_jar = true
-has_native_libraries = false
-has_proguard_flags = false
-has_r_text_file = true
-is_manifest_empty = true
-resources = [  ]
-subjar_tuples = [  ]
-subjars = [  ]
diff --git a/build/secondary/third_party/android_tools/support/android_support_v7_preference_java.info b/build/secondary/third_party/android_tools/support/android_support_v7_preference_java.info
deleted file mode 100644
index 25a4927..0000000
--- a/build/secondary/third_party/android_tools/support/android_support_v7_preference_java.info
+++ /dev/null
@@ -1,13 +0,0 @@
-# Generated by //build/android/gyp/aar.py
-# To regenerate, use "update_android_aar_prebuilts = true" and run "gn gen".
-
-aidl = [  ]
-assets = [  ]
-has_classes_jar = true
-has_native_libraries = false
-has_proguard_flags = true
-has_r_text_file = true
-is_manifest_empty = true
-resources = [ "res/layout-v11/preference.xml", "res/layout-v11/preference_dropdown.xml", "res/layout/preference_recyclerview.xml", "res/layout/preference_widget_checkbox.xml", "res/layout/preference_widget_seekbar.xml", "res/layout/preference_widget_switch_compat.xml", "res/layout/preference_list_fragment.xml", "res/layout/preference_dialog_edittext.xml", "res/layout/preference_information.xml", "res/layout/preference_category.xml", "res/values/values.xml", "res/values-v17/values-v17.xml" ]
-subjar_tuples = [  ]
-subjars = [  ]
diff --git a/build/secondary/third_party/android_tools/support/android_support_v7_recyclerview_java.info b/build/secondary/third_party/android_tools/support/android_support_v7_recyclerview_java.info
deleted file mode 100644
index a25d255..0000000
--- a/build/secondary/third_party/android_tools/support/android_support_v7_recyclerview_java.info
+++ /dev/null
@@ -1,13 +0,0 @@
-# Generated by //build/android/gyp/aar.py
-# To regenerate, use "update_android_aar_prebuilts = true" and run "gn gen".
-
-aidl = [  ]
-assets = [  ]
-has_classes_jar = true
-has_native_libraries = false
-has_proguard_flags = true
-has_r_text_file = true
-is_manifest_empty = true
-resources = [ "res/values/values.xml" ]
-subjar_tuples = [  ]
-subjars = [  ]
diff --git a/build/secondary/third_party/android_tools/support/android_support_vector_drawable_java.info b/build/secondary/third_party/android_tools/support/android_support_vector_drawable_java.info
deleted file mode 100644
index cd54060..0000000
--- a/build/secondary/third_party/android_tools/support/android_support_vector_drawable_java.info
+++ /dev/null
@@ -1,13 +0,0 @@
-# Generated by //build/android/gyp/aar.py
-# To regenerate, use "update_android_aar_prebuilts = true" and run "gn gen".
-
-aidl = [  ]
-assets = [  ]
-has_classes_jar = true
-has_native_libraries = false
-has_proguard_flags = false
-has_r_text_file = true
-is_manifest_empty = true
-resources = [  ]
-subjar_tuples = [  ]
-subjars = [  ]
diff --git a/build/secondary/third_party/catapult/devil/BUILD.gn b/build/secondary/third_party/catapult/devil/BUILD.gn
deleted file mode 100644
index 5e9707e..0000000
--- a/build/secondary/third_party/catapult/devil/BUILD.gn
+++ /dev/null
@@ -1,50 +0,0 @@
-# Copyright 2017 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import("//build/symlink.gni")
-import("//build/secondary/third_party/catapult/devil/devil_arm.gni")
-import("//testing/android/empty_apk/empty_apk.gni")
-
-empty_apk("empty_system_webview_apk") {
-  package_name = "com.android.webview"
-  apk_name = "EmptySystemWebView"
-}
-
-group("devil") {
-  testonly = true
-  deps = [
-    ":empty_system_webview_apk",
-    "//buildtools/third_party/libc++($host_toolchain)",
-    "//tools/android/forwarder2",
-    "//tools/android/md5sum",
-  ]
-
-  if (build_devil_arm_deps) {
-    deps += [
-      ":host_forwarder_arm",
-      ":libcxx_arm",
-      ":md5sum_bin_host_arm",
-    ]
-  }
-}
-
-if (build_devil_arm_deps) {
-  binary_symlink("host_forwarder_arm") {
-    binary_label = "//tools/android/forwarder2:host_forwarder(//build/toolchain/linux:clang_arm)"
-    output_name = "host_forwarder_arm"
-  }
-
-  binary_symlink("md5sum_bin_host_arm") {
-    binary_label =
-        "//tools/android/md5sum:md5sum_bin(//build/toolchain/linux:clang_arm)"
-    output_name = "md5sum_bin_host_arm"
-  }
-
-  binary_symlink("libcxx_arm") {
-    binary_label =
-        "//buildtools/third_party/libc++(//build/toolchain/linux:clang_arm)"
-    binary_output_name = "libc++.so"
-    output_name = "libc++_arm"
-  }
-}
diff --git a/build/secondary/third_party/catapult/devil/devil_arm.gni b/build/secondary/third_party/catapult/devil/devil_arm.gni
deleted file mode 100644
index 04c5e40..0000000
--- a/build/secondary/third_party/catapult/devil/devil_arm.gni
+++ /dev/null
@@ -1,9 +0,0 @@
-# Copyright 2018 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-declare_args() {
-  # Whether to build Devil host dependencies for Linux arm, e.g. for use on
-  # Raspberry Pis.
-  build_devil_arm_deps = false
-}
diff --git a/build/secondary/third_party/libjpeg_turbo/BUILD.gn b/build/secondary/third_party/libjpeg_turbo/BUILD.gn
deleted file mode 100644
index 265d30b..0000000
--- a/build/secondary/third_party/libjpeg_turbo/BUILD.gn
+++ /dev/null
@@ -1,221 +0,0 @@
-# Copyright 2014 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# Do not use the targets in this file unless you need a certain libjpeg
-# implementation. Use the meta target //third_party:jpeg instead.
-
-import("//build/config/sanitizers/sanitizers.gni")
-if (current_cpu == "arm") {
-  import("//build/config/arm.gni")
-}
-
-assert(!is_ios, "This is not used on iOS, don't drag it in unintentionally")
-
-if (current_cpu == "x86" || current_cpu == "x64") {
-  import("//third_party/yasm/yasm_assemble.gni")
-
-  yasm_assemble("simd_asm") {
-    defines = []
-
-    if (current_cpu == "x86") {
-      sources = [
-        "simd/jccolor-mmx.asm",
-        "simd/jccolor-sse2.asm",
-        "simd/jcgray-mmx.asm",
-        "simd/jcgray-sse2.asm",
-        "simd/jchuff-sse2.asm",
-        "simd/jcsample-mmx.asm",
-        "simd/jcsample-sse2.asm",
-        "simd/jdcolor-mmx.asm",
-        "simd/jdcolor-sse2.asm",
-        "simd/jdmerge-mmx.asm",
-        "simd/jdmerge-sse2.asm",
-        "simd/jdsample-mmx.asm",
-        "simd/jdsample-sse2.asm",
-        "simd/jfdctflt-3dn.asm",
-        "simd/jfdctflt-sse.asm",
-        "simd/jfdctfst-mmx.asm",
-        "simd/jfdctfst-sse2.asm",
-        "simd/jfdctint-mmx.asm",
-        "simd/jfdctint-sse2.asm",
-        "simd/jidctflt-3dn.asm",
-        "simd/jidctflt-sse.asm",
-        "simd/jidctflt-sse2.asm",
-        "simd/jidctfst-mmx.asm",
-        "simd/jidctfst-sse2.asm",
-        "simd/jidctint-mmx.asm",
-        "simd/jidctint-sse2.asm",
-        "simd/jidctred-mmx.asm",
-        "simd/jidctred-sse2.asm",
-        "simd/jquant-3dn.asm",
-        "simd/jquant-mmx.asm",
-        "simd/jquant-sse.asm",
-        "simd/jquantf-sse2.asm",
-        "simd/jquanti-sse2.asm",
-        "simd/jsimdcpu.asm",
-      ]
-      defines += [
-        "__x86__",
-        "PIC",
-      ]
-    } else if (current_cpu == "x64") {
-      sources = [
-        "simd/jccolor-sse2-64.asm",
-        "simd/jcgray-sse2-64.asm",
-        "simd/jchuff-sse2-64.asm",
-        "simd/jcsample-sse2-64.asm",
-        "simd/jdcolor-sse2-64.asm",
-        "simd/jdmerge-sse2-64.asm",
-        "simd/jdsample-sse2-64.asm",
-        "simd/jfdctflt-sse-64.asm",
-        "simd/jfdctfst-sse2-64.asm",
-        "simd/jfdctint-sse2-64.asm",
-        "simd/jidctflt-sse2-64.asm",
-        "simd/jidctfst-sse2-64.asm",
-        "simd/jidctint-sse2-64.asm",
-        "simd/jidctred-sse2-64.asm",
-        "simd/jquantf-sse2-64.asm",
-        "simd/jquanti-sse2-64.asm",
-      ]
-      defines += [
-        "__x86_64__",
-        "PIC",
-      ]
-    }
-
-    if (is_win) {
-      defines += [ "MSVC" ]
-      if (current_cpu == "x86") {
-        defines += [ "WIN32" ]
-      } else {
-        defines += [ "WIN64" ]
-      }
-    } else if (is_mac || is_ios) {
-      defines += [ "MACHO" ]
-    } else if (is_linux || is_android || is_fuchsia) {
-      defines += [ "ELF" ]
-    }
-  }
-}
-
-static_library("simd") {
-  if (current_cpu == "x86") {
-    deps = [
-      ":simd_asm",
-    ]
-    sources = [
-      "simd/jsimd_i386.c",
-    ]
-  } else if (current_cpu == "x64") {
-    deps = [
-      ":simd_asm",
-    ]
-    sources = [
-      "simd/jsimd_x86_64.c",
-    ]
-  } else if (current_cpu == "arm" && arm_version >= 7 &&
-             (arm_use_neon || arm_optionally_use_neon)) {
-    sources = [
-      "simd/jsimd_arm.c",
-      "simd/jsimd_arm_neon.S",
-    ]
-  } else if (current_cpu == "arm64") {
-    sources = [
-      "simd/jsimd_arm64.c",
-      "simd/jsimd_arm64_neon.S",
-    ]
-  } else {
-    sources = [
-      "jsimd_none.c",
-    ]
-  }
-
-  if (is_win) {
-    cflags = [ "/wd4245" ]
-  }
-}
-
-config("libjpeg_config") {
-  include_dirs = [ "." ]
-}
-
-static_library("libjpeg") {
-  sources = [
-    "jcapimin.c",
-    "jcapistd.c",
-    "jccoefct.c",
-    "jccolor.c",
-    "jcdctmgr.c",
-    "jchuff.c",
-    "jchuff.h",
-    "jcinit.c",
-    "jcmainct.c",
-    "jcmarker.c",
-    "jcmaster.c",
-    "jcomapi.c",
-    "jconfig.h",
-    "jcparam.c",
-    "jcphuff.c",
-    "jcprepct.c",
-    "jcsample.c",
-    "jdapimin.c",
-    "jdapistd.c",
-    "jdatadst.c",
-    "jdatasrc.c",
-    "jdcoefct.c",
-    "jdcolor.c",
-    "jdct.h",
-    "jddctmgr.c",
-    "jdhuff.c",
-    "jdhuff.h",
-    "jdinput.c",
-    "jdmainct.c",
-    "jdmarker.c",
-    "jdmaster.c",
-    "jdmerge.c",
-    "jdphuff.c",
-    "jdpostct.c",
-    "jdsample.c",
-    "jerror.c",
-    "jerror.h",
-    "jfdctflt.c",
-    "jfdctfst.c",
-    "jfdctint.c",
-    "jidctflt.c",
-    "jidctfst.c",
-    "jidctint.c",
-    "jidctred.c",
-    "jinclude.h",
-    "jmemmgr.c",
-    "jmemnobs.c",
-    "jmemsys.h",
-    "jmorecfg.h",
-    "jpegint.h",
-    "jpeglib.h",
-    "jpeglibmangler.h",
-    "jquant1.c",
-    "jquant2.c",
-    "jutils.c",
-    "jversion.h",
-  ]
-
-  defines = [
-    "WITH_SIMD",
-    "NO_GETENV",
-  ]
-
-  configs += [ ":libjpeg_config" ]
-
-  public_configs = [ ":libjpeg_config" ]
-
-  # MemorySanitizer doesn't support assembly code, so keep it disabled in
-  # MSan builds for now.
-  if (is_msan) {
-    sources += [ "jsimd_none.c" ]
-  } else {
-    deps = [
-      ":simd",
-    ]
-  }
-}
diff --git a/build/secondary/third_party/nss/BUILD.gn b/build/secondary/third_party/nss/BUILD.gn
deleted file mode 100644
index 5788f3e..0000000
--- a/build/secondary/third_party/nss/BUILD.gn
+++ /dev/null
@@ -1,22 +0,0 @@
-# Copyright 2014 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import("//build/config/linux/pkg_config.gni")
-
-if (is_linux) {
-  # This is a dependency on NSS with no libssl. On Linux we use a built-in SSL
-  # library but the system NSS libraries. Non-Linux platforms using NSS use the
-  # hermetic one in //third_party/nss.
-  #
-  # Generally you should depend on //crypto:platform instead of using this
-  # config since that will properly pick up NSS or OpenSSL depending on
-  # platform and build config.
-  pkg_config("system_nss_no_ssl_config") {
-    packages = [ "nss" ]
-    extra_args = [
-      "-v",
-      "-lssl3",
-    ]
-  }
-}
diff --git a/build/shim_headers.gni b/build/shim_headers.gni
deleted file mode 100644
index 5659148..0000000
--- a/build/shim_headers.gni
+++ /dev/null
@@ -1,41 +0,0 @@
-# Copyright 2016 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-template("shim_headers") {
-  action_name = "gen_${target_name}"
-  config_name = "${target_name}_config"
-  shim_headers_path = "${root_gen_dir}/shim_headers/${target_name}"
-
-  config(config_name) {
-    include_dirs = [ shim_headers_path ]
-  }
-
-  action(action_name) {
-    script = "//tools/generate_shim_headers/generate_shim_headers.py"
-    args = [
-      "--generate",
-      "--headers-root",
-      rebase_path(invoker.root_path),
-      "--output-directory",
-      rebase_path(shim_headers_path),
-    ]
-    if (defined(invoker.prefix)) {
-      args += [
-        "--prefix",
-        invoker.prefix,
-      ]
-    }
-    args += invoker.headers
-
-    outputs = process_file_template(invoker.headers,
-                                    "${shim_headers_path}/{{source_file_part}}")
-  }
-
-  group(target_name) {
-    deps = [
-      ":${action_name}",
-    ]
-    all_dependent_configs = [ ":${config_name}" ]
-  }
-}
diff --git a/build/split_static_library.gni b/build/split_static_library.gni
deleted file mode 100644
index ea378e3..0000000
--- a/build/split_static_library.gni
+++ /dev/null
@@ -1,73 +0,0 @@
-# Copyright 2016 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-template("split_static_library") {
-  assert(defined(invoker.split_count),
-         "Must define split_count for split_static_library")
-
-  # In many conditions the number of inputs will be 1 (because the count will
-  # be conditional on platform or configuration) so optimize that.
-  if (invoker.split_count == 1) {
-    static_library(target_name) {
-      forward_variables_from(invoker, "*")
-    }
-  } else {
-    group_name = target_name
-
-    generated_static_libraries = []
-    current_library_index = 0
-    foreach(current_sources, split_list(invoker.sources, invoker.split_count)) {
-      current_name = "${target_name}_$current_library_index"
-      assert(
-          current_sources != [],
-          "Your values for splitting a static library generate one that has no sources.")
-      generated_static_libraries += [ ":$current_name" ]
-
-      static_library(current_name) {
-        # Generated static library shard gets everything but sources (which
-        # we're redefining) and visibility (which is set to be the group
-        # below).
-        forward_variables_from(invoker,
-                               "*",
-                               [
-                                 "check_includes",
-                                 "sources",
-                                 "visibility",
-                               ])
-        sources = current_sources
-        visibility = [ ":$group_name" ]
-
-        # When splitting a target's sources up into a series of static
-        # libraries, those targets will naturally include headers from each
-        # other arbitrarily. We could theoretically generate a web of
-        # dependencies and allow_circular_includes_from between all pairs of
-        # targets, but that's very cumbersome. Typical usage in Chrome is that
-        # only official Windows builds use split static libraries due to the
-        # Visual Studio size limits, and this means we'll still get header
-        # checking coverage for the other configurations.
-        check_includes = false
-
-        # Uniquify the output name if one is specified.
-        if (defined(invoker.output_name)) {
-          output_name = "${invoker.output_name}_$current_library_index"
-        }
-      }
-
-      current_library_index = current_library_index + 1
-    }
-
-    group(group_name) {
-      public_deps = generated_static_libraries
-      forward_variables_from(invoker,
-                             [
-                               "testonly",
-                               "visibility",
-                             ])
-    }
-  }
-}
-
-set_defaults("split_static_library") {
-  configs = default_compiler_configs
-}
diff --git a/build/swarming_xcode_install.py b/build/swarming_xcode_install.py
deleted file mode 100755
index 7764aa5..0000000
--- a/build/swarming_xcode_install.py
+++ /dev/null
@@ -1,64 +0,0 @@
-#!/usr/bin/env python
-# Copyright 2017 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Script used to install Xcode on the swarming bots.
-"""
-
-import os
-import shutil
-import subprocess
-import sys
-import tarfile
-import tempfile
-
-import mac_toolchain
-
-VERSION = '9A235'
-URL = 'gs://chrome-mac-sdk/ios-toolchain-9A235-1.tgz'
-REMOVE_DIR = '/Applications/Xcode9.0-Beta4.app/'
-OUTPUT_DIR = '/Applications/Xcode9.0.app/'
-
-def main():
-  # Check if it's already installed.
-  if os.path.exists(OUTPUT_DIR):
-    env = os.environ.copy()
-    env['DEVELOPER_DIR'] = OUTPUT_DIR
-    cmd = ['xcodebuild', '-version']
-    found_version = \
-        subprocess.Popen(cmd, env=env, stdout=subprocess.PIPE).communicate()[0]
-    if VERSION in found_version:
-      print "Xcode %s already installed" % VERSION
-      sys.exit(0)
-
-  # Confirm old dir is there first.
-  if not os.path.exists(REMOVE_DIR):
-    print "Failing early since %s isn't there." % REMOVE_DIR
-    sys.exit(1)
-
-  # Download Xcode.
-  with tempfile.NamedTemporaryFile() as temp:
-    env = os.environ.copy()
-    env['PATH'] += ":/b/depot_tools"
-    subprocess.check_call(['gsutil.py', 'cp', URL, temp.name], env=env)
-    if os.path.exists(OUTPUT_DIR):
-      shutil.rmtree(OUTPUT_DIR)
-    if not os.path.exists(OUTPUT_DIR):
-      os.makedirs(OUTPUT_DIR)
-    tarfile.open(mode='r:gz', name=temp.name).extractall(path=OUTPUT_DIR)
-
-  # Accept license, call runFirstLaunch.
-  mac_toolchain.FinalizeUnpack(OUTPUT_DIR, 'ios')
-
-  # Set new Xcode as default.
-  subprocess.check_call(['sudo', '/usr/bin/xcode-select', '-s', OUTPUT_DIR])
-
-  if os.path.exists(REMOVE_DIR):
-    shutil.rmtree(REMOVE_DIR)
-
-
-if __name__ == '__main__':
-  sys.exit(main())
-
diff --git a/build/symlink.gni b/build/symlink.gni
deleted file mode 100644
index 4da5a57..0000000
--- a/build/symlink.gni
+++ /dev/null
@@ -1,85 +0,0 @@
-# Copyright 2015 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# Creates a symlink.
-# Args:
-#   source: Path to link to.
-#   output: Where to create the symlink.
-template("symlink") {
-  action(target_name) {
-    forward_variables_from(invoker,
-                           [
-                             "data_deps",
-                             "deps",
-                             "testonly",
-                             "visibility",
-                           ])
-    outputs = [
-      invoker.output,
-    ]
-    script = "//build/symlink.py"
-    args = [
-      "-f",
-      rebase_path(invoker.source, get_path_info(invoker.output, "dir")),
-      rebase_path(invoker.output, root_build_dir),
-    ]
-  }
-}
-
-# Creates a symlink from root_build_dir/target_name to |binary_label|. This rule
-# is meant to be used within if (current_toolchain == default_toolchain) blocks
-# and point to targets in the non-default toolchain.
-# Note that for executables, using a copy (as opposed to a symlink) does not
-# work when is_component_build=true, since dependent libraries are found via
-# relative location.
-#
-# Args:
-#   binary_label: Target that builds the file to symlink to. e.g.:
-#       ":$target_name($host_toolchain)".
-#   binary_output_name: The output_name set by the binary_label target
-#       (if applicable).
-#   output_name: Where to create the symlink
-#       (default="$root_out_dir/$binary_output_name").
-#
-# Example:
-#   if (current_toolchain == host_toolchain) {
-#     executable("foo") { ... }
-#   } else if (current_toolchain == default_toolchain) {
-#     binary_symlink("foo") {
-#       binary_label = ":foo($host_toolchain)"
-#     }
-#   }
-template("binary_symlink") {
-  symlink(target_name) {
-    forward_variables_from(invoker,
-                           [
-                             "output",
-                             "testonly",
-                             "visibility",
-                           ])
-    deps = [
-      invoker.binary_label,
-    ]
-    data_deps = [
-      invoker.binary_label,
-    ]
-    if (defined(invoker.data_deps)) {
-      data_deps += invoker.data_deps
-    }
-
-    _out_dir = get_label_info(invoker.binary_label, "root_out_dir")
-    if (defined(invoker.binary_output_name)) {
-      _name = invoker.binary_output_name
-    } else {
-      _name = get_label_info(invoker.binary_label, "name")
-    }
-    source = "$_out_dir/$_name"
-
-    _output_name = _name
-    if (defined(invoker.output_name)) {
-      _output_name = invoker.output_name
-    }
-    output = "$root_out_dir/$_output_name"
-  }
-}
diff --git a/build/symlink.py b/build/symlink.py
deleted file mode 100755
index 5a261dc..0000000
--- a/build/symlink.py
+++ /dev/null
@@ -1,60 +0,0 @@
-#!/usr/bin/env python
-# Copyright (c) 2013 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Make a symlink and optionally touch a file (to handle dependencies).
-
-Usage:
-  symlink.py [options] sources... target
-
-A sym link to source is created at target. If multiple sources are specfied,
-then target is assumed to be a directory, and will contain all the links to
-the sources (basenames identical to their source).
-"""
-
-import errno
-import optparse
-import os.path
-import shutil
-import sys
-
-
-def Main(argv):
-  parser = optparse.OptionParser()
-  parser.add_option('-f', '--force', action='store_true')
-  parser.add_option('--touch')
-
-  options, args = parser.parse_args(argv[1:])
-  if len(args) < 2:
-    parser.error('at least two arguments required.')
-
-  target = args[-1]
-  sources = args[:-1]
-  for s in sources:
-    t = os.path.join(target, os.path.basename(s))
-    if len(sources) == 1 and not os.path.isdir(target):
-      t = target
-    t = os.path.expanduser(t)
-    if os.path.realpath(t) == s:
-      continue
-    try:
-      os.symlink(s, t)
-    except OSError, e:
-      if e.errno == errno.EEXIST and options.force:
-        if os.path.isdir(t):
-          shutil.rmtree(t, ignore_errors=True)
-        else:
-          os.remove(t)
-        os.symlink(s, t)
-      else:
-        raise
-
-
-  if options.touch:
-    with open(options.touch, 'w') as f:
-      pass
-
-
-if __name__ == '__main__':
-  sys.exit(Main(sys.argv))
diff --git a/build/toolchain/BUILD.gn b/build/toolchain/BUILD.gn
deleted file mode 100644
index 75701de..0000000
--- a/build/toolchain/BUILD.gn
+++ /dev/null
@@ -1,25 +0,0 @@
-# Copyright 2016 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import("//build/toolchain/concurrent_links.gni")
-import("//build/toolchain/goma.gni")
-
-declare_args() {
-  # Pool for non goma tasks.
-  action_pool_depth = -1
-}
-
-if (action_pool_depth == -1 || use_goma) {
-  action_pool_depth = exec_script("get_cpu_count.py", [], "value")
-}
-
-if (current_toolchain == default_toolchain) {
-  pool("link_pool") {
-    depth = concurrent_links
-  }
-
-  pool("action_pool") {
-    depth = action_pool_depth
-  }
-}
diff --git a/build/toolchain/OWNERS b/build/toolchain/OWNERS
deleted file mode 100644
index 85d8d31..0000000
--- a/build/toolchain/OWNERS
+++ /dev/null
@@ -1,2 +0,0 @@
-dpranke@chromium.org
-scottmg@chromium.org
diff --git a/build/toolchain/aix/BUILD.gn b/build/toolchain/aix/BUILD.gn
deleted file mode 100644
index 202e59e..0000000
--- a/build/toolchain/aix/BUILD.gn
+++ /dev/null
@@ -1,21 +0,0 @@
-# Copyright 2017 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import("//build/toolchain/gcc_toolchain.gni")
-
-gcc_toolchain("ppc64") {
-  cc = "gcc"
-  cxx = "g++"
-
-  readelf = "readelf"
-  nm = "nm"
-  ar = "ar"
-  ld = cxx
-
-  toolchain_args = {
-    current_cpu = "ppc64"
-    current_os = "aix"
-    is_clang = false
-  }
-}
diff --git a/build/toolchain/android/BUILD.gn b/build/toolchain/android/BUILD.gn
deleted file mode 100644
index 407bc3a..0000000
--- a/build/toolchain/android/BUILD.gn
+++ /dev/null
@@ -1,121 +0,0 @@
-# Copyright 2013 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import("//build/config/clang/clang.gni")
-import("//build/config/sysroot.gni")  # Imports android/config.gni.
-import("//build/toolchain/gcc_toolchain.gni")
-
-# The Android clang toolchains share most of the same parameters, so we have this
-# wrapper around gcc_toolchain to avoid duplication of logic.
-#
-# Parameters:
-#  - toolchain_root
-#      Path to cpu-specific toolchain within the ndk.
-#  - sysroot
-#      Sysroot for this architecture.
-#  - lib_dir
-#      Subdirectory inside of sysroot where libs go.
-#  - binary_prefix
-#      Prefix of compiler executables.
-template("android_clang_toolchain") {
-  gcc_toolchain(target_name) {
-    assert(defined(invoker.toolchain_args),
-           "toolchain_args must be defined for android_clang_toolchain()")
-    toolchain_args = invoker.toolchain_args
-    toolchain_args.current_os = "android"
-
-    # Output linker map files for binary size analysis.
-    enable_linker_map = true
-
-    # Make our manually injected libs relative to the build dir.
-    _ndk_lib =
-        rebase_path(invoker.sysroot + "/" + invoker.lib_dir, root_build_dir)
-
-    libs_section_prefix = "$_ndk_lib/crtbegin_dynamic.o"
-    libs_section_postfix = "$_ndk_lib/crtend_android.o"
-
-    solink_libs_section_prefix = "$_ndk_lib/crtbegin_so.o"
-    solink_libs_section_postfix = "$_ndk_lib/crtend_so.o"
-
-    _android_tool_prefix =
-        "${invoker.toolchain_root}/bin/${invoker.binary_prefix}-"
-
-    # The tools should be run relative to the build dir.
-    _tool_prefix = rebase_path("$_android_tool_prefix", root_build_dir)
-
-    _prefix = rebase_path("$clang_base_path/bin", root_build_dir)
-    cc = "$_prefix/clang"
-    cxx = "$_prefix/clang++"
-    ar = "$_prefix/llvm-ar"
-    ld = cxx
-    readelf = _tool_prefix + "readelf"
-    nm = _tool_prefix + "nm"
-    strip = rebase_path("//third_party/eu-strip/bin/eu-strip", root_build_dir)
-    use_unstripped_as_runtime_outputs = true
-
-    # Don't use .cr.so for loadable_modules since they are always loaded via
-    # absolute path.
-    loadable_module_extension = ".so"
-  }
-}
-
-android_clang_toolchain("android_clang_x86") {
-  toolchain_root = x86_android_toolchain_root
-  sysroot = "$android_ndk_root/$x86_android_sysroot_subdir"
-  lib_dir = "usr/lib"
-  binary_prefix = "i686-linux-android"
-  toolchain_args = {
-    current_cpu = "x86"
-  }
-}
-
-android_clang_toolchain("android_clang_arm") {
-  toolchain_root = arm_android_toolchain_root
-  sysroot = "$android_ndk_root/$arm_android_sysroot_subdir"
-  lib_dir = "usr/lib"
-  binary_prefix = "arm-linux-androideabi"
-  toolchain_args = {
-    current_cpu = "arm"
-  }
-}
-
-android_clang_toolchain("android_clang_mipsel") {
-  toolchain_root = mips_android_toolchain_root
-  sysroot = "$android_ndk_root/$mips_android_sysroot_subdir"
-  lib_dir = "usr/lib"
-  binary_prefix = "mipsel-linux-android"
-  toolchain_args = {
-    current_cpu = "mipsel"
-  }
-}
-
-android_clang_toolchain("android_clang_x64") {
-  toolchain_root = x86_64_android_toolchain_root
-  sysroot = "$android_ndk_root/$x86_64_android_sysroot_subdir"
-  lib_dir = "usr/lib64"
-  binary_prefix = "x86_64-linux-android"
-  toolchain_args = {
-    current_cpu = "x64"
-  }
-}
-
-android_clang_toolchain("android_clang_arm64") {
-  toolchain_root = arm64_android_toolchain_root
-  sysroot = "$android_ndk_root/$arm64_android_sysroot_subdir"
-  lib_dir = "usr/lib"
-  binary_prefix = "aarch64-linux-android"
-  toolchain_args = {
-    current_cpu = "arm64"
-  }
-}
-
-android_clang_toolchain("android_clang_mips64el") {
-  toolchain_root = mips64_android_toolchain_root
-  sysroot = "$android_ndk_root/$mips64_android_sysroot_subdir"
-  lib_dir = "usr/lib64"
-  binary_prefix = "mips64el-linux-android"
-  toolchain_args = {
-    current_cpu = "mips64el"
-  }
-}
diff --git a/build/toolchain/cc_wrapper.gni b/build/toolchain/cc_wrapper.gni
deleted file mode 100644
index 0a03dde..0000000
--- a/build/toolchain/cc_wrapper.gni
+++ /dev/null
@@ -1,40 +0,0 @@
-# Copyright (c) 2014 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import("//build/toolchain/goma.gni")
-
-# Defines the configuration of cc wrapper
-# ccache: a c/c++ compiler cache which can greatly reduce recompilation times.
-# icecc, distcc: it takes compile jobs from a build and distributes them among
-#                remote machines allowing a parallel build.
-#
-# TIPS
-#
-# 1) ccache
-# Set clang_use_chrome_plugins=false if using ccache 3.1.9 or earlier, since
-# these versions don't support -Xclang.  (3.1.10 and later will silently
-# ignore -Xclang, so it doesn't matter if you disable clang_use_chrome_plugins
-# or not).
-#
-# Use ccache 3.2 or later to avoid clang unused argument warnings:
-# https://bugzilla.samba.org/show_bug.cgi?id=8118
-#
-# To avoid -Wparentheses-equality clang warnings, at some cost in terms of
-# speed, you can do:
-# export CCACHE_CPP2=yes
-#
-# 2) icecc
-# Set clang_use_chrome_plugins=false because icecc cannot distribute custom
-# clang libraries.
-#
-# To use icecc and ccache together, set cc_wrapper = "ccache" with
-# export CCACHE_PREFIX=icecc
-
-declare_args() {
-  # Set to "ccache", "icecc" or "distcc".  Probably doesn't work on windows.
-  cc_wrapper = ""
-}
-
-assert(!use_goma || cc_wrapper == "",
-       "use_goma and cc_wrapper can not be used together.")
diff --git a/build/toolchain/clang_static_analyzer.gni b/build/toolchain/clang_static_analyzer.gni
deleted file mode 100644
index 9cb7975..0000000
--- a/build/toolchain/clang_static_analyzer.gni
+++ /dev/null
@@ -1,11 +0,0 @@
-# Copyright (c) 2017 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# Defines the configuration of Clang static analysis tools.
-# See docs/clang_static_analyzer.md for more information.
-
-declare_args() {
-  # Uses the Clang static analysis tools during compilation.
-  use_clang_static_analyzer = false
-}
diff --git a/build/toolchain/clang_static_analyzer_wrapper.py b/build/toolchain/clang_static_analyzer_wrapper.py
deleted file mode 100755
index 1c54d72..0000000
--- a/build/toolchain/clang_static_analyzer_wrapper.py
+++ /dev/null
@@ -1,72 +0,0 @@
-#!/usr/bin/env python
-# Copyright 2017 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Adds an analysis build step to invocations of the Clang C/C++ compiler.
-
-Usage: clang_static_analyzer_wrapper.py <compiler> [args...]
-"""
-
-import argparse
-import fnmatch
-import itertools
-import os
-import sys
-import wrapper_utils
-
-# Flags used to enable analysis for Clang invocations.
-analyzer_enable_flags = [
-    '--analyze',
-]
-
-# Flags used to configure the analyzer's behavior.
-analyzer_option_flags = [
-    '-fdiagnostics-show-option',
-    '-analyzer-checker=cplusplus',
-    '-analyzer-opt-analyze-nested-blocks',
-    '-analyzer-eagerly-assume',
-    '-analyzer-output=text',
-    '-analyzer-config',
-    'suppress-c++-stdlib=true',
-
-# List of checkers to execute.
-# The full list of checkers can be found at
-# https://clang-analyzer.llvm.org/available_checks.html.
-    '-analyzer-checker=core',
-    '-analyzer-checker=unix',
-    '-analyzer-checker=deadcode',
-]
-
-
-# Prepends every element of a list |args| with |token|.
-# e.g. ['-analyzer-foo', '-analyzer-bar'] => ['-Xanalyzer', '-analyzer-foo',
-#                                             '-Xanalyzer', '-analyzer-bar']
-def interleave_args(args, token):
-  return list(sum(zip([token] * len(args), args), ()))
-
-
-def main():
-  parser = argparse.ArgumentParser()
-  parser.add_argument('--mode',
-                      choices=['clang', 'cl'],
-                      required=True,
-                      help='Specifies the compiler argument convention to use.')
-  parser.add_argument('args', nargs=argparse.REMAINDER)
-  parsed_args = parser.parse_args()
-
-  prefix = '-Xclang' if parsed_args.mode == 'cl' else '-Xanalyzer'
-  cmd = parsed_args.args + analyzer_enable_flags + \
-        interleave_args(analyzer_option_flags, prefix)
-  returncode, stderr = wrapper_utils.CaptureCommandStderr(
-      wrapper_utils.CommandToRun(cmd))
-  sys.stderr.write(stderr)
-
-  returncode, stderr = wrapper_utils.CaptureCommandStderr(
-    wrapper_utils.CommandToRun(parsed_args.args))
-  sys.stderr.write(stderr)
-
-  return returncode
-
-if __name__ == '__main__':
-  sys.exit(main())
diff --git a/build/toolchain/concurrent_links.gni b/build/toolchain/concurrent_links.gni
deleted file mode 100644
index e68384f..0000000
--- a/build/toolchain/concurrent_links.gni
+++ /dev/null
@@ -1,56 +0,0 @@
-# Copyright 2016 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file should only be imported from files that define toolchains.
-# There's no way to enforce this exactly, but all toolchains are processed
-# in the context of the default_toolchain, so we can at least check for that.
-assert(current_toolchain == default_toolchain)
-
-import("//build/config/compiler/compiler.gni")
-import("//build/config/sanitizers/sanitizers.gni")
-import("//build/toolchain/toolchain.gni")
-
-declare_args() {
-  # Limit the number of concurrent links; we often want to run fewer
-  # links at once than we do compiles, because linking is memory-intensive.
-  # The default to use varies by platform and by the amount of memory
-  # available, so we call out to a script to get the right value.
-  concurrent_links = -1
-}
-
-if (concurrent_links == -1) {
-  if (use_thin_lto) {
-    _args = [
-      "--mem_per_link_gb=10",
-      "--reserve_mem_gb=10",
-    ]
-  } else if (use_sanitizer_coverage || use_fuzzing_engine) {
-    # Sanitizer coverage instrumentation increases linker memory consumption
-    # significantly.
-    _args = [ "--mem_per_link_gb=16" ]
-  } else if (is_win && symbol_level == 1 && !is_debug) {
-    _args = [ "--mem_per_link_gb=3" ]
-  } else if (is_win) {
-    _args = [ "--mem_per_link_gb=5" ]
-  } else if (is_mac) {
-    _args = [ "--mem_per_link_gb=4" ]
-  } else if (is_android && !is_component_build && symbol_level == 2) {
-    # Full debug symbols require large memory for link.
-    _args = [ "--mem_per_link_gb=25" ]
-  } else if (is_android && !is_debug && !using_sanitizer && symbol_level == 0) {
-    # Increase the number of concurrent links for release bots. Debug builds
-    # make heavier use of ProGuard, and so should not be raised. Sanitizers also
-    # increase the memory overhead.
-    _args = [ "--mem_per_link_gb=4" ]
-  } else if (is_linux && !is_chromeos && symbol_level == 0) {
-    # Memory consumption on link without debug symbols is low on linux.
-    _args = [ "--mem_per_link_gb=3" ]
-  } else {
-    _args = []
-  }
-
-  # TODO(crbug.com/617429) Pass more build configuration info to the script
-  # so that we can compute better values.
-  concurrent_links = exec_script("get_concurrent_links.py", _args, "value")
-}
diff --git a/build/toolchain/cros/BUILD.gn b/build/toolchain/cros/BUILD.gn
deleted file mode 100644
index bf139dc..0000000
--- a/build/toolchain/cros/BUILD.gn
+++ /dev/null
@@ -1,136 +0,0 @@
-# Copyright 2014 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import("//build/config/compiler/compiler.gni")
-import("//build/config/sysroot.gni")
-import("//build/toolchain/gcc_toolchain.gni")
-import("//build/toolchain/cros_toolchain.gni")
-
-# This is the normal toolchain for most targets.
-gcc_toolchain("target") {
-  ar = cros_target_ar
-  cc = cros_target_cc
-  cxx = cros_target_cxx
-  ld = cxx
-  if (cros_target_ld != "") {
-    ld = cros_target_ld
-  }
-  if (cros_target_nm != "") {
-    nm = cros_target_nm
-  }
-  if (cros_target_readelf != "") {
-    readelf = cros_target_readelf
-  }
-  extra_cflags = cros_target_extra_cflags
-  extra_cppflags = cros_target_extra_cppflags
-  extra_cxxflags = cros_target_extra_cxxflags
-  extra_ldflags = cros_target_extra_ldflags
-
-  toolchain_args = {
-    cc_wrapper = ""
-    current_cpu = target_cpu
-    current_os = "chromeos"
-    is_clang = is_clang
-    use_debug_fission = use_debug_fission
-    use_gold = use_gold
-    use_sysroot = use_sysroot
-  }
-}
-
-# This is a special toolchain needed just for the nacl_bootstrap target in
-# //native_client/src/trusted/service_runtime/linux. It is identical
-# to ":target" except that it forces use_debug_fission, use_gold, and
-# use_sysroot off, and allows the user to set different sets of extra flags.
-gcc_toolchain("nacl_bootstrap") {
-  ar = cros_target_ar
-  cc = cros_target_cc
-  cxx = cros_target_cxx
-  ld = cxx
-  if (cros_target_ld != "") {
-    ld = cros_target_ld
-  }
-  if (cros_target_nm != "") {
-    nm = cros_target_nm
-  }
-  if (cros_target_readelf != "") {
-    readelf = cros_target_readelf
-  }
-  extra_cflags = cros_nacl_bootstrap_extra_cflags
-  extra_cppflags = cros_nacl_bootstrap_extra_cppflags
-  extra_cxxflags = cros_nacl_bootstrap_extra_cxxflags
-  extra_ldflags = cros_nacl_bootstrap_extra_ldflags
-
-  toolchain_args = {
-    cc_wrapper = ""
-    current_cpu = target_cpu
-    current_os = "chromeos"
-    is_clang = is_clang
-    use_debug_fission = false
-    use_gold = false
-    use_sysroot = false
-  }
-}
-
-gcc_toolchain("host") {
-  # These are args for the template.
-  ar = cros_host_ar
-  cc = cros_host_cc
-  cxx = cros_host_cxx
-  ld = cxx
-  if (cros_host_ld != "") {
-    ld = cros_host_ld
-  }
-  if (cros_host_nm != "") {
-    nm = cros_host_nm
-  }
-  if (cros_host_readelf != "") {
-    readelf = cros_host_readelf
-  }
-  extra_cflags = cros_host_extra_cflags
-  extra_cppflags = cros_host_extra_cppflags
-  extra_cxxflags = cros_host_extra_cxxflags
-  extra_ldflags = cros_host_extra_ldflags
-
-  toolchain_args = {
-    cc_wrapper = ""
-    is_clang = cros_host_is_clang
-    current_cpu = host_cpu
-    current_os = "linux"
-    use_sysroot = false
-  }
-}
-
-gcc_toolchain("v8_snapshot") {
-  # These are args for the template.
-  ar = cros_v8_snapshot_ar
-  cc = cros_v8_snapshot_cc
-  cxx = cros_v8_snapshot_cxx
-  ld = cxx
-  if (cros_v8_snapshot_ld != "") {
-    ld = cros_v8_snapshot_ld
-  }
-  if (cros_v8_snapshot_nm != "") {
-    nm = cros_v8_snapshot_nm
-  }
-  if (cros_v8_snapshot_readelf != "") {
-    readelf = cros_v8_snapshot_readelf
-  }
-  extra_cflags = cros_v8_snapshot_extra_cflags
-  extra_cppflags = cros_v8_snapshot_extra_cppflags
-  extra_cxxflags = cros_v8_snapshot_extra_cxxflags
-  extra_ldflags = cros_v8_snapshot_extra_ldflags
-
-  toolchain_args = {
-    cc_wrapper = ""
-    is_clang = cros_v8_snapshot_is_clang
-    if (target_cpu == "x86" || target_cpu == "arm" || target_cpu == "mipsel") {
-      current_cpu = "x86"
-    } else {
-      current_cpu = "x64"
-    }
-    v8_current_cpu = v8_target_cpu
-    current_os = "linux"
-    use_sysroot = false
-  }
-}
diff --git a/build/toolchain/cros_toolchain.gni b/build/toolchain/cros_toolchain.gni
deleted file mode 100644
index fdfdb07..0000000
--- a/build/toolchain/cros_toolchain.gni
+++ /dev/null
@@ -1,81 +0,0 @@
-# Copyright 2016 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# CrOS builds must cross-compile on a Linux host for the actual CrOS
-# device target. There are many different CrOS devices so the build
-# system provides configuration variables that permit a CrOS build to
-# control the cross-compilation tool chain. However, requiring such
-# fine-grain specification is tedious for build-bots and developers.
-# Consequently, the CrOS build system defaults to a convenience
-# compilation mode where the compilation host is also the build target.
-#
-# Chrome can be compiled in this way with the gn variable:
-#
-# target_os = "chromeos"
-#
-# To perform a board-specific build, first obtain the correct system
-# root (http://goo.gl/aFB4XH) for the board. Then configure GN to use it
-# by setting appropriate cross-compilation variables.
-#
-# For example, to compile a Chrome source tree in /g/src for an
-# auron_paine CrOS device with the system root cached in /g/.cros_cache,
-# the following GN arguments must be provided to configure
-# cross-compilation with Goma acceleration. (NB: additional variables
-# will be necessary to successfully compile a working CrOS Chrome. See
-# the definition of GYP_DEFINES inside a sysroot shell.)
-#
-# goma_dir = "/g/.cros_cache/common/goma+2"
-# target_sysroot= /g/.cros_cache/chrome-sdk/tarballs/auron_paine+7644.0.0+sysroot_chromeos-base_chromeos-chrome.tar.xz"
-# cros_target_cc = "x86_64-cros-linux-gnu-gcc -B/g/.cros_cache/chrome-sdk/tarballs/auron_paine+7657.0.0+target_toolchain/usr/x86_64-pc-linux-gnu/x86_64-cros-linux-gnu/binutils-bin/2.25.51-gold"
-# cros_target_cxx = "x86_64-cros-linux-gnu-g++ -B/g/.cros_cache/chrome-sdk/tarballs/auron_paine+7657.0.0+target_toolchain/usr/x86_64-pc-linux-gnu/x86_64-cros-linux-gnu/binutils-bin/2.25.51-gold"
-# cros_target_ar = "x86_64-cros-linux-gnu-gcc-ar"
-# target_cpu = "x64"
-
-declare_args() {
-  # These must be specified for a board-specific build.
-  cros_target_ar = "ar"
-  cros_target_cc = "gcc"
-  cros_target_cxx = "g++"
-  cros_target_ld = ""
-  cros_target_nm = ""
-  cros_target_readelf = ""
-
-  # These can be optionally set. The "_cppflags"  will be applied to *both*
-  # C and C++ files; use "_cxxflags" for C++-only flags.
-  cros_target_extra_cflags = ""
-  cros_target_extra_cppflags = ""
-  cros_target_extra_cxxflags = ""
-  cros_target_extra_ldflags = ""
-
-  # is_clang is used instead of cros_target_is_clang
-
-  cros_host_ar = "ar"
-  cros_host_cc = "gcc"
-  cros_host_cxx = "g++"
-  cros_host_ld = ""
-  cros_host_nm = ""
-  cros_host_readelf = ""
-  cros_host_extra_cflags = ""
-  cros_host_extra_cppflags = ""
-  cros_host_extra_cxxflags = ""
-  cros_host_extra_ldflags = ""
-  cros_host_is_clang = false
-
-  cros_v8_snapshot_ar = "ar"
-  cros_v8_snapshot_cc = "gcc"
-  cros_v8_snapshot_cxx = "g++"
-  cros_v8_snapshot_ld = ""
-  cros_v8_snapshot_nm = ""
-  cros_v8_snapshot_readelf = ""
-  cros_v8_snapshot_extra_cflags = ""
-  cros_v8_snapshot_extra_cppflags = ""
-  cros_v8_snapshot_extra_cxxflags = ""
-  cros_v8_snapshot_extra_ldflags = ""
-  cros_v8_snapshot_is_clang = false
-
-  cros_nacl_bootstrap_extra_cflags = ""
-  cros_nacl_bootstrap_extra_cppflags = ""
-  cros_nacl_bootstrap_extra_cxxflags = ""
-  cros_nacl_bootstrap_extra_ldflags = ""
-}
diff --git a/build/toolchain/fuchsia/BUILD.gn b/build/toolchain/fuchsia/BUILD.gn
deleted file mode 100644
index a5151e3..0000000
--- a/build/toolchain/fuchsia/BUILD.gn
+++ /dev/null
@@ -1,36 +0,0 @@
-# Copyright 2017 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import("//build/toolchain/gcc_toolchain.gni")
-import("//build/config/fuchsia/config.gni")
-
-# Fuchsia builds using the Clang toolchain, with most parameters common across
-# the different target architectures.
-template("fuchsia_clang_toolchain") {
-  clang_toolchain(target_name) {
-    assert(host_os == "linux")
-    assert(defined(invoker.toolchain_args),
-           "toolchain_args must be defined for fuchsia_clang_toolchain()")
-
-    # We want to build and strip binaries, but retain the unstripped binaries
-    # in runtime_deps to make them available for isolates.
-    strip = rebase_path("//third_party/eu-strip/bin/eu-strip", root_build_dir)
-    use_unstripped_as_runtime_outputs = true
-
-    toolchain_args = invoker.toolchain_args
-    toolchain_args.current_os = "fuchsia"
-  }
-}
-
-fuchsia_clang_toolchain("x64") {
-  toolchain_args = {
-    current_cpu = "x64"
-  }
-}
-
-fuchsia_clang_toolchain("arm64") {
-  toolchain_args = {
-    current_cpu = "arm64"
-  }
-}
diff --git a/build/toolchain/fuchsia/OWNERS b/build/toolchain/fuchsia/OWNERS
deleted file mode 100644
index 3f809e8..0000000
--- a/build/toolchain/fuchsia/OWNERS
+++ /dev/null
@@ -1 +0,0 @@
-scottmg@chromium.org
diff --git a/build/toolchain/gcc_ar_wrapper.py b/build/toolchain/gcc_ar_wrapper.py
deleted file mode 100755
index 5977f44..0000000
--- a/build/toolchain/gcc_ar_wrapper.py
+++ /dev/null
@@ -1,78 +0,0 @@
-#!/usr/bin/env python
-# Copyright 2015 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Runs the 'ar' command after removing its output file first.
-
-This script is invoked like:
-  python gcc_ar_wrapper.py --ar=$AR --output=$OUT $OP $INPUTS
-to do the equivalent of:
-  rm -f $OUT && $AR $OP $OUT $INPUTS
-"""
-
-import argparse
-import os
-import subprocess
-import sys
-
-import wrapper_utils
-
-
-def main():
-  parser = argparse.ArgumentParser(description=__doc__)
-  parser.add_argument('--ar',
-                      required=True,
-                      help='The ar binary to run',
-                      metavar='PATH')
-  parser.add_argument('--output',
-                      required=True,
-                      help='Output archive file',
-                      metavar='ARCHIVE')
-  parser.add_argument('--plugin',
-                      help='Load plugin')
-  parser.add_argument('--resource-whitelist',
-                      help='Merge all resource whitelists into a single file.',
-                      metavar='PATH')
-  parser.add_argument('operation',
-                      help='Operation on the archive')
-  parser.add_argument('inputs', nargs='+',
-                      help='Input files')
-  args = parser.parse_args()
-
-  # Specifies the type of object file ar should examine.
-  # The ar on linux ignores this option.
-  object_mode = []
-  if sys.platform.startswith('aix'):
-    # The @file feature is not available on ar for AIX.
-    # For linux (and other posix like systems), the @file_name
-    # option reads the contents of file_name as command line arguments.
-    # For AIX we must parse these (rsp files) manually.
-    # Read rspfile.
-    args.inputs  = wrapper_utils.ResolveRspLinks(args.inputs)
-    object_mode = ['-X64']
-  else:
-    if args.resource_whitelist:
-      whitelist_candidates = wrapper_utils.ResolveRspLinks(args.inputs)
-      wrapper_utils.CombineResourceWhitelists(
-          whitelist_candidates, args.resource_whitelist)
-
-  command = [args.ar] + object_mode + args.operation.split()
-  if args.plugin is not None:
-    command += ['--plugin', args.plugin]
-  command.append(args.output)
-  command += args.inputs
-
-  # Remove the output file first.
-  try:
-    os.remove(args.output)
-  except OSError as e:
-    if e.errno != os.errno.ENOENT:
-      raise
-
-  # Now just run the ar command.
-  return subprocess.call(wrapper_utils.CommandToRun(command))
-
-
-if __name__ == "__main__":
-  sys.exit(main())
diff --git a/build/toolchain/gcc_compile_wrapper.py b/build/toolchain/gcc_compile_wrapper.py
deleted file mode 100755
index 33cac37..0000000
--- a/build/toolchain/gcc_compile_wrapper.py
+++ /dev/null
@@ -1,43 +0,0 @@
-#!/usr/bin/env python
-# Copyright 2016 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Runs a compilation command.
-
-This script exists to avoid using complex shell commands in
-gcc_toolchain.gni's tool("cxx") and tool("cc") in case the host running the
-compiler does not have a POSIX-like shell (e.g. Windows).
-"""
-
-import argparse
-import sys
-
-import wrapper_utils
-
-
-def main():
-  parser = argparse.ArgumentParser(description=__doc__)
-  parser.add_argument('--resource-whitelist',
-                      help='Generate a resource whitelist for this target.',
-                      metavar='PATH')
-  parser.add_argument('command', nargs=argparse.REMAINDER,
-                      help='Compilation command')
-  args = parser.parse_args()
-
-  returncode, stderr = wrapper_utils.CaptureCommandStderr(
-      wrapper_utils.CommandToRun(args.command))
-
-  used_resources = wrapper_utils.ExtractResourceIdsFromPragmaWarnings(stderr)
-  sys.stderr.write(stderr)
-
-  if args.resource_whitelist:
-    with open(args.resource_whitelist, 'w') as f:
-      if used_resources:
-        f.write('\n'.join(str(resource) for resource in used_resources))
-        f.write('\n')
-
-  return returncode
-
-if __name__ == "__main__":
-  sys.exit(main())
diff --git a/build/toolchain/gcc_link_wrapper.py b/build/toolchain/gcc_link_wrapper.py
deleted file mode 100755
index 8892f14..0000000
--- a/build/toolchain/gcc_link_wrapper.py
+++ /dev/null
@@ -1,74 +0,0 @@
-#!/usr/bin/env python
-# Copyright 2015 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Runs a linking command and optionally a strip command.
-
-This script exists to avoid using complex shell commands in
-gcc_toolchain.gni's tool("link"), in case the host running the compiler
-does not have a POSIX-like shell (e.g. Windows).
-"""
-
-import argparse
-import os
-import subprocess
-import sys
-
-import wrapper_utils
-
-
-# When running on a Windows host and using a toolchain whose tools are
-# actually wrapper scripts (i.e. .bat files on Windows) rather than binary
-# executables, the "command" to run has to be prefixed with this magic.
-# The GN toolchain definitions take care of that for when GN/Ninja is
-# running the tool directly.  When that command is passed in to this
-# script, it appears as a unitary string but needs to be split up so that
-# just 'cmd' is the actual command given to Python's subprocess module.
-BAT_PREFIX = 'cmd /c call '
-
-def CommandToRun(command):
-  if command[0].startswith(BAT_PREFIX):
-    command = command[0].split(None, 3) + command[1:]
-  return command
-
-
-def main():
-  parser = argparse.ArgumentParser(description=__doc__)
-  parser.add_argument('--strip',
-                      help='The strip binary to run',
-                      metavar='PATH')
-  parser.add_argument('--unstripped-file',
-                      help='Executable file produced by linking command',
-                      metavar='FILE')
-  parser.add_argument('--map-file',
-                      help=('Use --Wl,-Map to generate a map file. Will be '
-                            'gzipped if extension ends with .gz'),
-                      metavar='FILE')
-  parser.add_argument('--output',
-                      required=True,
-                      help='Final output executable file',
-                      metavar='FILE')
-  parser.add_argument('command', nargs='+',
-                      help='Linking command')
-  args = parser.parse_args()
-
-  # Work-around for gold being slow-by-default. http://crbug.com/632230
-  fast_env = dict(os.environ)
-  fast_env['LC_ALL'] = 'C'
-  result = wrapper_utils.RunLinkWithOptionalMapFile(args.command, env=fast_env,
-                                                    map_file=args.map_file)
-  if result != 0:
-    return result
-
-  # Finally, strip the linked executable (if desired).
-  if args.strip:
-    result = subprocess.call(CommandToRun([
-        args.strip, '-o', args.output, args.unstripped_file
-        ]))
-
-  return result
-
-
-if __name__ == "__main__":
-  sys.exit(main())
diff --git a/build/toolchain/gcc_solink_wrapper.py b/build/toolchain/gcc_solink_wrapper.py
deleted file mode 100755
index 5159fce..0000000
--- a/build/toolchain/gcc_solink_wrapper.py
+++ /dev/null
@@ -1,131 +0,0 @@
-#!/usr/bin/env python
-# Copyright 2015 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Runs 'ld -shared' and generates a .TOC file that's untouched when unchanged.
-
-This script exists to avoid using complex shell commands in
-gcc_toolchain.gni's tool("solink"), in case the host running the compiler
-does not have a POSIX-like shell (e.g. Windows).
-"""
-
-import argparse
-import os
-import subprocess
-import sys
-
-import wrapper_utils
-
-
-def CollectSONAME(args):
-  """Replaces: readelf -d $sofile | grep SONAME"""
-  toc = ''
-  readelf = subprocess.Popen(wrapper_utils.CommandToRun(
-      [args.readelf, '-d', args.sofile]), stdout=subprocess.PIPE, bufsize=-1)
-  for line in readelf.stdout:
-    if 'SONAME' in line:
-      toc += line
-  return readelf.wait(), toc
-
-
-def CollectDynSym(args):
-  """Replaces: nm --format=posix -g -D $sofile | cut -f1-2 -d' '"""
-  toc = ''
-  nm = subprocess.Popen(wrapper_utils.CommandToRun([
-      args.nm, '--format=posix', '-g', '-D', args.sofile]),
-                        stdout=subprocess.PIPE, bufsize=-1)
-  for line in nm.stdout:
-    toc += ' '.join(line.split(' ', 2)[:2]) + '\n'
-  return nm.wait(), toc
-
-
-def CollectTOC(args):
-  result, toc = CollectSONAME(args)
-  if result == 0:
-    result, dynsym = CollectDynSym(args)
-    toc += dynsym
-  return result, toc
-
-
-def UpdateTOC(tocfile, toc):
-  if os.path.exists(tocfile):
-    old_toc = open(tocfile, 'r').read()
-  else:
-    old_toc = None
-  if toc != old_toc:
-    open(tocfile, 'w').write(toc)
-
-
-def main():
-  parser = argparse.ArgumentParser(description=__doc__)
-  parser.add_argument('--readelf',
-                      required=True,
-                      help='The readelf binary to run',
-                      metavar='PATH')
-  parser.add_argument('--nm',
-                      required=True,
-                      help='The nm binary to run',
-                      metavar='PATH')
-  parser.add_argument('--strip',
-                      help='The strip binary to run',
-                      metavar='PATH')
-  parser.add_argument('--sofile',
-                      required=True,
-                      help='Shared object file produced by linking command',
-                      metavar='FILE')
-  parser.add_argument('--tocfile',
-                      required=True,
-                      help='Output table-of-contents file',
-                      metavar='FILE')
-  parser.add_argument('--map-file',
-                      help=('Use --Wl,-Map to generate a map file. Will be '
-                            'gzipped if extension ends with .gz'),
-                      metavar='FILE')
-  parser.add_argument('--output',
-                      required=True,
-                      help='Final output shared object file',
-                      metavar='FILE')
-  parser.add_argument('--resource-whitelist',
-                      help='Merge all resource whitelists into a single file.',
-                      metavar='PATH')
-  parser.add_argument('command', nargs='+',
-                      help='Linking command')
-  args = parser.parse_args()
-
-  # Work-around for gold being slow-by-default. http://crbug.com/632230
-  fast_env = dict(os.environ)
-  fast_env['LC_ALL'] = 'C'
-
-  if args.resource_whitelist:
-    whitelist_candidates = wrapper_utils.ResolveRspLinks(args.command)
-    wrapper_utils.CombineResourceWhitelists(
-        whitelist_candidates, args.resource_whitelist)
-
-  # First, run the actual link.
-  command = wrapper_utils.CommandToRun(args.command)
-  result = wrapper_utils.RunLinkWithOptionalMapFile(command, env=fast_env,
-                                                    map_file=args.map_file)
-
-  if result != 0:
-    return result
-
-  # Next, generate the contents of the TOC file.
-  result, toc = CollectTOC(args)
-  if result != 0:
-    return result
-
-  # If there is an existing TOC file with identical contents, leave it alone.
-  # Otherwise, write out the TOC file.
-  UpdateTOC(args.tocfile, toc)
-
-  # Finally, strip the linked shared object file (if desired).
-  if args.strip:
-    result = subprocess.call(wrapper_utils.CommandToRun(
-        [args.strip, '-o', args.output, args.sofile]))
-
-  return result
-
-
-if __name__ == "__main__":
-  sys.exit(main())
diff --git a/build/toolchain/gcc_toolchain.gni b/build/toolchain/gcc_toolchain.gni
deleted file mode 100644
index b6f63d7..0000000
--- a/build/toolchain/gcc_toolchain.gni
+++ /dev/null
@@ -1,630 +0,0 @@
-# Copyright (c) 2013 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import("//build/config/android/config.gni")
-import("//build/config/clang/clang.gni")
-import("//build/config/sanitizers/sanitizers.gni")
-import("//build/config/v8_target_cpu.gni")
-import("//build/toolchain/cc_wrapper.gni")
-import("//build/toolchain/clang_static_analyzer.gni")
-import("//build/toolchain/goma.gni")
-import("//build/toolchain/toolchain.gni")
-
-if (is_nacl) {
-  # To keep NaCl variables out of builds that don't include NaCl, all
-  # variables defined in nacl/config.gni referenced here should be protected by
-  # is_nacl conditions.
-  import("//build/config/nacl/config.gni")
-}
-
-# Path to the Clang static analysis wrapper script.
-# REVIEWERS: can you suggest a better location for this?
-# GN is really picky about dead stores of variables except at the global scope.
-analyzer_wrapper =
-    rebase_path("//build/toolchain/clang_static_analyzer_wrapper.py",
-                root_build_dir) + " --mode=clang"
-
-# This template defines a toolchain for something that works like gcc
-# (including clang).
-#
-# It requires the following variables specifying the executables to run:
-#  - ar
-#  - cc
-#  - cxx
-#  - ld
-#
-# Optional parameters that control the tools:
-#
-#  - extra_cflags
-#      Extra flags to be appended when compiling C files (but not C++ files).
-#  - extra_cppflags
-#      Extra flags to be appended when compiling both C and C++ files. "CPP"
-#      stands for "C PreProcessor" in this context, although it can be
-#      used for non-preprocessor flags as well. Not to be confused with
-#      "CXX" (which follows).
-#  - extra_cxxflags
-#      Extra flags to be appended when compiling C++ files (but not C files).
-#  - extra_asmflags
-#      Extra flags to be appended when compiling assembly.
-#  - extra_ldflags
-#      Extra flags to be appended when linking
-#
-#  - libs_section_prefix
-#  - libs_section_postfix
-#      The contents of these strings, if specified, will be placed around
-#      the libs section of the linker line. It allows one to inject libraries
-#      at the beginning and end for all targets in a toolchain.
-#  - solink_libs_section_prefix
-#  - solink_libs_section_postfix
-#      Same as libs_section_{pre,post}fix except used for solink instead of link.
-#  - link_outputs
-#      The content of this array, if specified, will be added to the list of
-#      outputs from the link command. This can be useful in conjunction with
-#      the post_link parameter.
-#  - use_unstripped_as_runtime_outputs
-#      When |strip| is set, mark unstripped executables as runtime deps rather
-#      than stripped ones.
-#  - post_link
-#      The content of this string, if specified, will be run as a separate
-#      command following the the link command.
-#  - deps
-#      Just forwarded to the toolchain definition.
-#  - executable_extension
-#      If this string is specified it will be used for the file extension
-#      for an executable, rather than using no extension; targets will
-#      still be able to override the extension using the output_extension
-#      variable.
-#  - rebuild_define
-#      The contents of this string, if specified, will be passed as a #define
-#      to the toolchain. It can be used to force recompiles whenever a
-#      toolchain is updated.
-#  - shlib_extension
-#      If this string is specified it will be used for the file extension
-#      for a shared library, rather than default value specified in
-#      toolchain.gni
-#  - strip
-#      Location of the strip executable. When specified, strip will be run on
-#      all shared libraries and executables as they are built. The pre-stripped
-#      artifacts will be put in lib.unstripped/ and exe.unstripped/.
-template("gcc_toolchain") {
-  toolchain(target_name) {
-    assert(defined(invoker.ar), "gcc_toolchain() must specify a \"ar\" value")
-    assert(defined(invoker.cc), "gcc_toolchain() must specify a \"cc\" value")
-    assert(defined(invoker.cxx), "gcc_toolchain() must specify a \"cxx\" value")
-    assert(defined(invoker.ld), "gcc_toolchain() must specify a \"ld\" value")
-
-    # This define changes when the toolchain changes, forcing a rebuild.
-    # Nothing should ever use this define.
-    if (defined(invoker.rebuild_define)) {
-      rebuild_string = "-D" + invoker.rebuild_define + " "
-    } else {
-      rebuild_string = ""
-    }
-
-    # GN's syntax can't handle more than one scope dereference at once, like
-    # "invoker.toolchain_args.foo", so make a temporary to hold the toolchain
-    # args so we can do "invoker_toolchain_args.foo".
-    assert(defined(invoker.toolchain_args),
-           "Toolchains must specify toolchain_args")
-    invoker_toolchain_args = invoker.toolchain_args
-    assert(defined(invoker_toolchain_args.current_cpu),
-           "toolchain_args must specify a current_cpu")
-    assert(defined(invoker_toolchain_args.current_os),
-           "toolchain_args must specify a current_os")
-
-    # When invoking this toolchain not as the default one, these args will be
-    # passed to the build. They are ignored when this is the default toolchain.
-    toolchain_args = {
-      # Populate toolchain args from the invoker.
-      forward_variables_from(invoker_toolchain_args, "*")
-
-      # The host toolchain value computed by the default toolchain's setup
-      # needs to be passed through unchanged to all secondary toolchains to
-      # ensure that it's always the same, regardless of the values that may be
-      # set on those toolchains.
-      host_toolchain = host_toolchain
-
-      if (!defined(invoker_toolchain_args.v8_current_cpu)) {
-        v8_current_cpu = invoker_toolchain_args.current_cpu
-      }
-    }
-
-    # When the invoker has explicitly overridden use_goma or cc_wrapper in the
-    # toolchain args, use those values, otherwise default to the global one.
-    # This works because the only reasonable override that toolchains might
-    # supply for these values are to force-disable them.
-    if (defined(toolchain_args.use_goma)) {
-      toolchain_uses_goma = toolchain_args.use_goma
-    } else {
-      toolchain_uses_goma = use_goma
-    }
-    if (defined(toolchain_args.cc_wrapper)) {
-      toolchain_cc_wrapper = toolchain_args.cc_wrapper
-    } else {
-      toolchain_cc_wrapper = cc_wrapper
-    }
-    assert(!(toolchain_cc_wrapper != "" && toolchain_uses_goma),
-           "Goma and cc_wrapper can't be used together.")
-
-    # When the invoker has explicitly overridden use_goma or cc_wrapper in the
-    # toolchain args, use those values, otherwise default to the global one.
-    # This works because the only reasonable override that toolchains might
-    # supply for these values are to force-disable them.
-    if (toolchain_uses_goma) {
-      goma_path = "$goma_dir/gomacc"
-
-      # Use the static analysis script if static analysis is turned on
-      # AND the tool has not opted out by setting
-      # 'is_clang_static_analysis_supported' to false.
-      if (is_clang && use_clang_static_analyzer &&
-          (!defined(invoker.is_clang_analysis_supported) ||
-           invoker.is_clang_analysis_supported)) {
-        compiler_prefix = "${analyzer_wrapper} ${goma_path} "
-
-        # Create a distinct variable for "asm", since analysis runs pass
-        # a bunch of flags to clang/clang++ that are nonsensical on assembler
-        # runs.
-        asm = "${goma_path} ${invoker.cc}"
-      } else {
-        compiler_prefix = "${goma_path} "
-      }
-    } else {
-      if (is_clang && use_clang_static_analyzer &&
-          (!defined(invoker.is_clang_analysis_supported) ||
-           invoker.is_clang_analysis_supported)) {
-        compiler_prefix = "${analyzer_wrapper} "
-        asm = invoker.cc
-      } else {
-        compiler_prefix = "${toolchain_cc_wrapper} "
-      }
-    }
-
-    cc = compiler_prefix + invoker.cc
-    cxx = compiler_prefix + invoker.cxx
-    ar = invoker.ar
-    ld = invoker.ld
-    if (!defined(asm)) {
-      asm = cc
-    }
-    if (defined(invoker.readelf)) {
-      readelf = invoker.readelf
-    } else {
-      readelf = "readelf"
-    }
-    if (defined(invoker.nm)) {
-      nm = invoker.nm
-    } else {
-      nm = "nm"
-    }
-
-    if (defined(invoker.shlib_extension)) {
-      default_shlib_extension = invoker.shlib_extension
-    } else {
-      default_shlib_extension = shlib_extension
-    }
-
-    if (defined(invoker.executable_extension)) {
-      default_executable_extension = invoker.executable_extension
-    } else {
-      default_executable_extension = ""
-    }
-
-    # Bring these into our scope for string interpolation with default values.
-    if (defined(invoker.libs_section_prefix)) {
-      libs_section_prefix = invoker.libs_section_prefix
-    } else {
-      libs_section_prefix = ""
-    }
-
-    if (defined(invoker.libs_section_postfix)) {
-      libs_section_postfix = invoker.libs_section_postfix
-    } else {
-      libs_section_postfix = ""
-    }
-
-    if (defined(invoker.solink_libs_section_prefix)) {
-      solink_libs_section_prefix = invoker.solink_libs_section_prefix
-    } else {
-      solink_libs_section_prefix = ""
-    }
-
-    if (defined(invoker.solink_libs_section_postfix)) {
-      solink_libs_section_postfix = invoker.solink_libs_section_postfix
-    } else {
-      solink_libs_section_postfix = ""
-    }
-
-    if (defined(invoker.extra_cflags) && invoker.extra_cflags != "") {
-      extra_cflags = " " + invoker.extra_cflags
-    } else {
-      extra_cflags = ""
-    }
-
-    if (defined(invoker.extra_cppflags) && invoker.extra_cppflags != "") {
-      extra_cppflags = " " + invoker.extra_cppflags
-    } else {
-      extra_cppflags = ""
-    }
-
-    if (defined(invoker.extra_cxxflags) && invoker.extra_cxxflags != "") {
-      extra_cxxflags = " " + invoker.extra_cxxflags
-    } else {
-      extra_cxxflags = ""
-    }
-
-    if (defined(invoker.extra_asmflags) && invoker.extra_asmflags != "") {
-      extra_asmflags = " " + invoker.extra_asmflags
-    } else {
-      extra_asmflags = ""
-    }
-
-    if (defined(invoker.extra_ldflags) && invoker.extra_ldflags != "") {
-      extra_ldflags = " " + invoker.extra_ldflags
-    } else {
-      extra_ldflags = ""
-    }
-
-    enable_linker_map = defined(invoker.enable_linker_map) &&
-                        invoker.enable_linker_map && generate_linker_map
-
-    # These library switches can apply to all tools below.
-    lib_switch = "-l"
-    lib_dir_switch = "-L"
-
-    # Object files go in this directory.
-    object_subdir = "{{target_out_dir}}/{{label_name}}"
-
-    tool("cc") {
-      depfile = "{{output}}.d"
-      command = "$cc -MMD -MF $depfile ${rebuild_string}{{defines}} {{include_dirs}} {{cflags}} {{cflags_c}}${extra_cppflags}${extra_cflags} -c {{source}} -o {{output}}"
-      depsformat = "gcc"
-      description = "CC {{output}}"
-      outputs = [
-        # The whitelist file is also an output, but ninja does not
-        # currently support multiple outputs for tool("cc").
-        "$object_subdir/{{source_name_part}}.o",
-      ]
-      if (enable_resource_whitelist_generation) {
-        compile_wrapper =
-            rebase_path("//build/toolchain/gcc_compile_wrapper.py",
-                        root_build_dir)
-        command = "$python_path \"$compile_wrapper\" --resource-whitelist=\"{{output}}.whitelist\" $command"
-      }
-    }
-
-    tool("cxx") {
-      depfile = "{{output}}.d"
-      command = "$cxx -MMD -MF $depfile ${rebuild_string}{{defines}} {{include_dirs}} {{cflags}} {{cflags_cc}}${extra_cppflags}${extra_cxxflags} -c {{source}} -o {{output}}"
-      depsformat = "gcc"
-      description = "CXX {{output}}"
-      outputs = [
-        # The whitelist file is also an output, but ninja does not
-        # currently support multiple outputs for tool("cxx").
-        "$object_subdir/{{source_name_part}}.o",
-      ]
-      if (enable_resource_whitelist_generation) {
-        compile_wrapper =
-            rebase_path("//build/toolchain/gcc_compile_wrapper.py",
-                        root_build_dir)
-        command = "$python_path \"$compile_wrapper\" --resource-whitelist=\"{{output}}.whitelist\" $command"
-      }
-    }
-
-    tool("asm") {
-      # For GCC we can just use the C compiler to compile assembly.
-      depfile = "{{output}}.d"
-      command = "$asm -MMD -MF $depfile ${rebuild_string}{{defines}} {{include_dirs}} {{asmflags}}${extra_asmflags} -c {{source}} -o {{output}}"
-      depsformat = "gcc"
-      description = "ASM {{output}}"
-      outputs = [
-        "$object_subdir/{{source_name_part}}.o",
-      ]
-    }
-
-    tool("alink") {
-      rspfile = "{{output}}.rsp"
-      whitelist_flag = " "
-      if (enable_resource_whitelist_generation) {
-        whitelist_flag = " --resource-whitelist=\"{{output}}.whitelist\""
-      }
-
-      # This needs a Python script to avoid using simple sh features in this
-      # command, in case the host does not use a POSIX shell (e.g. compiling
-      # POSIX-like toolchains such as NaCl on Windows).
-      ar_wrapper =
-          rebase_path("//build/toolchain/gcc_ar_wrapper.py", root_build_dir)
-
-      if (current_os == "aix") {
-        # We use slightly different arflags for AIX.
-        extra_arflags = "-r -c -s"
-      } else {
-        extra_arflags = "-r -c -s -D"
-      }
-
-      # Almost all targets build with //build/config/compiler:thin_archive which
-      # adds -T to arflags.
-      command = "$python_path \"$ar_wrapper\"$whitelist_flag --output={{output}} --ar=\"$ar\" \"{{arflags}} $extra_arflags\" @\"$rspfile\""
-      description = "AR {{output}}"
-      rspfile_content = "{{inputs}}"
-      outputs = [
-        "{{output_dir}}/{{target_output_name}}{{output_extension}}",
-      ]
-
-      # Shared libraries go in the target out directory by default so we can
-      # generate different targets with the same name and not have them collide.
-      default_output_dir = "{{target_out_dir}}"
-      default_output_extension = ".a"
-      output_prefix = "lib"
-    }
-
-    tool("solink") {
-      soname = "{{target_output_name}}{{output_extension}}"  # e.g. "libfoo.so".
-      sofile = "{{output_dir}}/$soname"  # Possibly including toolchain dir.
-      rspfile = sofile + ".rsp"
-      pool = "//build/toolchain:link_pool($default_toolchain)"
-      whitelist_flag = " "
-      if (enable_resource_whitelist_generation) {
-        whitelist_file = "$sofile.whitelist"
-        whitelist_flag = " --resource-whitelist=\"$whitelist_file\""
-      }
-
-      if (defined(invoker.strip)) {
-        unstripped_sofile = "{{root_out_dir}}/lib.unstripped/$soname"
-      } else {
-        unstripped_sofile = sofile
-      }
-
-      # These variables are not built into GN but are helpers that
-      # implement (1) linking to produce a .so, (2) extracting the symbols
-      # from that file (3) if the extracted list differs from the existing
-      # .TOC file, overwrite it, otherwise, don't change it.
-      tocfile = sofile + ".TOC"
-
-      link_command = "$ld -shared {{ldflags}}${extra_ldflags} -o \"$unstripped_sofile\" -Wl,-soname=\"$soname\" @\"$rspfile\""
-
-      # Generate a map file to be used for binary size analysis.
-      # Map file adds ~10% to the link time on a z620.
-      # With target_os="android", libchrome.so.map.gz is ~20MB.
-      map_switch = ""
-      if (enable_linker_map && is_official_build) {
-        map_file = "$unstripped_sofile.map.gz"
-        map_switch = " --map-file \"$map_file\""
-      }
-
-      assert(defined(readelf), "to solink you must have a readelf")
-      assert(defined(nm), "to solink you must have an nm")
-      strip_switch = ""
-      if (defined(invoker.strip)) {
-        strip_switch = "--strip=${invoker.strip} "
-      }
-
-      # This needs a Python script to avoid using a complex shell command
-      # requiring sh control structures, pipelines, and POSIX utilities.
-      # The host might not have a POSIX shell and utilities (e.g. Windows).
-      solink_wrapper =
-          rebase_path("//build/toolchain/gcc_solink_wrapper.py", root_build_dir)
-      command = "$python_path \"$solink_wrapper\" --readelf=\"$readelf\" --nm=\"$nm\" $strip_switch--sofile=\"$unstripped_sofile\" --tocfile=\"$tocfile\"$map_switch --output=\"$sofile\"$whitelist_flag -- $link_command"
-
-      if (target_cpu == "mipsel" && is_component_build && is_android) {
-        rspfile_content = "-Wl,--start-group -Wl,--whole-archive {{inputs}} {{solibs}} -Wl,--no-whole-archive $solink_libs_section_prefix {{libs}} $solink_libs_section_postfix -Wl,--end-group"
-      } else {
-        rspfile_content = "-Wl,--whole-archive {{inputs}} {{solibs}} -Wl,--no-whole-archive $solink_libs_section_prefix {{libs}} $solink_libs_section_postfix"
-      }
-
-      description = "SOLINK $sofile"
-
-      # Use this for {{output_extension}} expansions unless a target manually
-      # overrides it (in which case {{output_extension}} will be what the target
-      # specifies).
-      default_output_extension = default_shlib_extension
-
-      default_output_dir = "{{root_out_dir}}"
-      if (shlib_subdir != ".") {
-        default_output_dir += "/$shlib_subdir"
-      }
-
-      output_prefix = "lib"
-
-      # Since the above commands only updates the .TOC file when it changes, ask
-      # Ninja to check if the timestamp actually changed to know if downstream
-      # dependencies should be recompiled.
-      restat = true
-
-      # Tell GN about the output files. It will link to the sofile but use the
-      # tocfile for dependency management.
-      outputs = [
-        sofile,
-        tocfile,
-      ]
-      if (enable_resource_whitelist_generation) {
-        outputs += [ whitelist_file ]
-      }
-      if (sofile != unstripped_sofile) {
-        outputs += [ unstripped_sofile ]
-        if (defined(invoker.use_unstripped_as_runtime_outputs) &&
-            invoker.use_unstripped_as_runtime_outputs) {
-          runtime_outputs = [ unstripped_sofile ]
-        }
-      }
-      if (defined(map_file)) {
-        outputs += [ map_file ]
-      }
-      link_output = sofile
-      depend_output = tocfile
-    }
-
-    tool("solink_module") {
-      soname = "{{target_output_name}}{{output_extension}}"  # e.g. "libfoo.so".
-      sofile = "{{output_dir}}/$soname"
-      rspfile = sofile + ".rsp"
-      pool = "//build/toolchain:link_pool($default_toolchain)"
-
-      if (defined(invoker.strip)) {
-        unstripped_sofile = "{{root_out_dir}}/lib.unstripped/$soname"
-      } else {
-        unstripped_sofile = sofile
-      }
-
-      command = "$ld -shared {{ldflags}}${extra_ldflags} -o \"$unstripped_sofile\" -Wl,-soname=\"$soname\" @\"$rspfile\""
-
-      if (defined(invoker.strip)) {
-        strip_command = "${invoker.strip} -o \"$sofile\" \"$unstripped_sofile\""
-        command += " && " + strip_command
-      }
-      rspfile_content = "-Wl,--whole-archive {{inputs}} {{solibs}} -Wl,--no-whole-archive $solink_libs_section_prefix {{libs}} $solink_libs_section_postfix"
-
-      description = "SOLINK_MODULE $sofile"
-
-      # Use this for {{output_extension}} expansions unless a target manually
-      # overrides it (in which case {{output_extension}} will be what the target
-      # specifies).
-      if (defined(invoker.loadable_module_extension)) {
-        default_output_extension = invoker.loadable_module_extension
-      } else {
-        default_output_extension = default_shlib_extension
-      }
-
-      default_output_dir = "{{root_out_dir}}"
-      if (shlib_subdir != ".") {
-        default_output_dir += "/$shlib_subdir"
-      }
-
-      output_prefix = "lib"
-
-      outputs = [
-        sofile,
-      ]
-      if (sofile != unstripped_sofile) {
-        outputs += [ unstripped_sofile ]
-        if (defined(invoker.use_unstripped_as_runtime_outputs) &&
-            invoker.use_unstripped_as_runtime_outputs) {
-          runtime_outputs = [ unstripped_sofile ]
-        }
-      }
-    }
-
-    tool("link") {
-      exename = "{{target_output_name}}{{output_extension}}"
-      outfile = "{{output_dir}}/$exename"
-      rspfile = "$outfile.rsp"
-      unstripped_outfile = outfile
-      pool = "//build/toolchain:link_pool($default_toolchain)"
-
-      # Use this for {{output_extension}} expansions unless a target manually
-      # overrides it (in which case {{output_extension}} will be what the target
-      # specifies).
-      default_output_extension = default_executable_extension
-
-      default_output_dir = "{{root_out_dir}}"
-
-      if (defined(invoker.strip)) {
-        unstripped_outfile = "{{root_out_dir}}/exe.unstripped/$exename"
-      }
-
-      # Generate a map file to be used for binary size analysis.
-      # Map file adds ~10% to the link time on a z620.
-      # With target_os="android", libchrome.so.map.gz is ~20MB.
-      map_switch = ""
-      if (enable_linker_map && is_official_build) {
-        map_file = "$unstripped_outfile.map.gz"
-        map_switch = " --map-file \"$map_file\""
-      }
-
-      start_group_flag = ""
-      end_group_flag = ""
-      if (current_os != "aix") {
-        # the "--start-group .. --end-group" feature isn't available on the aix ld.
-        start_group_flag = "-Wl,--start-group"
-        end_group_flag = "-Wl,--end-group "
-      }
-      link_command = "$ld {{ldflags}}${extra_ldflags} -o \"$unstripped_outfile\" $start_group_flag @\"$rspfile\" {{solibs}} $end_group_flag $libs_section_prefix {{libs}} $libs_section_postfix"
-
-      strip_switch = ""
-
-      if (defined(invoker.strip)) {
-        strip_switch = " --strip=\"${invoker.strip}\" --unstripped-file=\"$unstripped_outfile\""
-      }
-
-      link_wrapper =
-          rebase_path("//build/toolchain/gcc_link_wrapper.py", root_build_dir)
-      command = "$python_path \"$link_wrapper\" --output=\"$outfile\"$strip_switch$map_switch -- $link_command"
-      description = "LINK $outfile"
-      rspfile_content = "{{inputs}}"
-      outputs = [
-        outfile,
-      ]
-      if (outfile != unstripped_outfile) {
-        outputs += [ unstripped_outfile ]
-        if (defined(invoker.use_unstripped_as_runtime_outputs) &&
-            invoker.use_unstripped_as_runtime_outputs) {
-          runtime_outputs = [ unstripped_outfile ]
-        }
-      }
-      if (defined(invoker.link_outputs)) {
-        outputs += invoker.link_outputs
-      }
-      if (defined(map_file)) {
-        outputs += [ map_file ]
-      }
-    }
-
-    # These two are really entirely generic, but have to be repeated in
-    # each toolchain because GN doesn't allow a template to be used here.
-    # See //build/toolchain/toolchain.gni for details.
-    tool("stamp") {
-      command = stamp_command
-      description = stamp_description
-    }
-    tool("copy") {
-      command = copy_command
-      description = copy_description
-    }
-
-    tool("action") {
-      pool = "//build/toolchain:action_pool($default_toolchain)"
-    }
-
-    forward_variables_from(invoker, [ "deps" ])
-  }
-}
-
-# This is a shorthand for gcc_toolchain instances based on the Chromium-built
-# version of Clang. Only the toolchain_cpu and toolchain_os variables need to
-# be specified by the invoker, and optionally toolprefix if it's a
-# cross-compile case. Note that for a cross-compile case this toolchain
-# requires a config to pass the appropriate -target option, or else it will
-# actually just be doing a native compile. The invoker can optionally override
-# use_gold too.
-template("clang_toolchain") {
-  if (defined(invoker.toolprefix)) {
-    toolprefix = invoker.toolprefix
-  } else {
-    toolprefix = ""
-  }
-
-  gcc_toolchain(target_name) {
-    prefix = rebase_path("$clang_base_path/bin", root_build_dir)
-    cc = "$prefix/clang"
-    cxx = "$prefix/clang++"
-    ld = cxx
-    readelf = "${toolprefix}readelf"
-    ar = "${prefix}/llvm-ar"
-    nm = "${toolprefix}nm"
-
-    forward_variables_from(invoker,
-                           [
-                             "strip",
-                             "is_clang_analysis_supported",
-                             "enable_linker_map",
-                             "use_unstripped_as_runtime_outputs",
-                           ])
-
-    toolchain_args = {
-      if (defined(invoker.toolchain_args)) {
-        forward_variables_from(invoker.toolchain_args, "*")
-      }
-      is_clang = true
-    }
-  }
-}
diff --git a/build/toolchain/get_concurrent_links.py b/build/toolchain/get_concurrent_links.py
deleted file mode 100644
index 45a932c..0000000
--- a/build/toolchain/get_concurrent_links.py
+++ /dev/null
@@ -1,84 +0,0 @@
-# Copyright 2014 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This script computs the number of concurrent links we want to run in the build
-# as a function of machine spec. It's based on GetDefaultConcurrentLinks in GYP.
-
-import multiprocessing
-import optparse
-import os
-import re
-import subprocess
-import sys
-
-def _GetTotalMemoryInBytes():
-  if sys.platform in ('win32', 'cygwin'):
-    import ctypes
-
-    class MEMORYSTATUSEX(ctypes.Structure):
-      _fields_ = [
-        ("dwLength", ctypes.c_ulong),
-        ("dwMemoryLoad", ctypes.c_ulong),
-        ("ullTotalPhys", ctypes.c_ulonglong),
-        ("ullAvailPhys", ctypes.c_ulonglong),
-        ("ullTotalPageFile", ctypes.c_ulonglong),
-        ("ullAvailPageFile", ctypes.c_ulonglong),
-        ("ullTotalVirtual", ctypes.c_ulonglong),
-        ("ullAvailVirtual", ctypes.c_ulonglong),
-        ("sullAvailExtendedVirtual", ctypes.c_ulonglong),
-      ]
-
-    stat = MEMORYSTATUSEX(dwLength=ctypes.sizeof(MEMORYSTATUSEX))
-    ctypes.windll.kernel32.GlobalMemoryStatusEx(ctypes.byref(stat))
-    return stat.ullTotalPhys
-  elif sys.platform.startswith('linux'):
-    if os.path.exists("/proc/meminfo"):
-      with open("/proc/meminfo") as meminfo:
-        memtotal_re = re.compile(r'^MemTotal:\s*(\d*)\s*kB')
-        for line in meminfo:
-          match = memtotal_re.match(line)
-          if not match:
-            continue
-          return float(match.group(1)) * 2**10
-  elif sys.platform == 'darwin':
-    try:
-      return int(subprocess.check_output(['sysctl', '-n', 'hw.memsize']))
-    except Exception:
-      return 0
-  # TODO(scottmg): Implement this for other platforms.
-  return 0
-
-
-def _GetDefaultConcurrentLinks(mem_per_link_gb, reserve_mem_gb):
-  # Inherit the legacy environment variable for people that have set it in GYP.
-  pool_size = int(os.getenv('GYP_LINK_CONCURRENCY', 0))
-  if pool_size:
-    return pool_size
-
-  mem_total_bytes = _GetTotalMemoryInBytes()
-  mem_total_bytes = max(0, mem_total_bytes - reserve_mem_gb * 2**30)
-  num_concurrent_links = int(max(1, mem_total_bytes / mem_per_link_gb / 2**30))
-  hard_cap = max(1, int(os.getenv('GYP_LINK_CONCURRENCY_MAX', 2**32)))
-
-  try:
-    cpu_cap = multiprocessing.cpu_count()
-  except:
-    cpu_cap = 1
-
-  return min(num_concurrent_links, hard_cap, cpu_cap)
-
-
-def main():
-  parser = optparse.OptionParser()
-  parser.add_option('--mem_per_link_gb', action="store", type="int", default=8)
-  parser.add_option('--reserve_mem_gb', action="store", type="int", default=0)
-  parser.disable_interspersed_args()
-  options, _ = parser.parse_args()
-
-  print _GetDefaultConcurrentLinks(options.mem_per_link_gb,
-                                   options.reserve_mem_gb)
-  return 0
-
-if __name__ == '__main__':
-  sys.exit(main())
diff --git a/build/toolchain/get_cpu_count.py b/build/toolchain/get_cpu_count.py
deleted file mode 100644
index 1609ce6..0000000
--- a/build/toolchain/get_cpu_count.py
+++ /dev/null
@@ -1,21 +0,0 @@
-# Copyright 2018 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This script shows cpu count to specify capacity of action pool.
-
-import multiprocessing
-import sys
-
-def main():
-  try:
-    cpu_count = multiprocessing.cpu_count()
-  except:
-    cpu_count = 1
-
-  print cpu_count
-  return 0
-
-
-if __name__ == '__main__':
-  sys.exit(main())
diff --git a/build/toolchain/goma.gni b/build/toolchain/goma.gni
deleted file mode 100644
index 29be588..0000000
--- a/build/toolchain/goma.gni
+++ /dev/null
@@ -1,27 +0,0 @@
-# Copyright (c) 2013 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# Defines the configuration of Goma.
-
-declare_args() {
-  # Set to true to enable distributed compilation using Goma.
-  use_goma = false
-
-  # Set the default value based on the platform.
-  if (host_os == "win") {
-    # Absolute directory containing the gomacc.exe binary.
-    goma_dir = "C:\src\goma\goma-win64"
-  } else {
-    if (getenv("GOMA_DIR") != "") {
-      # Absolute directory containing the gomacc binary.
-      goma_dir = getenv("GOMA_DIR")
-    } else {
-      # Absolute directory containing the gomacc binary.
-      goma_dir = getenv("HOME") + "/goma"
-    }
-  }
-}
-
-assert(!(is_win && host_os != "win") || !use_goma,
-       "goma does not yet work in win cross builds, b/64390790")
diff --git a/build/toolchain/linux/BUILD.gn b/build/toolchain/linux/BUILD.gn
deleted file mode 100644
index 4e9ff44..0000000
--- a/build/toolchain/linux/BUILD.gn
+++ /dev/null
@@ -1,284 +0,0 @@
-# Copyright 2013 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import("//build/config/sysroot.gni")
-import("//build/toolchain/gcc_toolchain.gni")
-
-clang_toolchain("clang_arm") {
-  toolprefix = "arm-linux-gnueabihf-"
-  toolchain_args = {
-    current_cpu = "arm"
-    current_os = "linux"
-  }
-}
-
-clang_toolchain("clang_arm64") {
-  toolprefix = "aarch64-linux-gnu-"
-  toolchain_args = {
-    current_cpu = "arm64"
-    current_os = "linux"
-  }
-}
-
-gcc_toolchain("arm64") {
-  toolprefix = "aarch64-linux-gnu-"
-
-  cc = "${toolprefix}gcc"
-  cxx = "${toolprefix}g++"
-
-  ar = "${toolprefix}ar"
-  ld = cxx
-  readelf = "${toolprefix}readelf"
-  nm = "${toolprefix}nm"
-
-  toolchain_args = {
-    current_cpu = "arm64"
-    current_os = "linux"
-    is_clang = false
-  }
-}
-
-gcc_toolchain("arm") {
-  toolprefix = "arm-linux-gnueabihf-"
-
-  cc = "${toolprefix}gcc"
-  cxx = "${toolprefix}g++"
-
-  ar = "${toolprefix}ar"
-  ld = cxx
-  readelf = "${toolprefix}readelf"
-  nm = "${toolprefix}nm"
-
-  toolchain_args = {
-    current_cpu = "arm"
-    current_os = "linux"
-    is_clang = false
-  }
-}
-
-clang_toolchain("clang_x86") {
-  # Output linker map files for binary size analysis.
-  enable_linker_map = true
-
-  toolchain_args = {
-    current_cpu = "x86"
-    current_os = "linux"
-  }
-}
-
-clang_toolchain("clang_x86_v8_arm") {
-  toolchain_args = {
-    current_cpu = "x86"
-    v8_current_cpu = "arm"
-    current_os = "linux"
-  }
-}
-
-clang_toolchain("clang_x86_v8_mipsel") {
-  toolchain_args = {
-    current_cpu = "x86"
-    v8_current_cpu = "mipsel"
-    current_os = "linux"
-  }
-}
-
-clang_toolchain("clang_x86_v8_mips") {
-  toolchain_args = {
-    current_cpu = "x86"
-    v8_current_cpu = "mips"
-    current_os = "linux"
-  }
-}
-
-gcc_toolchain("x86") {
-  cc = "gcc"
-  cxx = "g++"
-
-  readelf = "readelf"
-  nm = "nm"
-  ar = "ar"
-  ld = cxx
-
-  # Output linker map files for binary size analysis.
-  enable_linker_map = true
-
-  toolchain_args = {
-    current_cpu = "x86"
-    current_os = "linux"
-    is_clang = false
-  }
-}
-
-clang_toolchain("clang_x64") {
-  # Output linker map files for binary size analysis.
-  enable_linker_map = true
-
-  toolchain_args = {
-    current_cpu = "x64"
-    current_os = "linux"
-  }
-}
-
-clang_toolchain("clang_x64_v8_arm64") {
-  toolchain_args = {
-    current_cpu = "x64"
-    v8_current_cpu = "arm64"
-    current_os = "linux"
-  }
-}
-
-clang_toolchain("clang_x64_v8_mips64el") {
-  toolchain_args = {
-    current_cpu = "x64"
-    v8_current_cpu = "mips64el"
-    current_os = "linux"
-  }
-}
-
-clang_toolchain("clang_x64_v8_mips64") {
-  toolchain_args = {
-    current_cpu = "x64"
-    v8_current_cpu = "mips64"
-    current_os = "linux"
-  }
-}
-
-gcc_toolchain("x64") {
-  cc = "gcc"
-  cxx = "g++"
-
-  readelf = "readelf"
-  nm = "nm"
-  ar = "ar"
-  ld = cxx
-
-  # Output linker map files for binary size analysis.
-  enable_linker_map = true
-
-  toolchain_args = {
-    current_cpu = "x64"
-    current_os = "linux"
-    is_clang = false
-  }
-}
-
-clang_toolchain("clang_mipsel") {
-  toolchain_args = {
-    current_cpu = "mipsel"
-    current_os = "linux"
-  }
-}
-
-clang_toolchain("clang_mips64el") {
-  toolchain_args = {
-    current_cpu = "mips64el"
-    current_os = "linux"
-  }
-}
-
-gcc_toolchain("mipsel") {
-  toolprefix = "mipsel-linux-gnu-"
-
-  cc = "${toolprefix}gcc"
-  cxx = " ${toolprefix}g++"
-  ar = "${toolprefix}ar"
-  ld = cxx
-  readelf = "${toolprefix}readelf"
-  nm = "${toolprefix}nm"
-
-  toolchain_args = {
-    cc_wrapper = ""
-    current_cpu = "mipsel"
-    current_os = "linux"
-    is_clang = false
-    use_goma = false
-  }
-}
-
-gcc_toolchain("mips64el") {
-  toolprefix = "mips64el-linux-gnuabi64-"
-
-  cc = "${toolprefix}gcc"
-  cxx = "${toolprefix}g++"
-  ar = "${toolprefix}ar"
-  ld = cxx
-  readelf = "${toolprefix}readelf"
-  nm = "${toolprefix}nm"
-
-  toolchain_args = {
-    cc_wrapper = ""
-    current_cpu = "mips64el"
-    current_os = "linux"
-    is_clang = false
-    use_goma = false
-  }
-}
-
-gcc_toolchain("s390x") {
-  cc = "gcc"
-  cxx = "g++"
-
-  readelf = "readelf"
-  nm = "nm"
-  ar = "ar"
-  ld = cxx
-
-  toolchain_args = {
-    current_cpu = "s390x"
-    current_os = "linux"
-    is_clang = false
-  }
-}
-
-gcc_toolchain("ppc64") {
-  cc = "gcc"
-  cxx = "g++"
-
-  readelf = "readelf"
-  nm = "nm"
-  ar = "ar"
-  ld = cxx
-
-  toolchain_args = {
-    current_cpu = "ppc64"
-    current_os = "linux"
-    is_clang = false
-  }
-}
-
-gcc_toolchain("mips") {
-  toolprefix = "mips-linux-gnu-"
-
-  cc = "${toolprefix}gcc"
-  cxx = "${toolprefix}g++"
-
-  readelf = "${toolprefix}readelf"
-  nm = "${toolprefix}nm"
-  ar = "${toolprefix}ar"
-  ld = cxx
-
-  toolchain_args = {
-    current_cpu = "mips"
-    current_os = "linux"
-    is_clang = false
-  }
-}
-
-gcc_toolchain("mips64") {
-  toolprefix = "mips64-linux-gnuabi64-"
-
-  cc = "${toolprefix}gcc"
-  cxx = "${toolprefix}g++"
-
-  readelf = "${toolprefix}readelf"
-  nm = "${toolprefix}nm"
-  ar = "${toolprefix}ar"
-  ld = cxx
-
-  toolchain_args = {
-    current_cpu = "mips64"
-    current_os = "linux"
-    is_clang = false
-  }
-}
diff --git a/build/toolchain/linux/unbundle/BUILD.gn b/build/toolchain/linux/unbundle/BUILD.gn
deleted file mode 100644
index 4719d54..0000000
--- a/build/toolchain/linux/unbundle/BUILD.gn
+++ /dev/null
@@ -1,41 +0,0 @@
-# Copyright 2017 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import("//build/toolchain/gcc_toolchain.gni")
-
-gcc_toolchain("default") {
-  cc = getenv("CC")
-  cxx = getenv("CXX")
-  ar = getenv("AR")
-  nm = getenv("NM")
-  ld = cxx
-
-  extra_cflags = getenv("CFLAGS")
-  extra_cppflags = getenv("CPPFLAGS")
-  extra_cxxflags = getenv("CXXFLAGS")
-  extra_ldflags = getenv("LDFLAGS")
-
-  toolchain_args = {
-    current_cpu = current_cpu
-    current_os = current_os
-  }
-}
-
-gcc_toolchain("host") {
-  cc = getenv("BUILD_CC")
-  cxx = getenv("BUILD_CXX")
-  ar = getenv("BUILD_AR")
-  nm = getenv("BUILD_NM")
-  ld = cxx
-
-  extra_cflags = getenv("BUILD_CFLAGS")
-  extra_cppflags = getenv("BUILD_CPPFLAGS")
-  extra_cxxflags = getenv("BUILD_CXXFLAGS")
-  extra_ldflags = getenv("BUILD_LDFLAGS")
-
-  toolchain_args = {
-    current_cpu = current_cpu
-    current_os = current_os
-  }
-}
diff --git a/build/toolchain/linux/unbundle/README.md b/build/toolchain/linux/unbundle/README.md
deleted file mode 100644
index ac5808a..0000000
--- a/build/toolchain/linux/unbundle/README.md
+++ /dev/null
@@ -1,41 +0,0 @@
-# Overview
-
-This directory contains files that make it possible for Linux
-distributions to build Chromium using system toolchain.
-
-For more info on established way such builds are configured,
-please read the following:
-
- - https://www.gnu.org/software/make/manual/html_node/Implicit-Variables.html
-
-Why do distros want CFLAGS, LDFLAGS, etc? Please read the following
-for some examples. This is not an exhaustive list.
-
- - https://wiki.debian.org/Hardening
- - https://wiki.ubuntu.com/DistCompilerFlags
- - https://fedoraproject.org/wiki/Changes/Harden_All_Packages
- - https://fedoraproject.org/wiki/Changes/Modernise_GCC_Flags
- - https://fedoraproject.org/wiki/Packaging:Guidelines#Compiler_flags
- - https://blog.flameeyes.eu/2010/09/are-we-done-with-ldflags/
- - https://blog.flameeyes.eu/2008/08/flags-and-flags/
-
-# Usage
-
-Add the following to GN args:
-
-```
-custom_toolchain="//build/toolchain/linux/unbundle:default"
-host_toolchain="//build/toolchain/linux/unbundle:default"
-```
-
-See [more docs on GN](https://chromium.googlesource.com/chromium/src/+/master/tools/gn/docs/quick_start.md).
-
-To cross-compile (not fully tested), add the following:
-
-```
-host_toolchain="//build/toolchain/linux/unbundle:host"
-v8_snapshot_toolchain="//build/toolchain/linux/unbundle:host"
-```
-
-Note: when cross-compiling for a 32-bit target, a matching 32-bit toolchain
-may be needed.
diff --git a/build/toolchain/mac/BUILD.gn b/build/toolchain/mac/BUILD.gn
deleted file mode 100644
index 441ff0f..0000000
--- a/build/toolchain/mac/BUILD.gn
+++ /dev/null
@@ -1,545 +0,0 @@
-# Copyright (c) 2013 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# TODO(brettw) Use "gcc_toolchain.gni" like the Linux toolchains. This requires
-# some enhancements since the commands on Mac are slightly different than on
-# Linux.
-
-import("../goma.gni")
-import("//build/config/clang/clang.gni")
-if (is_ios) {
-  import("//build/config/ios/ios_sdk.gni")
-}
-import("//build/config/mac/mac_sdk.gni")
-import("//build/config/mac/symbols.gni")
-
-assert(host_os == "mac")
-
-import("//build/toolchain/cc_wrapper.gni")
-import("//build/toolchain/clang_static_analyzer.gni")
-import("//build/toolchain/goma.gni")
-import("//build/toolchain/toolchain.gni")
-import("//build/toolchain/concurrent_links.gni")
-
-declare_args() {
-  # Reduce the number of tasks using the copy_bundle_data and compile_xcassets
-  # tools as they can cause lots of I/O contention when invoking ninja with a
-  # large number of parallel jobs (e.g. when using distributed build like goma).
-  bundle_pool_depth = -1
-}
-
-if (current_toolchain == default_toolchain) {
-  pool("bundle_pool") {
-    if (bundle_pool_depth == -1) {
-      depth = concurrent_links
-    } else {
-      depth = bundle_pool_depth
-    }
-  }
-}
-
-# When implementing tools using Python scripts, a TOOL_VERSION=N env
-# variable is placed in front of the command. The N should be incremented
-# whenever the script is changed, so that the build system rebuilds all
-# edges that utilize the script. Ideally this should be changed to use
-# proper input-dirty checking, but that could be expensive. Instead, use a
-# script to get the tool scripts' modification time to use as the version.
-# This won't cause a re-generation of GN files when the tool script changes
-# but it will cause edges to be marked as dirty if the ninja files are
-# regenerated. See https://crbug.com/619083 for details. A proper fix
-# would be to have inputs to tools (https://crbug.com/621119).
-tool_versions =
-    exec_script("get_tool_mtime.py",
-                rebase_path([
-                              "//build/toolchain/mac/compile_xcassets.py",
-                              "//build/toolchain/mac/filter_libtool.py",
-                              "//build/toolchain/mac/linker_driver.py",
-                            ],
-                            root_build_dir),
-                "trim scope")
-
-# Shared toolchain definition. Invocations should set current_os to set the
-# build args in this definition.
-template("mac_toolchain") {
-  toolchain(target_name) {
-    if (use_system_xcode) {
-      env_wrapper = ""
-    } else {
-      env_wrapper = "export DEVELOPER_DIR=$hermetic_xcode_path; "
-    }
-
-    # When invoking this toolchain not as the default one, these args will be
-    # passed to the build. They are ignored when this is the default toolchain.
-    assert(defined(invoker.toolchain_args),
-           "Toolchains must declare toolchain_args")
-    toolchain_args = {
-      # Populate toolchain args from the invoker.
-      forward_variables_from(invoker.toolchain_args, "*")
-
-      # The host toolchain value computed by the default toolchain's setup
-      # needs to be passed through unchanged to all secondary toolchains to
-      # ensure that it's always the same, regardless of the values that may be
-      # set on those toolchains.
-      host_toolchain = host_toolchain
-    }
-
-    # Supports building with the version of clang shipped with Xcode when
-    # targeting iOS by not respecting clang_base_path.
-    if (toolchain_args.current_os == "ios" && use_xcode_clang) {
-      prefix = ""
-    } else {
-      prefix = rebase_path("$clang_base_path/bin/", root_build_dir)
-    }
-
-    _cc = "${prefix}clang"
-    _cxx = "${prefix}clang++"
-
-    # When the invoker has explicitly overridden use_goma or cc_wrapper in the
-    # toolchain args, use those values, otherwise default to the global one.
-    # This works because the only reasonable override that toolchains might
-    # supply for these values are to force-disable them.
-    if (defined(toolchain_args.use_goma)) {
-      toolchain_uses_goma = toolchain_args.use_goma
-    } else {
-      toolchain_uses_goma = use_goma
-    }
-    if (defined(toolchain_args.cc_wrapper)) {
-      toolchain_cc_wrapper = toolchain_args.cc_wrapper
-    } else {
-      toolchain_cc_wrapper = cc_wrapper
-    }
-
-    # Compute the compiler prefix.
-    if (toolchain_uses_goma) {
-      assert(toolchain_cc_wrapper == "",
-             "Goma and cc_wrapper can't be used together.")
-      compiler_prefix = "$goma_dir/gomacc "
-    } else if (toolchain_cc_wrapper != "") {
-      compiler_prefix = toolchain_cc_wrapper + " "
-    } else {
-      compiler_prefix = ""
-    }
-
-    cc = compiler_prefix + _cc
-    cxx = compiler_prefix + _cxx
-    ld = _cxx
-
-    if (use_clang_static_analyzer) {
-      analyzer_wrapper =
-          rebase_path("//build/toolchain/clang_static_analyzer_wrapper.py",
-                      root_build_dir) + "  --mode=clang"
-      cc = analyzer_wrapper + " ${cc}"
-      cxx = analyzer_wrapper + " ${cxx}"
-
-      ld = cxx
-    }
-
-    linker_driver =
-        "TOOL_VERSION=${tool_versions.linker_driver} " +
-        rebase_path("//build/toolchain/mac/linker_driver.py", root_build_dir)
-
-    # On iOS, the final applications are assembled using lipo (to support fat
-    # builds). The correct flags are passed to the linker_driver.py script
-    # directly during the lipo call.
-    if (toolchain_args.current_os != "ios") {
-      _enable_dsyms = enable_dsyms
-      _save_unstripped_output = save_unstripped_output
-    } else {
-      _enable_dsyms = false
-      _save_unstripped_output = false
-    }
-
-    # Make these apply to all tools below.
-    lib_switch = "-l"
-    lib_dir_switch = "-L"
-
-    # Object files go in this directory. Use label_name instead of
-    # target_output_name since labels will generally have no spaces and will be
-    # unique in the directory.
-    object_subdir = "{{target_out_dir}}/{{label_name}}"
-
-    # If dSYMs are enabled, this flag will be added to the link tools.
-    if (_enable_dsyms) {
-      dsym_switch = " -Wcrl,dsym,{{root_out_dir}} "
-      dsym_output_dir =
-          "{{root_out_dir}}/{{target_output_name}}{{output_extension}}.dSYM"
-      dsym_output = [
-        "$dsym_output_dir/",
-        "$dsym_output_dir/Contents/Info.plist",
-        "$dsym_output_dir/Contents/Resources/DWARF/" +
-            "{{target_output_name}}{{output_extension}}",
-      ]
-    } else {
-      dsym_switch = ""
-    }
-
-    if (_save_unstripped_output) {
-      _unstripped_output = "{{root_out_dir}}/{{target_output_name}}{{output_extension}}.unstripped"
-    }
-
-    tool("cc") {
-      depfile = "{{output}}.d"
-      precompiled_header_type = "gcc"
-      command = "$env_wrapper $cc -MMD -MF $depfile {{defines}} {{include_dirs}} {{cflags}} {{cflags_c}} -c {{source}} -o {{output}}"
-      depsformat = "gcc"
-      description = "CC {{output}}"
-      outputs = [
-        "$object_subdir/{{source_name_part}}.o",
-      ]
-    }
-
-    tool("cxx") {
-      depfile = "{{output}}.d"
-      precompiled_header_type = "gcc"
-      command = "$env_wrapper $cxx -MMD -MF $depfile {{defines}} {{include_dirs}} {{cflags}} {{cflags_cc}} -c {{source}} -o {{output}}"
-      depsformat = "gcc"
-      description = "CXX {{output}}"
-      outputs = [
-        "$object_subdir/{{source_name_part}}.o",
-      ]
-    }
-
-    tool("asm") {
-      # For GCC we can just use the C compiler to compile assembly.
-      depfile = "{{output}}.d"
-      command = "$env_wrapper $cc -MMD -MF $depfile {{defines}} {{include_dirs}} {{asmflags}} -c {{source}} -o {{output}}"
-      depsformat = "gcc"
-      description = "ASM {{output}}"
-      outputs = [
-        "$object_subdir/{{source_name_part}}.o",
-      ]
-    }
-
-    tool("objc") {
-      depfile = "{{output}}.d"
-      precompiled_header_type = "gcc"
-      command = "$env_wrapper $cc -MMD -MF $depfile {{defines}} {{include_dirs}} {{cflags}} {{cflags_objc}} -c {{source}} -o {{output}}"
-      depsformat = "gcc"
-      description = "OBJC {{output}}"
-      outputs = [
-        "$object_subdir/{{source_name_part}}.o",
-      ]
-    }
-
-    tool("objcxx") {
-      depfile = "{{output}}.d"
-      precompiled_header_type = "gcc"
-      command = "$env_wrapper $cxx -MMD -MF $depfile {{defines}} {{include_dirs}} {{cflags}} {{cflags_objcc}} -c {{source}} -o {{output}}"
-      depsformat = "gcc"
-      description = "OBJCXX {{output}}"
-      outputs = [
-        "$object_subdir/{{source_name_part}}.o",
-      ]
-    }
-
-    tool("alink") {
-      script =
-          rebase_path("//build/toolchain/mac/filter_libtool.py", root_build_dir)
-
-      # Note about -filelist: Apple's linker reads the file list file and
-      # interprets each newline-separated chunk of text as a file name. It
-      # doesn't do the things one would expect from the shell like unescaping
-      # or handling quotes. In contrast, when Ninja finds a file name with
-      # spaces, it single-quotes them in $inputs_newline as it would normally
-      # do for command-line arguments. Thus any source names with spaces, or
-      # label names with spaces (which GN bases the output paths on) will be
-      # corrupted by this process. Don't use spaces for source files or labels.
-      rspfile = "{{output}}.rsp"
-      rspfile_content = "{{inputs_newline}}"
-      command = "$env_wrapper rm -f {{output}} && TOOL_VERSION=${tool_versions.filter_libtool} python $script libtool -static {{arflags}} -o {{output}} -filelist $rspfile"
-      description = "LIBTOOL-STATIC {{output}}"
-      outputs = [
-        "{{output_dir}}/{{target_output_name}}{{output_extension}}",
-      ]
-      default_output_dir = "{{target_out_dir}}"
-      default_output_extension = ".a"
-      output_prefix = "lib"
-    }
-
-    tool("solink") {
-      dylib = "{{output_dir}}/{{target_output_name}}{{output_extension}}"  # eg "./libfoo.dylib"
-      rspfile = dylib + ".rsp"
-      pool = "//build/toolchain:link_pool($default_toolchain)"
-
-      # These variables are not built into GN but are helpers that implement
-      # (1) linking to produce a .dylib, (2) extracting the symbols from that
-      # file to a temporary file, (3) if the temporary file has differences from
-      # the existing .TOC file, overwrite it, otherwise, don't change it.
-      #
-      # As a special case, if the library reexports symbols from other dynamic
-      # libraries, we always update the .TOC and skip the temporary file and
-      # diffing steps, since that library always needs to be re-linked.
-      tocname = dylib + ".TOC"
-      temporary_tocname = dylib + ".tmp"
-
-      does_reexport_command = "[ ! -e \"$dylib\" -o ! -e \"$tocname\" ] || otool -l \"$dylib\" | grep -q LC_REEXPORT_DYLIB"
-
-      link_command = "$linker_driver $ld -shared "
-      if (is_component_build) {
-        link_command += " -Wl,-install_name,@rpath/\"{{target_output_name}}{{output_extension}}\" "
-      }
-      link_command += dsym_switch
-      link_command += "{{ldflags}} -o \"$dylib\" -Wl,-filelist,\"$rspfile\" {{libs}} {{solibs}}"
-
-      replace_command = "if ! cmp -s \"$temporary_tocname\" \"$tocname\"; then mv \"$temporary_tocname\" \"$tocname\""
-      extract_toc_command = "{ otool -l \"$dylib\" | grep LC_ID_DYLIB -A 5; nm -gP \"$dylib\" | cut -f1-2 -d' ' | grep -v U\$\$; true; }"
-
-      command = "$env_wrapper if $does_reexport_command ; then $link_command && $extract_toc_command > \"$tocname\"; else $link_command && $extract_toc_command > \"$temporary_tocname\" && $replace_command ; fi; fi"
-
-      rspfile_content = "{{inputs_newline}}"
-
-      description = "SOLINK {{output}}"
-
-      # Use this for {{output_extension}} expansions unless a target manually
-      # overrides it (in which case {{output_extension}} will be what the target
-      # specifies).
-      default_output_dir = "{{root_out_dir}}"
-      default_output_extension = ".dylib"
-
-      output_prefix = "lib"
-
-      # Since the above commands only updates the .TOC file when it changes, ask
-      # Ninja to check if the timestamp actually changed to know if downstream
-      # dependencies should be recompiled.
-      restat = true
-
-      # Tell GN about the output files. It will link to the dylib but use the
-      # tocname for dependency management.
-      outputs = [
-        dylib,
-        tocname,
-      ]
-      link_output = dylib
-      depend_output = tocname
-
-      if (_enable_dsyms) {
-        outputs += dsym_output
-      }
-      if (_save_unstripped_output) {
-        outputs += [ _unstripped_output ]
-      }
-    }
-
-    tool("solink_module") {
-      sofile = "{{output_dir}}/{{target_output_name}}{{output_extension}}"  # eg "./libfoo.so"
-      rspfile = sofile + ".rsp"
-      pool = "//build/toolchain:link_pool($default_toolchain)"
-
-      link_command = "$env_wrapper $linker_driver $ld -bundle {{ldflags}} -o \"$sofile\" -Wl,-filelist,\"$rspfile\""
-      if (is_component_build) {
-        link_command += " -Wl,-install_name,@rpath/{{target_output_name}}{{output_extension}}"
-      }
-      link_command += dsym_switch
-      link_command += " {{solibs}} {{libs}}"
-      command = link_command
-
-      rspfile_content = "{{inputs_newline}}"
-
-      description = "SOLINK_MODULE {{output}}"
-
-      # Use this for {{output_extension}} expansions unless a target manually
-      # overrides it (in which case {{output_extension}} will be what the target
-      # specifies).
-      default_output_dir = "{{root_out_dir}}"
-      default_output_extension = ".so"
-
-      outputs = [
-        sofile,
-      ]
-
-      if (_enable_dsyms) {
-        outputs += dsym_output
-      }
-      if (_save_unstripped_output) {
-        outputs += [ _unstripped_output ]
-      }
-    }
-
-    tool("link") {
-      outfile = "{{output_dir}}/{{target_output_name}}{{output_extension}}"
-      rspfile = "$outfile.rsp"
-      pool = "//build/toolchain:link_pool($default_toolchain)"
-
-      # Note about -filelist: Apple's linker reads the file list file and
-      # interprets each newline-separated chunk of text as a file name. It
-      # doesn't do the things one would expect from the shell like unescaping
-      # or handling quotes. In contrast, when Ninja finds a file name with
-      # spaces, it single-quotes them in $inputs_newline as it would normally
-      # do for command-line arguments. Thus any source names with spaces, or
-      # label names with spaces (which GN bases the output paths on) will be
-      # corrupted by this process. Don't use spaces for source files or labels.
-      command = "$env_wrapper $linker_driver $ld $dsym_switch {{ldflags}} -o \"$outfile\" -Wl,-filelist,\"$rspfile\" {{solibs}} {{libs}}"
-      description = "LINK $outfile"
-      rspfile_content = "{{inputs_newline}}"
-      outputs = [
-        outfile,
-      ]
-
-      if (_enable_dsyms) {
-        outputs += dsym_output
-      }
-      if (_save_unstripped_output) {
-        outputs += [ _unstripped_output ]
-      }
-
-      default_output_dir = "{{root_out_dir}}"
-    }
-
-    # These two are really entirely generic, but have to be repeated in
-    # each toolchain because GN doesn't allow a template to be used here.
-    # See //build/toolchain/toolchain.gni for details.
-    tool("stamp") {
-      command = stamp_command
-      description = stamp_description
-    }
-    tool("copy") {
-      command = copy_command
-      description = copy_description
-    }
-
-    tool("copy_bundle_data") {
-      # copy_command use hardlink if possible but this does not work with
-      # directories. If source is a directory, instead use "pax" to create
-      # the same tree structure using hardlinks to individual files (this
-      # preserve symbolic links too) as recommended in the replies to the
-      # question at http://serverfault.com/q/209888/43689 ("cp -al" isn't
-      # available on macOS).
-      #
-      # According to the man page for pax, the commands to use to clone
-      # olddir to newdir using pax are the following:
-      #
-      #   $ mkdir newdir
-      #   $ cd olddir
-      #   $ pax -rwl . ../newdir
-      #
-      # The _copydir command does exactly that but use an absolute path
-      # constructed using shell variable $OLDPWD (automatically set when
-      # cd is used) as computing the relative path is a bit complex and
-      # using pwd would requires a sub-shell to be created.
-      _copydir = "mkdir -p {{output}} && cd {{source}} && " +
-                 "pax -rwl . \"\$OLDPWD\"/{{output}}"
-      command = "rm -rf {{output}} && if [[ -d {{source}} ]]; then " +
-                _copydir + "; else " + copy_command + "; fi"
-
-      description = "COPY_BUNDLE_DATA {{source}} {{output}}"
-      pool = ":bundle_pool($default_toolchain)"
-    }
-    tool("compile_xcassets") {
-      _tool = rebase_path("//build/toolchain/mac/compile_xcassets.py",
-                          root_build_dir)
-      if (is_ios) {
-        _sdk_name = ios_sdk_name
-        _min_deployment_target = ios_deployment_target
-        _compress_pngs = ""
-      } else {
-        _sdk_name = mac_sdk_name
-        _min_deployment_target = mac_deployment_target
-        _compress_pngs = " -c "
-      }
-      command =
-          "$env_wrapper rm -f \"{{output}}\" && " +
-          "TOOL_VERSION=${tool_versions.compile_xcassets} " +
-          "python $_tool$_compress_pngs -p \"$_sdk_name\" " +
-          "-t \"$_min_deployment_target\" -T \"{{bundle_product_type}}\" " +
-          "-P \"{{bundle_partial_info_plist}}\" -o {{output}} {{inputs}}"
-
-      description = "COMPILE_XCASSETS {{output}}"
-      pool = ":bundle_pool($default_toolchain)"
-    }
-
-    tool("action") {
-      pool = "//build/toolchain:action_pool($default_toolchain)"
-    }
-  }
-}
-
-mac_toolchain("clang_arm") {
-  toolchain_args = {
-    current_cpu = "arm"
-    current_os = "mac"
-  }
-}
-
-mac_toolchain("clang_x64") {
-  toolchain_args = {
-    current_cpu = "x64"
-    current_os = "mac"
-
-    if (target_os == "ios") {
-      # TODO(crbug.com/753445): the use_sanitizer_coverage arg is currently
-      # not supported by the Chromium mac_clang_x64 toolchain on iOS
-      # distribution.
-      use_sanitizer_coverage = false
-    }
-  }
-}
-
-mac_toolchain("clang_x86") {
-  toolchain_args = {
-    current_cpu = "x86"
-    current_os = "mac"
-  }
-}
-
-mac_toolchain("clang_x86_v8_arm") {
-  toolchain_args = {
-    current_cpu = "x86"
-    v8_current_cpu = "arm"
-    current_os = "mac"
-  }
-}
-
-mac_toolchain("clang_x86_v8_mipsel") {
-  toolchain_args = {
-    current_cpu = "x86"
-    v8_current_cpu = "mipsel"
-    current_os = "mac"
-  }
-}
-
-mac_toolchain("clang_x64_v8_arm64") {
-  toolchain_args = {
-    current_cpu = "x64"
-    v8_current_cpu = "arm64"
-    current_os = "mac"
-  }
-}
-
-mac_toolchain("clang_x64_v8_mips64el") {
-  toolchain_args = {
-    current_cpu = "x64"
-    v8_current_cpu = "mips64el"
-    current_os = "mac"
-  }
-}
-
-if (is_ios) {
-  mac_toolchain("ios_clang_arm") {
-    toolchain_args = {
-      current_cpu = "arm"
-      current_os = "ios"
-    }
-  }
-
-  mac_toolchain("ios_clang_arm64") {
-    toolchain_args = {
-      current_cpu = "arm64"
-      current_os = "ios"
-    }
-  }
-
-  mac_toolchain("ios_clang_x86") {
-    toolchain_args = {
-      current_cpu = "x86"
-      current_os = "ios"
-    }
-  }
-
-  mac_toolchain("ios_clang_x64") {
-    toolchain_args = {
-      current_cpu = "x64"
-      current_os = "ios"
-    }
-  }
-}
diff --git a/build/toolchain/mac/OWNERS b/build/toolchain/mac/OWNERS
deleted file mode 100644
index 0ed2e15..0000000
--- a/build/toolchain/mac/OWNERS
+++ /dev/null
@@ -1,2 +0,0 @@
-rsesek@chromium.org
-sdefresne@chromium.org
diff --git a/build/toolchain/mac/compile_xcassets.py b/build/toolchain/mac/compile_xcassets.py
deleted file mode 100644
index c1f4680..0000000
--- a/build/toolchain/mac/compile_xcassets.py
+++ /dev/null
@@ -1,251 +0,0 @@
-# Copyright 2016 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import argparse
-import os
-import re
-import subprocess
-import sys
-import tempfile
-
-"""Wrapper around actool to compile assets catalog.
-
-The script compile_xcassets.py is a wrapper around actool to compile
-assets catalog to Assets.car that turns warning into errors. It also
-fixes some quirks of actool to make it work from ninja (mostly that
-actool seems to require absolute path but gn generates command-line
-with relative paths).
-
-The wrapper filter out any message that is not a section header and
-not a warning or error message, and fails if filtered output is not
-empty. This should to treat all warnings as error until actool has
-an option to fail with non-zero error code when there are warnings.
-"""
-
-# Pattern matching a section header in the output of actool.
-SECTION_HEADER = re.compile('^/\\* ([^ ]*) \\*/$')
-
-# Name of the section containing informational messages that can be ignored.
-NOTICE_SECTION = 'com.apple.actool.compilation-results'
-
-# Regular expressions matching spurious messages from actool that should be
-# ignored (as they are bogus). Generally a bug should be filed with Apple
-# when adding a pattern here.
-SPURIOUS_PATTERNS = map(re.compile, [
-    # crbug.com/770634, likely a bug in Xcode 9.1 beta, remove once build
-    # requires a version of Xcode with a fix.
-    r'\[\]\[ipad\]\[76x76\]\[\]\[\]\[1x\]\[\]\[\]: notice: \(null\)',
-
-    # crbug.com/770634, likely a bug in Xcode 9.2 beta, remove once build
-    # requires a version of Xcode with a fix.
-    r'\[\]\[ipad\]\[76x76\]\[\]\[\]\[1x\]\[\]\[\]: notice: 76x76@1x app icons'
-    ' only apply to iPad apps targeting releases of iOS prior to 10.0.',
-])
-
-# Map special type of asset catalog to the corresponding command-line
-# parameter that need to be passed to actool.
-ACTOOL_FLAG_FOR_ASSET_TYPE = {
-  '.appiconset': '--app-icon',
-  '.launchimage': '--launch-image',
-}
-
-
-def IsSpuriousMessage(line):
-  """Returns whether line contains a spurious message that should be ignored."""
-  for pattern in SPURIOUS_PATTERNS:
-    match = pattern.search(line)
-    if match is not None:
-      return True
-  return False
-
-
-def FilterCompilerOutput(compiler_output, relative_paths):
-  """Filers actool compilation output.
-
-  The compiler output is composed of multiple sections for each different
-  level of output (error, warning, notices, ...). Each section starts with
-  the section name on a single line, followed by all the messages from the
-  section.
-
-  The function filter any lines that are not in com.apple.actool.errors or
-  com.apple.actool.document.warnings sections (as spurious messages comes
-  before any section of the output).
-
-  See crbug.com/730054, crbug.com/739163 and crbug.com/770634 for some example
-  messages that pollute the output of actool and cause flaky builds.
-
-  Args:
-    compiler_output: string containing the output generated by the
-      compiler (contains both stdout and stderr)
-    relative_paths: mapping from absolute to relative paths used to
-      convert paths in the warning and error messages (unknown paths
-      will be left unaltered)
-
-  Returns:
-    The filtered output of the compiler. If the compilation was a
-    success, then the output will be empty, otherwise it will use
-    relative path and omit any irrelevant output.
-  """
-
-  filtered_output = []
-  current_section = None
-  data_in_section = False
-  for line in compiler_output.splitlines():
-    match = SECTION_HEADER.search(line)
-    if match is not None:
-      data_in_section = False
-      current_section = match.group(1)
-      continue
-    if current_section and current_section != NOTICE_SECTION:
-      if IsSpuriousMessage(line):
-        continue
-      absolute_path = line.split(':')[0]
-      relative_path = relative_paths.get(absolute_path, absolute_path)
-      if absolute_path != relative_path:
-        line = relative_path + line[len(absolute_path):]
-      if not data_in_section:
-        data_in_section = True
-        filtered_output.append('/* %s */\n' % current_section)
-      filtered_output.append(line + '\n')
-
-  return ''.join(filtered_output)
-
-
-def CompileAssetCatalog(output, platform, product_type, min_deployment_target,
-    inputs, compress_pngs, partial_info_plist):
-  """Compile the .xcassets bundles to an asset catalog using actool.
-
-  Args:
-    output: absolute path to the containing bundle
-    platform: the targeted platform
-    product_type: the bundle type
-    min_deployment_target: minimum deployment target
-    inputs: list of absolute paths to .xcassets bundles
-    compress_pngs: whether to enable compression of pngs
-    partial_info_plist: path to partial Info.plist to generate
-  """
-  command = [
-      'xcrun', 'actool', '--output-format=human-readable-text',
-      '--notices', '--warnings', '--errors', '--platform', platform,
-      '--minimum-deployment-target', min_deployment_target,
-  ]
-
-  if compress_pngs:
-    command.extend(['--compress-pngs'])
-
-  if product_type != '':
-    command.extend(['--product-type', product_type])
-
-  if platform == 'macosx':
-    command.extend(['--target-device', 'mac'])
-  else:
-    command.extend(['--target-device', 'iphone', '--target-device', 'ipad'])
-
-  # Scan the input directories for the presence of asset catalog types that
-  # require special treatment, and if so, add them to the actool command-line.
-  for relative_path in inputs:
-
-    if not os.path.isdir(relative_path):
-      continue
-
-    for file_or_dir_name in os.listdir(relative_path):
-      if not os.path.isdir(os.path.join(relative_path, file_or_dir_name)):
-        continue
-
-      asset_name, asset_type = os.path.splitext(file_or_dir_name)
-      if asset_type not in ACTOOL_FLAG_FOR_ASSET_TYPE:
-        continue
-
-      command.extend([ACTOOL_FLAG_FOR_ASSET_TYPE[asset_type], asset_name])
-
-  # Always ask actool to generate a partial Info.plist file. If not path
-  # has been given by the caller, use a temporary file name.
-  temporary_file = None
-  if not partial_info_plist:
-    temporary_file = tempfile.NamedTemporaryFile(suffix='.plist')
-    partial_info_plist = temporary_file.name
-
-  command.extend(['--output-partial-info-plist', partial_info_plist])
-
-  # Dictionary used to convert absolute paths back to their relative form
-  # in the output of actool.
-  relative_paths = {}
-
-  # actool crashes if paths are relative, so convert input and output paths
-  # to absolute paths, and record the relative paths to fix them back when
-  # filtering the output.
-  absolute_output = os.path.abspath(output)
-  relative_paths[output] = absolute_output
-  relative_paths[os.path.dirname(output)] = os.path.dirname(absolute_output)
-  command.extend(['--compile', os.path.dirname(os.path.abspath(output))])
-
-  for relative_path in inputs:
-    absolute_path = os.path.abspath(relative_path)
-    relative_paths[absolute_path] = relative_path
-    command.append(absolute_path)
-
-  try:
-    # Run actool and redirect stdout and stderr to the same pipe (as actool
-    # is confused about what should go to stderr/stdout).
-    process = subprocess.Popen(
-        command, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
-    stdout, _ = process.communicate()
-
-    # Filter the output to remove all garbarge and to fix the paths.
-    stdout = FilterCompilerOutput(stdout, relative_paths)
-
-    if process.returncode or stdout:
-      sys.stderr.write(stdout)
-      sys.exit(1)
-
-  finally:
-    if temporary_file:
-      temporary_file.close()
-
-
-def Main():
-  parser = argparse.ArgumentParser(
-      description='compile assets catalog for a bundle')
-  parser.add_argument(
-      '--platform', '-p', required=True,
-      choices=('macosx', 'iphoneos', 'iphonesimulator'),
-      help='target platform for the compiled assets catalog')
-  parser.add_argument(
-      '--minimum-deployment-target', '-t', required=True,
-      help='minimum deployment target for the compiled assets catalog')
-  parser.add_argument(
-      '--output', '-o', required=True,
-      help='path to the compiled assets catalog')
-  parser.add_argument(
-      '--compress-pngs', '-c', action='store_true', default=False,
-      help='recompress PNGs while compiling assets catalog')
-  parser.add_argument(
-      '--product-type', '-T',
-      help='type of the containing bundle')
-  parser.add_argument(
-      '--partial-info-plist', '-P',
-      help='path to partial info plist to create')
-  parser.add_argument(
-      'inputs', nargs='+',
-      help='path to input assets catalog sources')
-  args = parser.parse_args()
-
-  if os.path.basename(args.output) != 'Assets.car':
-    sys.stderr.write(
-        'output should be path to compiled asset catalog, not '
-        'to the containing bundle: %s\n' % (args.output,))
-    sys.exit(1)
-
-  CompileAssetCatalog(
-      args.output,
-      args.platform,
-      args.product_type,
-      args.minimum_deployment_target,
-      args.inputs,
-      args.compress_pngs,
-      args.partial_info_plist)
-
-
-if __name__ == '__main__':
-  sys.exit(Main())
diff --git a/build/toolchain/mac/compile_xcassets_unittests.py b/build/toolchain/mac/compile_xcassets_unittests.py
deleted file mode 100644
index 7655df8..0000000
--- a/build/toolchain/mac/compile_xcassets_unittests.py
+++ /dev/null
@@ -1,141 +0,0 @@
-# Copyright 2017 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import unittest
-import compile_xcassets
-
-
-class TestFilterCompilerOutput(unittest.TestCase):
-
-  relative_paths = {
-    '/Users/janedoe/chromium/src/Chromium.xcassets':
-        '../../Chromium.xcassets',
-    '/Users/janedoe/chromium/src/out/Default/Chromium.app/Assets.car':
-        'Chromium.app/Assets.car',
-  }
-
-  def testNoError(self):
-    self.assertEquals(
-        '',
-        compile_xcassets.FilterCompilerOutput(
-            '/* com.apple.actool.compilation-results */\n'
-            '/Users/janedoe/chromium/src/out/Default/Chromium.app/Assets.car\n',
-            self.relative_paths))
-
-  def testNoErrorRandomMessages(self):
-    self.assertEquals(
-        '',
-        compile_xcassets.FilterCompilerOutput(
-            '2017-07-04 04:59:19.460 ibtoold[23487:41214] CoreSimulator is att'
-                'empting to unload a stale CoreSimulatorService job.  Existing'
-                ' job (com.apple.CoreSimulator.CoreSimulatorService.179.1.E8tt'
-                'yeDeVgWK) is from an older version and is being removed to pr'
-                'event problems.\n'
-            '/* com.apple.actool.compilation-results */\n'
-            '/Users/janedoe/chromium/src/out/Default/Chromium.app/Assets.car\n',
-            self.relative_paths))
-
-  def testWarning(self):
-    self.assertEquals(
-        '/* com.apple.actool.document.warnings */\n'
-        '../../Chromium.xcassets:./image1.imageset/[universal][][][1x][][][]['
-            '][][]: warning: The file "image1.png" for the image set "image1"'
-            ' does not exist.\n',
-        compile_xcassets.FilterCompilerOutput(
-            '/* com.apple.actool.document.warnings */\n'
-            '/Users/janedoe/chromium/src/Chromium.xcassets:./image1.imageset/['
-                'universal][][][1x][][][][][][]: warning: The file "image1.png'
-                '" for the image set "image1" does not exist.\n'
-            '/* com.apple.actool.compilation-results */\n'
-            '/Users/janedoe/chromium/src/out/Default/Chromium.app/Assets.car\n',
-            self.relative_paths))
-
-  def testError(self):
-    self.assertEquals(
-        '/* com.apple.actool.errors */\n'
-        '../../Chromium.xcassets: error: The output directory "/Users/janedoe/'
-            'chromium/src/out/Default/Chromium.app" does not exist.\n',
-        compile_xcassets.FilterCompilerOutput(
-            '/* com.apple.actool.errors */\n'
-            '/Users/janedoe/chromium/src/Chromium.xcassets: error: The output '
-                'directory "/Users/janedoe/chromium/src/out/Default/Chromium.a'
-                'pp" does not exist.\n'
-            '/* com.apple.actool.compilation-results */\n',
-            self.relative_paths))
-
-  def testSpurious(self):
-    self.assertEquals(
-        '/* com.apple.actool.document.warnings */\n'
-        '../../Chromium.xcassets:./AppIcon.appiconset: warning: A 1024x1024 ap'
-            'p store icon is required for iOS apps\n',
-        compile_xcassets.FilterCompilerOutput(
-            '/* com.apple.actool.document.warnings */\n'
-            '/Users/janedoe/chromium/src/Chromium.xcassets:./AppIcon.appiconse'
-                't: warning: A 1024x1024 app store icon is required for iOS ap'
-                'ps\n'
-            '/* com.apple.actool.document.notices */\n'
-            '/Users/janedoe/chromium/src/Chromium.xcassets:./AppIcon.appiconse'
-                't/[][ipad][76x76][][][1x][][]: notice: (null)\n',
-            self.relative_paths))
-
-  def testComplexError(self):
-    self.assertEquals(
-        '/* com.apple.actool.errors */\n'
-        ': error: Failed to find a suitable device for the type SimDeviceType '
-            ': com.apple.dt.Xcode.IBSimDeviceType.iPad-2x with runtime SimRunt'
-            'ime : 10.3.1 (14E8301) - com.apple.CoreSimulator.SimRuntime.iOS-1'
-            '0-3\n'
-        '    Failure Reason: Failed to create SimDeviceSet at path /Users/jane'
-            'doe/Library/Developer/Xcode/UserData/IB Support/Simulator Devices'
-            '. You\'ll want to check the logs in ~/Library/Logs/CoreSimulator '
-            'to see why creating the SimDeviceSet failed.\n'
-        '    Underlying Errors:\n'
-        '        Description: Failed to initialize simulator device set.\n'
-        '        Failure Reason: Failed to subscribe to notifications from Cor'
-            'eSimulatorService.\n'
-        '        Underlying Errors:\n'
-        '            Description: Error returned in reply to notification requ'
-            'est: Connection invalid\n'
-        '            Failure Reason: Software caused connection abort\n',
-        compile_xcassets.FilterCompilerOutput(
-            '2017-07-07 10:37:27.367 ibtoold[88538:12553239] CoreSimulator det'
-                'ected Xcode.app relocation or CoreSimulatorService version ch'
-                'ange.  Framework path (/Applications/Xcode.app/Contents/Devel'
-                'oper/Library/PrivateFrameworks/CoreSimulator.framework) and v'
-                'ersion (375.21) does not match existing job path (/Library/De'
-                'veloper/PrivateFrameworks/CoreSimulator.framework/Versions/A/'
-                'XPCServices/com.apple.CoreSimulator.CoreSimulatorService.xpc)'
-                ' and version (459.13).  Attempting to remove the stale servic'
-                'e in order to add the expected version.\n'
-            '2017-07-07 10:37:27.625 ibtoold[88538:12553256] CoreSimulatorServ'
-                'ice connection interrupted.  Resubscribing to notifications.\n'
-            '2017-07-07 10:37:27.632 ibtoold[88538:12553264] CoreSimulatorServ'
-                'ice connection became invalid.  Simulator services will no lo'
-                'nger be available.\n'
-            '2017-07-07 10:37:27.642 ibtoold[88538:12553274] CoreSimulatorServ'
-                'ice connection became invalid.  Simulator services will no lo'
-                'nger be available.\n'
-            '/* com.apple.actool.errors */\n'
-            ': error: Failed to find a suitable device for the type SimDeviceT'
-                'ype : com.apple.dt.Xcode.IBSimDeviceType.iPad-2x with runtime'
-                ' SimRuntime : 10.3.1 (14E8301) - com.apple.CoreSimulator.SimR'
-                'untime.iOS-10-3\n'
-            '    Failure Reason: Failed to create SimDeviceSet at path /Users/'
-                'janedoe/Library/Developer/Xcode/UserData/IB Support/Simulator'
-                ' Devices. You\'ll want to check the logs in ~/Library/Logs/Co'
-                'reSimulator to see why creating the SimDeviceSet failed.\n'
-            '    Underlying Errors:\n'
-            '        Description: Failed to initialize simulator device set.\n'
-            '        Failure Reason: Failed to subscribe to notifications from'
-                ' CoreSimulatorService.\n'
-            '        Underlying Errors:\n'
-            '            Description: Error returned in reply to notification '
-                'request: Connection invalid\n'
-            '            Failure Reason: Software caused connection abort\n'
-            '/* com.apple.actool.compilation-results */\n',
-            self.relative_paths))
-
-
-if __name__ == '__main__':
-  unittest.main()
diff --git a/build/toolchain/mac/filter_libtool.py b/build/toolchain/mac/filter_libtool.py
deleted file mode 100644
index 3b16151..0000000
--- a/build/toolchain/mac/filter_libtool.py
+++ /dev/null
@@ -1,54 +0,0 @@
-# Copyright 2016 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import os
-import re
-import subprocess
-import sys
-
-# This script executes libool and filters out logspam lines like:
-#    '/path/to/libtool: file: foo.o has no symbols'
-
-BLACKLIST_PATTERNS = map(re.compile, [
-    r'^.*libtool: (?:for architecture: \S* )?file: .* has no symbols$',
-    r'^.*libtool: warning for library: .* the table of contents is empty '
-        r'\(no object file members in the library define global symbols\)$',
-    r'^.*libtool: warning same member name \(\S*\) in output file used for '
-        r'input files: \S* and: \S* \(due to use of basename, truncation, '
-        r'blank padding or duplicate input files\)$',
-])
-
-
-def IsBlacklistedLine(line):
-  """Returns whether the line should be filtered out."""
-  for pattern in BLACKLIST_PATTERNS:
-    if pattern.match(line):
-      return True
-  return False
-
-
-def Main(cmd_list):
-  env = os.environ.copy()
-  # Ref:
-  # http://www.opensource.apple.com/source/cctools/cctools-809/misc/libtool.c
-  # The problem with this flag is that it resets the file mtime on the file to
-  # epoch=0, e.g. 1970-1-1 or 1969-12-31 depending on timezone.
-  env['ZERO_AR_DATE'] = '1'
-  libtoolout = subprocess.Popen(cmd_list, stderr=subprocess.PIPE, env=env)
-  _, err = libtoolout.communicate()
-  for line in err.splitlines():
-    if not IsBlacklistedLine(line):
-      print >>sys.stderr, line
-  # Unconditionally touch the output .a file on the command line if present
-  # and the command succeeded. A bit hacky.
-  if not libtoolout.returncode:
-    for i in range(len(cmd_list) - 1):
-      if cmd_list[i] == '-o' and cmd_list[i+1].endswith('.a'):
-        os.utime(cmd_list[i+1], None)
-        break
-  return libtoolout.returncode
-
-
-if __name__ == '__main__':
-  sys.exit(Main(sys.argv[1:]))
diff --git a/build/toolchain/mac/get_tool_mtime.py b/build/toolchain/mac/get_tool_mtime.py
deleted file mode 100644
index 4106344..0000000
--- a/build/toolchain/mac/get_tool_mtime.py
+++ /dev/null
@@ -1,17 +0,0 @@
-# Copyright 2016 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import os
-import sys
-
-# Usage: python get_tool_mtime.py path/to/file1.py path/to/file2.py
-#
-# Prints a GN scope with the variable name being the basename sans-extension
-# and the value being the file modification time. A variable is emitted for
-# each file argument on the command line.
-
-if __name__ == '__main__':
-  for f in sys.argv[1:]:
-    variable = os.path.splitext(os.path.basename(f))[0]
-    print '%s = %d' % (variable, os.path.getmtime(f))
diff --git a/build/toolchain/mac/linker_driver.py b/build/toolchain/mac/linker_driver.py
deleted file mode 100755
index 35de9d1..0000000
--- a/build/toolchain/mac/linker_driver.py
+++ /dev/null
@@ -1,230 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright 2016 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import os
-import os.path
-import shutil
-import subprocess
-import sys
-
-# The linker_driver.py is responsible for forwarding a linker invocation to
-# the compiler driver, while processing special arguments itself.
-#
-# Usage: linker_driver.py clang++ main.o -L. -llib -o prog -Wcrl,dsym,out
-#
-# On Mac, the logical step of linking is handled by three discrete tools to
-# perform the image link, debug info link, and strip. The linker_driver.py
-# combines these three steps into a single tool.
-#
-# The command passed to the linker_driver.py should be the compiler driver
-# invocation for the linker. It is first invoked unaltered (except for the
-# removal of the special driver arguments, described below). Then the driver
-# performs additional actions, based on these arguments:
-#
-#   -Wcrl,dsym,<dsym_path_prefix>
-#       After invoking the linker, this will run `dsymutil` on the linker's
-#       output, producing a dSYM bundle, stored at dsym_path_prefix. As an
-#       example, if the linker driver were invoked with:
-#         "... -o out/gn/obj/foo/libbar.dylib ... -Wcrl,dsym,out/gn ..."
-#       The resulting dSYM would be out/gn/libbar.dylib.dSYM/.
-#
-#   -Wcrl,unstripped,<unstripped_path_prefix>
-#       After invoking the linker, and before strip, this will save a copy of
-#       the unstripped linker output in the directory unstripped_path_prefix.
-#
-#   -Wcrl,strip,<strip_arguments>
-#       After invoking the linker, and optionally dsymutil, this will run
-#       the strip command on the linker's output. strip_arguments are
-#       comma-separated arguments to be passed to the strip command.
-
-def Main(args):
-  """Main function for the linker driver. Separates out the arguments for
-  the main compiler driver and the linker driver, then invokes all the
-  required tools.
-
-  Args:
-    args: list of string, Arguments to the script.
-  """
-
-  if len(args) < 2:
-    raise RuntimeError("Usage: linker_driver.py [linker-invocation]")
-
-  for i in xrange(len(args)):
-    if args[i] != '--developer_dir':
-      continue
-    os.environ['DEVELOPER_DIR'] = args[i + 1]
-    del args[i:i+2]
-    break
-
-  # Collect arguments to the linker driver (this script) and remove them from
-  # the arguments being passed to the compiler driver.
-  linker_driver_actions = {}
-  compiler_driver_args = []
-  for arg in args[1:]:
-    if arg.startswith(_LINKER_DRIVER_ARG_PREFIX):
-      # Convert driver actions into a map of name => lambda to invoke.
-      driver_action = ProcessLinkerDriverArg(arg)
-      assert driver_action[0] not in linker_driver_actions
-      linker_driver_actions[driver_action[0]] = driver_action[1]
-    else:
-      compiler_driver_args.append(arg)
-
-  linker_driver_outputs = [_FindLinkerOutput(compiler_driver_args)]
-
-  try:
-    # Run the linker by invoking the compiler driver.
-    subprocess.check_call(compiler_driver_args)
-
-    # Run the linker driver actions, in the order specified by the actions list.
-    for action in _LINKER_DRIVER_ACTIONS:
-      name = action[0]
-      if name in linker_driver_actions:
-        linker_driver_outputs += linker_driver_actions[name](args)
-  except:
-    # If a linker driver action failed, remove all the outputs to make the
-    # build step atomic.
-    map(_RemovePath, linker_driver_outputs)
-
-    # Re-report the original failure.
-    raise
-
-
-def ProcessLinkerDriverArg(arg):
-  """Processes a linker driver argument and returns a tuple containing the
-  name and unary lambda to invoke for that linker driver action.
-
-  Args:
-    arg: string, The linker driver argument.
-
-  Returns:
-    A 2-tuple:
-      0: The driver action name, as in _LINKER_DRIVER_ACTIONS.
-      1: An 1-ary lambda that takes the full list of arguments passed to
-         Main(). The lambda should call the linker driver action that
-         corresponds to the argument and return a list of outputs from the
-         action.
-  """
-  if not arg.startswith(_LINKER_DRIVER_ARG_PREFIX):
-    raise ValueError('%s is not a linker driver argument' % (arg,))
-
-  sub_arg = arg[len(_LINKER_DRIVER_ARG_PREFIX):]
-
-  for driver_action in _LINKER_DRIVER_ACTIONS:
-    (name, action) = driver_action
-    if sub_arg.startswith(name):
-      return (name,
-          lambda full_args: action(sub_arg[len(name):], full_args))
-
-  raise ValueError('Unknown linker driver argument: %s' % (arg,))
-
-
-def RunDsymUtil(dsym_path_prefix, full_args):
-  """Linker driver action for -Wcrl,dsym,<dsym-path-prefix>. Invokes dsymutil
-  on the linker's output and produces a dsym file at |dsym_file| path.
-
-  Args:
-    dsym_path_prefix: string, The path at which the dsymutil output should be
-        located.
-    full_args: list of string, Full argument list for the linker driver.
-
-  Returns:
-      list of string, Build step outputs.
-  """
-  if not len(dsym_path_prefix):
-    raise ValueError('Unspecified dSYM output file')
-
-  linker_out = _FindLinkerOutput(full_args)
-  base = os.path.basename(linker_out)
-  dsym_out = os.path.join(dsym_path_prefix, base + '.dSYM')
-
-  # Remove old dSYMs before invoking dsymutil.
-  _RemovePath(dsym_out)
-  subprocess.check_call(['xcrun', 'dsymutil', '-o', dsym_out, linker_out])
-  return [dsym_out]
-
-
-def RunSaveUnstripped(unstripped_path_prefix, full_args):
-  """Linker driver action for -Wcrl,unstripped,<unstripped_path_prefix>. Copies
-  the linker output to |unstripped_path_prefix| before stripping.
-
-  Args:
-    unstripped_path_prefix: string, The path at which the unstripped output
-        should be located.
-    full_args: list of string, Full argument list for the linker driver.
-
-  Returns:
-    list of string, Build step outputs.
-  """
-  if not len(unstripped_path_prefix):
-    raise ValueError('Unspecified unstripped output file')
-
-  linker_out = _FindLinkerOutput(full_args)
-  base = os.path.basename(linker_out)
-  unstripped_out = os.path.join(unstripped_path_prefix, base + '.unstripped')
-
-  shutil.copyfile(linker_out, unstripped_out)
-  return [unstripped_out]
-
-
-def RunStrip(strip_args_string, full_args):
-  """Linker driver action for -Wcrl,strip,<strip_arguments>.
-
-  Args:
-      strip_args_string: string, Comma-separated arguments for `strip`.
-      full_args: list of string, Full arguments for the linker driver.
-
-  Returns:
-      list of string, Build step outputs.
-  """
-  strip_command = ['xcrun', 'strip']
-  if len(strip_args_string) > 0:
-    strip_command += strip_args_string.split(',')
-  strip_command.append(_FindLinkerOutput(full_args))
-  subprocess.check_call(strip_command)
-  return []
-
-
-def _FindLinkerOutput(full_args):
-  """Finds the output of the linker by looking for the output flag in its
-  argument list. As this is a required linker argument, raises an error if it
-  cannot be found.
-  """
-  # The linker_driver.py script may be used to wrap either the compiler linker
-  # (uses -o to configure the output) or lipo (uses -output to configure the
-  # output). Since wrapping the compiler linker is the most likely possibility
-  # use try/except and fallback to checking for -output if -o is not found.
-  try:
-    output_flag_index = full_args.index('-o')
-  except ValueError:
-    output_flag_index = full_args.index('-output')
-  return full_args[output_flag_index + 1]
-
-
-def _RemovePath(path):
-  """Removes the file or directory at |path| if it exists."""
-  if os.path.exists(path):
-    if os.path.isdir(path):
-      shutil.rmtree(path)
-    else:
-      os.unlink(path)
-
-
-_LINKER_DRIVER_ARG_PREFIX = '-Wcrl,'
-
-"""List of linker driver actions. The sort order of this list affects the
-order in which the actions are invoked. The first item in the tuple is the
-argument's -Wcrl,<sub_argument> and the second is the function to invoke.
-"""
-_LINKER_DRIVER_ACTIONS = [
-    ('dsym,', RunDsymUtil),
-    ('unstripped,', RunSaveUnstripped),
-    ('strip,', RunStrip),
-]
-
-
-if __name__ == '__main__':
-  Main(sys.argv)
-  sys.exit(0)
diff --git a/build/toolchain/nacl/BUILD.gn b/build/toolchain/nacl/BUILD.gn
deleted file mode 100644
index 2083de8..0000000
--- a/build/toolchain/nacl/BUILD.gn
+++ /dev/null
@@ -1,270 +0,0 @@
-# Copyright (c) 2014 The Native Client Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import("//build/config/sysroot.gni")
-import("//build/config/nacl/config.gni")
-import("//build/toolchain/nacl_toolchain.gni")
-
-# Add the toolchain revision as a preprocessor define so that sources are
-# rebuilt when a toolchain is updated.
-# Idea we could use the toolchain deps feature, but currently that feature is
-# bugged and does not trigger a rebuild.
-# https://code.google.com/p/chromium/issues/detail?id=431880
-# Calls to get the toolchain revision are relatively slow, so do them all in a
-# single batch to amortize python startup, etc.
-revisions = exec_script("//native_client/build/get_toolchain_revision.py",
-                        [
-                          "nacl_x86_glibc",
-                          "nacl_arm_glibc",
-                          "pnacl_newlib",
-                        ],
-                        "trim list lines")
-nacl_x86_glibc_rev = revisions[0]
-nacl_arm_glibc_rev = revisions[1]
-
-pnacl_newlib_rev = revisions[2]
-
-if (host_os == "win") {
-  toolsuffix = ".exe"
-} else {
-  toolsuffix = ""
-}
-
-# The PNaCl toolchain tools are all wrapper scripts rather than binary
-# executables.  On POSIX systems, nobody cares what kind of executable
-# file you are.  But on Windows, scripts (.bat files) cannot be run
-# directly and need the Windows shell (cmd.exe) specified explicily.
-if (host_os == "win") {
-  # NOTE!  The //build/toolchain/gcc_*_wrapper.py scripts recognize
-  # this exact prefix string, so they must be updated if this string
-  # is changed in any way.
-  scriptprefix = "cmd /c call "
-  scriptsuffix = ".bat"
-} else {
-  scriptprefix = ""
-  scriptsuffix = ""
-}
-
-# When the compilers are run via goma or ccache rather than directly by
-# GN/Ninja, the goma/ccache wrapper handles .bat files but gets confused
-# by being given the scriptprefix.
-if (host_os == "win" && !use_goma && cc_wrapper == "") {
-  compiler_scriptprefix = scriptprefix
-} else {
-  compiler_scriptprefix = ""
-}
-
-template("pnacl_toolchain") {
-  assert(defined(invoker.executable_extension),
-         "Must define executable_extension")
-
-  nacl_toolchain(target_name) {
-    toolchain_package = "pnacl_newlib"
-    toolchain_revision = pnacl_newlib_rev
-    toolprefix =
-        rebase_path("${nacl_toolchain_dir}/${toolchain_package}/bin/pnacl-",
-                    root_build_dir)
-
-    cc = compiler_scriptprefix + toolprefix + "clang" + scriptsuffix
-    cxx = compiler_scriptprefix + toolprefix + "clang++" + scriptsuffix
-    ar = scriptprefix + toolprefix + "ar" + scriptsuffix
-    readelf = scriptprefix + toolprefix + "readelf" + scriptsuffix
-    nm = scriptprefix + toolprefix + "nm" + scriptsuffix
-    if (defined(invoker.strip)) {
-      strip = scriptprefix + toolprefix + invoker.strip + scriptsuffix
-    }
-    forward_variables_from(invoker,
-                           [
-                             "executable_extension",
-                             "is_clang_analysis_supported",
-                           ])
-
-    # Note this is not the usual "ld = cxx" because "ld" uses are
-    # never run via goma, so this needs scriptprefix.
-    ld = scriptprefix + toolprefix + "clang++" + scriptsuffix
-
-    toolchain_args = {
-      is_clang = true
-      current_cpu = "pnacl"
-      use_lld = false
-    }
-  }
-}
-
-pnacl_toolchain("newlib_pnacl") {
-  executable_extension = ".pexe"
-
-  # The pnacl-finalize tool turns a .pexe.debug file into a .pexe file.
-  # It's very similar in purpose to the traditional "strip" utility: it
-  # turns what comes out of the linker into what you actually want to
-  # distribute and run.  PNaCl doesn't have a "strip"-like utility that
-  # you ever actually want to use other than pnacl-finalize, so just
-  # make pnacl-finalize the strip tool rather than adding an additional
-  # step like "postlink" to run pnacl-finalize.
-  strip = "finalize"
-}
-
-pnacl_toolchain("newlib_pnacl_nonsfi") {
-  executable_extension = ""
-  strip = "strip"
-
-  if (use_clang_static_analyzer) {
-    is_clang_analysis_supported = false
-  }
-}
-
-template("nacl_glibc_toolchain") {
-  toolchain_cpu = target_name
-  assert(defined(invoker.toolchain_tuple), "Must define toolchain_tuple")
-  assert(defined(invoker.toolchain_package), "Must define toolchain_package")
-  assert(defined(invoker.toolchain_revision), "Must define toolchain_revision")
-  forward_variables_from(invoker,
-                         [
-                           "toolchain_package",
-                           "toolchain_revision",
-                         ])
-
-  toolprefix = rebase_path("${nacl_toolchain_dir}/${toolchain_package}/bin/" +
-                               invoker.toolchain_tuple + "-",
-                           root_build_dir)
-
-  nacl_toolchain("glibc_" + toolchain_cpu) {
-    cc = toolprefix + "gcc" + toolsuffix
-    cxx = toolprefix + "g++" + toolsuffix
-    ar = toolprefix + "ar" + toolsuffix
-    ld = cxx
-    readelf = toolprefix + "readelf" + toolsuffix
-    nm = toolprefix + "nm" + toolsuffix
-    strip = toolprefix + "strip" + toolsuffix
-
-    toolchain_args = {
-      current_cpu = toolchain_cpu
-      is_clang = false
-      is_nacl_glibc = true
-      use_lld = false
-    }
-  }
-}
-
-nacl_glibc_toolchain("x86") {
-  toolchain_package = "nacl_x86_glibc"
-  toolchain_revision = nacl_x86_glibc_rev
-
-  # Rely on the :compiler_cpu_abi config adding the -m32 flag here rather
-  # than using the i686-nacl binary directly.  This is a because i686-nacl-gcc
-  # is a shell script wrapper around x86_64-nacl-gcc and goma has trouble with
-  # compiler executables that are shell scripts (so the i686 'compiler' is not
-  # currently in goma).
-  toolchain_tuple = "x86_64-nacl"
-}
-
-nacl_glibc_toolchain("x64") {
-  toolchain_package = "nacl_x86_glibc"
-  toolchain_revision = nacl_x86_glibc_rev
-  toolchain_tuple = "x86_64-nacl"
-}
-
-nacl_glibc_toolchain("arm") {
-  toolchain_package = "nacl_arm_glibc"
-  toolchain_revision = nacl_arm_glibc_rev
-  toolchain_tuple = "arm-nacl"
-}
-
-template("nacl_clang_toolchain") {
-  toolchain_cpu = target_name
-  assert(defined(invoker.toolchain_tuple), "Must define toolchain_tuple")
-
-  toolchain_package = "pnacl_newlib"
-  toolchain_revision = pnacl_newlib_rev
-  toolprefix = rebase_path("${nacl_toolchain_dir}/${toolchain_package}/bin/" +
-                               invoker.toolchain_tuple + "-",
-                           root_build_dir)
-
-  nacl_toolchain("clang_newlib_" + toolchain_cpu) {
-    cc = toolprefix + "clang" + toolsuffix
-    cxx = toolprefix + "clang++" + toolsuffix
-    ar = toolprefix + "ar" + toolsuffix
-    ld = cxx
-    readelf = toolprefix + "readelf" + toolsuffix
-    nm = toolprefix + "nm" + toolsuffix
-    strip = toolprefix + "strip" + toolsuffix
-
-    toolchain_args = {
-      current_cpu = toolchain_cpu
-      is_clang = true
-      use_lld = false
-    }
-  }
-}
-
-template("nacl_irt_toolchain") {
-  toolchain_cpu = target_name
-  assert(defined(invoker.toolchain_tuple), "Must define toolchain_tuple")
-
-  toolchain_package = "pnacl_newlib"
-  toolchain_revision = pnacl_newlib_rev
-  toolprefix = rebase_path("${nacl_toolchain_dir}/${toolchain_package}/bin/" +
-                               invoker.toolchain_tuple + "-",
-                           root_build_dir)
-
-  link_irt = rebase_path("//native_client/build/link_irt.py", root_build_dir)
-
-  tls_edit_label =
-      "//native_client/src/tools/tls_edit:tls_edit($host_toolchain)"
-  host_toolchain_out_dir =
-      rebase_path(get_label_info(tls_edit_label, "root_out_dir"),
-                  root_build_dir)
-  tls_edit = "${host_toolchain_out_dir}/tls_edit"
-
-  nacl_toolchain("irt_" + toolchain_cpu) {
-    cc = toolprefix + "clang" + toolsuffix
-    cxx = toolprefix + "clang++" + toolsuffix
-    ar = toolprefix + "ar" + toolsuffix
-    readelf = toolprefix + "readelf" + toolsuffix
-    nm = toolprefix + "nm" + toolsuffix
-    strip = toolprefix + "strip" + toolsuffix
-
-    # Some IRT implementations (notably, Chromium's) contain C++ code,
-    # so we need to link w/ the C++ linker.
-    ld = "${python_path} ${link_irt} --tls-edit=${tls_edit} --link-cmd=${cxx} --readelf-cmd=${readelf}"
-
-    toolchain_args = {
-      current_cpu = toolchain_cpu
-      is_clang = true
-      use_lld = false
-    }
-
-    # TODO(ncbray): depend on link script
-    deps = [
-      tls_edit_label,
-    ]
-  }
-}
-
-template("nacl_clang_toolchains") {
-  assert(defined(invoker.toolchain_tuple), "Must define toolchain_tuple")
-  nacl_clang_toolchain(target_name) {
-    toolchain_tuple = invoker.toolchain_tuple
-  }
-  nacl_irt_toolchain(target_name) {
-    toolchain_tuple = invoker.toolchain_tuple
-  }
-}
-
-nacl_clang_toolchains("x86") {
-  # Rely on :compiler_cpu_abi adding -m32.  See nacl_x86_glibc above.
-  toolchain_tuple = "x86_64-nacl"
-}
-
-nacl_clang_toolchains("x64") {
-  toolchain_tuple = "x86_64-nacl"
-}
-
-nacl_clang_toolchains("arm") {
-  toolchain_tuple = "arm-nacl"
-}
-
-nacl_clang_toolchains("mipsel") {
-  toolchain_tuple = "mipsel-nacl"
-}
diff --git a/build/toolchain/nacl_toolchain.gni b/build/toolchain/nacl_toolchain.gni
deleted file mode 100644
index 566f071..0000000
--- a/build/toolchain/nacl_toolchain.gni
+++ /dev/null
@@ -1,59 +0,0 @@
-# Copyright (c) 2014 The Native Client Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import("//build/config/nacl/config.gni")
-import("//build/toolchain/gcc_toolchain.gni")
-
-# This template defines a NaCl toolchain.
-#
-# It requires the following variables specifying the executables to run:
-#  - cc
-#  - cxx
-#  - ar
-#  - ld
-
-template("nacl_toolchain") {
-  assert(defined(invoker.cc), "nacl_toolchain() must specify a \"cc\" value")
-  assert(defined(invoker.cxx), "nacl_toolchain() must specify a \"cxx\" value")
-  assert(defined(invoker.ar), "nacl_toolchain() must specify a \"ar\" value")
-  assert(defined(invoker.ld), "nacl_toolchain() must specify a \"ld\" value")
-  gcc_toolchain(target_name) {
-    if (defined(invoker.executable_extension)) {
-      executable_extension = invoker.executable_extension
-    } else {
-      executable_extension = ".nexe"
-    }
-    rebuild_define = "NACL_TC_REV=" + invoker.toolchain_revision
-
-    forward_variables_from(invoker,
-                           [
-                             "ar",
-                             "cc",
-                             "cxx",
-                             "deps",
-                             "is_clang_analysis_supported",
-                             "ld",
-                             "link_outputs",
-                             "nm",
-                             "readelf",
-                             "strip",
-                           ])
-
-    toolchain_args = {
-      # Use all values set on the invoker's toolchain_args.
-      forward_variables_from(invoker.toolchain_args, "*")
-
-      current_os = "nacl"
-
-      # We do not support component builds with the NaCl toolchains.
-      is_component_build = false
-
-      # We do not support tcmalloc in the NaCl toolchains.
-      use_allocator = "none"
-
-      # We do not support clang code coverage in the NaCl toolchains.
-      use_clang_coverage = false
-    }
-  }
-}
diff --git a/build/toolchain/toolchain.gni b/build/toolchain/toolchain.gni
deleted file mode 100644
index 5b4ca3f..0000000
--- a/build/toolchain/toolchain.gni
+++ /dev/null
@@ -1,113 +0,0 @@
-# Copyright 2015 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# Toolchain-related configuration that may be needed outside the context of the
-# toolchain() rules themselves.
-
-import("//build_overrides/build.gni")
-import("//build/config/chrome_build.gni")
-
-declare_args() {
-  # If this is set to true, or if LLVM_FORCE_HEAD_REVISION is set to 1
-  # in the environment, we use the revision in the llvm repo to determine
-  # the CLANG_REVISION to use, instead of the version hard-coded into
-  # //tools/clang/scripts/update.py. This should only be used in
-  # conjunction with setting LLVM_FORCE_HEAD_REVISION in the
-  # environment when `gclient runhooks` is run as well.
-  llvm_force_head_revision = false
-
-  # Compile with Xcode version of clang instead of hermetic version shipped
-  # with the build. Used on iOS to ship official builds (as they are built
-  # with the version of clang shipped with Xcode).
-  use_xcode_clang = is_ios && is_official_build
-
-  # Used for binary size analysis.
-  # Currently disabled on LLD because of a bug (fixed upstream).
-  # See https://crbug.com/716209.
-  generate_linker_map = is_android && is_official_build
-
-  # Use absolute file paths in the compiler diagnostics and __FILE__ macro
-  # if needed.
-  msvc_use_absolute_paths = false
-}
-
-if (generate_linker_map) {
-  assert(
-      is_official_build,
-      "Linker map files should only be generated when is_official_build = true")
-  assert(current_os == "android" || target_os == "linux",
-         "Linker map files should only be generated for Android and Linux")
-}
-
-# The path to the hermetic install of Xcode. Only relevant when
-# use_system_xcode = false.
-hermetic_xcode_path =
-    rebase_path("//build/${target_os}_files/Xcode.app", "", root_build_dir)
-
-declare_args() {
-  if (is_clang) {
-    # Clang compiler version. Clang files are placed at version-dependent paths.
-    clang_version = "7.0.0"
-  }
-}
-
-# Check target_os here instead of is_ios as this file is loaded for secondary
-# toolchain (host toolchain in particular) but the argument is the same for
-# all toolchains.
-assert(!use_xcode_clang || target_os == "ios",
-       "Using Xcode's clang is only supported in iOS builds")
-
-# Subdirectory within root_out_dir for shared library files.
-# TODO(agrieve): GYP sets this to "lib" for Linux & Android, but this won't work
-#     in GN until support for loadable_module() is added.
-#     See: https://codereview.chromium.org/1236503002/
-shlib_subdir = "."
-
-# Root out dir for shared library files.
-root_shlib_dir = root_out_dir
-if (shlib_subdir != ".") {
-  root_shlib_dir += "/$shlib_subdir"
-}
-
-# Extension for shared library files (including leading dot).
-if (is_mac || is_ios) {
-  shlib_extension = ".dylib"
-} else if (is_android && is_component_build) {
-  # By appending .cr, we prevent name collisions with libraries already
-  # loaded by the Android zygote.
-  shlib_extension = ".cr.so"
-} else if (is_posix || is_fuchsia) {
-  shlib_extension = ".so"
-} else if (is_win) {
-  shlib_extension = ".dll"
-} else {
-  assert(false, "Platform not supported")
-}
-
-# Prefix for shared library files.
-if (is_posix || is_fuchsia) {
-  shlib_prefix = "lib"
-} else {
-  shlib_prefix = ""
-}
-
-# While other "tool"s in a toolchain are specific to the target of that
-# toolchain, the "stamp" and "copy" tools are really generic to the host;
-# but each toolchain must define them separately.  GN doesn't allow a
-# template instantiation inside a toolchain definition, so some boilerplate
-# has to be repeated in each toolchain to define these two tools.  These
-# four variables reduce the duplication in that boilerplate.
-stamp_description = "STAMP {{output}}"
-copy_description = "COPY {{source}} {{output}}"
-if (host_os == "win") {
-  _tool_wrapper_path =
-      rebase_path("//build/toolchain/win/tool_wrapper.py", root_build_dir)
-
-  stamp_command = "cmd /c type nul > \"{{output}}\""
-  copy_command =
-      "$python_path $_tool_wrapper_path recursive-mirror {{source}} {{output}}"
-} else {
-  stamp_command = "touch {{output}}"
-  copy_command = "ln -f {{source}} {{output}} 2>/dev/null || (rm -rf {{output}} && cp -af {{source}} {{output}})"
-}
diff --git a/build/toolchain/win/BUILD.gn b/build/toolchain/win/BUILD.gn
deleted file mode 100644
index 4d9d1f4..0000000
--- a/build/toolchain/win/BUILD.gn
+++ /dev/null
@@ -1,469 +0,0 @@
-# Copyright (c) 2013 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import("//build/config/clang/clang.gni")
-import("//build/config/compiler/compiler.gni")
-import("//build/config/sanitizers/sanitizers.gni")
-import("//build/config/win/visual_studio_version.gni")
-import("//build/toolchain/clang_static_analyzer.gni")
-import("//build/toolchain/goma.gni")
-import("//build/toolchain/toolchain.gni")
-
-# Should only be running on Windows.
-assert(is_win)
-
-# Setup the Visual Studio state.
-#
-# Its arguments are the VS path and the compiler wrapper tool. It will write
-# "environment.x86" and "environment.x64" to the build directory and return a
-# list to us.
-
-# This tool will is used as a wrapper for various commands below.
-tool_wrapper_path = rebase_path("tool_wrapper.py", root_build_dir)
-
-if (use_goma) {
-  if (host_os == "win") {
-    goma_prefix = "$goma_dir/gomacc.exe "
-  } else {
-    goma_prefix = "$goma_dir/gomacc "
-  }
-} else {
-  goma_prefix = ""
-}
-
-# Copy the VS runtime DLL for the default toolchain to the root build directory
-# so things will run.
-if (current_toolchain == default_toolchain) {
-  if (is_debug) {
-    configuration_name = "Debug"
-  } else {
-    configuration_name = "Release"
-  }
-  exec_script("../../vs_toolchain.py",
-              [
-                "copy_dlls",
-                rebase_path(root_build_dir),
-                configuration_name,
-                target_cpu,
-              ])
-}
-
-# Parameters:
-#   environment: File name of environment file.
-#
-# You would also define a toolchain_args variable with at least these set:
-#   current_cpu: current_cpu to pass as a build arg
-#   current_os: current_os to pass as a build arg
-template("msvc_toolchain") {
-  toolchain(target_name) {
-    # When invoking this toolchain not as the default one, these args will be
-    # passed to the build. They are ignored when this is the default toolchain.
-    assert(defined(invoker.toolchain_args))
-    toolchain_args = {
-      if (defined(invoker.toolchain_args)) {
-        forward_variables_from(invoker.toolchain_args, "*")
-      }
-
-      # This value needs to be passed through unchanged.
-      host_toolchain = host_toolchain
-    }
-
-    # Make these apply to all tools below.
-    lib_switch = ""
-    lib_dir_switch = "/LIBPATH:"
-
-    # Object files go in this directory.
-    object_subdir = "{{target_out_dir}}/{{label_name}}"
-
-    env = invoker.environment
-
-    # When the invoker has explicitly overridden use_goma or cc_wrapper in the
-    # toolchain args, use those values, otherwise default to the global one.
-    # This works because the only reasonable override that toolchains might
-    # supply for these values are to force-disable them.
-    if (defined(toolchain_args.is_clang)) {
-      toolchain_uses_clang = toolchain_args.is_clang
-    } else {
-      toolchain_uses_clang = is_clang
-    }
-
-    cl = invoker.cl
-
-    if (toolchain_uses_clang && use_clang_static_analyzer) {
-      analyzer_prefix =
-          "$python_path " +
-          rebase_path("//build/toolchain/clang_static_analyzer_wrapper.py",
-                      root_build_dir) + " --mode=cl"
-      cl = "${analyzer_prefix} ${cl}"
-    }
-
-    if (use_lld) {
-      if (host_os == "win") {
-        lld_link = "lld-link.exe"
-      } else {
-        lld_link = "lld-link"
-      }
-      prefix = rebase_path("$clang_base_path/bin", root_build_dir)
-
-      # lld-link includes a replacement for lib.exe that can produce thin
-      # archives and understands bitcode (for lto builds).
-      lib = "$prefix/$lld_link /lib /llvmlibthin"
-      link = "$prefix/$lld_link"
-      if (host_os != "win") {
-        # See comment adding --rsp-quoting to $cl above for more information.
-        link = "$link --rsp-quoting=posix"
-      }
-    } else {
-      lib = "lib.exe"
-      link = "link.exe"
-    }
-
-    # If possible, pass system includes as flags to the compiler.  When that's
-    # not possible, load a full environment file (containing %INCLUDE% and
-    # %PATH%) -- e.g. 32-bit MSVS builds require %PATH% to be set and just
-    # passing in a list of include directories isn't enough.
-    if (defined(invoker.sys_include_flags)) {
-      env_wrapper = ""
-      sys_include_flags = "${invoker.sys_include_flags} "  # Note trailing space.
-    } else {
-      # clang-cl doesn't need this env hoop, so omit it there.
-      assert(!toolchain_uses_clang)
-      env_wrapper = "ninja -t msvc -e $env -- "  # Note trailing space.
-      sys_include_flags = ""
-    }
-
-    clflags = ""
-
-    # Pass /FC flag to the compiler if needed.
-    if (msvc_use_absolute_paths) {
-      clflags += "/FC "
-    }
-
-    tool("cc") {
-      precompiled_header_type = "msvc"
-      pdbname = "{{target_out_dir}}/{{label_name}}_c.pdb"
-
-      # Label names may have spaces in them so the pdbname must be quoted. The
-      # source and output don't need to be quoted because GN knows they're a
-      # full file name and will quote automatically when necessary.
-      depsformat = "msvc"
-      description = "CC {{output}}"
-      outputs = [
-        "$object_subdir/{{source_name_part}}.obj",
-      ]
-
-      command = "$env_wrapper$cl /nologo /showIncludes ${clflags} $sys_include_flags{{defines}} {{include_dirs}} {{cflags}} {{cflags_c}} /c {{source}} /Fo{{output}} /Fd\"$pdbname\""
-    }
-
-    tool("cxx") {
-      precompiled_header_type = "msvc"
-
-      # The PDB name needs to be different between C and C++ compiled files.
-      pdbname = "{{target_out_dir}}/{{label_name}}_cc.pdb"
-
-      # See comment in CC tool about quoting.
-      depsformat = "msvc"
-      description = "CXX {{output}}"
-      outputs = [
-        "$object_subdir/{{source_name_part}}.obj",
-      ]
-
-      command = "$env_wrapper$cl /nologo /showIncludes ${clflags} $sys_include_flags{{defines}} {{include_dirs}} {{cflags}} {{cflags_cc}} /c {{source}} /Fo{{output}} /Fd\"$pdbname\""
-    }
-
-    tool("rc") {
-      command = "$python_path $tool_wrapper_path rc-wrapper $env rc.exe {{defines}} {{include_dirs}} /fo{{output}} {{source}}"
-      depsformat = "msvc"
-      outputs = [
-        "$object_subdir/{{source_name_part}}.res",
-      ]
-      description = "RC {{output}}"
-    }
-
-    tool("asm") {
-      if (toolchain_args.current_cpu == "x64") {
-        ml = "ml64.exe"
-      } else {
-        ml = "ml.exe"
-      }
-      command = "$python_path $tool_wrapper_path asm-wrapper $env $ml {{defines}} {{include_dirs}} {{asmflags}} /c /Fo{{output}} {{source}}"
-      description = "ASM {{output}}"
-      outputs = [
-        "$object_subdir/{{source_name_part}}.obj",
-      ]
-    }
-
-    tool("alink") {
-      rspfile = "{{output}}.rsp"
-      command = "$python_path $tool_wrapper_path link-wrapper $env False $lib /nologo {{arflags}} /OUT:{{output}} @$rspfile"
-      description = "LIB {{output}}"
-      outputs = [
-        # Ignore {{output_extension}} and always use .lib, there's no reason to
-        # allow targets to override this extension on Windows.
-        "{{output_dir}}/{{target_output_name}}.lib",
-      ]
-      default_output_extension = ".lib"
-      default_output_dir = "{{target_out_dir}}"
-
-      # The use of inputs_newline is to work around a fixed per-line buffer
-      # size in the linker.
-      rspfile_content = "{{inputs_newline}}"
-    }
-
-    tool("solink") {
-      dllname = "{{output_dir}}/{{target_output_name}}{{output_extension}}"  # e.g. foo.dll
-      libname = "${dllname}.lib"  # e.g. foo.dll.lib
-      pdbname = "${dllname}.pdb"
-      rspfile = "${dllname}.rsp"
-      pool = "//build/toolchain:link_pool($default_toolchain)"
-
-      command = "$python_path $tool_wrapper_path link-wrapper $env False $link /nologo /IMPLIB:$libname /DLL /OUT:$dllname /PDB:$pdbname @$rspfile"
-
-      default_output_extension = ".dll"
-      default_output_dir = "{{root_out_dir}}"
-      description = "LINK(DLL) {{output}}"
-      outputs = [
-        dllname,
-        libname,
-      ]
-      link_output = libname
-      depend_output = libname
-      runtime_outputs = [ dllname ]
-      if (symbol_level != 0) {
-        outputs += [ pdbname ]
-        runtime_outputs += [ pdbname ]
-      }
-
-      # Since the above commands only updates the .lib file when it changes, ask
-      # Ninja to check if the timestamp actually changed to know if downstream
-      # dependencies should be recompiled.
-      restat = true
-
-      # The use of inputs_newline is to work around a fixed per-line buffer
-      # size in the linker.
-      rspfile_content = "{{libs}} {{solibs}} {{inputs_newline}} {{ldflags}}"
-    }
-
-    tool("solink_module") {
-      dllname = "{{output_dir}}/{{target_output_name}}{{output_extension}}"  # e.g. foo.dll
-      pdbname = "${dllname}.pdb"
-      rspfile = "${dllname}.rsp"
-      pool = "//build/toolchain:link_pool($default_toolchain)"
-
-      command = "$python_path $tool_wrapper_path link-wrapper $env False $link /nologo /DLL /OUT:$dllname /PDB:$pdbname @$rspfile"
-
-      default_output_extension = ".dll"
-      default_output_dir = "{{root_out_dir}}"
-      description = "LINK_MODULE(DLL) {{output}}"
-      outputs = [
-        dllname,
-      ]
-      if (symbol_level != 0) {
-        outputs += [ pdbname ]
-      }
-      runtime_outputs = outputs
-
-      # The use of inputs_newline is to work around a fixed per-line buffer
-      # size in the linker.
-      rspfile_content = "{{libs}} {{solibs}} {{inputs_newline}} {{ldflags}}"
-    }
-
-    tool("link") {
-      exename = "{{output_dir}}/{{target_output_name}}{{output_extension}}"
-      pdbname = "$exename.pdb"
-      rspfile = "$exename.rsp"
-      pool = "//build/toolchain:link_pool($default_toolchain)"
-
-      command = "$python_path $tool_wrapper_path link-wrapper $env False $link /nologo /OUT:$exename /PDB:$pdbname @$rspfile"
-
-      if (host_os == "win") {
-        shellprefix = "cmd /c"
-      } else {
-        shellprefix = ""
-      }
-      not_needed([ "shellprefix" ])
-
-      if (is_official_build) {
-        # On bots, the binary's PDB grow and eventually exceed 4G, causing the
-        # link to fail. As there's no utility to keeping the PDB around
-        # incrementally anyway in this config (because we're doing
-        # non-incremental LTCG builds), delete it before linking.
-        command = "$shellprefix $python_path $tool_wrapper_path delete-file $pdbname && $command"
-      }
-
-      default_output_extension = ".exe"
-      default_output_dir = "{{root_out_dir}}"
-      description = "LINK {{output}}"
-      outputs = [
-        exename,
-      ]
-      if (symbol_level != 0) {
-        outputs += [ pdbname ]
-      }
-      runtime_outputs = outputs
-
-      # The use of inputs_newline is to work around a fixed per-line buffer
-      # size in the linker.
-      rspfile_content = "{{inputs_newline}} {{libs}} {{solibs}} {{ldflags}}"
-    }
-
-    # These two are really entirely generic, but have to be repeated in
-    # each toolchain because GN doesn't allow a template to be used here.
-    # See //build/toolchain/toolchain.gni for details.
-    tool("stamp") {
-      command = stamp_command
-      description = stamp_description
-      pool = "//build/toolchain:action_pool($default_toolchain)"
-    }
-    tool("copy") {
-      command = copy_command
-      description = copy_description
-      pool = "//build/toolchain:action_pool($default_toolchain)"
-    }
-
-    tool("action") {
-      pool = "//build/toolchain:action_pool($default_toolchain)"
-    }
-  }
-}
-
-if (host_os == "win") {
-  clang_cl = "clang-cl.exe"
-} else {
-  clang_cl = "clang-cl"
-}
-
-if (target_cpu == "x86" || target_cpu == "x64") {
-  win_build_host_cpu = target_cpu
-} else {
-  win_build_host_cpu = host_cpu
-}
-
-# x86, arm and arm64 build cpu toolchains for Windows (not WinUWP). Only
-# define when the build cpu is one of these architectures since we don't
-# do any cross compiles when targeting x64-bit (the build does generate
-# some 64-bit stuff from x86/arm/arm64 target builds).
-if (win_build_host_cpu != "x64") {
-  build_cpu_toolchain_data = exec_script("setup_toolchain.py",
-                                         [
-                                           visual_studio_path,
-                                           windows_sdk_path,
-                                           visual_studio_runtime_dirs,
-                                           host_os,
-                                           win_build_host_cpu,
-                                           "environment." + win_build_host_cpu,
-                                         ],
-                                         "scope")
-
-  msvc_toolchain(win_build_host_cpu) {
-    environment = "environment." + win_build_host_cpu
-    cl = "${goma_prefix}\"${build_cpu_toolchain_data.vc_bin_dir}/cl.exe\""
-    toolchain_args = {
-      current_os = "win"
-      current_cpu = win_build_host_cpu
-      is_clang = false
-    }
-  }
-
-  msvc_toolchain("win_clang_" + win_build_host_cpu) {
-    environment = "environment." + win_build_host_cpu
-    prefix = rebase_path("$clang_base_path/bin", root_build_dir)
-    cl = "${goma_prefix}$prefix/${clang_cl}"
-    sys_include_flags = "${build_cpu_toolchain_data.include_flags_imsvc}"
-
-    toolchain_args = {
-      current_os = "win"
-      current_cpu = win_build_host_cpu
-      is_clang = true
-    }
-  }
-}
-
-# 64-bit toolchains.
-x64_toolchain_data = exec_script("setup_toolchain.py",
-                                 [
-                                   visual_studio_path,
-                                   windows_sdk_path,
-                                   visual_studio_runtime_dirs,
-                                   "win",
-                                   "x64",
-                                   "environment.x64",
-                                 ],
-                                 "scope")
-
-template("win_x64_toolchains") {
-  msvc_toolchain(target_name) {
-    environment = "environment.x64"
-    cl = "${goma_prefix}\"${x64_toolchain_data.vc_bin_dir}/cl.exe\""
-
-    toolchain_args = {
-      if (defined(invoker.toolchain_args)) {
-        forward_variables_from(invoker.toolchain_args, "*")
-      }
-      is_clang = false
-      current_os = "win"
-      current_cpu = "x64"
-    }
-  }
-
-  msvc_toolchain("win_clang_" + target_name) {
-    environment = "environment.x64"
-    prefix = rebase_path("$clang_base_path/bin", root_build_dir)
-    cl = "${goma_prefix}$prefix/${clang_cl}"
-    sys_include_flags = "${x64_toolchain_data.include_flags_imsvc}"
-
-    toolchain_args = {
-      if (defined(invoker.toolchain_args)) {
-        forward_variables_from(invoker.toolchain_args, "*")
-      }
-      is_clang = true
-      current_os = "win"
-      current_cpu = "x64"
-    }
-  }
-}
-
-win_x64_toolchains("x64") {
-  toolchain_args = {
-    # Use the defaults.
-  }
-}
-
-# The nacl_win64 toolchain is nearly identical to the plain x64 toolchain.
-# It's used solely for building nacl64.exe (//components/nacl/broker:nacl64).
-# The only reason it's a separate toolchain is so that it can force
-# is_component_build to false in the toolchain_args() block, because
-# building nacl64.exe in component style does not work.
-win_x64_toolchains("nacl_win64") {
-  toolchain_args = {
-    is_component_build = false
-  }
-}
-
-# WinUWP toolchains. Only define these when targeting them.
-
-if (target_os == "winuwp") {
-  assert(target_cpu == "x64" || target_cpu == "x86" || target_cpu == "arm" ||
-         target_cpu == "arm64")
-  store_cpu_toolchain_data = exec_script("setup_toolchain.py",
-                                         [
-                                           visual_studio_path,
-                                           windows_sdk_path,
-                                           visual_studio_runtime_dirs,
-                                           target_os,
-                                           target_cpu,
-                                           "environment.store_" + target_cpu,
-                                         ],
-                                         "scope")
-
-  msvc_toolchain("uwp_" + target_cpu) {
-    environment = "environment.store_" + target_cpu
-    cl = "${goma_prefix}\"${store_cpu_toolchain_data.vc_bin_dir}/cl.exe\""
-    toolchain_args = {
-      current_os = "winuwp"
-      current_cpu = target_cpu
-      is_clang = false
-    }
-  }
-}
diff --git a/build/toolchain/win/midl.gni b/build/toolchain/win/midl.gni
deleted file mode 100644
index 9ff29c6..0000000
--- a/build/toolchain/win/midl.gni
+++ /dev/null
@@ -1,115 +0,0 @@
-# Copyright 2014 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-assert(is_win)
-
-import("//build/config/win/visual_studio_version.gni")
-
-# This template defines a rule to invoke the MS IDL compiler. The generated
-# source code will be compiled and linked into targets that depend on this.
-#
-# Parameters
-#
-#   sources
-#      List of .idl file to process.
-#
-#   header_file (optional)
-#       File name of generated header file.  Defaults to the basename of the
-#       source idl file with a .h extension.
-#
-#   out_dir (optional)
-#       Directory to write the generated files to. Defaults to target_gen_dir.
-#
-#   dynamic_guid (optional)
-#       If the GUID is not constant across builds, the current GUID.
-#
-#   deps (optional)
-#   visibility (optional)
-
-template("midl") {
-  action_name = "${target_name}_idl_action"
-  source_set_name = target_name
-
-  assert(defined(invoker.sources), "Source must be defined for $target_name")
-
-  if (defined(invoker.out_dir)) {
-    out_dir = invoker.out_dir
-  } else {
-    out_dir = target_gen_dir
-  }
-
-  if (defined(invoker.dynamic_guid)) {
-    dynamic_guid = invoker.dynamic_guid
-  } else {
-    dynamic_guid = "none"
-  }
-
-  if (defined(invoker.header_file)) {
-    header_file = invoker.header_file
-  } else {
-    header_file = "{{source_name_part}}.h"
-  }
-
-  dlldata_file = "{{source_name_part}}.dlldata.c"
-  interface_identifier_file = "{{source_name_part}}_i.c"
-  proxy_file = "{{source_name_part}}_p.c"
-  type_library_file = "{{source_name_part}}.tlb"
-
-  action_foreach(action_name) {
-    visibility = [ ":$source_set_name" ]
-    script = "//build/toolchain/win/midl.py"
-
-    sources = invoker.sources
-
-    # Note that .tlb is not included in the outputs as it is not always
-    # generated depending on the content of the input idl file.
-    outputs = [
-      "$out_dir/$header_file",
-      "$out_dir/$dlldata_file",
-      "$out_dir/$interface_identifier_file",
-      "$out_dir/$proxy_file",
-    ]
-
-    if (current_cpu == "x86") {
-      win_tool_arch = "environment.x86"
-      idl_target_platform = "win32"
-    } else if (current_cpu == "x64") {
-      win_tool_arch = "environment.x64"
-      idl_target_platform = "x64"
-    } else {
-      assert(false, "Need environment for this arch")
-    }
-
-    args = [
-      win_tool_arch,
-      rebase_path(out_dir, root_build_dir),
-      dynamic_guid,
-      type_library_file,
-      header_file,
-      dlldata_file,
-      interface_identifier_file,
-      proxy_file,
-      "{{source}}",
-      "/char",
-      "signed",
-      "/env",
-      idl_target_platform,
-      "/Oicf",
-    ]
-
-    forward_variables_from(invoker, [ "deps" ])
-  }
-
-  source_set(target_name) {
-    forward_variables_from(invoker, [ "visibility" ])
-
-    # We only compile the IID files from the IDL tool rather than all outputs.
-    sources = process_file_template(invoker.sources,
-                                    [ "$out_dir/$interface_identifier_file" ])
-
-    public_deps = [
-      ":$action_name",
-    ]
-  }
-}
diff --git a/build/toolchain/win/midl.py b/build/toolchain/win/midl.py
deleted file mode 100644
index 09fec0b..0000000
--- a/build/toolchain/win/midl.py
+++ /dev/null
@@ -1,238 +0,0 @@
-# Copyright 2017 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import array
-import difflib
-import distutils.dir_util
-import filecmp
-import operator
-import os
-import re
-import shutil
-import struct
-import subprocess
-import sys
-import tempfile
-import uuid
-
-
-def ZapTimestamp(filename):
-  contents = open(filename, 'rb').read()
-  # midl.exe writes timestamp 2147483647 (2^31 - 1) as creation date into its
-  # outputs, but using the local timezone.  To make the output timezone-
-  # independent, replace that date with a fixed string of the same length.
-  # Also blank out the minor version number.
-  if filename.endswith('.tlb'):
-    # See https://chromium-review.googlesource.com/c/chromium/src/+/693223 for
-    # a fairly complete description of the .tlb binary format.
-    # TLB files start with a 54 byte header. Offset 0x20 stores how many types
-    # are defined in the file, and the header is followed by that many uint32s.
-    # After that, 15 section headers appear.  Each section header is 16 bytes,
-    # starting with offset and length uint32s.
-    # Section 12 in the file contains custom() data. custom() data has a type
-    # (int, string, etc).  Each custom data chunk starts with a uint16_t
-    # describing its type.  Type 8 is string data, consisting of a uint32_t
-    # len, followed by that many data bytes, followed by 'W' bytes to pad to a
-    # 4 byte boundary.  Type 0x13 is uint32 data, followed by 4 data bytes,
-    # followed by two 'W' to pad to a 4 byte boundary.
-    # The custom block always starts with one string containing "Created by
-    # MIDL version 8...", followed by one uint32 containing 0x7fffffff,
-    # followed by another uint32 containing the MIDL compiler version (e.g.
-    # 0x0801026e for v8.1.622 -- 0x26e == 622).  These 3 fields take 0x54 bytes.
-    # There might be more custom data after that, but these 3 blocks are always
-    # there for file-level metadata.
-    # All data is little-endian in the file.
-    assert contents[0:8] == 'MSFT\x02\x00\x01\x00'
-    ntypes, = struct.unpack_from('<I', contents, 0x20)
-    custom_off, custom_len = struct.unpack_from(
-        '<II', contents, 0x54 + 4*ntypes + 11*16)
-    assert custom_len >= 0x54
-    # First: Type string (0x8), followed by 0x3e characters.
-    assert contents[custom_off:custom_off+6] == '\x08\x00\x3e\x00\x00\x00'
-    assert re.match(
-        'Created by MIDL version 8\.\d\d\.\d{4} at ... Jan 1. ..:..:.. 2038\n',
-        contents[custom_off+6:custom_off+6+0x3e])
-    # Second: Type uint32 (0x13) storing 0x7fffffff (followed by WW / 0x57 pad)
-    assert contents[custom_off+6+0x3e:custom_off+6+0x3e+8] == \
-        '\x13\x00\xff\xff\xff\x7f\x57\x57'
-    # Third: Type uint32 (0x13) storing MIDL compiler version.
-    assert contents[custom_off+6+0x3e+8:custom_off+6+0x3e+8+2] == '\x13\x00'
-    # Replace "Created by" string with fixed string, and fixed MIDL version with
-    # 8.1.622 always.
-    contents = (contents[0:custom_off+6] +
-        'Created by MIDL version 8.xx.xxxx at a redacted point in time\n' +
-        # uint32 (0x13) val 0x7fffffff, WW, uint32 (0x13), val 0x0801026e, WW
-        '\x13\x00\xff\xff\xff\x7f\x57\x57\x13\x00\x6e\x02\x01\x08\x57\x57' +
-        contents[custom_off + 0x54:])
-  else:
-    contents = re.sub(
-        'File created by MIDL compiler version 8\.\d\d\.\d{4} \*/\r\n'
-        '/\* at ... Jan 1. ..:..:.. 2038',
-        'File created by MIDL compiler version 8.xx.xxxx */\r\n'
-        '/* at a redacted point in time',
-        contents)
-    contents = re.sub(
-        '    Oicf, W1, Zp8, env=(.....) \(32b run\), '
-        'target_arch=(AMD64|X86) 8\.\d\d\.\d{4}',
-        '    Oicf, W1, Zp8, env=\\1 (32b run), target_arch=\\2 8.xx.xxxx',
-        contents)
-    # TODO(thakis): If we need more hacks than these, try to verify checked-in
-    # outputs when we're using the hermetic toolchain.
-    # midl.exe older than 8.1.622 omit '//' after #endif, fix that:
-    contents = contents.replace('#endif !_MIDL_USE_GUIDDEF_',
-                                '#endif // !_MIDL_USE_GUIDDEF_')
-    # midl.exe puts the midl version into code in one place.  To have
-    # predictable output, lie about the midl version if it's not 8.1.622.
-    # This is unfortunate, but remember that there's beauty too in imperfection.
-    contents = contents.replace('0x801026c, /* MIDL Version 8.1.620 */',
-                                '0x801026e, /* MIDL Version 8.1.622 */')
-  open(filename, 'wb').write(contents)
-
-
-def overwrite_cls_guid_h(h_file, dynamic_guid):
-  contents = open(h_file, 'rb').read()
-  contents = re.sub('class DECLSPEC_UUID\("[^"]*"\)',
-                    'class DECLSPEC_UUID("%s")' % str(dynamic_guid), contents)
-  open(h_file, 'wb').write(contents)
-
-
-def overwrite_cls_guid_iid(iid_file, dynamic_guid):
-  contents = open(iid_file, 'rb').read()
-  hexuuid = '0x%08x,0x%04x,0x%04x,' % dynamic_guid.fields[0:3]
-  hexuuid += ','.join('0x%02x' % ord(b) for b in dynamic_guid.bytes[8:])
-  contents = re.sub(r'MIDL_DEFINE_GUID\(CLSID, ([^,]*),[^)]*\)',
-                    r'MIDL_DEFINE_GUID(CLSID, \1,%s)' % hexuuid, contents)
-  open(iid_file, 'wb').write(contents)
-
-
-def overwrite_cls_guid_tlb(tlb_file, dynamic_guid):
-  # See ZapTimestamp() for a short overview of the .tlb format.  The 1st
-  # section contains type descriptions, and the first type should be our
-  # coclass.  It points to the type's GUID in section 6, the GUID section.
-  contents = open(tlb_file, 'rb').read()
-  assert contents[0:8] == 'MSFT\x02\x00\x01\x00'
-  ntypes, = struct.unpack_from('<I', contents, 0x20)
-  type_off, type_len = struct.unpack_from('<II', contents, 0x54 + 4*ntypes)
-  assert ord(contents[type_off]) == 0x25, "expected coclass"
-  guidind = struct.unpack_from('<I', contents, type_off + 0x2c)[0]
-  guid_off, guid_len = struct.unpack_from(
-      '<II', contents, 0x54 + 4*ntypes + 5*16)
-  assert guidind + 14 <= guid_len
-  contents = array.array('c', contents)
-  struct.pack_into('<IHH8s', contents, guid_off + guidind,
-                   *(dynamic_guid.fields[0:3] + (dynamic_guid.bytes[8:],)))
-  # The GUID is correct now, but there's also a GUID hashtable in section 5.
-  # Need to recreate that too.  Since the hash table uses chaining, it's
-  # easiest to recompute it from scratch rather than trying to patch it up.
-  hashtab = [0xffffffff] * (0x80 / 4)
-  for guidind in range(guid_off, guid_off + guid_len, 24):
-    guidbytes, typeoff, nextguid = struct.unpack_from(
-        '<16sII', contents, guidind)
-    words = struct.unpack('<8H', guidbytes)
-    # midl seems to use the following simple hash function for GUIDs:
-    guidhash = reduce(operator.xor, [w for w in words]) % (0x80 / 4)
-    nextguid = hashtab[guidhash]
-    struct.pack_into('<I', contents, guidind + 0x14, nextguid)
-    hashtab[guidhash] = guidind - guid_off
-  hash_off, hash_len = struct.unpack_from(
-      '<II', contents, 0x54 + 4*ntypes + 4*16)
-  for i, hashval in enumerate(hashtab):
-    struct.pack_into('<I', contents, hash_off + 4*i, hashval)
-  open(tlb_file, 'wb').write(contents)
-
-
-def overwrite_cls_guid(h_file, iid_file, tlb_file, dynamic_guid):
-  # Fix up GUID in .h, _i.c, and .tlb.  This currently assumes that there's
-  # only one coclass in the idl file, and that that's the type with the
-  # dynamic type.
-  overwrite_cls_guid_h(h_file, dynamic_guid)
-  overwrite_cls_guid_iid(iid_file, dynamic_guid)
-  overwrite_cls_guid_tlb(tlb_file, dynamic_guid)
-
-
-def main(arch, outdir, dynamic_guid, tlb, h, dlldata, iid, proxy, idl, *flags):
-  # Copy checked-in outputs to final location.
-  THIS_DIR = os.path.abspath(os.path.dirname(__file__))
-  source = os.path.join(THIS_DIR, '..', '..', '..',
-      'third_party', 'win_build_output', outdir.replace('gen/', 'midl/'))
-  if os.path.isdir(os.path.join(source, os.path.basename(idl))):
-    source = os.path.join(source, os.path.basename(idl))
-  source = os.path.join(source, arch.split('.')[1])  # Append 'x86' or 'x64'.
-  source = os.path.normpath(source)
-  distutils.dir_util.copy_tree(source, outdir, preserve_times=False)
-  if dynamic_guid != 'none':
-    overwrite_cls_guid(os.path.join(outdir, h),
-                       os.path.join(outdir, iid),
-                       os.path.join(outdir, tlb),
-                       uuid.UUID(dynamic_guid))
-
-  # On non-Windows, that's all we can do.
-  if sys.platform != 'win32':
-    return 0
-
-  # On Windows, run midl.exe on the input and check that its outputs are
-  # identical to the checked-in outputs (after possibly replacing their main
-  # class guid).
-  tmp_dir = tempfile.mkdtemp()
-  delete_tmp_dir = True
-
-  # Read the environment block from the file. This is stored in the format used
-  # by CreateProcess. Drop last 2 NULs, one for list terminator, one for
-  # trailing vs. separator.
-  env_pairs = open(arch).read()[:-2].split('\0')
-  env_dict = dict([item.split('=', 1) for item in env_pairs])
-
-  args = ['midl', '/nologo'] + list(flags) + [
-      '/out', tmp_dir,
-      '/tlb', tlb,
-      '/h', h,
-      '/dlldata', dlldata,
-      '/iid', iid,
-      '/proxy', proxy,
-      idl]
-  try:
-    popen = subprocess.Popen(args, shell=True, env=env_dict,
-                             stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
-    out, _ = popen.communicate()
-    # Filter junk out of stdout, and write filtered versions. Output we want
-    # to filter is pairs of lines that look like this:
-    # Processing C:\Program Files (x86)\Microsoft SDKs\...\include\objidl.idl
-    # objidl.idl
-    lines = out.splitlines()
-    prefixes = ('Processing ', '64 bit Processing ')
-    processing = set(os.path.basename(x)
-                     for x in lines if x.startswith(prefixes))
-    for line in lines:
-      if not line.startswith(prefixes) and line not in processing:
-        print line
-    if popen.returncode != 0:
-      return popen.returncode
-
-    for f in os.listdir(tmp_dir):
-      ZapTimestamp(os.path.join(tmp_dir, f))
-
-    # Now compare the output in tmp_dir to the copied-over outputs.
-    diff = filecmp.dircmp(tmp_dir, outdir)
-    if diff.diff_files:
-      print 'midl.exe output different from files in %s, see %s' \
-          % (outdir, tmp_dir)
-      for f in diff.diff_files:
-        if f.endswith('.tlb'): continue
-        fromfile = os.path.join(outdir, f)
-        tofile = os.path.join(tmp_dir, f)
-        print ''.join(difflib.unified_diff(open(fromfile, 'U').readlines(),
-                                           open(tofile, 'U').readlines(),
-                                           fromfile, tofile))
-      delete_tmp_dir = False
-      print 'To rebaseline:'
-      print '  copy /y %s\* %s' % (tmp_dir, source)
-      sys.exit(1)
-    return 0
-  finally:
-    if os.path.exists(tmp_dir) and delete_tmp_dir:
-      shutil.rmtree(tmp_dir)
-
-
-if __name__ == '__main__':
-  sys.exit(main(*sys.argv[1:]))
diff --git a/build/toolchain/win/rc/.gitignore b/build/toolchain/win/rc/.gitignore
deleted file mode 100644
index e8fc4d3..0000000
--- a/build/toolchain/win/rc/.gitignore
+++ /dev/null
@@ -1,3 +0,0 @@
-linux64/rc
-mac/rc
-win/rc.exe
diff --git a/build/toolchain/win/rc/README.md b/build/toolchain/win/rc/README.md
deleted file mode 100644
index e6d38f9..0000000
--- a/build/toolchain/win/rc/README.md
+++ /dev/null
@@ -1,30 +0,0 @@
-# rc
-
-This contains a cross-platform reimplementation of rc.exe.
-
-This exists mainly to compile .rc files on non-Windows hosts for cross builds.
-However, it also runs on Windows for two reasons:
-
-1. To compare the output of Microsoft's rc.exe and the reimplementation and to
-    check that they produce bitwise identical output.
-2. The reimplementation supports printing resource files in /showIncludes
-   output, which helps getting build dependencies right.
-
-The resource compiler consists of two parts:
-
-1. A python script rc.py that serves as the driver.  It does unicode
-   conversions, runs the input through the preprocessor, and then calls the
-   actual resource compiler.
-2. The resource compiler, a C++ binary obtained via sha1 files from Google
-   Storage.  The binary's code currenty lives at
-   https://github.com/nico/hack/tree/master/res, even though work is (slowly)
-   underway to upstream it into LLVM.
-
-To update the rc binary, run `upload_rc_binaries.sh` in this directory, on a
-Mac.
-
-rc isn't built from source as part of the regular chrome build because
-it's needed in a gn toolchain tool, and these currently cannot have deps.
-Alternatively, gn could be taught about deps on tools, or rc invocations could
-be not a tool but a template like e.g. yasm invocations (which can have deps),
-then the prebuilt binaries wouldn't be needed.
diff --git a/build/toolchain/win/rc/linux64/rc.sha1 b/build/toolchain/win/rc/linux64/rc.sha1
deleted file mode 100644
index ad14ca4..0000000
--- a/build/toolchain/win/rc/linux64/rc.sha1
+++ /dev/null
@@ -1 +0,0 @@
-2d0c766039264dc2514d005a42f074af4838a446
\ No newline at end of file
diff --git a/build/toolchain/win/rc/mac/rc.sha1 b/build/toolchain/win/rc/mac/rc.sha1
deleted file mode 100644
index dbd6302..0000000
--- a/build/toolchain/win/rc/mac/rc.sha1
+++ /dev/null
@@ -1 +0,0 @@
-4c25c3bcb6608109bb52028d008835895cf72629
\ No newline at end of file
diff --git a/build/toolchain/win/rc/rc.py b/build/toolchain/win/rc/rc.py
deleted file mode 100755
index 73da888..0000000
--- a/build/toolchain/win/rc/rc.py
+++ /dev/null
@@ -1,194 +0,0 @@
-#!/usr/bin/env python
-# Copyright 2017 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""usage: rc.py [options] input.res
-A resource compiler for .rc files.
-
-options:
--h, --help     Print this message.
--I<dir>        Add include path.
--D<sym>        Define a macro for the preprocessor.
-/fo<out>       Set path of output .res file.
-/showIncludes  Print referenced header and resource files."""
-
-from __future__ import print_function
-from collections import namedtuple
-import codecs
-import os
-import re
-import subprocess
-import sys
-import tempfile
-
-
-THIS_DIR = os.path.abspath(os.path.dirname(__file__))
-SRC_DIR = \
-    os.path.dirname(os.path.dirname(os.path.dirname(os.path.dirname(THIS_DIR))))
-
-
-def ParseFlags():
-  """Parses flags off sys.argv and returns the parsed flags."""
-  # Can't use optparse / argparse because of /fo flag :-/
-  includes = []
-  defines = []
-  output = None
-  input = None
-  show_includes = False
-  # Parse.
-  for flag in sys.argv[1:]:
-    if flag == '-h' or flag == '--help':
-      print(__doc__)
-      sys.exit(0)
-    if flag.startswith('-I'):
-      includes.append(flag)
-    elif flag.startswith('-D'):
-      defines.append(flag)
-    elif flag.startswith('/fo'):
-      if output:
-        print('rc.py: error: multiple /fo flags', '/fo' + output, flag,
-              file=sys.stderr)
-        sys.exit(1)
-      output = flag[3:]
-    elif flag == '/showIncludes':
-      show_includes = True
-    elif (flag.startswith('-') or
-          (flag.startswith('/') and not os.path.exists(flag))):
-      print('rc.py: error: unknown flag', flag, file=sys.stderr)
-      print(__doc__, file=sys.stderr)
-      sys.exit(1)
-    else:
-      if input:
-        print('rc.py: error: multiple inputs:', input, flag, file=sys.stderr)
-        sys.exit(1)
-      input = flag
-  # Validate and set default values.
-  if not input:
-    print('rc.py: error: no input file', file=sys.stderr)
-    sys.exit(1)
-  if not output:
-    output = os.path.splitext(input)[0] + '.res'
-  Flags = namedtuple('Flags', ['includes', 'defines', 'output', 'input',
-                               'show_includes'])
-  return Flags(includes=includes, defines=defines, output=output, input=input,
-               show_includes=show_includes)
-
-
-def ReadInput(input):
-  """"Reads input and returns it. For UTF-16LEBOM input, converts to UTF-8."""
-  # Microsoft's rc.exe only supports unicode in the form of UTF-16LE with a BOM.
-  # Our rc binary sniffs for UTF-16LE.  If that's not found, if /utf-8 is
-  # passed, the input is treated as UTF-8.  If /utf-8 is not passed and the
-  # input is not UTF-16LE, then our rc errors out on characters outside of
-  # 7-bit ASCII.  Since the driver always converts UTF-16LE to UTF-8 here (for
-  # the preprocessor, which doesn't support UTF-16LE), our rc will either see
-  # UTF-8 with the /utf-8 flag (for UTF-16LE input), or ASCII input.
-  # This is compatible with Microsoft rc.exe.  If we wanted, we could expose
-  # a /utf-8 flag for the driver for UTF-8 .rc inputs too.
-  # TODO(thakis): Microsoft's rc.exe supports BOM-less UTF-16LE. We currently
-  # don't, but for chrome it currently doesn't matter.
-  is_utf8 = False
-  try:
-    with open(input, 'rb') as rc_file:
-      rc_file_data = rc_file.read()
-      if rc_file_data.startswith(codecs.BOM_UTF16_LE):
-        rc_file_data = rc_file_data[2:].decode('utf-16le').encode('utf-8')
-        is_utf8 = True
-  except IOError:
-    print('rc.py: failed to open', input, file=sys.stderr)
-    sys.exit(1)
-  except UnicodeDecodeError:
-    print('rc.py: failed to decode UTF-16 despite BOM', input, file=sys.stderr)
-    sys.exit(1)
-  return rc_file_data, is_utf8
-
-
-def Preprocess(rc_file_data, flags):
-  """Runs the input file through the preprocessor."""
-  clang = os.path.join(SRC_DIR, 'third_party', 'llvm-build',
-                       'Release+Asserts', 'bin', 'clang-cl')
-  # Let preprocessor write to a temp file so that it doesn't interfere
-  # with /showIncludes output on stdout.
-  if sys.platform == 'win32':
-    clang += '.exe'
-  temp_handle, temp_file = tempfile.mkstemp(suffix='.i')
-  # Closing temp_handle immediately defeats the purpose of mkstemp(), but I
-  # can't figure out how to let write to the temp file on Windows otherwise.
-  os.close(temp_handle)
-  clang_cmd = [clang, '/P', '/DRC_INVOKED', '/TC', '-', '/Fi' + temp_file]
-  if os.path.dirname(flags.input):
-    # This must precede flags.includes.
-    clang_cmd.append('-I' + os.path.dirname(flags.input))
-  if flags.show_includes:
-    clang_cmd.append('/showIncludes')
-  clang_cmd += flags.includes + flags.defines
-  p = subprocess.Popen(clang_cmd, stdin=subprocess.PIPE)
-  p.communicate(input=rc_file_data)
-  if p.returncode != 0:
-    sys.exit(p.returncode)
-  preprocessed_output = open(temp_file, 'rb').read()
-  os.remove(temp_file)
-
-  # rc.exe has a wacko preprocessor:
-  # https://msdn.microsoft.com/en-us/library/windows/desktop/aa381033(v=vs.85).aspx
-  # """RC treats files with the .c and .h extensions in a special manner. It
-  # assumes that a file with one of these extensions does not contain
-  # resources. If a file has the .c or .h file name extension, RC ignores all
-  # lines in the file except the preprocessor directives."""
-  # Thankfully, the Microsoft headers are mostly good about putting everything
-  # in the system headers behind `if !defined(RC_INVOKED)`, so regular
-  # preprocessing with RC_INVOKED defined almost works. The one exception
-  # is struct tagCRGB in dlgs.h, but that will be fixed in the next major
-  # SDK release too.
-  # TODO(thakis): Remove this once an SDK with the fix has been released.
-  preprocessed_output = re.sub('typedef struct tagCRGB\s*{[^}]*} CRGB;', '',
-                               preprocessed_output)
-  return preprocessed_output
-
-
-def RunRc(preprocessed_output, is_utf8, flags):
-  if sys.platform.startswith('linux'):
-    rc = os.path.join(THIS_DIR, 'linux64', 'rc')
-  elif sys.platform == 'darwin':
-    rc = os.path.join(THIS_DIR, 'mac', 'rc')
-  elif sys.platform == 'win32':
-    rc = os.path.join(THIS_DIR, 'win', 'rc.exe')
-  else:
-    print('rc.py: error: unsupported platform', sys.platform, file=sys.stderr)
-    sys.exit(1)
-  rc_cmd = [rc]
-  # Make sure rc-relative resources can be found:
-  if os.path.dirname(flags.input):
-    rc_cmd.append('/cd' + os.path.dirname(flags.input))
-  rc_cmd.append('/fo' + flags.output)
-  if is_utf8:
-    rc_cmd.append('/utf-8')
-  # TODO(thakis): rc currently always prints full paths for /showIncludes,
-  # but clang-cl /P doesn't.  Which one is right?
-  if flags.show_includes:
-    rc_cmd.append('/showIncludes')
-  # Microsoft rc.exe searches for referenced files relative to -I flags in
-  # addition to the pwd, so -I flags need to be passed both to both
-  # the preprocessor and rc.
-  rc_cmd += flags.includes
-  p = subprocess.Popen(rc_cmd, stdin=subprocess.PIPE)
-  p.communicate(input=preprocessed_output)
-  return p.returncode
-
-
-def main():
-  # This driver has to do these things:
-  # 1. Parse flags.
-  # 2. Convert the input from UTF-16LE to UTF-8 if needed.
-  # 3. Pass the input through a preprocessor (and clean up the preprocessor's
-  #    output in minor ways).
-  # 4. Call rc for the heavy lifting.
-  flags = ParseFlags()
-  rc_file_data, is_utf8 = ReadInput(flags.input)
-  preprocessed_output = Preprocess(rc_file_data, flags)
-  return RunRc(preprocessed_output, is_utf8, flags)
-
-
-if __name__ == '__main__':
-  sys.exit(main())
diff --git a/build/toolchain/win/rc/upload_rc_binaries.sh b/build/toolchain/win/rc/upload_rc_binaries.sh
deleted file mode 100755
index ec4df4c..0000000
--- a/build/toolchain/win/rc/upload_rc_binaries.sh
+++ /dev/null
@@ -1,46 +0,0 @@
-#!/bin/bash
-# Copyright 2017 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-set -eu
-
-# Builds new rc binaries at head and uploads them to google storage.
-# The new .sha1 files will be in the tree after this has run.
-
-if [[ "$OSTYPE" != "darwin"* ]]; then
-  echo "this script must run on a mac"
-  exit 1
-fi
-
-DIR="$(cd "$(dirname "${0}" )" && pwd)"
-SRC_DIR="$DIR/../../../.."
-
-# Make sure Linux and Windows sysroots are installed, for distrib.py.
-$SRC_DIR/build/linux/sysroot_scripts/install-sysroot.py --arch amd64
-$SRC_DIR/build/vs_toolchain.py update --force
-
-# Make a temporary directory.
-WORK_DIR=$(mktemp -d)
-if [[ ! "$WORK_DIR" || ! -d "$WORK_DIR" ]]; then
-  echo "could not create temp dir"
-  exit 1
-fi
-function cleanup {
-  rm -rf "$WORK_DIR"
-}
-trap cleanup EXIT
-
-# Check out rc and build it in the temporary directory. Copy binaries over.
-pushd "$WORK_DIR" > /dev/null
-git clone -q https://github.com/nico/hack
-cd hack/res
-./distrib.py "$SRC_DIR"
-popd > /dev/null
-cp "$WORK_DIR/hack/res/rc-linux64" "$DIR/linux64/rc"
-cp "$WORK_DIR/hack/res/rc-mac" "$DIR/mac/rc"
-cp "$WORK_DIR/hack/res/rc-win.exe" "$DIR/win/rc.exe"
-
-# Upload binaries to cloud storage.
-upload_to_google_storage.py -b chromium-browser-clang/rc "$DIR/linux64/rc"
-upload_to_google_storage.py -b chromium-browser-clang/rc "$DIR/mac/rc"
-upload_to_google_storage.py -b chromium-browser-clang/rc "$DIR/win/rc.exe"
diff --git a/build/toolchain/win/rc/win/rc.exe.sha1 b/build/toolchain/win/rc/win/rc.exe.sha1
deleted file mode 100644
index 3fdbfc0..0000000
--- a/build/toolchain/win/rc/win/rc.exe.sha1
+++ /dev/null
@@ -1 +0,0 @@
-ba51d69039ffb88310b72b6568efa9f0de148f8f
\ No newline at end of file
diff --git a/build/toolchain/win/setup_toolchain.py b/build/toolchain/win/setup_toolchain.py
deleted file mode 100644
index d63b131..0000000
--- a/build/toolchain/win/setup_toolchain.py
+++ /dev/null
@@ -1,271 +0,0 @@
-# Copyright (c) 2013 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-#
-# Copies the given "win tool" (which the toolchain uses to wrap compiler
-# invocations) and the environment blocks for the 32-bit and 64-bit builds on
-# Windows to the build directory.
-#
-# The arguments are the visual studio install location and the location of the
-# win tool. The script assumes that the root build directory is the current dir
-# and the files will be written to the current directory.
-
-import errno
-import json
-import os
-import re
-import subprocess
-import sys
-
-sys.path.append(os.path.join(os.path.dirname(__file__), os.pardir, os.pardir))
-import gn_helpers
-
-SCRIPT_DIR = os.path.dirname(__file__)
-
-def _ExtractImportantEnvironment(output_of_set):
-  """Extracts environment variables required for the toolchain to run from
-  a textual dump output by the cmd.exe 'set' command."""
-  envvars_to_save = (
-      'goma_.*', # TODO(scottmg): This is ugly, but needed for goma.
-      'include',
-      'lib',
-      'libpath',
-      'path',
-      'pathext',
-      'systemroot',
-      'temp',
-      'tmp',
-      )
-  env = {}
-  # This occasionally happens and leads to misleading SYSTEMROOT error messages
-  # if not caught here.
-  if output_of_set.count('=') == 0:
-    raise Exception('Invalid output_of_set. Value is:\n%s' % output_of_set)
-  for line in output_of_set.splitlines():
-    for envvar in envvars_to_save:
-      if re.match(envvar + '=', line.lower()):
-        var, setting = line.split('=', 1)
-        if envvar == 'path':
-          # Our own rules and actions in Chromium rely on python being in the
-          # path. Add the path to this python here so that if it's not in the
-          # path when ninja is run later, python will still be found.
-          setting = os.path.dirname(sys.executable) + os.pathsep + setting
-        env[var.upper()] = setting
-        break
-  if sys.platform in ('win32', 'cygwin'):
-    for required in ('SYSTEMROOT', 'TEMP', 'TMP'):
-      if required not in env:
-        raise Exception('Environment variable "%s" '
-                        'required to be set to valid path' % required)
-  return env
-
-
-def _DetectVisualStudioPath():
-  """Return path to the GYP_MSVS_VERSION of Visual Studio.
-  """
-
-  # Use the code in build/vs_toolchain.py to avoid duplicating code.
-  chromium_dir = os.path.abspath(os.path.join(SCRIPT_DIR, '..', '..', '..'))
-  sys.path.append(os.path.join(chromium_dir, 'build'))
-  import vs_toolchain
-  return vs_toolchain.DetectVisualStudioPath()
-
-
-def _LoadEnvFromBat(args):
-  """Given a bat command, runs it and returns env vars set by it."""
-  args = args[:]
-  args.extend(('&&', 'set'))
-  popen = subprocess.Popen(
-      args, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
-  variables, _ = popen.communicate()
-  if popen.returncode != 0:
-    raise Exception('"%s" failed with error %d' % (args, popen.returncode))
-  return variables
-
-
-def _LoadToolchainEnv(cpu, sdk_dir, target_store):
-  """Returns a dictionary with environment variables that must be set while
-  running binaries from the toolchain (e.g. INCLUDE and PATH for cl.exe)."""
-  # Check if we are running in the SDK command line environment and use
-  # the setup script from the SDK if so. |cpu| should be either
-  # 'x86' or 'x64' or 'arm' or 'arm64'.
-  assert cpu in ('x86', 'x64', 'arm', 'arm64')
-  if bool(int(os.environ.get('DEPOT_TOOLS_WIN_TOOLCHAIN', 1))) and sdk_dir:
-    # Load environment from json file.
-    env = os.path.normpath(os.path.join(sdk_dir, 'bin/SetEnv.%s.json' % cpu))
-    env = json.load(open(env))['env']
-    for k in env:
-      entries = [os.path.join(*([os.path.join(sdk_dir, 'bin')] + e))
-                 for e in env[k]]
-      # clang-cl wants INCLUDE to be ;-separated even on non-Windows,
-      # lld-link wants LIB to be ;-separated even on non-Windows.  Path gets :.
-      # The separator for INCLUDE here must match the one used in main() below.
-      sep = os.pathsep if k == 'PATH' else ';'
-      env[k] = sep.join(entries)
-    # PATH is a bit of a special case, it's in addition to the current PATH.
-    env['PATH'] = env['PATH'] + os.pathsep + os.environ['PATH']
-    # Augment with the current env to pick up TEMP and friends.
-    for k in os.environ:
-      if k not in env:
-        env[k] = os.environ[k]
-
-    varlines = []
-    for k in sorted(env.keys()):
-      varlines.append('%s=%s' % (str(k), str(env[k])))
-    variables = '\n'.join(varlines)
-
-    # Check that the json file contained the same environment as the .cmd file.
-    if sys.platform in ('win32', 'cygwin'):
-      script = os.path.normpath(os.path.join(sdk_dir, 'Bin/SetEnv.cmd'))
-      arg = '/' + cpu
-      json_env = _ExtractImportantEnvironment(variables)
-      cmd_env = _ExtractImportantEnvironment(_LoadEnvFromBat([script, arg]))
-      assert _LowercaseDict(json_env) == _LowercaseDict(cmd_env)
-  else:
-    if 'GYP_MSVS_OVERRIDE_PATH' not in os.environ:
-      os.environ['GYP_MSVS_OVERRIDE_PATH'] = _DetectVisualStudioPath()
-    # We only support x64-hosted tools.
-    script_path = os.path.normpath(os.path.join(
-                                       os.environ['GYP_MSVS_OVERRIDE_PATH'],
-                                       'VC/vcvarsall.bat'))
-    if not os.path.exists(script_path):
-      # vcvarsall.bat for VS 2017 fails if run after running vcvarsall.bat from
-      # VS 2013 or VS 2015. Fix this by clearing the vsinstalldir environment
-      # variable.
-      if 'VSINSTALLDIR' in os.environ:
-        del os.environ['VSINSTALLDIR']
-      other_path = os.path.normpath(os.path.join(
-                                        os.environ['GYP_MSVS_OVERRIDE_PATH'],
-                                        'VC/Auxiliary/Build/vcvarsall.bat'))
-      if not os.path.exists(other_path):
-        raise Exception('%s is missing - make sure VC++ tools are installed.' %
-                        script_path)
-      script_path = other_path
-    cpu_arg = "amd64"
-    if (cpu != 'x64'):
-      # x64 is default target CPU thus any other CPU requires a target set
-      cpu_arg += '_' + cpu
-    args = [script_path, cpu_arg]
-    # Store target must come before any SDK version declaration
-    if (target_store):
-      args.append(['store'])
-    # Chromium requires the 10.0.17134.0 SDK - previous versions don't have
-    # all of the required declarations.
-    args.append('10.0.17134.0')
-    variables = _LoadEnvFromBat(args)
-  return _ExtractImportantEnvironment(variables)
-
-
-def _FormatAsEnvironmentBlock(envvar_dict):
-  """Format as an 'environment block' directly suitable for CreateProcess.
-  Briefly this is a list of key=value\0, terminated by an additional \0. See
-  CreateProcess documentation for more details."""
-  block = ''
-  nul = '\0'
-  for key, value in envvar_dict.iteritems():
-    block += key + '=' + value + nul
-  block += nul
-  return block
-
-
-def _LowercaseDict(d):
-  """Returns a copy of `d` with both key and values lowercased.
-
-  Args:
-    d: dict to lowercase (e.g. {'A': 'BcD'}).
-
-  Returns:
-    A dict with both keys and values lowercased (e.g.: {'a': 'bcd'}).
-  """
-  return {k.lower(): d[k].lower() for k in d}
-
-
-def main():
-  if len(sys.argv) != 7:
-    print('Usage setup_toolchain.py '
-          '<visual studio path> <win sdk path> '
-          '<runtime dirs> <target_os> <target_cpu> '
-          '<environment block name|none>')
-    sys.exit(2)
-  win_sdk_path = sys.argv[2]
-  runtime_dirs = sys.argv[3]
-  target_os = sys.argv[4]
-  target_cpu = sys.argv[5]
-  environment_block_name = sys.argv[6]
-  if (environment_block_name == 'none'):
-    environment_block_name = ''
-
-  if (target_os == 'winuwp'):
-    target_store = True
-  else:
-    target_store = False
-
-  cpus = ('x86', 'x64', 'arm', 'arm64')
-  assert target_cpu in cpus
-  vc_bin_dir = ''
-  vc_lib_path = ''
-  vc_lib_atlmfc_path = ''
-  vc_lib_um_path = ''
-  include = ''
-
-  # TODO(scottmg|goma): Do we need an equivalent of
-  # ninja_use_custom_environment_files?
-
-  for cpu in cpus:
-    if cpu == target_cpu:
-      # Extract environment variables for subprocesses.
-      env = _LoadToolchainEnv(cpu, win_sdk_path, target_store)
-      env['PATH'] = runtime_dirs + os.pathsep + env['PATH']
-
-      for path in env['PATH'].split(os.pathsep):
-        if os.path.exists(os.path.join(path, 'cl.exe')):
-          vc_bin_dir = os.path.realpath(path)
-          break
-
-      for path in env['LIB'].split(';'):
-        if os.path.exists(os.path.join(path, 'msvcrt.lib')):
-          vc_lib_path = os.path.realpath(path)
-          break
-
-      for path in env['LIB'].split(';'):
-        if os.path.exists(os.path.join(path, 'atls.lib')):
-          vc_lib_atlmfc_path = os.path.realpath(path)
-          break
-
-      for path in env['LIB'].split(';'):
-        if os.path.exists(os.path.join(path, 'User32.Lib')):
-          vc_lib_um_path = os.path.realpath(path)
-          break
-
-      # The separator for INCLUDE here must match the one used in
-      # _LoadToolchainEnv() above.
-      include = [p.replace('"', r'\"') for p in env['INCLUDE'].split(';') if p]
-      include_I = ' '.join(['"/I' + i + '"' for i in include])
-      include_imsvc = ' '.join(['"-imsvc' + i + '"' for i in include])
-
-      if (environment_block_name != ''):
-        env_block = _FormatAsEnvironmentBlock(env)
-        with open(environment_block_name, 'wb') as f:
-          f.write(env_block)
-
-  assert vc_bin_dir
-  print 'vc_bin_dir = ' + gn_helpers.ToGNString(vc_bin_dir)
-  assert include_I
-  print 'include_flags_I = ' + gn_helpers.ToGNString(include_I)
-  assert include_imsvc
-  print 'include_flags_imsvc = ' + gn_helpers.ToGNString(include_imsvc)
-  assert vc_lib_path
-  print 'vc_lib_path = ' + gn_helpers.ToGNString(vc_lib_path)
-  if (target_store != True):
-    # Path is assumed not to exist for desktop applications
-    assert vc_lib_atlmfc_path
-  # Possible atlmfc library path gets introduced in the future for store thus
-  # output result if a result exists.
-  if (vc_lib_atlmfc_path != ''):
-    print 'vc_lib_atlmfc_path = ' + gn_helpers.ToGNString(vc_lib_atlmfc_path)
-  assert vc_lib_um_path
-  print 'vc_lib_um_path = ' + gn_helpers.ToGNString(vc_lib_um_path)
-  print 'paths = ' + gn_helpers.ToGNString(env['PATH'])
-
-if __name__ == '__main__':
-  main()
diff --git a/build/toolchain/win/tool_wrapper.py b/build/toolchain/win/tool_wrapper.py
deleted file mode 100644
index b2cb093..0000000
--- a/build/toolchain/win/tool_wrapper.py
+++ /dev/null
@@ -1,275 +0,0 @@
-# Copyright (c) 2012 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Utility functions for Windows builds.
-
-This file is copied to the build directory as part of toolchain setup and
-is used to set up calls to tools used by the build that need wrappers.
-"""
-
-import os
-import re
-import shutil
-import subprocess
-import stat
-import string
-import sys
-
-# tool_wrapper.py doesn't get invoked through python.bat so the Python bin
-# directory doesn't get added to the path. The Python module search logic
-# handles this fine and finds win32file.pyd. However the Windows module
-# search logic then looks for pywintypes27.dll and other DLLs in the path and
-# if it finds versions with a different bitness first then win32file.pyd will
-# fail to load with a cryptic error:
-#     ImportError: DLL load failed: %1 is not a valid Win32 application.
-if sys.platform == 'win32':
-  os.environ['PATH'] = os.path.dirname(sys.executable) + \
-                       os.pathsep + os.environ['PATH']
-  import win32file    # pylint: disable=import-error
-
-BASE_DIR = os.path.dirname(os.path.abspath(__file__))
-
-# A regex matching an argument corresponding to the output filename passed to
-# link.exe.
-_LINK_EXE_OUT_ARG = re.compile('/OUT:(?P<out>.+)$', re.IGNORECASE)
-_LINK_PDB_OUT_ARG = re.compile('/PDB:(?P<out>.+)$', re.IGNORECASE)
-_LINK_ERROR = re.compile('.* error LNK(\d+):')
-
-# Retry links when this error is hit, to try to deal with crbug.com/782660
-_LINKER_RETRY_ERRORS = 1201
-# Maximum number of linker retries.
-_LINKER_RETRIES = 3
-
-def main(args):
-  exit_code = WinTool().Dispatch(args)
-  if exit_code is not None:
-    sys.exit(exit_code)
-
-
-class WinTool(object):
-  """This class performs all the Windows tooling steps. The methods can either
-  be executed directly, or dispatched from an argument list."""
-
-  def _UseSeparateMspdbsrv(self, env, args):
-    """Allows to use a unique instance of mspdbsrv.exe per linker instead of a
-    shared one."""
-    if len(args) < 1:
-      raise Exception("Not enough arguments")
-
-    if args[0] != 'link.exe':
-      return
-
-    # Use the output filename passed to the linker to generate an endpoint name
-    # for mspdbsrv.exe.
-    endpoint_name = None
-    for arg in args:
-      m = _LINK_EXE_OUT_ARG.match(arg)
-      if m:
-        endpoint_name = re.sub(r'\W+', '',
-            '%s_%d' % (m.group('out'), os.getpid()))
-        break
-
-    if endpoint_name is None:
-      return
-
-    # Adds the appropriate environment variable. This will be read by link.exe
-    # to know which instance of mspdbsrv.exe it should connect to (if it's
-    # not set then the default endpoint is used).
-    env['_MSPDBSRV_ENDPOINT_'] = endpoint_name
-
-  def Dispatch(self, args):
-    """Dispatches a string command to a method."""
-    if len(args) < 1:
-      raise Exception("Not enough arguments")
-
-    method = "Exec%s" % self._CommandifyName(args[0])
-    return getattr(self, method)(*args[1:])
-
-  def _CommandifyName(self, name_string):
-    """Transforms a tool name like recursive-mirror to RecursiveMirror."""
-    return name_string.title().replace('-', '')
-
-  def _GetEnv(self, arch):
-    """Gets the saved environment from a file for a given architecture."""
-    # The environment is saved as an "environment block" (see CreateProcess
-    # and msvs_emulation for details). We convert to a dict here.
-    # Drop last 2 NULs, one for list terminator, one for trailing vs. separator.
-    pairs = open(arch).read()[:-2].split('\0')
-    kvs = [item.split('=', 1) for item in pairs]
-    return dict(kvs)
-
-  def ExecDeleteFile(self, path):
-    """Simple file delete command."""
-    if os.path.exists(path):
-      os.unlink(path)
-
-  def ExecRecursiveMirror(self, source, dest):
-    """Emulation of rm -rf out && cp -af in out."""
-    if os.path.exists(dest):
-      if os.path.isdir(dest):
-        def _on_error(fn, path, dummy_excinfo):
-          # The operation failed, possibly because the file is set to
-          # read-only. If that's why, make it writable and try the op again.
-          if not os.access(path, os.W_OK):
-            os.chmod(path, stat.S_IWRITE)
-          fn(path)
-        shutil.rmtree(dest, onerror=_on_error)
-      else:
-        if not os.access(dest, os.W_OK):
-          # Attempt to make the file writable before deleting it.
-          os.chmod(dest, stat.S_IWRITE)
-        os.unlink(dest)
-
-    if os.path.isdir(source):
-      shutil.copytree(source, dest)
-    else:
-      shutil.copy2(source, dest)
-      # Try to diagnose crbug.com/741603
-      if not os.path.exists(dest):
-        raise Exception("Copying of %s to %s failed" % (source, dest))
-
-  def ExecLinkWrapper(self, arch, use_separate_mspdbsrv, *args):
-    """Filter diagnostic output from link that looks like:
-    '   Creating library ui.dll.lib and object ui.dll.exp'
-    This happens when there are exports from the dll or exe.
-    """
-    env = self._GetEnv(arch)
-    if use_separate_mspdbsrv == 'True':
-      self._UseSeparateMspdbsrv(env, args)
-    if sys.platform == 'win32':
-      args = list(args)  # *args is a tuple by default, which is read-only.
-      args[0] = args[0].replace('/', '\\')
-    # https://docs.python.org/2/library/subprocess.html:
-    # "On Unix with shell=True [...] if args is a sequence, the first item
-    # specifies the command string, and any additional items will be treated as
-    # additional arguments to the shell itself.  That is to say, Popen does the
-    # equivalent of:
-    #   Popen(['/bin/sh', '-c', args[0], args[1], ...])"
-    # For that reason, since going through the shell doesn't seem necessary on
-    # non-Windows don't do that there.
-    pdb_name = None
-    pe_name = None
-    for arg in args:
-      m = _LINK_PDB_OUT_ARG.match(arg)
-      if m:
-        pdb_name = m.group('out')
-      m = _LINK_EXE_OUT_ARG.match(arg)
-      if m:
-        pe_name = m.group('out')
-    for retry_count in range(_LINKER_RETRIES):
-      retry = False
-      link = subprocess.Popen(args, shell=sys.platform == 'win32', env=env,
-                              stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
-      # Read output one line at a time as it shows up to avoid OOM failures when
-      # GBs of output is produced.
-      for line in link.stdout:
-        if (not line.startswith('   Creating library ') and
-            not line.startswith('Generating code') and
-            not line.startswith('Finished generating code')):
-          m = _LINK_ERROR.match(line)
-          if m:
-            error_code = int(m.groups()[0])
-            if error_code == _LINKER_RETRY_ERRORS:
-              print 'Retrying link due to error %d' % error_code
-              if pdb_name:
-                shutil.copyfile(pdb_name, pdb_name + 'failure_backup')
-              retry = True
-          print line,
-      result = link.wait()
-      if not retry:
-        break
-    if result == 0 and sys.platform == 'win32':
-      # Flush the file buffers to try to work around a Windows 10 kernel bug,
-      # https://crbug.com/644525
-      output_handle = win32file.CreateFile(pe_name, win32file.GENERIC_WRITE,
-                                      0, None, win32file.OPEN_EXISTING, 0, 0)
-      win32file.FlushFileBuffers(output_handle)
-      output_handle.Close()
-    return result
-
-  def ExecAsmWrapper(self, arch, *args):
-    """Filter logo banner from invocations of asm.exe."""
-    env = self._GetEnv(arch)
-    popen = subprocess.Popen(args, shell=True, env=env,
-                             stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
-    out, _ = popen.communicate()
-    for line in out.splitlines():
-      # Split to avoid triggering license checks:
-      if (not line.startswith('Copy' + 'right (C' +
-                              ') Microsoft Corporation') and
-          not line.startswith('Microsoft (R) Macro Assembler') and
-          not line.startswith(' Assembling: ') and
-          line):
-        print line
-    return popen.returncode
-
-  def ExecRcWrapper(self, arch, *args):
-    """Converts .rc files to .res files."""
-    env = self._GetEnv(arch)
-
-    # We run two resource compilers:
-    # 1. A custom one at build/toolchain/win/rc/rc.py which can run on
-    #    non-Windows, and which has /showIncludes support so we can track
-    #    dependencies (e.g. on .ico files) of .rc files.
-    # 2. On Windows, regular Microsoft rc.exe, to make sure rc.py produces
-    #    bitwise identical output.
-
-    # 1. Run our rc.py.
-    # Also pass /showIncludes to track dependencies of .rc files.
-    args = list(args)
-    rcpy_args = args[:]
-    rcpy_args[0:1] = [sys.executable, os.path.join(BASE_DIR, 'rc', 'rc.py')]
-    rcpy_res_output = rcpy_args[-2]
-    assert rcpy_res_output.startswith('/fo')
-    assert rcpy_res_output.endswith('.res')
-    rc_res_output = rcpy_res_output + '_ms_rc'
-    args[-2] = rc_res_output
-    rcpy_args.append('/showIncludes')
-    rc_exe_exit_code = subprocess.call(rcpy_args, env=env)
-    if rc_exe_exit_code == 0:
-      # Since tool("rc") can't have deps, add deps on this script and on rc.py
-      # and its deps here, so that rc edges become dirty if rc.py changes.
-      print 'Note: including file: ../../build/toolchain/win/tool_wrapper.py'
-      print 'Note: including file: ../../build/toolchain/win/rc/rc.py'
-      print 'Note: including file: ../../build/toolchain/win/rc/linux64/rc.sha1'
-      print 'Note: including file: ../../build/toolchain/win/rc/mac/rc.sha1'
-      print 'Note: including file: ../../build/toolchain/win/rc/win/rc.exe.sha1'
-
-    # 2. Run Microsoft rc.exe.
-    if sys.platform == 'win32' and rc_exe_exit_code == 0:
-      popen = subprocess.Popen(args, shell=True, env=env,
-                               stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
-      out, _ = popen.communicate()
-      # Filter logo banner from invocations of rc.exe. Older versions of RC
-      # don't support the /nologo flag.
-      for line in out.splitlines():
-        if (not line.startswith('Microsoft (R) Windows (R) Resource Compiler')
-            and not line.startswith('Copy' + 'right (C' +
-                                ') Microsoft Corporation')
-            and line):
-          print line
-      rc_exe_exit_code = popen.returncode
-      # Assert Microsoft rc.exe and rc.py produced identical .res files.
-      if rc_exe_exit_code == 0:
-        import filecmp
-        # Strip "/fo" prefix.
-        assert filecmp.cmp(rc_res_output[3:], rcpy_res_output[3:])
-    return rc_exe_exit_code
-
-  def ExecActionWrapper(self, arch, rspfile, *dirname):
-    """Runs an action command line from a response file using the environment
-    for |arch|. If |dirname| is supplied, use that as the working directory."""
-    env = self._GetEnv(arch)
-    # TODO(scottmg): This is a temporary hack to get some specific variables
-    # through to actions that are set after GN-time. http://crbug.com/333738.
-    for k, v in os.environ.iteritems():
-      if k not in env:
-        env[k] = v
-    args = open(rspfile).read()
-    dirname = dirname[0] if dirname else None
-    return subprocess.call(args, shell=True, env=env, cwd=dirname)
-
-
-if __name__ == '__main__':
-  sys.exit(main(sys.argv[1:]))
diff --git a/build/toolchain/wrapper_utils.py b/build/toolchain/wrapper_utils.py
deleted file mode 100644
index f76192e..0000000
--- a/build/toolchain/wrapper_utils.py
+++ /dev/null
@@ -1,150 +0,0 @@
-# Copyright (c) 2016 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Helper functions for gcc_toolchain.gni wrappers."""
-
-import gzip
-import os
-import re
-import subprocess
-import shlex
-import shutil
-import sys
-import threading
-
-_BAT_PREFIX = 'cmd /c call '
-_WHITELIST_RE = re.compile('whitelisted_resource_(?P<resource_id>[0-9]+)')
-
-
-def _GzipThenDelete(src_path, dest_path):
-  # Results for Android map file with GCC on a z620:
-  # Uncompressed: 207MB
-  # gzip -9: 16.4MB, takes 8.7 seconds.
-  # gzip -1: 21.8MB, takes 2.0 seconds.
-  # Piping directly from the linker via -print-map (or via -Map with a fifo)
-  # adds a whopping 30-45 seconds!
-  with open(src_path, 'rb') as f_in, gzip.GzipFile(dest_path, 'wb', 1) as f_out:
-    shutil.copyfileobj(f_in, f_out)
-  os.unlink(src_path)
-
-
-def CommandToRun(command):
-  """Generates commands compatible with Windows.
-
-  When running on a Windows host and using a toolchain whose tools are
-  actually wrapper scripts (i.e. .bat files on Windows) rather than binary
-  executables, the |command| to run has to be prefixed with this magic.
-  The GN toolchain definitions take care of that for when GN/Ninja is
-  running the tool directly.  When that command is passed in to this
-  script, it appears as a unitary string but needs to be split up so that
-  just 'cmd' is the actual command given to Python's subprocess module.
-
-  Args:
-    command: List containing the UNIX style |command|.
-
-  Returns:
-    A list containing the Windows version of the |command|.
-  """
-  if command[0].startswith(_BAT_PREFIX):
-    command = command[0].split(None, 3) + command[1:]
-  return command
-
-
-def RunLinkWithOptionalMapFile(command, env=None, map_file=None):
-  """Runs the given command, adding in -Wl,-Map when |map_file| is given.
-
-  Also takes care of gzipping when |map_file| ends with .gz.
-
-  Args:
-    command: List of arguments comprising the command.
-    env: Environment variables.
-    map_file: Path to output map_file.
-
-  Returns:
-    The exit code of running |command|.
-  """
-  tmp_map_path = None
-  if map_file and map_file.endswith('.gz'):
-    tmp_map_path = map_file + '.tmp'
-    command.append('-Wl,-Map,' + tmp_map_path)
-  elif map_file:
-    command.append('-Wl,-Map,' + map_file)
-
-  result = subprocess.call(command, env=env)
-
-  if tmp_map_path and result == 0:
-    threading.Thread(
-        target=lambda: _GzipThenDelete(tmp_map_path, map_file)).start()
-  elif tmp_map_path and os.path.exists(tmp_map_path):
-    os.unlink(tmp_map_path)
-
-  return result
-
-
-def ResolveRspLinks(inputs):
-  """Return a list of files contained in a response file.
-
-  Args:
-    inputs: A command containing rsp files.
-
-  Returns:
-    A set containing the rsp file content."""
-  rspfiles = [a[1:] for a in inputs if a.startswith('@')]
-  resolved = set()
-  for rspfile in rspfiles:
-    with open(rspfile, 'r') as f:
-      resolved.update(shlex.split(f.read()))
-
-  return resolved
-
-
-def CombineResourceWhitelists(whitelist_candidates, outfile):
-  """Combines all whitelists for a resource file into a single whitelist.
-
-  Args:
-    whitelist_candidates: List of paths to rsp files containing all targets.
-    outfile: Path to save the combined whitelist.
-  """
-  whitelists = ('%s.whitelist' % candidate for candidate in whitelist_candidates
-                if os.path.exists('%s.whitelist' % candidate))
-
-  resources = set()
-  for whitelist in whitelists:
-    with open(whitelist, 'r') as f:
-      resources.update(f.readlines())
-
-  with open(outfile, 'w') as f:
-    f.writelines(resources)
-
-
-def ExtractResourceIdsFromPragmaWarnings(text):
-  """Returns set of resource IDs that are inside unknown pragma warnings.
-
-  Args:
-    text: The text that will be scanned for unknown pragma warnings.
-
-  Returns:
-    A set containing integers representing resource IDs.
-  """
-  used_resources = set()
-  lines = text.splitlines()
-  for ln in lines:
-    match = _WHITELIST_RE.search(ln)
-    if match:
-      resource_id = int(match.group('resource_id'))
-      used_resources.add(resource_id)
-
-  return used_resources
-
-
-def CaptureCommandStderr(command, env=None):
-  """Returns the stderr of a command.
-
-  Args:
-    command: A list containing the command and arguments.
-    env: Environment variables for the new process.
-  """
-  child = subprocess.Popen(command, stderr=subprocess.PIPE, env=env)
-  _, stderr = child.communicate()
-  return child.returncode, stderr
diff --git a/build/tree_truth.sh b/build/tree_truth.sh
deleted file mode 100755
index 617092d..0000000
--- a/build/tree_truth.sh
+++ /dev/null
@@ -1,102 +0,0 @@
-#!/bin/bash
-# Copyright 2013 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-#
-# Script for printing recent commits in a buildbot run.
-
-# Return the sha1 of the given tag.  If not present, return "".
-# $1: path to repo
-# $2: tag name
-tt_sha1_for_tag() {
-  oneline=$(cd $1 && git log -1 $2 --format='%H' 2>/dev/null)
-  if [ $? -eq 0 ] ; then
-    echo $oneline
-  fi
-}
-
-# Return the sha1 of HEAD, or ""
-# $1: path to repo
-tt_sha1_for_head() {
-  ( cd $1 && git log HEAD -n1 --format='%H' | cat )
-}
-
-# For the given repo, set tag to HEAD.
-# $1: path to repo
-# $2: tag name
-tt_tag_head() {
-  ( cd $1 && git tag -f $2 )
-}
-
-# For the given repo, delete the tag.
-# $1: path to repo
-# $2: tag name
-tt_delete_tag() {
-  ( cd $1 && git tag -d $2 )
-}
-
-# For the given repo, set tag to "three commits ago" (for testing).
-# $1: path to repo
-# $2: tag name
-tt_tag_three_ago() {
- local sh=$(cd $1 && git log --pretty=oneline -n 3 | tail -1 | awk '{print $1}')
-  ( cd $1 && git tag -f $2 $sh )
-}
-
-# List the commits between the given tag and HEAD.
-# If the tag does not exist, only list the last few.
-# If the tag is at HEAD, list nothing.
-# Output format has distinct build steps for repos with changes.
-# $1: path to repo
-# $2: tag name
-# $3: simple/short repo name to use for display
-tt_list_commits() {
-  local tag_sha1=$(tt_sha1_for_tag $1 $2)
-  local head_sha1=$(tt_sha1_for_head $1)
-  local display_name=$(echo $3 | sed 's#/#_#g')
-  if [ "${tag_sha1}" = "${head_sha1}" ] ; then
-    return
-  fi
-  if [ "${tag_sha1}" = "" ] ; then
-    echo "@@@BUILD_STEP Recent commits in repo $display_name@@@"
-    echo "NOTE: git tag was not found so we have no baseline."
-    echo "Here are some recent commits, but they may not be new for this build."
-    ( cd $1 && git log -n 10 --stat | cat)
-  else
-    echo "@@@BUILD_STEP New commits in repo $display_name@@@"
-    ( cd $1 && git log -n 500 $2..HEAD --stat | cat)
-  fi
-}
-
-# Clean out the tree truth tags in all repos.  For testing.
-tt_clean_all() {
- for project in $@; do
-   tt_delete_tag $CHROME_SRC/../$project tree_truth
- done
-}
-
-# Print tree truth for all clank repos.
-tt_print_all() {
- for project in $@; do
-   local full_path=$CHROME_SRC/../$project
-   tt_list_commits $full_path tree_truth $project
-   tt_tag_head $full_path tree_truth
- done
-}
-
-# Print a summary of the last 10 commits for each repo.
-tt_brief_summary() {
-  echo "@@@BUILD_STEP Brief summary of recent CLs in every branch@@@"
-  for project in $@; do
-    echo $project:
-    local full_path=$CHROME_SRC/../$project
-    (cd $full_path && git log -n 10 --format="   %H %s   %an, %ad" | cat)
-    echo "================================================================="
-  done
-}
-
-CHROME_SRC=$1
-shift
-PROJECT_LIST=$@
-tt_brief_summary $PROJECT_LIST
-tt_print_all $PROJECT_LIST
diff --git a/build/update-linux-sandbox.sh b/build/update-linux-sandbox.sh
deleted file mode 100755
index fa2d107..0000000
--- a/build/update-linux-sandbox.sh
+++ /dev/null
@@ -1,76 +0,0 @@
-#!/bin/sh
-
-# Copyright (c) 2012 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-BUILDTYPE="${BUILDTYPE:-Debug}"
-CHROME_SRC_DIR="${CHROME_SRC_DIR:-$(dirname -- $(readlink -fn -- "$0"))/..}"
-CHROME_OUT_DIR="${CHROME_SRC_DIR}/${CHROMIUM_OUT_DIR:-out}/${BUILDTYPE}"
-CHROME_SANDBOX_BUILD_PATH="${CHROME_OUT_DIR}/chrome_sandbox"
-CHROME_SANDBOX_INST_PATH="/usr/local/sbin/chrome-devel-sandbox"
-CHROME_SANDBOX_INST_DIR=$(dirname -- "$CHROME_SANDBOX_INST_PATH")
-
-TARGET_DIR_TYPE=$(stat -f -c %t -- "${CHROME_SANDBOX_INST_DIR}" 2>/dev/null)
-if [ $? -ne 0 ]; then
-  echo "Could not get status of ${CHROME_SANDBOX_INST_DIR}"
-  exit 1
-fi
-
-# Make sure the path is not on NFS.
-if [ "${TARGET_DIR_TYPE}" = "6969" ]; then
-  echo "Please make sure ${CHROME_SANDBOX_INST_PATH} is not on NFS!"
-  exit 1
-fi
-
-installsandbox() {
-  echo "(using sudo so you may be asked for your password)"
-  sudo -- cp "${CHROME_SANDBOX_BUILD_PATH}" \
-    "${CHROME_SANDBOX_INST_PATH}" &&
-  sudo -- chown root:root "${CHROME_SANDBOX_INST_PATH}" &&
-  sudo -- chmod 4755 "${CHROME_SANDBOX_INST_PATH}"
-  return $?
-}
-
-if [ ! -d "${CHROME_OUT_DIR}" ]; then
-  echo -n "${CHROME_OUT_DIR} does not exist. Use \"BUILDTYPE=Release ${0}\" "
-  echo "If you are building in Release mode"
-  exit 1
-fi
-
-if [ ! -f "${CHROME_SANDBOX_BUILD_PATH}" ]; then
-  echo "Could not find ${CHROME_SANDBOX_BUILD_PATH}"
-  echo -n "BUILDTYPE is $BUILDTYPE, use \"BUILDTYPE=<value> ${0}\" to override "
-  echo "after you build the chrome_sandbox target"
-  exit 1
-fi
-
-if [ ! -f "${CHROME_SANDBOX_INST_PATH}" ]; then
-  echo -n "Could not find ${CHROME_SANDBOX_INST_PATH}, "
-  echo "installing it now."
-  installsandbox
-fi
-
-if [ ! -f "${CHROME_SANDBOX_INST_PATH}" ]; then
-  echo "Failed to install ${CHROME_SANDBOX_INST_PATH}"
-  exit 1
-fi
-
-CURRENT_API=$("${CHROME_SANDBOX_BUILD_PATH}" --get-api)
-INSTALLED_API=$("${CHROME_SANDBOX_INST_PATH}" --get-api)
-
-if [ "${CURRENT_API}" != "${INSTALLED_API}" ]; then
-  echo "Your installed setuid sandbox is too old, installing it now."
-  if ! installsandbox; then
-    echo "Failed to install ${CHROME_SANDBOX_INST_PATH}"
-    exit 1
-  fi
-else
-  echo "Your setuid sandbox is up to date"
-  if [ "${CHROME_DEVEL_SANDBOX}" != "${CHROME_SANDBOX_INST_PATH}" ]; then
-    echo -n "Make sure you have \"export "
-    echo -n "CHROME_DEVEL_SANDBOX=${CHROME_SANDBOX_INST_PATH}\" "
-    echo "somewhere in your .bashrc"
-    echo "This variable is currently: ${CHROME_DEVEL_SANDBOX:-empty}"
-  fi
-fi
diff --git a/build/util/BUILD.gn b/build/util/BUILD.gn
deleted file mode 100644
index 9f5a6f7..0000000
--- a/build/util/BUILD.gn
+++ /dev/null
@@ -1,51 +0,0 @@
-# Copyright (c) 2013 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-action("webkit_version") {
-  script = "version.py"
-
-  lastchange_file = "LASTCHANGE"
-
-  template_file = "webkit_version.h.in"
-  inputs = [
-    lastchange_file,
-    template_file,
-  ]
-
-  output_file = "$target_gen_dir/webkit_version.h"
-  outputs = [
-    output_file,
-  ]
-
-  args = [
-    # LASTCHANGE contains "<build hash>-<ref>".  The user agent only wants the
-    # "<build hash>" bit, so chop off everything after it.
-    "-e",
-    "LASTCHANGE=LASTCHANGE[:LASTCHANGE.find('-')]",
-    "-f",
-    rebase_path(lastchange_file, root_build_dir),
-    rebase_path(template_file, root_build_dir),
-    rebase_path(output_file, root_build_dir),
-  ]
-}
-
-action("chrome_version_json") {
-  script = "version.py"
-  _chrome_version_path = "//chrome/VERSION"
-  inputs = [
-    _chrome_version_path,
-  ]
-  _output_file = "$root_gen_dir/CHROME_VERSION.json"
-  outputs = [
-    _output_file,
-  ]
-  args = [
-    "--file",
-    rebase_path(_chrome_version_path, root_build_dir),
-    "--template",
-    "{\"full-quoted\": \"\\\"@MAJOR@.@MINOR@.@BUILD@.@PATCH@\\\"\"}",
-    "--output",
-    rebase_path(_output_file, root_build_dir),
-  ]
-}
diff --git a/build/util/LASTCHANGE b/build/util/LASTCHANGE
deleted file mode 100644
index 125c0d0..0000000
--- a/build/util/LASTCHANGE
+++ /dev/null
@@ -1 +0,0 @@
-LASTCHANGE=f2d1e453de33756fb4454dd881ba8fa786bed919-refs/heads/master@{#563863}
diff --git a/build/util/branding.gni b/build/util/branding.gni
deleted file mode 100644
index cadd1a3..0000000
--- a/build/util/branding.gni
+++ /dev/null
@@ -1,44 +0,0 @@
-# Copyright 2016 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This exposes the Chrome branding as GN variables for use in build files.
-#
-# PREFER NOT TO USE THESE. The GYP build uses this kind of thing extensively.
-# However, it is far better to write an action to generate a file at
-# build-time with the information you need. This allows better dependency
-# checking and GN will run faster.
-#
-# These values should only be used if you REALLY need to depend on them at
-# build-time, for example, in the computation of output file names.
-
-import("//build/config/chrome_build.gni")
-
-_branding_dictionary_template =
-    "full_name = \"@PRODUCT_FULLNAME@\" " +
-    "short_name = \"@PRODUCT_SHORTNAME@\" " +
-    "bundle_id = \"@MAC_BUNDLE_ID@\" " +
-    "creator_code = \"@MAC_CREATOR_CODE@\" " +
-    "installer_full_name = \"@PRODUCT_INSTALLER_FULLNAME@\" " +
-    "installer_short_name = \"@PRODUCT_INSTALLER_SHORTNAME@\" "
-
-_branding_file = "//chrome/app/theme/$branding_path_component/BRANDING"
-_result = exec_script("version.py",
-                      [
-                        "-f",
-                        rebase_path(_branding_file, root_build_dir),
-                        "-t",
-                        _branding_dictionary_template,
-                      ],
-                      "scope",
-                      [ _branding_file ])
-
-chrome_product_full_name = _result.full_name
-chrome_product_short_name = _result.short_name
-chrome_product_installer_full_name = _result.installer_full_name
-chrome_product_installer_short_name = _result.installer_short_name
-
-if (is_mac) {
-  chrome_mac_bundle_id = _result.bundle_id
-  chrome_mac_creator_code = _result.creator_code
-}
diff --git a/build/util/java_action.gni b/build/util/java_action.gni
deleted file mode 100644
index 646d5a4..0000000
--- a/build/util/java_action.gni
+++ /dev/null
@@ -1,103 +0,0 @@
-# Copyright 2015 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-jarrunner = "//build/util/java_action.py"
-
-# Declare a target that runs a java command a single time.
-#
-# This target type allows you to run a java command a single time to produce
-# one or more output files. If you want to run a java command for each of a
-# set of input files, see "java_action_foreach".
-#
-# See "gn help action" for more information on how to use this target. This
-# template is based on the "action" and supports the same variables.
-template("java_action") {
-  assert(defined(invoker.script),
-         "Need script in $target_name listing the .jar file to run.")
-  assert(defined(invoker.outputs),
-         "Need outputs in $target_name listing the generated outputs.")
-
-  jarscript = invoker.script
-  action(target_name) {
-    script = jarrunner
-
-    inputs = [
-      jarscript,
-    ]
-    if (defined(invoker.inputs)) {
-      inputs += invoker.inputs
-    }
-
-    args = [
-      "-jar",
-      rebase_path(jarscript, root_build_dir),
-    ]
-    if (defined(invoker.args)) {
-      args += invoker.args
-    }
-
-    forward_variables_from(invoker,
-                           [
-                             "console",
-                             "data",
-                             "data_deps",
-                             "depfile",
-                             "deps",
-                             "outputs",
-                             "sources",
-                             "testonly",
-                             "visibility",
-                           ])
-  }
-}
-
-# Declare a target that runs a java command over a set of files.
-#
-# This target type allows you to run a java command once-per-file over a set of
-# sources. If you want to run a java command once that takes many files as
-# input, see "java_action".
-#
-# See "gn help action_foreach" for more information on how to use this target.
-# This template is based on the "action_foreach" supports the same variables.
-template("java_action_foreach") {
-  assert(defined(invoker.script),
-         "Need script in $target_name listing the .jar file to run.")
-  assert(defined(invoker.outputs),
-         "Need outputs in $target_name listing the generated outputs.")
-  assert(defined(invoker.sources),
-         "Need sources in $target_name listing the target inputs.")
-
-  jarscript = invoker.script
-  action_foreach(target_name) {
-    script = jarrunner
-
-    inputs = [
-      jarscript,
-    ]
-    if (defined(invoker.inputs)) {
-      inputs += invoker.inputs
-    }
-
-    args = [
-      "-jar",
-      rebase_path(jarscript, root_build_dir),
-    ]
-    if (defined(invoker.args)) {
-      args += invoker.args
-    }
-
-    forward_variables_from(invoker,
-                           [
-                             "console",
-                             "data",
-                             "data_deps",
-                             "depfile",
-                             "deps",
-                             "outputs",
-                             "sources",
-                             "testonly",
-                             "visibility",
-                           ])
-  }
-}
diff --git a/build/util/java_action.py b/build/util/java_action.py
deleted file mode 100755
index ed9bb60..0000000
--- a/build/util/java_action.py
+++ /dev/null
@@ -1,82 +0,0 @@
-#!/usr/bin/env python
-# Copyright 2015 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Wrapper script to run java command as action with gn."""
-
-import os
-import subprocess
-import sys
-
-EXIT_SUCCESS = 0
-EXIT_FAILURE = 1
-
-
-def IsExecutable(path):
-  """Returns whether file at |path| exists and is executable.
-
-  Args:
-    path: absolute or relative path to test.
-
-  Returns:
-    True if the file at |path| exists, False otherwise.
-  """
-  return os.path.isfile(path) and os.access(path, os.X_OK)
-
-
-def FindCommand(command):
-  """Looks up for |command| in PATH.
-
-  Args:
-    command: name of the command to lookup, if command is a relative or
-      absolute path (i.e. contains some path separator) then only that
-      path will be tested.
-
-  Returns:
-    Full path to command or None if the command was not found.
-
-    On Windows, this respects the PATHEXT environment variable when the
-    command name does not have an extension.
-  """
-  fpath, _ = os.path.split(command)
-  if fpath:
-    if IsExecutable(command):
-      return command
-
-  if sys.platform == 'win32':
-    # On Windows, if the command does not have an extension, cmd.exe will
-    # try all extensions from PATHEXT when resolving the full path.
-    command, ext = os.path.splitext(command)
-    if not ext:
-      exts = os.environ['PATHEXT'].split(os.path.pathsep)
-    else:
-      exts = [ext]
-  else:
-    exts = ['']
-
-  for path in os.environ['PATH'].split(os.path.pathsep):
-    for ext in exts:
-      path = os.path.join(path, command) + ext
-      if IsExecutable(path):
-        return path
-
-  return None
-
-
-def main():
-  java_path = FindCommand('java')
-  if not java_path:
-    sys.stderr.write('java: command not found\n')
-    sys.exit(EXIT_FAILURE)
-
-  args = sys.argv[1:]
-  if len(args) < 2 or args[0] != '-jar':
-    sys.stderr.write('usage: %s -jar JARPATH [java_args]...\n' % sys.argv[0])
-    sys.exit(EXIT_FAILURE)
-
-  return subprocess.check_call([java_path] + args)
-
-
-if __name__ == '__main__':
-  sys.exit(main())
diff --git a/build/util/lastchange.py b/build/util/lastchange.py
deleted file mode 100755
index 19e3237..0000000
--- a/build/util/lastchange.py
+++ /dev/null
@@ -1,224 +0,0 @@
-#!/usr/bin/env python
-# Copyright (c) 2012 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-lastchange.py -- Chromium revision fetching utility.
-"""
-
-import re
-import logging
-import optparse
-import os
-import subprocess
-import sys
-
-class VersionInfo(object):
-  def __init__(self, revision_id, full_revision_string):
-    self.revision_id = revision_id
-    self.revision = full_revision_string
-
-
-def RunGitCommand(directory, command):
-  """
-  Launches git subcommand.
-
-  Errors are swallowed.
-
-  Returns:
-    A process object or None.
-  """
-  command = ['git'] + command
-  # Force shell usage under cygwin. This is a workaround for
-  # mysterious loss of cwd while invoking cygwin's git.
-  # We can't just pass shell=True to Popen, as under win32 this will
-  # cause CMD to be used, while we explicitly want a cygwin shell.
-  if sys.platform == 'cygwin':
-    command = ['sh', '-c', ' '.join(command)]
-  try:
-    proc = subprocess.Popen(command,
-                            stdout=subprocess.PIPE,
-                            stderr=subprocess.PIPE,
-                            cwd=directory,
-                            shell=(sys.platform=='win32'))
-    return proc
-  except OSError as e:
-    logging.error('Command %r failed: %s' % (' '.join(command), e))
-    return None
-
-
-def FetchGitRevision(directory, filter):
-  """
-  Fetch the Git hash (and Cr-Commit-Position if any) for a given directory.
-
-  Errors are swallowed.
-
-  Returns:
-    A VersionInfo object or None on error.
-  """
-  hsh = ''
-  git_args = ['log', '-1', '--format=%H']
-  if filter is not None:
-    git_args.append('--grep=' + filter)
-  proc = RunGitCommand(directory, git_args)
-  if proc:
-    output = proc.communicate()[0].strip()
-    if proc.returncode == 0 and output:
-      hsh = output
-    else:
-      logging.error('Git error: rc=%d, output=%r' %
-                    (proc.returncode, output))
-  if not hsh:
-    return None
-  pos = ''
-  proc = RunGitCommand(directory, ['cat-file', 'commit', hsh])
-  if proc:
-    output = proc.communicate()[0]
-    if proc.returncode == 0 and output:
-      for line in reversed(output.splitlines()):
-        if line.startswith('Cr-Commit-Position:'):
-          pos = line.rsplit()[-1].strip()
-          break
-  return VersionInfo(hsh, '%s-%s' % (hsh, pos))
-
-
-def FetchVersionInfo(directory=None, filter=None):
-  """
-  Returns the last change (as a VersionInfo object)
-  from some appropriate revision control system.
-  """
-  version_info = FetchGitRevision(directory, filter)
-  if not version_info:
-    version_info = VersionInfo('0', '0')
-  return version_info
-
-
-def GetHeaderGuard(path):
-  """
-  Returns the header #define guard for the given file path.
-  This treats everything after the last instance of "src/" as being a
-  relevant part of the guard. If there is no "src/", then the entire path
-  is used.
-  """
-  src_index = path.rfind('src/')
-  if src_index != -1:
-    guard = path[src_index + 4:]
-  else:
-    guard = path
-  guard = guard.upper()
-  return guard.replace('/', '_').replace('.', '_').replace('\\', '_') + '_'
-
-
-def GetHeaderContents(path, define, version):
-  """
-  Returns what the contents of the header file should be that indicate the given
-  revision.
-  """
-  header_guard = GetHeaderGuard(path)
-
-  header_contents = """/* Generated by lastchange.py, do not edit.*/
-
-#ifndef %(header_guard)s
-#define %(header_guard)s
-
-#define %(define)s "%(version)s"
-
-#endif  // %(header_guard)s
-"""
-  header_contents = header_contents % { 'header_guard': header_guard,
-                                        'define': define,
-                                        'version': version }
-  return header_contents
-
-
-def WriteIfChanged(file_name, contents):
-  """
-  Writes the specified contents to the specified file_name
-  iff the contents are different than the current contents.
-  """
-  try:
-    old_contents = open(file_name, 'r').read()
-  except EnvironmentError:
-    pass
-  else:
-    if contents == old_contents:
-      return
-    os.unlink(file_name)
-  open(file_name, 'w').write(contents)
-
-
-def main(argv=None):
-  if argv is None:
-    argv = sys.argv
-
-  parser = optparse.OptionParser(usage="lastchange.py [options]")
-  parser.add_option("-m", "--version-macro",
-                    help="Name of C #define when using --header. Defaults to " +
-                    "LAST_CHANGE.",
-                    default="LAST_CHANGE")
-  parser.add_option("-o", "--output", metavar="FILE",
-                    help="Write last change to FILE. " +
-                    "Can be combined with --header to write both files.")
-  parser.add_option("", "--header", metavar="FILE",
-                    help="Write last change to FILE as a C/C++ header. " +
-                    "Can be combined with --output to write both files.")
-  parser.add_option("--revision-id-only", action='store_true',
-                    help="Output the revision as a VCS revision ID only (in " +
-                    "Git, a 40-character commit hash, excluding the " +
-                    "Cr-Commit-Position).")
-  parser.add_option("--print-only", action='store_true',
-                    help="Just print the revision string. Overrides any " +
-                    "file-output-related options.")
-  parser.add_option("-s", "--source-dir", metavar="DIR",
-                    help="Use repository in the given directory.")
-  parser.add_option("", "--filter", metavar="REGEX",
-                    help="Only use log entries where the commit message " +
-                    "matches the supplied filter regex. Defaults to " +
-                    "'^Change-Id:' to suppress local commits.",
-                    default='^Change-Id:')
-  opts, args = parser.parse_args(argv[1:])
-
-  logging.basicConfig(level=logging.WARNING)
-
-  out_file = opts.output
-  header = opts.header
-  filter=opts.filter
-
-  while len(args) and out_file is None:
-    if out_file is None:
-      out_file = args.pop(0)
-  if args:
-    sys.stderr.write('Unexpected arguments: %r\n\n' % args)
-    parser.print_help()
-    sys.exit(2)
-
-  if opts.source_dir:
-    src_dir = opts.source_dir
-  else:
-    src_dir = os.path.dirname(os.path.abspath(__file__))
-
-  version_info = FetchVersionInfo(directory=src_dir, filter=filter)
-  revision_string = version_info.revision
-  if opts.revision_id_only:
-    revision_string = version_info.revision_id
-
-  if opts.print_only:
-    print revision_string
-  else:
-    contents = "LASTCHANGE=%s\n" % revision_string
-    if not out_file and not opts.header:
-      sys.stdout.write(contents)
-    else:
-      if out_file:
-        WriteIfChanged(out_file, contents)
-      if header:
-        WriteIfChanged(header,
-                       GetHeaderContents(header, opts.version_macro,
-                                         revision_string))
-
-  return 0
-
-
-if __name__ == '__main__':
-  sys.exit(main())
diff --git a/build/util/lib/common/PRESUBMIT.py b/build/util/lib/common/PRESUBMIT.py
deleted file mode 100644
index fca962f..0000000
--- a/build/util/lib/common/PRESUBMIT.py
+++ /dev/null
@@ -1,16 +0,0 @@
-# Copyright 2015 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-
-def _RunTests(input_api, output_api):
-  return (input_api.canned_checks.RunUnitTestsInDirectory(
-          input_api, output_api, '.', whitelist=[r'.+_test.py$']))
-
-
-def CheckChangeOnUpload(input_api, output_api):
-  return _RunTests(input_api, output_api)
-
-
-def CheckChangeOnCommit(input_api, output_api):
-  return _RunTests(input_api, output_api)
diff --git a/build/util/lib/common/__init__.py b/build/util/lib/common/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/build/util/lib/common/__init__.py
+++ /dev/null
diff --git a/build/util/lib/common/chrome_test_server_spawner.py b/build/util/lib/common/chrome_test_server_spawner.py
deleted file mode 100644
index b9844aa..0000000
--- a/build/util/lib/common/chrome_test_server_spawner.py
+++ /dev/null
@@ -1,480 +0,0 @@
-# Copyright 2017 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""A "Test Server Spawner" that handles killing/stopping per-test test servers.
-
-It's used to accept requests from the device to spawn and kill instances of the
-chrome test server on the host.
-"""
-# pylint: disable=W0702
-
-import BaseHTTPServer
-import json
-import logging
-import os
-import select
-import struct
-import subprocess
-import sys
-import threading
-import time
-import urlparse
-
-
-SERVER_TYPES = {
-    'http': '',
-    'ftp': '-f',
-    'sync': '',  # Sync uses its own script, and doesn't take a server type arg.
-    'tcpecho': '--tcp-echo',
-    'udpecho': '--udp-echo',
-    'ws': '--websocket',
-}
-
-
-_DIR_SOURCE_ROOT = os.path.abspath(
-    os.path.join(os.path.dirname(__file__), os.pardir, os.pardir, os.pardir,
-                 os.pardir))
-
-
-_logger = logging.getLogger(__name__)
-
-
-# Path that are needed to import necessary modules when launching a testserver.
-os.environ['PYTHONPATH'] = os.environ.get('PYTHONPATH', '') + (':%s:%s:%s:%s:%s'
-    % (os.path.join(_DIR_SOURCE_ROOT, 'third_party'),
-       os.path.join(_DIR_SOURCE_ROOT, 'third_party', 'tlslite'),
-       os.path.join(_DIR_SOURCE_ROOT, 'third_party', 'pyftpdlib', 'src'),
-       os.path.join(_DIR_SOURCE_ROOT, 'net', 'tools', 'testserver'),
-       os.path.join(_DIR_SOURCE_ROOT, 'components', 'sync', 'tools',
-                    'testserver')))
-
-
-# The timeout (in seconds) of starting up the Python test server.
-_TEST_SERVER_STARTUP_TIMEOUT = 10
-
-
-def _GetServerTypeCommandLine(server_type):
-  """Returns the command-line by the given server type.
-
-  Args:
-    server_type: the server type to be used (e.g. 'http').
-
-  Returns:
-    A string containing the command-line argument.
-  """
-  if server_type not in SERVER_TYPES:
-    raise NotImplementedError('Unknown server type: %s' % server_type)
-  if server_type == 'udpecho':
-    raise Exception('Please do not run UDP echo tests because we do not have '
-                    'a UDP forwarder tool.')
-  return SERVER_TYPES[server_type]
-
-
-class PortForwarder:
-  def Map(self, port_pairs):
-    pass
-
-  def GetDevicePortForHostPort(self, host_port):
-    """Returns the device port that corresponds to a given host port."""
-    return host_port
-
-  def WaitHostPortAvailable(self, port):
-    """Returns True if |port| is available."""
-    return True
-
-  def WaitPortNotAvailable(self, port):
-    """Returns True if |port| is not available."""
-    return True
-
-  def WaitDevicePortReady(self, port):
-    """Returns whether the provided port is used."""
-    return True
-
-  def Unmap(self, device_port):
-    """Unmaps specified port"""
-    pass
-
-
-class TestServerThread(threading.Thread):
-  """A thread to run the test server in a separate process."""
-
-  def __init__(self, ready_event, arguments, port_forwarder):
-    """Initialize TestServerThread with the following argument.
-
-    Args:
-      ready_event: event which will be set when the test server is ready.
-      arguments: dictionary of arguments to run the test server.
-      device: An instance of DeviceUtils.
-      tool: instance of runtime error detection tool.
-    """
-    threading.Thread.__init__(self)
-    self.wait_event = threading.Event()
-    self.stop_event = threading.Event()
-    self.ready_event = ready_event
-    self.ready_event.clear()
-    self.arguments = arguments
-    self.port_forwarder = port_forwarder
-    self.test_server_process = None
-    self.is_ready = False
-    self.host_port = self.arguments['port']
-    self.host_ocsp_port = 0
-    assert isinstance(self.host_port, int)
-    # The forwarder device port now is dynamically allocated.
-    self.forwarder_device_port = 0
-    self.forwarder_ocsp_device_port = 0
-    # Anonymous pipe in order to get port info from test server.
-    self.pipe_in = None
-    self.pipe_out = None
-    self.process = None
-    self.command_line = []
-
-  def _WaitToStartAndGetPortFromTestServer(self):
-    """Waits for the Python test server to start and gets the port it is using.
-
-    The port information is passed by the Python test server with a pipe given
-    by self.pipe_out. It is written as a result to |self.host_port|.
-
-    Returns:
-      Whether the port used by the test server was successfully fetched.
-    """
-    assert self.host_port == 0 and self.pipe_out and self.pipe_in
-    (in_fds, _, _) = select.select([self.pipe_in, ], [], [],
-                                   _TEST_SERVER_STARTUP_TIMEOUT)
-    if len(in_fds) == 0:
-      _logger.error('Failed to wait to the Python test server to be started.')
-      return False
-    # First read the data length as an unsigned 4-byte value.  This
-    # is _not_ using network byte ordering since the Python test server packs
-    # size as native byte order and all Chromium platforms so far are
-    # configured to use little-endian.
-    # TODO(jnd): Change the Python test server and local_test_server_*.cc to
-    # use a unified byte order (either big-endian or little-endian).
-    data_length = os.read(self.pipe_in, struct.calcsize('=L'))
-    if data_length:
-      (data_length,) = struct.unpack('=L', data_length)
-      assert data_length
-    if not data_length:
-      _logger.error('Failed to get length of server data.')
-      return False
-    server_data_json = os.read(self.pipe_in, data_length)
-    if not server_data_json:
-      _logger.error('Failed to get server data.')
-      return False
-    _logger.info('Got port json data: %s', server_data_json)
-
-    parsed_server_data = None
-    try:
-      parsed_server_data = json.loads(server_data_json)
-    except ValueError:
-      pass
-
-    if not isinstance(parsed_server_data, dict):
-      _logger.error('Failed to parse server_data: %s' % server_data_json)
-      return False
-
-    if not isinstance(parsed_server_data.get('port'), int):
-      _logger.error('Failed to get port information from the server data.')
-      return False
-
-    self.host_port = parsed_server_data['port']
-    self.host_ocsp_port = parsed_server_data.get('ocsp_port', 0)
-
-    return self.port_forwarder.WaitPortNotAvailable(self.host_port)
-
-  def _GenerateCommandLineArguments(self):
-    """Generates the command line to run the test server.
-
-    Note that all options are processed by following the definitions in
-    testserver.py.
-    """
-    if self.command_line:
-      return
-
-    args_copy = dict(self.arguments)
-
-    # Translate the server type.
-    type_cmd = _GetServerTypeCommandLine(args_copy.pop('server-type'))
-    if type_cmd:
-      self.command_line.append(type_cmd)
-
-    # Use a pipe to get the port given by the instance of Python test server
-    # if the test does not specify the port.
-    assert self.host_port == args_copy['port']
-    if self.host_port == 0:
-      (self.pipe_in, self.pipe_out) = os.pipe()
-      self.command_line.append('--startup-pipe=%d' % self.pipe_out)
-
-    # Pass the remaining arguments as-is.
-    for key, values in args_copy.iteritems():
-      if not isinstance(values, list):
-        values = [values]
-      for value in values:
-        if value is None:
-          self.command_line.append('--%s' % key)
-        else:
-          self.command_line.append('--%s=%s' % (key, value))
-
-  def _CloseUnnecessaryFDsForTestServerProcess(self):
-    # This is required to avoid subtle deadlocks that could be caused by the
-    # test server child process inheriting undesirable file descriptors such as
-    # file lock file descriptors.
-    for fd in xrange(0, 1024):
-      if fd != self.pipe_out:
-        try:
-          os.close(fd)
-        except:
-          pass
-
-  def run(self):
-    _logger.info('Start running the thread!')
-    self.wait_event.clear()
-    self._GenerateCommandLineArguments()
-    command = _DIR_SOURCE_ROOT
-    if self.arguments['server-type'] == 'sync':
-      command = [os.path.join(command, 'components', 'sync', 'tools',
-                              'testserver',
-                              'sync_testserver.py')] + self.command_line
-    else:
-      command = [os.path.join(command, 'net', 'tools', 'testserver',
-                              'testserver.py')] + self.command_line
-    _logger.info('Running: %s', command)
-
-    # Disable PYTHONUNBUFFERED because it has a bad interaction with the
-    # testserver. Remove once this interaction is fixed.
-    unbuf = os.environ.pop('PYTHONUNBUFFERED', None)
-
-    # Pass _DIR_SOURCE_ROOT as the child's working directory so that relative
-    # paths in the arguments are resolved correctly.
-    self.process = subprocess.Popen(
-        command, preexec_fn=self._CloseUnnecessaryFDsForTestServerProcess,
-        cwd=_DIR_SOURCE_ROOT)
-    if unbuf:
-      os.environ['PYTHONUNBUFFERED'] = unbuf
-    if self.process:
-      if self.pipe_out:
-        self.is_ready = self._WaitToStartAndGetPortFromTestServer()
-      else:
-        self.is_ready = self.port_forwarder.WaitPortNotAvailable(self.host_port)
-
-    if self.is_ready:
-      port_map = [(0, self.host_port)]
-      if self.host_ocsp_port:
-        port_map.extend([(0, self.host_ocsp_port)])
-      self.port_forwarder.Map(port_map)
-
-      self.forwarder_device_port = \
-          self.port_forwarder.GetDevicePortForHostPort(self.host_port)
-      if self.host_ocsp_port:
-        self.forwarder_ocsp_device_port = \
-            self.port_forwarder.GetDevicePortForHostPort(self.host_ocsp_port)
-
-      # Check whether the forwarder is ready on the device.
-      self.is_ready = self.forwarder_device_port and \
-          self.port_forwarder.WaitDevicePortReady(self.forwarder_device_port)
-
-    # Wake up the request handler thread.
-    self.ready_event.set()
-    # Keep thread running until Stop() gets called.
-    self.stop_event.wait()
-    if self.process.poll() is None:
-      self.process.kill()
-    self.port_forwarder.Unmap(self.forwarder_device_port)
-    self.process = None
-    self.is_ready = False
-    if self.pipe_out:
-      os.close(self.pipe_in)
-      os.close(self.pipe_out)
-      self.pipe_in = None
-      self.pipe_out = None
-    _logger.info('Test-server has died.')
-    self.wait_event.set()
-
-  def Stop(self):
-    """Blocks until the loop has finished.
-
-    Note that this must be called in another thread.
-    """
-    if not self.process:
-      return
-    self.stop_event.set()
-    self.wait_event.wait()
-
-
-class SpawningServerRequestHandler(BaseHTTPServer.BaseHTTPRequestHandler):
-  """A handler used to process http GET/POST request."""
-
-  def _SendResponse(self, response_code, response_reason, additional_headers,
-                    contents):
-    """Generates a response sent to the client from the provided parameters.
-
-    Args:
-      response_code: number of the response status.
-      response_reason: string of reason description of the response.
-      additional_headers: dict of additional headers. Each key is the name of
-                          the header, each value is the content of the header.
-      contents: string of the contents we want to send to client.
-    """
-    self.send_response(response_code, response_reason)
-    self.send_header('Content-Type', 'text/html')
-    # Specify the content-length as without it the http(s) response will not
-    # be completed properly (and the browser keeps expecting data).
-    self.send_header('Content-Length', len(contents))
-    for header_name in additional_headers:
-      self.send_header(header_name, additional_headers[header_name])
-    self.end_headers()
-    self.wfile.write(contents)
-    self.wfile.flush()
-
-  def _StartTestServer(self):
-    """Starts the test server thread."""
-    _logger.info('Handling request to spawn a test server.')
-    content_type = self.headers.getheader('content-type')
-    if content_type != 'application/json':
-      raise Exception('Bad content-type for start request.')
-    content_length = self.headers.getheader('content-length')
-    if not content_length:
-      content_length = 0
-    try:
-      content_length = int(content_length)
-    except:
-      raise Exception('Bad content-length for start request.')
-    _logger.info(content_length)
-    test_server_argument_json = self.rfile.read(content_length)
-    _logger.info(test_server_argument_json)
-
-    if len(self.server.test_servers) >= self.server.max_instances:
-      self._SendResponse(400, 'Invalid request', {},
-                         'Too many test servers running')
-      return
-
-    ready_event = threading.Event()
-    new_server = TestServerThread(ready_event,
-                                  json.loads(test_server_argument_json),
-                                  self.server.port_forwarder)
-    new_server.setDaemon(True)
-    new_server.start()
-    ready_event.wait()
-    if new_server.is_ready:
-      response = {'port': new_server.forwarder_device_port,
-                  'message': 'started'};
-      if new_server.forwarder_ocsp_device_port:
-        response['ocsp_port'] = new_server.forwarder_ocsp_device_port
-      self._SendResponse(200, 'OK', {}, json.dumps(response))
-      _logger.info('Test server is running on port %d forwarded to %d.' %
-              (new_server.forwarder_device_port, new_server.host_port))
-      port = new_server.forwarder_device_port
-      assert not self.server.test_servers.has_key(port)
-      self.server.test_servers[port] = new_server
-    else:
-      new_server.Stop()
-      self._SendResponse(500, 'Test Server Error.', {}, '')
-      _logger.info('Encounter problem during starting a test server.')
-
-  def _KillTestServer(self, params):
-    """Stops the test server instance."""
-    try:
-      port = int(params['port'][0])
-    except ValueError, KeyError:
-      port = None
-    if port == None or port <= 0:
-      self._SendResponse(400, 'Invalid request.', {}, 'port must be specified')
-      return
-
-    if not self.server.test_servers.has_key(port):
-      self._SendResponse(400, 'Invalid request.', {},
-                         "testserver isn't running on port %d" % port)
-      return
-
-    server = self.server.test_servers.pop(port)
-
-    _logger.info('Handling request to kill a test server on port: %d.', port)
-    server.Stop()
-
-    # Make sure the status of test server is correct before sending response.
-    if self.server.port_forwarder.WaitHostPortAvailable(port):
-      self._SendResponse(200, 'OK', {}, 'killed')
-      _logger.info('Test server on port %d is killed', port)
-    else:
-      self._SendResponse(500, 'Test Server Error.', {}, '')
-      _logger.info('Encounter problem during killing a test server.')
-
-  def log_message(self, format, *args):
-    # Suppress the default HTTP logging behavior if the logging level is higher
-    # than INFO.
-    if _logger.getEffectiveLevel() <= logging.INFO:
-      pass
-
-  def do_POST(self):
-    parsed_path = urlparse.urlparse(self.path)
-    action = parsed_path.path
-    _logger.info('Action for POST method is: %s.', action)
-    if action == '/start':
-      self._StartTestServer()
-    else:
-      self._SendResponse(400, 'Unknown request.', {}, '')
-      _logger.info('Encounter unknown request: %s.', action)
-
-  def do_GET(self):
-    parsed_path = urlparse.urlparse(self.path)
-    action = parsed_path.path
-    params = urlparse.parse_qs(parsed_path.query, keep_blank_values=1)
-    _logger.info('Action for GET method is: %s.', action)
-    for param in params:
-      _logger.info('%s=%s', param, params[param][0])
-    if action == '/kill':
-      self._KillTestServer(params)
-    elif action == '/ping':
-      # The ping handler is used to check whether the spawner server is ready
-      # to serve the requests. We don't need to test the status of the test
-      # server when handling ping request.
-      self._SendResponse(200, 'OK', {}, 'ready')
-      _logger.info('Handled ping request and sent response.')
-    else:
-      self._SendResponse(400, 'Unknown request', {}, '')
-      _logger.info('Encounter unknown request: %s.', action)
-
-
-class SpawningServer(object):
-  """The class used to start/stop a http server."""
-
-  def __init__(self, test_server_spawner_port, port_forwarder, max_instances):
-    self.server = BaseHTTPServer.HTTPServer(('', test_server_spawner_port),
-                                            SpawningServerRequestHandler)
-    self.server_port = self.server.server_port
-    _logger.info('Started test server spawner on port: %d.', self.server_port)
-
-    self.server.port_forwarder = port_forwarder
-    self.server.test_servers = {}
-    self.server.max_instances = max_instances
-
-  def _Listen(self):
-    _logger.info('Starting test server spawner.')
-    self.server.serve_forever()
-
-  def Start(self):
-    """Starts the test server spawner."""
-    listener_thread = threading.Thread(target=self._Listen)
-    listener_thread.setDaemon(True)
-    listener_thread.start()
-
-  def Stop(self):
-    """Stops the test server spawner.
-
-    Also cleans the server state.
-    """
-    self.CleanupState()
-    self.server.shutdown()
-
-  def CleanupState(self):
-    """Cleans up the spawning server state.
-
-    This should be called if the test server spawner is reused,
-    to avoid sharing the test server instance.
-    """
-    if self.server.test_servers:
-      _logger.warning('Not all test servers were stopped.')
-      for port in self.server.test_servers:
-        _logger.warning('Stopping test server on port %d' % port)
-        self.server.test_servers[port].Stop()
-      self.server.test_servers = {}
diff --git a/build/util/lib/common/perf_result_data_type.py b/build/util/lib/common/perf_result_data_type.py
deleted file mode 100644
index 67b550a..0000000
--- a/build/util/lib/common/perf_result_data_type.py
+++ /dev/null
@@ -1,20 +0,0 @@
-# Copyright 2013 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-DEFAULT = 'default'
-UNIMPORTANT = 'unimportant'
-HISTOGRAM = 'histogram'
-UNIMPORTANT_HISTOGRAM = 'unimportant-histogram'
-INFORMATIONAL = 'informational'
-
-ALL_TYPES = [DEFAULT, UNIMPORTANT, HISTOGRAM, UNIMPORTANT_HISTOGRAM,
-             INFORMATIONAL]
-
-
-def IsValidType(datatype):
-  return datatype in ALL_TYPES
-
-
-def IsHistogram(datatype):
-  return (datatype == HISTOGRAM or datatype == UNIMPORTANT_HISTOGRAM)
diff --git a/build/util/lib/common/perf_tests_results_helper.py b/build/util/lib/common/perf_tests_results_helper.py
deleted file mode 100644
index 59bb5e4..0000000
--- a/build/util/lib/common/perf_tests_results_helper.py
+++ /dev/null
@@ -1,200 +0,0 @@
-# Copyright 2013 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import re
-import sys
-
-import json
-import logging
-import math
-
-import perf_result_data_type
-
-
-# Mapping from result type to test output
-RESULT_TYPES = {perf_result_data_type.UNIMPORTANT: 'RESULT ',
-                perf_result_data_type.DEFAULT: '*RESULT ',
-                perf_result_data_type.INFORMATIONAL: '',
-                perf_result_data_type.UNIMPORTANT_HISTOGRAM: 'HISTOGRAM ',
-                perf_result_data_type.HISTOGRAM: '*HISTOGRAM '}
-
-
-def _EscapePerfResult(s):
-  """Escapes |s| for use in a perf result."""
-  return re.sub('[\:|=/#&,]', '_', s)
-
-
-def FlattenList(values):
-  """Returns a simple list without sub-lists."""
-  ret = []
-  for entry in values:
-    if isinstance(entry, list):
-      ret.extend(FlattenList(entry))
-    else:
-      ret.append(entry)
-  return ret
-
-
-def GeomMeanAndStdDevFromHistogram(histogram_json):
-  histogram = json.loads(histogram_json)
-  # Handle empty histograms gracefully.
-  if not 'buckets' in histogram:
-    return 0.0, 0.0
-  count = 0
-  sum_of_logs = 0
-  for bucket in histogram['buckets']:
-    if 'high' in bucket:
-      bucket['mean'] = (bucket['low'] + bucket['high']) / 2.0
-    else:
-      bucket['mean'] = bucket['low']
-    if bucket['mean'] > 0:
-      sum_of_logs += math.log(bucket['mean']) * bucket['count']
-      count += bucket['count']
-
-  if count == 0:
-    return 0.0, 0.0
-
-  sum_of_squares = 0
-  geom_mean = math.exp(sum_of_logs / count)
-  for bucket in histogram['buckets']:
-    if bucket['mean'] > 0:
-      sum_of_squares += (bucket['mean'] - geom_mean) ** 2 * bucket['count']
-  return geom_mean, math.sqrt(sum_of_squares / count)
-
-
-def _ValueToString(v):
-  # Special case for floats so we don't print using scientific notation.
-  if isinstance(v, float):
-    return '%f' % v
-  else:
-    return str(v)
-
-
-def _MeanAndStdDevFromList(values):
-  avg = None
-  sd = None
-  if len(values) > 1:
-    try:
-      value = '[%s]' % ','.join([_ValueToString(v) for v in values])
-      avg = sum([float(v) for v in values]) / len(values)
-      sqdiffs = [(float(v) - avg) ** 2 for v in values]
-      variance = sum(sqdiffs) / (len(values) - 1)
-      sd = math.sqrt(variance)
-    except ValueError:
-      value = ', '.join(values)
-  else:
-    value = values[0]
-  return value, avg, sd
-
-
-def PrintPages(page_list):
-  """Prints list of pages to stdout in the format required by perf tests."""
-  print 'Pages: [%s]' % ','.join([_EscapePerfResult(p) for p in page_list])
-
-
-def PrintPerfResult(measurement, trace, values, units,
-                    result_type=perf_result_data_type.DEFAULT,
-                    print_to_stdout=True):
-  """Prints numerical data to stdout in the format required by perf tests.
-
-  The string args may be empty but they must not contain any colons (:) or
-  equals signs (=).
-  This is parsed by the buildbot using:
-  http://src.chromium.org/viewvc/chrome/trunk/tools/build/scripts/slave/process_log_utils.py
-
-  Args:
-    measurement: A description of the quantity being measured, e.g. "vm_peak".
-        On the dashboard, this maps to a particular graph. Mandatory.
-    trace: A description of the particular data point, e.g. "reference".
-        On the dashboard, this maps to a particular "line" in the graph.
-        Mandatory.
-    values: A list of numeric measured values. An N-dimensional list will be
-        flattened and treated as a simple list.
-    units: A description of the units of measure, e.g. "bytes".
-    result_type: Accepts values of perf_result_data_type.ALL_TYPES.
-    print_to_stdout: If True, prints the output in stdout instead of returning
-        the output to caller.
-
-    Returns:
-      String of the formated perf result.
-  """
-  assert perf_result_data_type.IsValidType(result_type), \
-         'result type: %s is invalid' % result_type
-
-  trace_name = _EscapePerfResult(trace)
-
-  if (result_type == perf_result_data_type.UNIMPORTANT or
-      result_type == perf_result_data_type.DEFAULT or
-      result_type == perf_result_data_type.INFORMATIONAL):
-    assert isinstance(values, list)
-    assert '/' not in measurement
-    flattened_values = FlattenList(values)
-    assert len(flattened_values)
-    value, avg, sd = _MeanAndStdDevFromList(flattened_values)
-    output = '%s%s: %s%s%s %s' % (
-        RESULT_TYPES[result_type],
-        _EscapePerfResult(measurement),
-        trace_name,
-        # Do not show equal sign if the trace is empty. Usually it happens when
-        # measurement is enough clear to describe the result.
-        '= ' if trace_name else '',
-        value,
-        units)
-  else:
-    assert perf_result_data_type.IsHistogram(result_type)
-    assert isinstance(values, list)
-    # The histograms can only be printed individually, there's no computation
-    # across different histograms.
-    assert len(values) == 1
-    value = values[0]
-    output = '%s%s: %s= %s %s' % (
-        RESULT_TYPES[result_type],
-        _EscapePerfResult(measurement),
-        trace_name,
-        value,
-        units)
-    avg, sd = GeomMeanAndStdDevFromHistogram(value)
-
-  if avg:
-    output += '\nAvg %s: %f%s' % (measurement, avg, units)
-  if sd:
-    output += '\nSd  %s: %f%s' % (measurement, sd, units)
-  if print_to_stdout:
-    print output
-    sys.stdout.flush()
-  return output
-
-
-def ReportPerfResult(chart_data, graph_title, trace_title, value, units,
-                     improvement_direction='down', important=True):
-  """Outputs test results in correct format.
-
-  If chart_data is None, it outputs data in old format. If chart_data is a
-  dictionary, formats in chartjson format. If any other format defaults to
-  old format.
-
-  Args:
-    chart_data: A dictionary corresponding to perf results in the chartjson
-        format.
-    graph_title: A string containing the name of the chart to add the result
-        to.
-    trace_title: A string containing the name of the trace within the chart
-        to add the result to.
-    value: The value of the result being reported.
-    units: The units of the value being reported.
-    improvement_direction: A string denoting whether higher or lower is
-        better for the result. Either 'up' or 'down'.
-    important: A boolean denoting whether the result is important or not.
-  """
-  if chart_data and isinstance(chart_data, dict):
-    chart_data['charts'].setdefault(graph_title, {})
-    chart_data['charts'][graph_title][trace_title] = {
-        'type': 'scalar',
-        'value': value,
-        'units': units,
-        'improvement_direction': improvement_direction,
-        'important': important
-    }
-  else:
-    PrintPerfResult(graph_title, trace_title, [value], units)
diff --git a/build/util/lib/common/unittest_util.py b/build/util/lib/common/unittest_util.py
deleted file mode 100644
index 9683ab7..0000000
--- a/build/util/lib/common/unittest_util.py
+++ /dev/null
@@ -1,155 +0,0 @@
-# Copyright 2013 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Utilities for dealing with the python unittest module."""
-
-import fnmatch
-import re
-import sys
-import unittest
-
-
-class _TextTestResult(unittest._TextTestResult):
-  """A test result class that can print formatted text results to a stream.
-
-  Results printed in conformance with gtest output format, like:
-  [ RUN        ] autofill.AutofillTest.testAutofillInvalid: "test desc."
-  [         OK ] autofill.AutofillTest.testAutofillInvalid
-  [ RUN        ] autofill.AutofillTest.testFillProfile: "test desc."
-  [         OK ] autofill.AutofillTest.testFillProfile
-  [ RUN        ] autofill.AutofillTest.testFillProfileCrazyCharacters: "Test."
-  [         OK ] autofill.AutofillTest.testFillProfileCrazyCharacters
-  """
-  def __init__(self, stream, descriptions, verbosity):
-    unittest._TextTestResult.__init__(self, stream, descriptions, verbosity)
-    self._fails = set()
-
-  def _GetTestURI(self, test):
-    return '%s.%s.%s' % (test.__class__.__module__,
-                         test.__class__.__name__,
-                         test._testMethodName)
-
-  def getDescription(self, test):
-    return '%s: "%s"' % (self._GetTestURI(test), test.shortDescription())
-
-  def startTest(self, test):
-    unittest.TestResult.startTest(self, test)
-    self.stream.writeln('[ RUN        ] %s' % self.getDescription(test))
-
-  def addSuccess(self, test):
-    unittest.TestResult.addSuccess(self, test)
-    self.stream.writeln('[         OK ] %s' % self._GetTestURI(test))
-
-  def addError(self, test, err):
-    unittest.TestResult.addError(self, test, err)
-    self.stream.writeln('[      ERROR ] %s' % self._GetTestURI(test))
-    self._fails.add(self._GetTestURI(test))
-
-  def addFailure(self, test, err):
-    unittest.TestResult.addFailure(self, test, err)
-    self.stream.writeln('[     FAILED ] %s' % self._GetTestURI(test))
-    self._fails.add(self._GetTestURI(test))
-
-  def getRetestFilter(self):
-    return ':'.join(self._fails)
-
-
-class TextTestRunner(unittest.TextTestRunner):
-  """Test Runner for displaying test results in textual format.
-
-  Results are displayed in conformance with google test output.
-  """
-
-  def __init__(self, verbosity=1):
-    unittest.TextTestRunner.__init__(self, stream=sys.stderr,
-                                     verbosity=verbosity)
-
-  def _makeResult(self):
-    return _TextTestResult(self.stream, self.descriptions, self.verbosity)
-
-
-def GetTestsFromSuite(suite):
-  """Returns all the tests from a given test suite."""
-  tests = []
-  for x in suite:
-    if isinstance(x, unittest.TestSuite):
-      tests += GetTestsFromSuite(x)
-    else:
-      tests += [x]
-  return tests
-
-
-def GetTestNamesFromSuite(suite):
-  """Returns a list of every test name in the given suite."""
-  return map(lambda x: GetTestName(x), GetTestsFromSuite(suite))
-
-
-def GetTestName(test):
-  """Gets the test name of the given unittest test."""
-  return '.'.join([test.__class__.__module__,
-                   test.__class__.__name__,
-                   test._testMethodName])
-
-
-def FilterTestSuite(suite, gtest_filter):
-  """Returns a new filtered tests suite based on the given gtest filter.
-
-  See https://github.com/google/googletest/blob/master/googletest/docs/AdvancedGuide.md
-  for gtest_filter specification.
-  """
-  return unittest.TestSuite(FilterTests(GetTestsFromSuite(suite), gtest_filter))
-
-
-def FilterTests(all_tests, gtest_filter):
-  """Filter a list of tests based on the given gtest filter.
-
-  Args:
-    all_tests: List of tests (unittest.TestSuite)
-    gtest_filter: Filter to apply.
-
-  Returns:
-    Filtered subset of the given list of tests.
-  """
-  test_names = [GetTestName(test) for test in all_tests]
-  filtered_names = FilterTestNames(test_names, gtest_filter)
-  return [test for test in all_tests if GetTestName(test) in filtered_names]
-
-
-def FilterTestNames(all_tests, gtest_filter):
-  """Filter a list of test names based on the given gtest filter.
-
-  See https://github.com/google/googletest/blob/master/googletest/docs/AdvancedGuide.md
-  for gtest_filter specification.
-
-  Args:
-    all_tests: List of test names.
-    gtest_filter: Filter to apply.
-
-  Returns:
-    Filtered subset of the given list of test names.
-  """
-  pattern_groups = gtest_filter.split('-')
-  positive_patterns = ['*']
-  if pattern_groups[0]:
-    positive_patterns = pattern_groups[0].split(':')
-  negative_patterns = []
-  if len(pattern_groups) > 1:
-    negative_patterns = pattern_groups[1].split(':')
-
-  neg_pats = None
-  if negative_patterns:
-    neg_pats = re.compile('|'.join(fnmatch.translate(p) for p in
-                                   negative_patterns))
-
-  tests = []
-  test_set = set()
-  for pattern in positive_patterns:
-    pattern_tests = [
-        test for test in all_tests
-        if (fnmatch.fnmatch(test, pattern)
-            and not (neg_pats and neg_pats.match(test))
-            and test not in test_set)]
-    tests.extend(pattern_tests)
-    test_set.update(pattern_tests)
-  return tests
diff --git a/build/util/lib/common/unittest_util_test.py b/build/util/lib/common/unittest_util_test.py
deleted file mode 100755
index 1514c9b..0000000
--- a/build/util/lib/common/unittest_util_test.py
+++ /dev/null
@@ -1,65 +0,0 @@
-#!/usr/bin/env python
-# Copyright 2015 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# pylint: disable=protected-access
-
-import logging
-import sys
-import unittest
-import unittest_util
-
-
-class FilterTestNamesTest(unittest.TestCase):
-
-  possible_list = ["Foo.One",
-                   "Foo.Two",
-                   "Foo.Three",
-                   "Bar.One",
-                   "Bar.Two",
-                   "Bar.Three",
-                   "Quux.One",
-                   "Quux.Two",
-                   "Quux.Three"]
-
-  def testMatchAll(self):
-    x = unittest_util.FilterTestNames(self.possible_list, "*")
-    self.assertEquals(x, self.possible_list)
-
-  def testMatchPartial(self):
-    x = unittest_util.FilterTestNames(self.possible_list, "Foo.*")
-    self.assertEquals(x, ["Foo.One", "Foo.Two", "Foo.Three"])
-
-  def testMatchFull(self):
-    x = unittest_util.FilterTestNames(self.possible_list, "Foo.Two")
-    self.assertEquals(x, ["Foo.Two"])
-
-  def testMatchTwo(self):
-    x = unittest_util.FilterTestNames(self.possible_list, "Bar.*:Foo.*")
-    self.assertEquals(x, ["Bar.One",
-                          "Bar.Two",
-                          "Bar.Three",
-                          "Foo.One",
-                          "Foo.Two",
-                          "Foo.Three"])
-
-  def testMatchWithNegative(self):
-    x = unittest_util.FilterTestNames(self.possible_list, "Bar.*:Foo.*-*.Three")
-    self.assertEquals(x, ["Bar.One",
-                          "Bar.Two",
-                          "Foo.One",
-                          "Foo.Two"])
-
-  def testMatchOverlapping(self):
-    x = unittest_util.FilterTestNames(self.possible_list, "Bar.*:*.Two")
-    self.assertEquals(x, ["Bar.One",
-                          "Bar.Two",
-                          "Bar.Three",
-                          "Foo.Two",
-                          "Quux.Two"])
-
-
-if __name__ == '__main__':
-  logging.getLogger().setLevel(logging.DEBUG)
-  unittest.main(verbosity=2)
diff --git a/build/util/lib/common/util.py b/build/util/lib/common/util.py
deleted file mode 100644
index a415b1f..0000000
--- a/build/util/lib/common/util.py
+++ /dev/null
@@ -1,151 +0,0 @@
-# Copyright 2013 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Generic utilities for all python scripts."""
-
-import atexit
-import httplib
-import os
-import signal
-import stat
-import subprocess
-import sys
-import tempfile
-import urlparse
-
-
-def GetPlatformName():
-  """Return a string to be used in paths for the platform."""
-  if IsWindows():
-    return 'win'
-  if IsMac():
-    return 'mac'
-  if IsLinux():
-    return 'linux'
-  raise NotImplementedError('Unknown platform "%s".' % sys.platform)
-
-
-def IsWindows():
-  return sys.platform == 'cygwin' or sys.platform.startswith('win')
-
-
-def IsLinux():
-  return sys.platform.startswith('linux')
-
-
-def IsMac():
-  return sys.platform.startswith('darwin')
-
-
-def _DeleteDir(path):
-  """Deletes a directory recursively, which must exist."""
-  # Don't use shutil.rmtree because it can't delete read-only files on Win.
-  for root, dirs, files in os.walk(path, topdown=False):
-    for name in files:
-      filename = os.path.join(root, name)
-      os.chmod(filename, stat.S_IWRITE)
-      os.remove(filename)
-    for name in dirs:
-      os.rmdir(os.path.join(root, name))
-  os.rmdir(path)
-
-
-def Delete(path):
-  """Deletes the given file or directory (recursively), which must exist."""
-  if os.path.isdir(path):
-    _DeleteDir(path)
-  else:
-    os.remove(path)
-
-
-def MaybeDelete(path):
-  """Deletes the given file or directory (recurisvely), if it exists."""
-  if os.path.exists(path):
-    Delete(path)
-
-
-def MakeTempDir(parent_dir=None):
-  """Creates a temporary directory and returns an absolute path to it.
-
-  The temporary directory is automatically deleted when the python interpreter
-  exits normally.
-
-  Args:
-    parent_dir: the directory to create the temp dir in. If None, the system
-                temp dir is used.
-
-  Returns:
-    The absolute path to the temporary directory.
-  """
-  path = tempfile.mkdtemp(dir=parent_dir)
-  atexit.register(MaybeDelete, path)
-  return path
-
-
-def Unzip(zip_path, output_dir):
-  """Unzips the given zip file using a system installed unzip tool.
-
-  Args:
-    zip_path: zip file to unzip.
-    output_dir: directory to unzip the contents of the zip file. The directory
-                must exist.
-
-  Raises:
-    RuntimeError if the unzip operation fails.
-  """
-  if IsWindows():
-    unzip_cmd = ['C:\\Program Files\\7-Zip\\7z.exe', 'x', '-y']
-  else:
-    unzip_cmd = ['unzip', '-o']
-  unzip_cmd += [zip_path]
-  if RunCommand(unzip_cmd, output_dir) != 0:
-    raise RuntimeError('Unable to unzip %s to %s' % (zip_path, output_dir))
-
-
-def Kill(pid):
-  """Terminate the given pid."""
-  if IsWindows():
-    subprocess.call(['taskkill.exe', '/T', '/F', '/PID', str(pid)])
-  else:
-    os.kill(pid, signal.SIGTERM)
-
-
-def RunCommand(cmd, cwd=None):
-  """Runs the given command and returns the exit code.
-
-  Args:
-    cmd: list of command arguments.
-    cwd: working directory to execute the command, or None if the current
-         working directory should be used.
-
-  Returns:
-    The exit code of the command.
-  """
-  process = subprocess.Popen(cmd, cwd=cwd)
-  process.wait()
-  return process.returncode
-
-
-def DoesUrlExist(url):
-  """Determines whether a resource exists at the given URL.
-
-  Args:
-    url: URL to be verified.
-
-  Returns:
-    True if url exists, otherwise False.
-  """
-  parsed = urlparse.urlparse(url)
-  try:
-    conn = httplib.HTTPConnection(parsed.netloc)
-    conn.request('HEAD', parsed.path)
-    response = conn.getresponse()
-  except (socket.gaierror, socket.error):
-    return False
-  finally:
-    conn.close()
-  # Follow both permanent (301) and temporary (302) redirects.
-  if response.status == 302 or response.status == 301:
-    return DoesUrlExist(response.getheader('location'))
-  return response.status == 200
diff --git a/build/util/process_version.gni b/build/util/process_version.gni
deleted file mode 100644
index e27346e..0000000
--- a/build/util/process_version.gni
+++ /dev/null
@@ -1,126 +0,0 @@
-# Copyright 2014 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# Runs the version processing script over the given template file to produce
-# an output file. This is used for generating various forms of files that
-# incorporate the product name and version.
-#
-# Unlike GYP, this will actually compile the resulting file, so you don't need
-# to add it separately to the sources, just depend on the target.
-#
-# In GYP this is a rule that runs once per ".ver" file. In GN this just
-# processes one file per invocation of the template so you may have to have
-# multiple targets.
-#
-# Parameters:
-#   sources (optional):
-#     List of file names to read. When converting a GYP target, this should
-#     list the 'source' (see above) as well as any extra_variable_files.
-#     The files will be passed to version.py in the order specified here.
-#
-#   output:
-#     File name of file to write. In GYP this is unspecified and it will
-#     make up a file name for you based on the input name, and tack on
-#     "_version.rc" to the end. But in GN you need to specify the full name.
-#
-#   template_file (optional):
-#     Template file to use (not a list). Most Windows users that want to use
-#     this to process a .rc template should use process_version_rc_template(),
-#     defined in //chrome/process_version_rc_template.gni, instead.
-#
-#   extra_args (optional):
-#     Extra arguments to pass to version.py. Any "-f <filename>" args should
-#     use sources instead.
-#
-#   process_only (optional, defaults to false)
-#     Set to generate only one action that processes the version file and
-#     doesn't attempt to link the result into a source set. This is for if
-#     you are processing the version as data only.
-#
-#   visibility (optional)
-#
-# Example:
-#   process_version("myversion") {
-#     sources = [
-#       "//chrome/VERSION"
-#       "myfile.h.in"
-#     ]
-#     output = "$target_gen_dir/myfile.h"
-#     extra_args = [ "-e", "FOO=42" ]
-#   }
-template("process_version") {
-  assert(defined(invoker.output), "Output must be defined for $target_name")
-
-  process_only = defined(invoker.process_only) && invoker.process_only
-
-  if (process_only) {
-    action_name = target_name
-  } else {
-    action_name = target_name + "_action"
-    source_set_name = target_name
-  }
-
-  action(action_name) {
-    script = "//build/util/version.py"
-
-    inputs = []
-    if (defined(invoker.inputs)) {
-      inputs += invoker.inputs
-    }
-    if (defined(invoker.template_file)) {
-      inputs += [ invoker.template_file ]
-    }
-
-    outputs = [
-      invoker.output,
-    ]
-
-    args = []
-
-    if (is_official_build) {
-      args += [ "--official" ]
-    }
-
-    if (defined(invoker.sources)) {
-      inputs += invoker.sources
-      foreach(i, invoker.sources) {
-        args += [
-          "-f",
-          rebase_path(i, root_build_dir),
-        ]
-      }
-    }
-
-    if (defined(invoker.extra_args)) {
-      args += invoker.extra_args
-    }
-    args += [
-      "-o",
-      rebase_path(invoker.output, root_build_dir),
-    ]
-    if (defined(invoker.template_file)) {
-      args += [ rebase_path(invoker.template_file, root_build_dir) ]
-    }
-
-    forward_variables_from(invoker, [ "deps" ])
-
-    if (process_only) {
-      # When processing only, visibility gets applied to this target.
-      forward_variables_from(invoker, [ "visibility" ])
-    } else {
-      # When linking the result, only the source set can depend on the action.
-      visibility = [ ":$source_set_name" ]
-    }
-  }
-
-  if (!process_only) {
-    source_set(source_set_name) {
-      forward_variables_from(invoker, [ "visibility" ])
-      sources = get_target_outputs(":$action_name")
-      public_deps = [
-        ":$action_name",
-      ]
-    }
-  }
-}
diff --git a/build/util/version.gni b/build/util/version.gni
deleted file mode 100644
index 01e3807..0000000
--- a/build/util/version.gni
+++ /dev/null
@@ -1,59 +0,0 @@
-# Copyright 2015 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This exposes the Chrome version as GN variables for use in build files.
-#
-# PREFER NOT TO USE THESE. The GYP build uses this kind of thing extensively.
-# However, it is far better to write an action (or use the process_version
-# wrapper in build/util/version.gni) to generate a file at build-time with the
-# information you need. This allows better dependency checking and GN will
-# run faster.
-#
-# These values should only be used if you REALLY need to depend on them at
-# build-time, for example, in the computation of output file names.
-
-# Give version.py a pattern that will expand to a GN scope consisting of
-# all values we need at once.
-_version_dictionary_template = "full = \"@MAJOR@.@MINOR@.@BUILD@.@PATCH@\" " +
-                               "major = \"@MAJOR@\" minor = \"@MINOR@\" " +
-                               "build = \"@BUILD@\" patch = \"@PATCH@\" "
-
-# The file containing the Chrome version number.
-chrome_version_file = "//chrome/VERSION"
-
-_result = exec_script("version.py",
-                      [
-                        "-f",
-                        rebase_path(chrome_version_file, root_build_dir),
-                        "-t",
-                        _version_dictionary_template,
-                      ],
-                      "scope",
-                      [ chrome_version_file ])
-
-# Full version. For example "45.0.12321.0"
-chrome_version_full = _result.full
-
-# The consituent parts of the full version.
-chrome_version_major = _result.major
-chrome_version_minor = _result.minor
-chrome_version_build = _result.build
-chrome_version_patch = _result.patch
-
-if (is_mac) {
-  _result = exec_script("version.py",
-                        [
-                          "-f",
-                          rebase_path(chrome_version_file, root_build_dir),
-                          "-t",
-                          "@BUILD@.@PATCH_HI@.@PATCH_LO@",
-                          "-e",
-                          "PATCH_HI=int(PATCH)/256",
-                          "-e",
-                          "PATCH_LO=int(PATCH)%256",
-                        ],
-                        "trim string",
-                        [ chrome_version_file ])
-  chrome_dylib_version = _result
-}
diff --git a/build/util/version.py b/build/util/version.py
deleted file mode 100755
index 767412e..0000000
--- a/build/util/version.py
+++ /dev/null
@@ -1,170 +0,0 @@
-#!/usr/bin/env python
-# Copyright 2014 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-version.py -- Chromium version string substitution utility.
-"""
-
-import argparse
-import os
-import sys
-
-
-def fetch_values_from_file(values_dict, file_name):
-  """
-  Fetches KEYWORD=VALUE settings from the specified file.
-
-  Everything to the left of the first '=' is the keyword,
-  everything to the right is the value.  No stripping of
-  white space, so beware.
-
-  The file must exist, otherwise you get the Python exception from open().
-  """
-  for line in open(file_name, 'r').readlines():
-    key, val = line.rstrip('\r\n').split('=', 1)
-    values_dict[key] = val
-
-
-def fetch_values(file_list, is_official_build=None):
-  """
-  Returns a dictionary of values to be used for substitution, populating
-  the dictionary with KEYWORD=VALUE settings from the files in 'file_list'.
-
-  Explicitly adds the following value from internal calculations:
-
-    OFFICIAL_BUILD
-  """
-  CHROME_BUILD_TYPE = os.environ.get('CHROME_BUILD_TYPE')
-  if CHROME_BUILD_TYPE == '_official' or is_official_build:
-    official_build = '1'
-  else:
-    official_build = '0'
-
-  values = dict(
-    OFFICIAL_BUILD = official_build,
-  )
-
-  for file_name in file_list:
-    fetch_values_from_file(values, file_name)
-
-  return values
-
-
-def subst_template(contents, values):
-  """
-  Returns the template with substituted values from the specified dictionary.
-
-  Keywords to be substituted are surrounded by '@':  @KEYWORD@.
-
-  No attempt is made to avoid recursive substitution.  The order
-  of evaluation is random based on the order of the keywords returned
-  by the Python dictionary.  So do NOT substitute a value that
-  contains any @KEYWORD@ strings expecting them to be recursively
-  substituted, okay?
-  """
-  for key, val in values.iteritems():
-    try:
-      contents = contents.replace('@' + key + '@', val)
-    except TypeError:
-      print repr(key), repr(val)
-  return contents
-
-
-def subst_file(file_name, values):
-  """
-  Returns the contents of the specified file_name with substituted
-  values from the specified dictionary.
-
-  This is like subst_template, except it operates on a file.
-  """
-  template = open(file_name, 'r').read()
-  return subst_template(template, values);
-
-
-def write_if_changed(file_name, contents):
-  """
-  Writes the specified contents to the specified file_name
-  iff the contents are different than the current contents.
-  """
-  try:
-    old_contents = open(file_name, 'r').read()
-  except EnvironmentError:
-    pass
-  else:
-    if contents == old_contents:
-      return
-    os.unlink(file_name)
-  open(file_name, 'w').write(contents)
-
-
-def main():
-  parser = argparse.ArgumentParser()
-  parser.add_argument('-f', '--file', action='append', default=[],
-                      help='Read variables from FILE.')
-  parser.add_argument('-i', '--input', default=None,
-                      help='Read strings to substitute from FILE.')
-  parser.add_argument('-o', '--output', default=None,
-                      help='Write substituted strings to FILE.')
-  parser.add_argument('-t', '--template', default=None,
-                      help='Use TEMPLATE as the strings to substitute.')
-  parser.add_argument('-e', '--eval', action='append', default=[],
-                      help='Evaluate VAL after reading variables. Can be used '
-                           'to synthesize variables. e.g. -e \'PATCH_HI=int('
-                           'PATCH)/256.')
-  parser.add_argument('--official', action='store_true',
-                      help='Whether the current build should be an official '
-                           'build, used in addition to the environment '
-                           'variable.')
-  parser.add_argument('args', nargs=argparse.REMAINDER,
-                      help='For compatibility: INPUT and OUTPUT can be '
-                           'passed as positional arguments.')
-  options = parser.parse_args()
-
-  evals = {}
-  for expression in options.eval:
-    try:
-      evals.update(dict([expression.split('=', 1)]))
-    except ValueError:
-      parser.error('-e requires VAR=VAL')
-
-  # Compatibility with old versions that considered the first two positional
-  # arguments shorthands for --input and --output.
-  while len(options.args) and (options.input is None or \
-                               options.output is None):
-    if options.input is None:
-      options.input = options.args.pop(0)
-    elif options.output is None:
-      options.output = options.args.pop(0)
-  if options.args:
-    parser.error('Unexpected arguments: %r' % options.args)
-
-  values = fetch_values(options.file, options.official)
-  for key, val in evals.iteritems():
-    values[key] = str(eval(val, globals(), values))
-
-  if options.template is not None:
-    contents = subst_template(options.template, values)
-  elif options.input:
-    contents = subst_file(options.input, values)
-  else:
-    # Generate a default set of version information.
-    contents = """MAJOR=%(MAJOR)s
-MINOR=%(MINOR)s
-BUILD=%(BUILD)s
-PATCH=%(PATCH)s
-LASTCHANGE=%(LASTCHANGE)s
-OFFICIAL_BUILD=%(OFFICIAL_BUILD)s
-""" % values
-
-  if options.output is not None:
-    write_if_changed(options.output, contents)
-  else:
-    print contents
-
-  return 0
-
-
-if __name__ == '__main__':
-  sys.exit(main())
diff --git a/build/util/webkit_version.h.in b/build/util/webkit_version.h.in
deleted file mode 100644
index 41960e7..0000000
--- a/build/util/webkit_version.h.in
+++ /dev/null
@@ -1,9 +0,0 @@
-// Copyright 2014 The Chromium Authors. All rights reserved.
-// Use of this source is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-// webkit_version.h is generated from webkit_version.h.in.  Edit the source!
-
-#define WEBKIT_VERSION_MAJOR 537
-#define WEBKIT_VERSION_MINOR 36
-#define WEBKIT_SVN_REVISION "@@LASTCHANGE@"
diff --git a/build/vs_toolchain.py b/build/vs_toolchain.py
deleted file mode 100755
index 24a57b2..0000000
--- a/build/vs_toolchain.py
+++ /dev/null
@@ -1,484 +0,0 @@
-#!/usr/bin/env python
-# Copyright 2014 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import glob
-import json
-import os
-import pipes
-import platform
-import re
-import shutil
-import stat
-import subprocess
-import sys
-from gn_helpers import ToGNString
-
-
-script_dir = os.path.dirname(os.path.realpath(__file__))
-chrome_src = os.path.abspath(os.path.join(script_dir, os.pardir))
-SRC_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
-sys.path.insert(0, os.path.join(chrome_src, 'tools', 'gyp', 'pylib'))
-json_data_file = os.path.join(script_dir, 'win_toolchain.json')
-
-
-# Use MSVS2017 as the default toolchain.
-CURRENT_DEFAULT_TOOLCHAIN_VERSION = '2017'
-
-
-def SetEnvironmentAndGetRuntimeDllDirs():
-  """Sets up os.environ to use the depot_tools VS toolchain with gyp, and
-  returns the location of the VS runtime DLLs so they can be copied into
-  the output directory after gyp generation.
-
-  Return value is [x64path, x86path] or None
-  """
-  vs_runtime_dll_dirs = None
-  depot_tools_win_toolchain = \
-      bool(int(os.environ.get('DEPOT_TOOLS_WIN_TOOLCHAIN', '1')))
-  # When running on a non-Windows host, only do this if the SDK has explicitly
-  # been downloaded before (in which case json_data_file will exist).
-  if ((sys.platform in ('win32', 'cygwin') or os.path.exists(json_data_file))
-      and depot_tools_win_toolchain):
-    if ShouldUpdateToolchain():
-      update_result = Update()
-      if update_result != 0:
-        raise Exception('Failed to update, error code %d.' % update_result)
-    with open(json_data_file, 'r') as tempf:
-      toolchain_data = json.load(tempf)
-
-    toolchain = toolchain_data['path']
-    version = toolchain_data['version']
-    win_sdk = toolchain_data.get('win_sdk')
-    if not win_sdk:
-      win_sdk = toolchain_data['win8sdk']
-    wdk = toolchain_data['wdk']
-    # TODO(scottmg): The order unfortunately matters in these. They should be
-    # split into separate keys for x86 and x64. (See CopyDlls call below).
-    # http://crbug.com/345992
-    vs_runtime_dll_dirs = toolchain_data['runtime_dirs']
-
-    os.environ['GYP_MSVS_OVERRIDE_PATH'] = toolchain
-    os.environ['GYP_MSVS_VERSION'] = version
-
-    # Limit the scope of the gyp import to only where it is used. This
-    # potentially lets build configs that never execute this block to drop
-    # their GYP checkout.
-    import gyp
-
-    # We need to make sure windows_sdk_path is set to the automated
-    # toolchain values in GYP_DEFINES, but don't want to override any
-    # otheroptions.express
-    # values there.
-    gyp_defines_dict = gyp.NameValueListToDict(gyp.ShlexEnv('GYP_DEFINES'))
-    gyp_defines_dict['windows_sdk_path'] = win_sdk
-    os.environ['GYP_DEFINES'] = ' '.join('%s=%s' % (k, pipes.quote(str(v)))
-        for k, v in gyp_defines_dict.iteritems())
-
-    os.environ['WINDOWSSDKDIR'] = win_sdk
-    os.environ['WDK_DIR'] = wdk
-    # Include the VS runtime in the PATH in case it's not machine-installed.
-    runtime_path = os.path.pathsep.join(vs_runtime_dll_dirs)
-    os.environ['PATH'] = runtime_path + os.path.pathsep + os.environ['PATH']
-  elif sys.platform == 'win32' and not depot_tools_win_toolchain:
-    if not 'GYP_MSVS_OVERRIDE_PATH' in os.environ:
-      os.environ['GYP_MSVS_OVERRIDE_PATH'] = DetectVisualStudioPath()
-    if not 'GYP_MSVS_VERSION' in os.environ:
-      os.environ['GYP_MSVS_VERSION'] = GetVisualStudioVersion()
-
-    # When using an installed toolchain these files aren't needed in the output
-    # directory in order to run binaries locally, but they are needed in order
-    # to create isolates or the mini_installer. Copying them to the output
-    # directory ensures that they are available when needed.
-    bitness = platform.architecture()[0]
-    # When running 64-bit python the x64 DLLs will be in System32
-    x64_path = 'System32' if bitness == '64bit' else 'Sysnative'
-    x64_path = os.path.join(os.path.expandvars('%windir%'), x64_path)
-    vs_runtime_dll_dirs = [x64_path, os.path.expandvars('%windir%/SysWOW64')]
-
-  return vs_runtime_dll_dirs
-
-
-def _RegistryGetValueUsingWinReg(key, value):
-  """Use the _winreg module to obtain the value of a registry key.
-
-  Args:
-    key: The registry key.
-    value: The particular registry value to read.
-  Return:
-    contents of the registry key's value, or None on failure.  Throws
-    ImportError if _winreg is unavailable.
-  """
-  import _winreg
-  try:
-    root, subkey = key.split('\\', 1)
-    assert root == 'HKLM'  # Only need HKLM for now.
-    with _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, subkey) as hkey:
-      return _winreg.QueryValueEx(hkey, value)[0]
-  except WindowsError:
-    return None
-
-
-def _RegistryGetValue(key, value):
-  try:
-    return _RegistryGetValueUsingWinReg(key, value)
-  except ImportError:
-    raise Exception('The python library _winreg not found.')
-
-
-def GetVisualStudioVersion():
-  """Return GYP_MSVS_VERSION of Visual Studio.
-  """
-  return os.environ.get('GYP_MSVS_VERSION', CURRENT_DEFAULT_TOOLCHAIN_VERSION)
-
-
-def DetectVisualStudioPath():
-  """Return path to the GYP_MSVS_VERSION of Visual Studio.
-  """
-
-  # Note that this code is used from
-  # build/toolchain/win/setup_toolchain.py as well.
-  version_as_year = GetVisualStudioVersion()
-  year_to_version = {
-      '2017': '15.0',
-  }
-  if version_as_year not in year_to_version:
-    raise Exception(('Visual Studio version %s (from GYP_MSVS_VERSION)'
-                     ' not supported. Supported versions are: %s') % (
-                       version_as_year, ', '.join(year_to_version.keys())))
-  version = year_to_version[version_as_year]
-  if version_as_year == '2017':
-    # The VC++ 2017 install location needs to be located using COM instead of
-    # the registry. For details see:
-    # https://blogs.msdn.microsoft.com/heaths/2016/09/15/changes-to-visual-studio-15-setup/
-    # For now we use a hardcoded default with an environment variable override.
-    for path in (
-        os.environ.get('vs2017_install'),
-        os.path.expandvars('%ProgramFiles(x86)%'
-                           '/Microsoft Visual Studio/2017/Enterprise'),
-        os.path.expandvars('%ProgramFiles(x86)%'
-                           '/Microsoft Visual Studio/2017/Professional'),
-        os.path.expandvars('%ProgramFiles(x86)%'
-                           '/Microsoft Visual Studio/2017/Community')):
-      if path and os.path.exists(path):
-        return path
-
-  raise Exception(('Visual Studio Version %s (from GYP_MSVS_VERSION)'
-                   ' not found.') % (version_as_year))
-
-
-def _CopyRuntimeImpl(target, source, verbose=True):
-  """Copy |source| to |target| if it doesn't already exist or if it needs to be
-  updated (comparing last modified time as an approximate float match as for
-  some reason the values tend to differ by ~1e-07 despite being copies of the
-  same file... https://crbug.com/603603).
-  """
-  if (os.path.isdir(os.path.dirname(target)) and
-      (not os.path.isfile(target) or
-       abs(os.stat(target).st_mtime - os.stat(source).st_mtime) >= 0.01)):
-    if verbose:
-      print 'Copying %s to %s...' % (source, target)
-    if os.path.exists(target):
-      # Make the file writable so that we can delete it now, and keep it
-      # readable.
-      os.chmod(target, stat.S_IWRITE | stat.S_IREAD)
-      os.unlink(target)
-    shutil.copy2(source, target)
-    # Make the file writable so that we can overwrite or delete it later,
-    # keep it readable.
-    os.chmod(target, stat.S_IWRITE | stat.S_IREAD)
-
-
-def _CopyUCRTRuntime(target_dir, source_dir, target_cpu, dll_pattern, suffix):
-  """Copy both the msvcp and vccorlib runtime DLLs, only if the target doesn't
-  exist, but the target directory does exist."""
-  for file_part in ('msvcp', 'vccorlib', 'vcruntime'):
-    dll = dll_pattern % file_part
-    target = os.path.join(target_dir, dll)
-    source = os.path.join(source_dir, dll)
-    _CopyRuntimeImpl(target, source)
-  # Copy the UCRT files from the Windows SDK. This location includes the
-  # api-ms-win-crt-*.dll files that are not found in the Windows directory.
-  # These files are needed for component builds. If WINDOWSSDKDIR is not set
-  # use the default SDK path. This will be the case when
-  # DEPOT_TOOLS_WIN_TOOLCHAIN=0 and vcvarsall.bat has not been run.
-  win_sdk_dir = os.path.normpath(
-      os.environ.get('WINDOWSSDKDIR',
-                     os.path.expandvars('%ProgramFiles(x86)%'
-                                        '\\Windows Kits\\10')))
-  ucrt_dll_dirs = os.path.join(win_sdk_dir, 'Redist', 'ucrt', 'DLLs',
-                               target_cpu)
-  ucrt_files = glob.glob(os.path.join(ucrt_dll_dirs, 'api-ms-win-*.dll'))
-  assert len(ucrt_files) > 0
-  for ucrt_src_file in ucrt_files:
-    file_part = os.path.basename(ucrt_src_file)
-    ucrt_dst_file = os.path.join(target_dir, file_part)
-    _CopyRuntimeImpl(ucrt_dst_file, ucrt_src_file, False)
-  _CopyRuntimeImpl(os.path.join(target_dir, 'ucrtbase' + suffix),
-                    os.path.join(source_dir, 'ucrtbase' + suffix))
-
-
-def FindVCToolsRoot():
-  """In VS2017 the PGO runtime dependencies are located in
-  {toolchain_root}/VC/Tools/MSVC/{x.y.z}/bin/Host{target_cpu}/{target_cpu}/, the
-  {version_number} part is likely to change in case of a minor update of the
-  toolchain so we don't hardcode this value here (except for the major number).
-
-  This returns the '{toolchain_root}/VC/Tools/MSVC/{x.y.z}/bin/' path.
-
-  This function should only be called when using VS2017.
-  """
-  assert GetVisualStudioVersion() == '2017'
-  SetEnvironmentAndGetRuntimeDllDirs()
-  assert ('GYP_MSVS_OVERRIDE_PATH' in os.environ)
-  vc_tools_msvc_root = os.path.join(os.environ['GYP_MSVS_OVERRIDE_PATH'],
-      'VC', 'Tools', 'MSVC')
-  for directory in os.listdir(vc_tools_msvc_root):
-    if not os.path.isdir(os.path.join(vc_tools_msvc_root, directory)):
-      continue
-    if re.match('14\.\d+\.\d+', directory):
-      return os.path.join(vc_tools_msvc_root, directory, 'bin')
-  raise Exception('Unable to find the VC tools directory.')
-
-
-def _CopyPGORuntime(target_dir, target_cpu):
-  """Copy the runtime dependencies required during a PGO build.
-  """
-  env_version = GetVisualStudioVersion()
-  # These dependencies will be in a different location depending on the version
-  # of the toolchain.
-  if env_version == '2017':
-    pgo_runtime_root = FindVCToolsRoot()
-    assert pgo_runtime_root
-    # There's no version of pgosweep.exe in HostX64/x86, so we use the copy
-    # from HostX86/x86.
-    pgo_x86_runtime_dir = os.path.join(pgo_runtime_root, 'HostX86', 'x86')
-    pgo_x64_runtime_dir = os.path.join(pgo_runtime_root, 'HostX64', 'x64')
-  else:
-    raise Exception('Unexpected toolchain version: %s.' % env_version)
-
-  # We need to copy 2 runtime dependencies used during the profiling step:
-  #     - pgort140.dll: runtime library required to run the instrumented image.
-  #     - pgosweep.exe: executable used to collect the profiling data
-  pgo_runtimes = ['pgort140.dll', 'pgosweep.exe']
-  for runtime in pgo_runtimes:
-    if target_cpu == 'x86':
-      source = os.path.join(pgo_x86_runtime_dir, runtime)
-    elif target_cpu == 'x64':
-      source = os.path.join(pgo_x64_runtime_dir, runtime)
-    else:
-      raise NotImplementedError("Unexpected target_cpu value: " + target_cpu)
-    if not os.path.exists(source):
-      raise Exception('Unable to find %s.' % source)
-    _CopyRuntimeImpl(os.path.join(target_dir, runtime), source)
-
-
-def _CopyRuntime(target_dir, source_dir, target_cpu, debug):
-  """Copy the VS runtime DLLs, only if the target doesn't exist, but the target
-  directory does exist. Handles VS 2015 and VS 2017."""
-  suffix = "d.dll" if debug else ".dll"
-  # VS 2017 uses the same CRT DLLs as VS 2015.
-  _CopyUCRTRuntime(target_dir, source_dir, target_cpu, '%s140' + suffix,
-                    suffix)
-
-
-def CopyDlls(target_dir, configuration, target_cpu):
-  """Copy the VS runtime DLLs into the requested directory as needed.
-
-  configuration is one of 'Debug' or 'Release'.
-  target_cpu is one of 'x86' or 'x64'.
-
-  The debug configuration gets both the debug and release DLLs; the
-  release config only the latter.
-  """
-  vs_runtime_dll_dirs = SetEnvironmentAndGetRuntimeDllDirs()
-  if not vs_runtime_dll_dirs:
-    return
-
-  x64_runtime, x86_runtime = vs_runtime_dll_dirs
-  runtime_dir = x64_runtime if target_cpu == 'x64' else x86_runtime
-  _CopyRuntime(target_dir, runtime_dir, target_cpu, debug=False)
-  if configuration == 'Debug':
-    _CopyRuntime(target_dir, runtime_dir, target_cpu, debug=True)
-  else:
-    _CopyPGORuntime(target_dir, target_cpu)
-
-  _CopyDebugger(target_dir, target_cpu)
-
-
-def _CopyDebugger(target_dir, target_cpu):
-  """Copy dbghelp.dll and dbgcore.dll into the requested directory as needed.
-
-  target_cpu is one of 'x86' or 'x64'.
-
-  dbghelp.dll is used when Chrome needs to symbolize stacks. Copying this file
-  from the SDK directory avoids using the system copy of dbghelp.dll which then
-  ensures compatibility with recent debug information formats, such as VS
-  2017 /debug:fastlink PDBs.
-
-  dbgcore.dll is needed when using some functions from dbghelp.dll (like
-  MinidumpWriteDump).
-  """
-  win_sdk_dir = SetEnvironmentAndGetSDKDir()
-  if not win_sdk_dir:
-    return
-
-  # List of debug files that should be copied, the first element of the tuple is
-  # the name of the file and the second indicates if it's optional.
-  debug_files = [('dbghelp.dll', False), ('dbgcore.dll', True)]
-  for debug_file, is_optional in debug_files:
-    full_path = os.path.join(win_sdk_dir, 'Debuggers', target_cpu, debug_file)
-    if not os.path.exists(full_path):
-      if is_optional:
-        continue
-      else:
-        # TODO(crbug.com/773476): remove version requirement.
-        raise Exception('%s not found in "%s"\r\nYou must install the '
-                        '"Debugging Tools for Windows" feature from the Windows'
-                        ' 10 SDK. You must use v10.0.17134.0. of the SDK'
-                        % (debug_file, full_path))
-    target_path = os.path.join(target_dir, debug_file)
-    _CopyRuntimeImpl(target_path, full_path)
-
-
-def _GetDesiredVsToolchainHashes():
-  """Load a list of SHA1s corresponding to the toolchains that we want installed
-  to build with."""
-  env_version = GetVisualStudioVersion()
-  if env_version == '2017':
-    # VS 2017 Update 7.1 (15.7.1) with 10.0.17134.12 SDK.
-    toolchain_hash = '5454e45bf3764c03d3fc1024b3bf5bc41e3ab62c'
-    # Third parties that do not have access to the canonical toolchain can map
-    # canonical toolchain version to their own toolchain versions.
-    toolchain_hash_mapping_key = 'GYP_MSVS_HASH_%s' % toolchain_hash
-    return [os.environ.get(toolchain_hash_mapping_key, toolchain_hash)]
-  raise Exception('Unsupported VS version %s' % env_version)
-
-
-def ShouldUpdateToolchain():
-  """Check if the toolchain should be upgraded."""
-  if not os.path.exists(json_data_file):
-    return True
-  with open(json_data_file, 'r') as tempf:
-    toolchain_data = json.load(tempf)
-  version = toolchain_data['version']
-  env_version = GetVisualStudioVersion()
-  # If there's a mismatch between the version set in the environment and the one
-  # in the json file then the toolchain should be updated.
-  return version != env_version
-
-
-def Update(force=False):
-  """Requests an update of the toolchain to the specific hashes we have at
-  this revision. The update outputs a .json of the various configuration
-  information required to pass to gyp which we use in |GetToolchainDir()|.
-  """
-  if force != False and force != '--force':
-    print >>sys.stderr, 'Unknown parameter "%s"' % force
-    return 1
-  if force == '--force' or os.path.exists(json_data_file):
-    force = True
-
-  depot_tools_win_toolchain = \
-      bool(int(os.environ.get('DEPOT_TOOLS_WIN_TOOLCHAIN', '1')))
-  if ((sys.platform in ('win32', 'cygwin') or force) and
-        depot_tools_win_toolchain):
-    import find_depot_tools
-    depot_tools_path = find_depot_tools.add_depot_tools_to_path()
-
-    # On Linux, the file system is usually case-sensitive while the Windows
-    # SDK only works on case-insensitive file systems.  If it doesn't already
-    # exist, set up a ciopfs fuse mount to put the SDK in a case-insensitive
-    # part of the file system.
-    toolchain_dir = os.path.join(depot_tools_path, 'win_toolchain', 'vs_files')
-    # For testing this block, unmount existing mounts with
-    # fusermount -u third_party/depot_tools/win_toolchain/vs_files
-    if sys.platform.startswith('linux') and not os.path.ismount(toolchain_dir):
-      import distutils.spawn
-      ciopfs = distutils.spawn.find_executable('ciopfs')
-      if not ciopfs:
-        # ciopfs not found in PATH; try the one downloaded from the DEPS hook.
-        ciopfs = os.path.join(script_dir, 'ciopfs')
-      if not os.path.isdir(toolchain_dir):
-        os.mkdir(toolchain_dir)
-      if not os.path.isdir(toolchain_dir + '.ciopfs'):
-        os.mkdir(toolchain_dir + '.ciopfs')
-      # Without use_ino, clang's #pragma once and Wnonportable-include-path
-      # both don't work right, see https://llvm.org/PR34931
-      # use_ino doesn't slow down builds, so it seems there's no drawback to
-      # just using it always.
-      subprocess.check_call([
-          ciopfs, '-o', 'use_ino', toolchain_dir + '.ciopfs', toolchain_dir])
-
-    # Necessary so that get_toolchain_if_necessary.py will put the VS toolkit
-    # in the correct directory.
-    os.environ['GYP_MSVS_VERSION'] = GetVisualStudioVersion()
-    get_toolchain_args = [
-        sys.executable,
-        os.path.join(depot_tools_path,
-                    'win_toolchain',
-                    'get_toolchain_if_necessary.py'),
-        '--output-json', json_data_file,
-      ] + _GetDesiredVsToolchainHashes()
-    if force:
-      get_toolchain_args.append('--force')
-    subprocess.check_call(get_toolchain_args)
-
-  return 0
-
-
-def NormalizePath(path):
-  while path.endswith("\\"):
-    path = path[:-1]
-  return path
-
-
-def SetEnvironmentAndGetSDKDir():
-  """Gets location information about the current sdk (must have been
-  previously updated by 'update'). This is used for the GN build."""
-  SetEnvironmentAndGetRuntimeDllDirs()
-
-  # If WINDOWSSDKDIR is not set, search the default SDK path and set it.
-  if not 'WINDOWSSDKDIR' in os.environ:
-    default_sdk_path = os.path.expandvars('%ProgramFiles(x86)%'
-                                          '\\Windows Kits\\10')
-    if os.path.isdir(default_sdk_path):
-      os.environ['WINDOWSSDKDIR'] = default_sdk_path
-
-  return NormalizePath(os.environ['WINDOWSSDKDIR'])
-
-
-def GetToolchainDir():
-  """Gets location information about the current toolchain (must have been
-  previously updated by 'update'). This is used for the GN build."""
-  runtime_dll_dirs = SetEnvironmentAndGetRuntimeDllDirs()
-  win_sdk_dir = SetEnvironmentAndGetSDKDir()
-
-  print '''vs_path = %s
-sdk_path = %s
-vs_version = %s
-wdk_dir = %s
-runtime_dirs = %s
-''' % (
-      ToGNString(NormalizePath(os.environ['GYP_MSVS_OVERRIDE_PATH'])),
-      ToGNString(win_sdk_dir),
-      ToGNString(GetVisualStudioVersion()),
-      ToGNString(NormalizePath(os.environ.get('WDK_DIR', ''))),
-      ToGNString(os.path.pathsep.join(runtime_dll_dirs or ['None'])))
-
-
-def main():
-  commands = {
-      'update': Update,
-      'get_toolchain_dir': GetToolchainDir,
-      'copy_dlls': CopyDlls,
-  }
-  if len(sys.argv) < 2 or sys.argv[1] not in commands:
-    print >>sys.stderr, 'Expected one of: %s' % ', '.join(commands)
-    return 1
-  return commands[sys.argv[1]](*sys.argv[2:])
-
-
-if __name__ == '__main__':
-  sys.exit(main())
diff --git a/build/whitespace_file.txt b/build/whitespace_file.txt
deleted file mode 100644
index bd3bd32..0000000
--- a/build/whitespace_file.txt
+++ /dev/null
@@ -1,175 +0,0 @@
-Copyright 2014 The Chromium Authors. All rights reserved.
-Use of this useless file is governed by a BSD-style license that can be
-found in the LICENSE file.
-
-
-This file is used for making non-code changes to trigger buildbot cycles. Make
-any modification below this line.
-
-======================================================================
-
-Let's make a story. Add zero+ sentences for every commit:
-
-CHÄPTER 1:
-It was a dark and blinky night; the rain fell in torrents -- except at
-occasional intervals, when it was checked by a violent gust of wind which
-swept up the streets (for it is in London that our scene lies), rattling along
-the housetops, and fiercely agitating the scanty flame of the lamps that
-struggled against the elements. A hooded figure emerged.
-
-It was a Domo-Kun.
-
-"What took you so long?", inquired his wife.
-
-Silence. Oblivious to his silence, she continued, "Did Mr. Usagi enjoy the
-waffles you brought him?" "You know him, he's not one to forego a waffle,
-no matter how burnt," he snickered.
-
-The pause was filled with the sound of compile errors.
-
-CHAPTER 2:
-The jelly was as dark as night, and just as runny.
-The Domo-Kun shuddered, remembering the way Mr. Usagi had speared his waffles
-with his fork, watching the runny jelly spread and pool across his plate,
-like the blood of a dying fawn. "It reminds me of that time --" he started, as
-his wife cut in quickly: "-- please. I can't bear to hear it.". A flury of
-images coming from the past flowed through his mind.
-
-"You recall what happened on Mulholland drive?" The ceiling fan rotated slowly
-overhead, barely disturbing the thick cigarette smoke. No doubt was left about
-when the fan was last cleaned.
-
-There was a poignant pause.
-
-CHAPTER 3:
-Mr. Usagi felt that something wasn't right. Shortly after the Domo-Kun left he
-began feeling sick. He thought out loud to himself, "No, he wouldn't have done
-that to me." He considered that perhaps he shouldn't have pushed so hard.
-Perhaps he shouldn't have been so cold and sarcastic, after the unimaginable
-horror that had occurred just the week before.
-
-Next time, there won't be any sushi. Why sushi with waffles anyway?  It's like
-adorning breakfast cereal with halibut -- shameful.
-
-CHAPTER 4:
-The taste of stale sushi in his mouth the next morning was unbearable. He
-wondered where the sushi came from as he attempted to wash the taste away with
-a bottle of 3000¥ sake. He tries to recall the cook's face.  Green? Probably.
-
-CHAPTER 5:
-Many tears later, Mr. Usagi would laugh at the memory of the earnest,
-well-intentioned Domo-Kun. Another day in the life. That is when he realized that
-life goes on.
-
-$CHAPTER6
-
-TRUISMS (1978-1983)
-JENNY HOLZER
-A LITTLE KNOWLEDGE CAN GO A LONG WAY
-A LOT OF PROFESSIONALS ARE CRACKPOTS
-A MAN CAN'T KNOW WHAT IT IS TO BE A MOTHER
-A NAME MEANS A LOT JUST BY ITSELF
-A POSITIVE ATTITUDE MEANS ALL THE DIFFERENCE IN THE WORLD
-A RELAXED MAN IS NOT NECESSARILY A BETTER MAN
-NO ONE SHOULD EVER USE SVN
-AN INFLEXIBLE POSITION SOMETIMES IS A SIGN OF PARALYSIS
-IT IS MANS FATE TO OUTSMART HIMSELF
-BEING SURE OF YOURSELF MEANS YOU'RE A FOOL
-AM NOT
-ARE TOO
-IF AT FIRST YOU DON'T SUCCEED: TRY, EXCEPT, FINALLY
-AND THEN, TIME LEAPT BACKWARDS
-AAAAAAAAAAAAAAAAAAAAAAAAAAAAAaaaaaaaaaaaaaaaaaaaaaaaaaaaahhhh LOT
-I'm really tempted to change something above the line.
-Reeccciiiipppppeeeeeesssssss!!!!!!!!!
-PEOPLE SAY "FAILURE IS NOT AN OPTION", BUT FAILURE IS ALWAYS AN OPTION.
-WHAT GOES UP MUST HAVE A NON-ZERO VELOCITY
-
-I can feel the heat closing in, feel them out there making their moves...
-What could possibly go wrong? We've already ate our cake.
-
-Stand Still. Pause Clocks. We can make the World Stop.
-WUBWUBWUBWUBWUB
-
-I want a 1917 build and you will give me what I want.
-
-This sentence is false.
-
-Beauty is in the eyes of a Beholder.
-
-I'm the best at space.
-
-The first time Yossarian saw the chaplain, he fell madly in love with him.
-*
-*
-*
-Give not thyself up, then, to fire, lest it invert thee, deaden thee; as for
-the time it did me. There is a wisdom that is woe; but there is a woe that is
-madness. And there is a Catskill eagle in some souls that can alike dive down
-into the blackest gorges, and soar out of them again and become invisible in
-the sunny spaces. And even if he for ever flies within the gorge, that gorge
-is in the mountains; so that even in his lowest swoop the mountain eagle is
-still higher than other birds upon the plain, even though they soar.
-*
-*
-*
-
-I'm here to commit lines and drop rhymes
-*
-This is a line to test and try uploading a cl.
-
-And lo, in the year 2014, there was verily an attempt to upgrade to GCC 4.8 on
-the Android bots, and it was good. Except on one bot, where it was bad. And
-lo, the change was reverted, and GCC went back to 4.6, where code is slower
-and less optimized. And verily did it break the build, because artifacts had
-been created with 4.8, and alignment was no longer the same, and a great
-sadness descended upon the Android GN buildbot, and it did refuseth to build
-any more. But the sheriffs thought to themselves: Placebo! Let us clobber the
-bot, and perhaps it will rebuild with GCC 4.6, which hath worked for many many
-seasons. And so they modified the whitespace file with these immortal lines,
-and visited it upon the bots, that great destruction might be wrought upon
-their outdated binaries. In clobberus, veritas.
-
-As the git approaches, light begins to shine through the SCM thrice again...
-However, the git, is, after all, quite stupid.
-
-Suddenly Domo-Kun found itself in a room filled with dazzling mirrors. As
-Domo-Kun looked around, it realized that some of the mirrors were actually but
-pale reflections of true reality.
-
-A herd of wild gits appears!  Time for CQ :D
-And one more for sizes.py...
-
-What's an overmarketed dietary supplement expressing sadness, relief,
-tiredness, or a similar feeling.?  Ah-Sigh-ee.
-
-It was love at first sight.  The moment Yossarian first laid eyes on the chaplain, he fell madly in love with him.
-
-Cool whitespace change for git-cl land
-
-Oh god the bots are red! I'm blind! Mmmm, cronuts.
-
-If you stand on your head, you will get footprints in your hair.
-
-sigh
-sigher
-pick up cls
-
-In the BUILD we trust.
-^_^
-
-In the masters we don't.
-In the tryservers, we don't either.
-In the CQ sometimes.
-Auto-generated by git-eject-upstream (http://goo.gl/cIHsYR)
-My sandwiches are like my children: I love them all.
-No, really, I couldn't eat another bit.
-When I hunger I think of you, and a pastrami sandwich.
-Do make a terrible mistake every once in a while.
-I just made two.
-Mistakes are the best sometimes.
-\o/
-This is groovy.
-
-SECRET ENDING: IT WAS _____ ALL ALONG!
-testing trailing line
diff --git a/build/win/BUILD.gn b/build/win/BUILD.gn
deleted file mode 100644
index 320ee7a..0000000
--- a/build/win/BUILD.gn
+++ /dev/null
@@ -1,153 +0,0 @@
-# Copyright 2015 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import("//build/config/clang/clang.gni")
-import("//build/config/sanitizers/sanitizers.gni")
-import("//build/config/win/manifest.gni")
-
-# Depending on this target will cause the manifests for Chrome's default
-# Windows and common control compatibility and elevation for executables.
-windows_manifest("default_exe_manifest") {
-  sources = [
-    as_invoker_manifest,
-    common_controls_manifest,
-    default_compatibility_manifest,
-  ]
-}
-
-if (is_win) {
-  action("copy_cdb_to_output") {
-    script = "//build/win/copy_cdb_to_output.py"
-    inputs = [
-      script,
-    ]
-    outputs = [
-      "$root_out_dir/cdb/cdb.exe",
-      "$root_out_dir/cdb/dbgeng.dll",
-      "$root_out_dir/cdb/dbghelp.dll",
-      "$root_out_dir/cdb/dbgmodel.dll",
-      "$root_out_dir/cdb/winext/ext.dll",
-      "$root_out_dir/cdb/winext/uext.dll",
-      "$root_out_dir/cdb/winxp/exts.dll",
-      "$root_out_dir/cdb/winxp/ntsdexts.dll",
-      "$root_out_dir/cdb/api-ms-win-core-console-l1-1-0.dll",
-      "$root_out_dir/cdb/api-ms-win-core-datetime-l1-1-0.dll",
-      "$root_out_dir/cdb/api-ms-win-core-debug-l1-1-0.dll",
-      "$root_out_dir/cdb/api-ms-win-core-errorhandling-l1-1-0.dll",
-      "$root_out_dir/cdb/api-ms-win-core-file-l1-1-0.dll",
-      "$root_out_dir/cdb/api-ms-win-core-file-l1-2-0.dll",
-      "$root_out_dir/cdb/api-ms-win-core-file-l2-1-0.dll",
-      "$root_out_dir/cdb/api-ms-win-core-handle-l1-1-0.dll",
-      "$root_out_dir/cdb/api-ms-win-core-heap-l1-1-0.dll",
-      "$root_out_dir/cdb/api-ms-win-core-interlocked-l1-1-0.dll",
-      "$root_out_dir/cdb/api-ms-win-core-libraryloader-l1-1-0.dll",
-      "$root_out_dir/cdb/api-ms-win-core-localization-l1-2-0.dll",
-      "$root_out_dir/cdb/api-ms-win-core-memory-l1-1-0.dll",
-      "$root_out_dir/cdb/api-ms-win-core-namedpipe-l1-1-0.dll",
-      "$root_out_dir/cdb/api-ms-win-core-processenvironment-l1-1-0.dll",
-      "$root_out_dir/cdb/api-ms-win-core-processthreads-l1-1-0.dll",
-      "$root_out_dir/cdb/api-ms-win-core-processthreads-l1-1-1.dll",
-      "$root_out_dir/cdb/api-ms-win-core-profile-l1-1-0.dll",
-      "$root_out_dir/cdb/api-ms-win-core-rtlsupport-l1-1-0.dll",
-      "$root_out_dir/cdb/api-ms-win-core-string-l1-1-0.dll",
-      "$root_out_dir/cdb/api-ms-win-core-synch-l1-1-0.dll",
-      "$root_out_dir/cdb/api-ms-win-core-synch-l1-2-0.dll",
-      "$root_out_dir/cdb/api-ms-win-core-sysinfo-l1-1-0.dll",
-      "$root_out_dir/cdb/api-ms-win-core-timezone-l1-1-0.dll",
-      "$root_out_dir/cdb/api-ms-win-core-util-l1-1-0.dll",
-      "$root_out_dir/cdb/api-ms-win-crt-conio-l1-1-0.dll",
-      "$root_out_dir/cdb/api-ms-win-crt-convert-l1-1-0.dll",
-      "$root_out_dir/cdb/api-ms-win-crt-environment-l1-1-0.dll",
-      "$root_out_dir/cdb/api-ms-win-crt-filesystem-l1-1-0.dll",
-      "$root_out_dir/cdb/api-ms-win-crt-heap-l1-1-0.dll",
-      "$root_out_dir/cdb/api-ms-win-crt-locale-l1-1-0.dll",
-      "$root_out_dir/cdb/api-ms-win-crt-math-l1-1-0.dll",
-      "$root_out_dir/cdb/api-ms-win-crt-multibyte-l1-1-0.dll",
-      "$root_out_dir/cdb/api-ms-win-crt-private-l1-1-0.dll",
-      "$root_out_dir/cdb/api-ms-win-crt-process-l1-1-0.dll",
-      "$root_out_dir/cdb/api-ms-win-crt-runtime-l1-1-0.dll",
-      "$root_out_dir/cdb/api-ms-win-crt-stdio-l1-1-0.dll",
-      "$root_out_dir/cdb/api-ms-win-crt-string-l1-1-0.dll",
-      "$root_out_dir/cdb/api-ms-win-crt-time-l1-1-0.dll",
-      "$root_out_dir/cdb/api-ms-win-crt-utility-l1-1-0.dll",
-      "$root_out_dir/cdb/api-ms-win-eventing-provider-l1-1-0.dll",
-      "$root_out_dir/cdb/ucrtbase.dll",
-    ]
-    args = [
-      rebase_path("$root_out_dir/cdb", root_out_dir),
-      current_cpu,
-    ]
-  }
-
-  group("runtime_libs") {
-    if (is_component_build) {
-      # Copy the VS runtime DLLs into the isolate so that they don't have to be
-      # preinstalled on the target machine. The debug runtimes have a "d" at
-      # the end.
-      if (is_debug) {
-        vcrt_suffix = "d"
-      } else {
-        vcrt_suffix = ""
-      }
-
-      # These runtime files are copied to the output directory by the
-      # vs_toolchain script that runs as part of toolchain configuration.
-      data = [
-        "$root_out_dir/msvcp140${vcrt_suffix}.dll",
-        "$root_out_dir/vccorlib140${vcrt_suffix}.dll",
-        "$root_out_dir/vcruntime140${vcrt_suffix}.dll",
-
-        # Universal Windows 10 CRT files
-        "$root_out_dir/api-ms-win-core-console-l1-1-0.dll",
-        "$root_out_dir/api-ms-win-core-datetime-l1-1-0.dll",
-        "$root_out_dir/api-ms-win-core-debug-l1-1-0.dll",
-        "$root_out_dir/api-ms-win-core-errorhandling-l1-1-0.dll",
-        "$root_out_dir/api-ms-win-core-file-l1-1-0.dll",
-        "$root_out_dir/api-ms-win-core-file-l1-2-0.dll",
-        "$root_out_dir/api-ms-win-core-file-l2-1-0.dll",
-        "$root_out_dir/api-ms-win-core-handle-l1-1-0.dll",
-        "$root_out_dir/api-ms-win-core-heap-l1-1-0.dll",
-        "$root_out_dir/api-ms-win-core-interlocked-l1-1-0.dll",
-        "$root_out_dir/api-ms-win-core-libraryloader-l1-1-0.dll",
-        "$root_out_dir/api-ms-win-core-localization-l1-2-0.dll",
-        "$root_out_dir/api-ms-win-core-memory-l1-1-0.dll",
-        "$root_out_dir/api-ms-win-core-namedpipe-l1-1-0.dll",
-        "$root_out_dir/api-ms-win-core-processenvironment-l1-1-0.dll",
-        "$root_out_dir/api-ms-win-core-processthreads-l1-1-0.dll",
-        "$root_out_dir/api-ms-win-core-processthreads-l1-1-1.dll",
-        "$root_out_dir/api-ms-win-core-profile-l1-1-0.dll",
-        "$root_out_dir/api-ms-win-core-rtlsupport-l1-1-0.dll",
-        "$root_out_dir/api-ms-win-core-string-l1-1-0.dll",
-        "$root_out_dir/api-ms-win-core-synch-l1-1-0.dll",
-        "$root_out_dir/api-ms-win-core-synch-l1-2-0.dll",
-        "$root_out_dir/api-ms-win-core-sysinfo-l1-1-0.dll",
-        "$root_out_dir/api-ms-win-core-timezone-l1-1-0.dll",
-        "$root_out_dir/api-ms-win-core-util-l1-1-0.dll",
-        "$root_out_dir/api-ms-win-crt-conio-l1-1-0.dll",
-        "$root_out_dir/api-ms-win-crt-convert-l1-1-0.dll",
-        "$root_out_dir/api-ms-win-crt-environment-l1-1-0.dll",
-        "$root_out_dir/api-ms-win-crt-filesystem-l1-1-0.dll",
-        "$root_out_dir/api-ms-win-crt-heap-l1-1-0.dll",
-        "$root_out_dir/api-ms-win-crt-locale-l1-1-0.dll",
-        "$root_out_dir/api-ms-win-crt-math-l1-1-0.dll",
-        "$root_out_dir/api-ms-win-crt-multibyte-l1-1-0.dll",
-        "$root_out_dir/api-ms-win-crt-private-l1-1-0.dll",
-        "$root_out_dir/api-ms-win-crt-process-l1-1-0.dll",
-        "$root_out_dir/api-ms-win-crt-runtime-l1-1-0.dll",
-        "$root_out_dir/api-ms-win-crt-stdio-l1-1-0.dll",
-        "$root_out_dir/api-ms-win-crt-string-l1-1-0.dll",
-        "$root_out_dir/api-ms-win-crt-time-l1-1-0.dll",
-        "$root_out_dir/api-ms-win-crt-utility-l1-1-0.dll",
-        "$root_out_dir/ucrtbase${vcrt_suffix}.dll",
-      ]
-      if (is_asan) {
-        if (current_cpu == "x64") {
-          data += [ "$clang_base_path/lib/clang/$clang_version/lib/windows/clang_rt.asan_dynamic-x86_64.dll" ]
-        } else {
-          data += [ "$clang_base_path/lib/clang/$clang_version/lib/windows/clang_rt.asan_dynamic-i386.dll" ]
-        }
-      }
-    }
-  }
-}
diff --git a/build/win/as_invoker.manifest b/build/win/as_invoker.manifest
deleted file mode 100644
index df046fd..0000000
--- a/build/win/as_invoker.manifest
+++ /dev/null
@@ -1,9 +0,0 @@
-<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
-<assembly xmlns="urn:schemas-microsoft-com:asm.v1" manifestVersion="1.0">
-<trustInfo xmlns="urn:schemas-microsoft-com:asm.v3">
-  <security>
-    <requestedPrivileges>
-      <requestedExecutionLevel level="asInvoker" uiAccess="false"></requestedExecutionLevel>
-    </requestedPrivileges>
-  </security>
-</trustInfo></assembly>
diff --git a/build/win/chrome_win.croc b/build/win/chrome_win.croc
deleted file mode 100644
index e1e3bb7..0000000
--- a/build/win/chrome_win.croc
+++ /dev/null
@@ -1,26 +0,0 @@
-# -*- python -*-
-# Crocodile config file for Chromium windows
-
-{
-  # List of rules, applied in order
-  'rules' : [
-    # Specify inclusions before exclusions, since rules are in order.
-
-    # Don't include chromeos, posix, or linux specific files
-    {
-      'regexp' : '.*(_|/)(chromeos|linux|posix)(\\.|_)',
-      'include' : 0,
-    },
-    # Don't include ChromeOS dirs
-    {
-      'regexp' : '.*/chromeos/',
-      'include' : 0,
-    },
-
-    # Groups
-    {
-      'regexp' : '.*_test_win\\.',
-      'group' : 'test',
-    },
-  ],
-}
diff --git a/build/win/common_controls.manifest b/build/win/common_controls.manifest
deleted file mode 100644
index 1710196..0000000
--- a/build/win/common_controls.manifest
+++ /dev/null
@@ -1,8 +0,0 @@
-<?xml version='1.0' encoding='UTF-8' standalone='yes'?>
-<assembly xmlns='urn:schemas-microsoft-com:asm.v1' manifestVersion='1.0'>
-  <dependency>
-    <dependentAssembly>
-      <assemblyIdentity type='win32' name='Microsoft.Windows.Common-Controls' version='6.0.0.0' processorArchitecture='*' publicKeyToken='6595b64144ccf1df' language='*' />
-    </dependentAssembly>
-  </dependency>
-</assembly>
diff --git a/build/win/compatibility.manifest b/build/win/compatibility.manifest
deleted file mode 100644
index 10d10da..0000000
--- a/build/win/compatibility.manifest
+++ /dev/null
@@ -1,17 +0,0 @@
-<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
-<assembly xmlns="urn:schemas-microsoft-com:asm.v1" manifestVersion="1.0">
-  <compatibility xmlns="urn:schemas-microsoft-com:compatibility.v1">
-    <application>
-      <!--The ID below indicates application support for Windows Vista -->
-      <supportedOS Id="{e2011457-1546-43c5-a5fe-008deee3d3f0}"/>
-      <!--The ID below indicates application support for Windows 7 -->
-      <supportedOS Id="{35138b9a-5d96-4fbd-8e2d-a2440225f93a}"/>
-      <!--The ID below indicates application support for Windows 8 -->
-      <supportedOS Id="{4a2f28e3-53b9-4441-ba9c-d69d4a4a6e38}"/>
-      <!--The ID below indicates application support for Windows 8.1 -->
-      <supportedOS Id="{1f676c76-80e1-4239-95bb-83d0f6d0da78}"/>
-      <!--The ID below indicates application support for Windows 10 -->
-      <supportedOS Id="{8e0f7a12-bfb3-4fe8-b9a5-48fd50a15a9a}"/>
-    </application>
-  </compatibility>
-</assembly>
diff --git a/build/win/copy_cdb_to_output.py b/build/win/copy_cdb_to_output.py
deleted file mode 100755
index 46d4294..0000000
--- a/build/win/copy_cdb_to_output.py
+++ /dev/null
@@ -1,111 +0,0 @@
-#!/usr/bin/env python
-# Copyright 2016 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import glob
-import hashlib
-import os
-import shutil
-import sys
-
-script_dir = os.path.dirname(os.path.realpath(__file__))
-src_build_dir = os.path.abspath(os.path.join(script_dir, os.pardir))
-sys.path.insert(0, src_build_dir)
-
-import vs_toolchain
-
-
-def _HexDigest(file_name):
-  hasher = hashlib.sha256()
-  afile = open(file_name, 'rb')
-  blocksize = 65536
-  buf = afile.read(blocksize)
-  while len(buf) > 0:
-    hasher.update(buf)
-    buf = afile.read(blocksize)
-  afile.close()
-  return hasher.hexdigest()
-
-
-def _CopyImpl(file_name, target_dir, source_dir, verbose=False):
-  """Copy |source| to |target| if it doesn't already exist or if it
-  needs to be updated.
-  """
-  target = os.path.join(target_dir, file_name)
-  source = os.path.join(source_dir, file_name)
-  if (os.path.isdir(os.path.dirname(target)) and
-      ((not os.path.isfile(target)) or
-       _HexDigest(source) != _HexDigest(target))):
-    if verbose:
-      print 'Copying %s to %s...' % (source, target)
-    if os.path.exists(target):
-      os.unlink(target)
-    shutil.copy(source, target)
-
-
-def _ConditionalMkdir(output_dir):
-  if not os.path.isdir(output_dir):
-    os.makedirs(output_dir)
-
-
-def _CopyCDBToOutput(output_dir, target_arch):
-  """Copies the Windows debugging executable cdb.exe to the output
-  directory, which is created if it does not exist. The output
-  directory, and target architecture that should be copied, are
-  passed. Supported values for the target architecture are the GYP
-  values "ia32" and "x64" and the GN values "x86" and "x64".
-  """
-  _ConditionalMkdir(output_dir)
-  vs_toolchain.SetEnvironmentAndGetRuntimeDllDirs()
-  # If WINDOWSSDKDIR is not set use the default SDK path. This will be the case
-  # when DEPOT_TOOLS_WIN_TOOLCHAIN=0 and vcvarsall.bat has not been run.
-  win_sdk_dir = os.path.normpath(
-      os.environ.get('WINDOWSSDKDIR',
-                     os.path.expandvars('%ProgramFiles(x86)%'
-                                        '\\Windows Kits\\10')))
-  if target_arch == 'ia32' or target_arch == 'x86':
-    src_arch = 'x86'
-  elif target_arch == 'x64':
-    src_arch = 'x64'
-  else:
-    print 'copy_cdb_to_output.py: unknown target_arch %s' % target_arch
-    sys.exit(1)
-  # We need to copy multiple files, so cache the computed source directory.
-  src_dir = os.path.join(win_sdk_dir, 'Debuggers', src_arch)
-  # We need to copy some helper DLLs to get access to the !uniqstack
-  # command to dump all threads' stacks.
-  src_winext_dir = os.path.join(src_dir, 'winext')
-  dst_winext_dir = os.path.join(output_dir, 'winext')
-  src_winxp_dir = os.path.join(src_dir, 'winxp')
-  dst_winxp_dir = os.path.join(output_dir, 'winxp')
-  src_crt_dir = os.path.join(win_sdk_dir, 'Redist', 'ucrt', 'DLLs', src_arch)
-  _ConditionalMkdir(dst_winext_dir)
-  _ConditionalMkdir(dst_winxp_dir)
-  # Note that the outputs from the "copy_cdb_to_output" target need to
-  # be kept in sync with this list.
-  _CopyImpl('cdb.exe', output_dir, src_dir)
-  _CopyImpl('dbgeng.dll', output_dir, src_dir)
-  _CopyImpl('dbghelp.dll', output_dir, src_dir)
-  _CopyImpl('dbgmodel.dll', output_dir, src_dir)
-  _CopyImpl('ext.dll', dst_winext_dir, src_winext_dir)
-  _CopyImpl('uext.dll', dst_winext_dir, src_winext_dir)
-  _CopyImpl('exts.dll', dst_winxp_dir, src_winxp_dir)
-  _CopyImpl('ntsdexts.dll', dst_winxp_dir, src_winxp_dir)
-  _CopyImpl('api-ms-win-eventing-provider-l1-1-0.dll', output_dir, src_dir)
-  for dll_path in glob.glob(os.path.join(src_crt_dir, 'api-ms-win-*.dll')):
-    _CopyImpl(os.path.split(dll_path)[1], output_dir, src_crt_dir)
-  _CopyImpl('ucrtbase.dll', output_dir, src_crt_dir)
-  return 0
-
-
-def main():
-  if len(sys.argv) < 2:
-    print >>sys.stderr, 'Usage: copy_cdb_to_output.py <output_dir> ' + \
-        '<target_arch>'
-    return 1
-  return _CopyCDBToOutput(sys.argv[1], sys.argv[2])
-
-
-if __name__ == '__main__':
-  sys.exit(main())
diff --git a/build/win/gn_meta_sln.py b/build/win/gn_meta_sln.py
deleted file mode 100644
index 9f72eda..0000000
--- a/build/win/gn_meta_sln.py
+++ /dev/null
@@ -1,212 +0,0 @@
-# Copyright 2017 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-#
-# gn_meta_sln.py
-#   Helper utility to combine GN-generated Visual Studio projects into
-#   a single meta-solution.
-
-import os
-import glob
-import re
-import sys
-from shutil import copyfile
-
-# Helpers
-def EnsureExists(path):
-    try:
-        os.makedirs(path)
-    except OSError:
-        pass
-
-def WriteLinesToFile(lines, file_name):
-    EnsureExists(os.path.dirname(file_name))
-    with open(file_name, "w") as f:
-        f.writelines(lines)
-
-def ExtractIdg(proj_file_name):
-    result = []
-    with open(proj_file_name) as proj_file:
-        lines = iter(proj_file)
-        for p_line in lines:
-            if "<ItemDefinitionGroup" in p_line:
-                while not "</ItemDefinitionGroup" in p_line:
-                    result.append(p_line)
-                    p_line = lines.next()
-                result.append(p_line)
-                return result
-
-# [ (name, solution_name, vs_version), ... ]
-configs = []
-
-def GetVSVersion(solution_file):
-    with open(solution_file) as f:
-        f.readline()
-        comment = f.readline().strip()
-        return comment[-4:]
-
-# Find all directories that can be used as configs (and record if they have VS
-# files present)
-for root, dirs, files in os.walk("out"):
-    for out_dir in dirs:
-        gn_file = os.path.join("out", out_dir, "build.ninja.d")
-        if os.path.exists(gn_file):
-            solutions = glob.glob(os.path.join("out", out_dir, "*.sln"))
-            for solution in solutions:
-                vs_version = GetVSVersion(solution)
-                configs.append((out_dir, os.path.basename(solution),
-                                vs_version))
-    break
-
-# Every project has a GUID that encodes the type. We only care about C++.
-cpp_type_guid = "8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942"
-
-# Work around MSBuild limitations by always using a fixed arch.
-hard_coded_arch = "x64"
-
-# name -> [ (config, pathToProject, GUID, arch), ... ]
-all_projects = {}
-project_pattern = (r'Project\("\{' + cpp_type_guid +
-                   r'\}"\) = "([^"]*)", "([^"]*)", "\{([^\}]*)\}"')
-
-# We need something to work with. Typically, this will fail if no GN folders
-# have IDE files
-if len(configs) == 0:
-    print("ERROR: At least one GN directory must have been built with --ide=vs")
-    sys.exit()
-
-# Filter out configs which don't match the name and vs version of the first.
-name = configs[0][1]
-vs_version = configs[0][2]
-
-for config in configs:
-    if config[1] != name or config[2] != vs_version:
-        continue
-
-    sln_lines = iter(open(os.path.join("out", config[0], config[1])))
-    for sln_line in sln_lines:
-        match_obj = re.match(project_pattern, sln_line)
-        if match_obj:
-            proj_name = match_obj.group(1)
-            if not all_projects.has_key(proj_name):
-                all_projects[proj_name] = []
-            all_projects[proj_name].append((config[0], match_obj.group(2),
-                                            match_obj.group(3)))
-
-# We need something to work with. Typically, this will fail if no GN folders
-# have IDE files
-if len(all_projects) == 0:
-    print("ERROR: At least one GN directory must have been built with --ide=vs")
-    sys.exit()
-
-# Create a new solution. We arbitrarily use the first config as the GUID source
-# (but we need to match that behavior later, when we copy/generate the project
-# files).
-new_sln_lines = []
-new_sln_lines.append(
-    'Microsoft Visual Studio Solution File, Format Version 12.00\n')
-new_sln_lines.append('# Visual Studio ' + vs_version + '\n')
-for proj_name, proj_configs in all_projects.items():
-    new_sln_lines.append('Project("{' + cpp_type_guid + '}") = "' + proj_name +
-                         '", "' + proj_configs[0][1] + '", "{' +
-                         proj_configs[0][2] + '}"\n')
-    new_sln_lines.append('EndProject\n')
-
-new_sln_lines.append('Global\n')
-new_sln_lines.append(
-    '\tGlobalSection(SolutionConfigurationPlatforms) = preSolution\n')
-for config in configs:
-    match = config[0] + '|' + hard_coded_arch
-    new_sln_lines.append('\t\t' + match + ' = ' + match + '\n')
-new_sln_lines.append('\tEndGlobalSection\n')
-new_sln_lines.append(
-    '\tGlobalSection(ProjectConfigurationPlatforms) = postSolution\n')
-for proj_name, proj_configs in all_projects.items():
-    proj_guid = proj_configs[0][2]
-    for config in configs:
-        match = config[0] + '|' + hard_coded_arch
-        new_sln_lines.append('\t\t{' + proj_guid + '}.' + match +
-                           '.ActiveCfg = ' + match + '\n')
-        new_sln_lines.append('\t\t{' + proj_guid + '}.' + match +
-                           '.Build.0 = ' + match + '\n')
-new_sln_lines.append('\tEndGlobalSection\n')
-new_sln_lines.append('\tGlobalSection(SolutionProperties) = preSolution\n')
-new_sln_lines.append('\t\tHideSolutionNode = FALSE\n')
-new_sln_lines.append('\tEndGlobalSection\n')
-new_sln_lines.append('\tGlobalSection(NestedProjects) = preSolution\n')
-new_sln_lines.append('\tEndGlobalSection\n')
-new_sln_lines.append('EndGlobal\n')
-
-# Write solution file
-WriteLinesToFile(new_sln_lines, 'out/sln/' + name)
-
-idg_hdr = "<ItemDefinitionGroup Condition=\"'$(Configuration)|$(Platform)'=='"
-
-configuration_template = """    <ProjectConfiguration Include="{config}|{arch}">
-      <Configuration>{config}</Configuration>
-      <Platform>{arch}</Platform>
-    </ProjectConfiguration>
-"""
-
-def FormatProjectConfig(config):
-    return configuration_template.format(
-        config = config[0], arch = hard_coded_arch)
-
-# Now, bring over the project files
-for proj_name, proj_configs in all_projects.items():
-    # Paths to project and filter file in src and dst locations
-    src_proj_path = os.path.join("out", proj_configs[0][0], proj_configs[0][1])
-    dst_proj_path = os.path.join("out", "sln", proj_configs[0][1])
-    src_filter_path = src_proj_path + ".filters"
-    dst_filter_path = dst_proj_path + ".filters"
-
-    # Copy the filter file unmodified
-    EnsureExists(os.path.dirname(dst_proj_path))
-    copyfile(src_filter_path, dst_filter_path)
-
-    preferred_tool_arch = None
-    config_arch = {}
-
-    # Bring over the project file, modified with extra configs
-    with open(src_proj_path) as src_proj_file:
-        proj_lines = iter(src_proj_file)
-        new_proj_lines = []
-        for line in proj_lines:
-            if "<ItemDefinitionGroup" in line:
-                # This is a large group that contains many settings. We need to
-                # replicate it, with conditions so it varies per configuration.
-                idg_lines = []
-                while not "</ItemDefinitionGroup" in line:
-                    idg_lines.append(line)
-                    line = proj_lines.next()
-                idg_lines.append(line)
-                for proj_config in proj_configs:
-                    config_idg_lines = ExtractIdg(os.path.join("out",
-                                                             proj_config[0],
-                                                             proj_config[1]))
-                    match = proj_config[0] + '|' + hard_coded_arch
-                    new_proj_lines.append(idg_hdr + match + "'\">\n")
-                    for idg_line in config_idg_lines[1:]:
-                        new_proj_lines.append(idg_line)
-            elif "ProjectConfigurations" in line:
-                new_proj_lines.append(line)
-                proj_lines.next()
-                proj_lines.next()
-                proj_lines.next()
-                proj_lines.next()
-                for config in configs:
-                    new_proj_lines.append(FormatProjectConfig(config))
-
-            elif "<OutDir" in line:
-                new_proj_lines.append(line.replace(proj_configs[0][0],
-                                                 "$(Configuration)"))
-            elif "<PreferredToolArchitecture" in line:
-                new_proj_lines.append("    <PreferredToolArchitecture>" +
-                                      hard_coded_arch +
-                                      "</PreferredToolArchitecture>\n")
-            else:
-                new_proj_lines.append(line)
-        with open(dst_proj_path, "w") as new_proj:
-            new_proj.writelines(new_proj_lines)
-
-print('Wrote meta solution to out/sln/' + name)
diff --git a/build/win/merge_pgc_files.py b/build/win/merge_pgc_files.py
deleted file mode 100755
index 804c4ea..0000000
--- a/build/win/merge_pgc_files.py
+++ /dev/null
@@ -1,144 +0,0 @@
-#!/usr/bin/env python
-# Copyright 2017 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Merge the PGC files generated during the profiling step to the PGD database.
-
-This is required to workaround a flakyness in pgomgr.exe where it can run out
-of address space while trying to merge all the PGC files at the same time.
-"""
-
-import glob
-import json
-import optparse
-import os
-import subprocess
-import sys
-
-
-script_dir = os.path.dirname(os.path.realpath(__file__))
-sys.path.insert(0, os.path.join(script_dir, os.pardir))
-
-import vs_toolchain
-
-
-# Number of PGC files that should be merged in each iteration, merging all
-# the files one by one is really slow but merging more than 10 at a time doesn't
-# really seem to impact the total time (when merging 180 files).
-#
-# Number of pgc merged per iteration  |  Time (in min)
-# 1                                   |  27.2
-# 10                                  |  12.8
-# 20                                  |  12.0
-# 30                                  |  11.5
-# 40                                  |  11.4
-# 50                                  |  11.5
-# 60                                  |  11.6
-# 70                                  |  11.6
-# 80                                  |  11.7
-#
-# TODO(sebmarchand): Measure the memory usage of pgomgr.exe to see how it get
-#     affected by the number of pgc files.
-_BATCH_SIZE_DEFAULT = 10
-
-
-def find_pgomgr(chrome_checkout_dir):
-  """Find pgomgr.exe."""
-  win_toolchain_json_file = os.path.join(chrome_checkout_dir, 'build',
-      'win_toolchain.json')
-  if not os.path.exists(win_toolchain_json_file):
-    raise Exception('The toolchain JSON file is missing.')
-  with open(win_toolchain_json_file) as temp_f:
-    toolchain_data = json.load(temp_f)
-  if not os.path.isdir(toolchain_data['path']):
-    raise Exception('The toolchain JSON file is invalid.')
-
-  # Always use the x64 version of pgomgr (the x86 one doesn't work on the bot's
-  # environment).
-  pgomgr_dir = None
-  if toolchain_data['version'] == '2017':
-    vc_tools_root = vs_toolchain.FindVCToolsRoot()
-    pgomgr_dir = os.path.join(vc_tools_root, 'HostX64', 'x64')
-
-  pgomgr_path = os.path.join(pgomgr_dir, 'pgomgr.exe')
-  if not os.path.exists(pgomgr_path):
-    raise Exception('pgomgr.exe is missing from %s.' % pgomgr_dir)
-
-  return pgomgr_path
-
-
-def merge_pgc_files(pgomgr_path, files, pgd_path):
-  """Merge all the pgc_files in |files| to |pgd_path|."""
-  merge_command = [
-      pgomgr_path,
-      '/merge'
-  ]
-  merge_command.extend(files)
-  merge_command.append(pgd_path)
-  proc = subprocess.Popen(merge_command, stdout=subprocess.PIPE)
-  stdout, _ = proc.communicate()
-  print stdout
-  return proc.returncode
-
-
-def main():
-  parser = optparse.OptionParser(usage='%prog [options]')
-  parser.add_option('--checkout-dir', help='The Chrome checkout directory.')
-  parser.add_option('--target-cpu', help='[DEPRECATED] The target\'s bitness.')
-  parser.add_option('--build-dir', help='Chrome build directory.')
-  parser.add_option('--binary-name', help='The binary for which the PGC files '
-                    'should be merged, without extension.')
-  parser.add_option('--files-per-iter', help='The number of PGC files to merge '
-                    'in each iteration, default to %d.' % _BATCH_SIZE_DEFAULT,
-                    type='int', default=_BATCH_SIZE_DEFAULT)
-  options, _ = parser.parse_args()
-
-  if not options.checkout_dir:
-    parser.error('--checkout-dir is required')
-  if not options.build_dir:
-    parser.error('--build-dir is required')
-  if not options.binary_name:
-    parser.error('--binary-name is required')
-
-  # Starts by finding pgomgr.exe.
-  pgomgr_path = find_pgomgr(options.checkout_dir)
-
-  pgc_files = glob.glob(os.path.join(options.build_dir,
-                                     '%s*.pgc' % options.binary_name))
-  pgd_file = os.path.join(options.build_dir, '%s.pgd' % options.binary_name)
-
-  def _split_in_chunks(items, chunk_size):
-    """Split |items| in chunks of size |chunk_size|.
-
-    Source: http://stackoverflow.com/a/312464
-    """
-    for i in xrange(0, len(items), chunk_size):
-      yield items[i:i + chunk_size]
-  for chunk in _split_in_chunks(pgc_files, options.files_per_iter):
-    files_to_merge = []
-    for pgc_file in chunk:
-      files_to_merge.append(
-          os.path.join(options.build_dir, os.path.basename(pgc_file)))
-    ret = merge_pgc_files(pgomgr_path, files_to_merge, pgd_file)
-    # pgomgr.exe sometimes fails to merge too many files at the same time (it
-    # usually complains that a stream is missing, but if you try to merge this
-    # file individually it works), try to merge all the PGCs from this batch one
-    # at a time instead. Don't fail the build if we can't merge a file.
-    # TODO(sebmarchand): Report this to Microsoft, check if this is still
-    # happening with VS2017.
-    if ret != 0:
-      print ('Error while trying to merge several PGC files at the same time, '
-             'trying to merge them one by one.')
-      for pgc_file in chunk:
-        ret = merge_pgc_files(
-            pgomgr_path,
-            [os.path.join(options.build_dir, os.path.basename(pgc_file))],
-            pgd_file
-        )
-        if ret != 0:
-          print 'Error while trying to merge %s, continuing.' % pgc_file
-
-
-if __name__ == '__main__':
-  sys.exit(main())
diff --git a/build/win/message_compiler.gni b/build/win/message_compiler.gni
deleted file mode 100644
index 814eb65..0000000
--- a/build/win/message_compiler.gni
+++ /dev/null
@@ -1,89 +0,0 @@
-# Copyright 2015 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-assert(is_win, "This only runs on Windows.")
-
-# Runs mc.exe over a list of sources. The outputs (a header and rc file) are
-# placed in the target gen dir, and compiled.
-#
-# sources
-#   List of message files to process.
-#
-# user_mode_logging (optional bool)
-#   Generates user-mode logging code. Defaults to false (no logging code).
-#
-# compile_generated_code (optional, deafults = true)
-#   If unset or true, the generated code will be compiled and linked into
-#   targets that depend on it. If set to false, the .h and .rc files will only
-#   be generated.
-#
-# deps, public_deps, visibility
-#   Normal meaning.
-template("message_compiler") {
-  if (defined(invoker.compile_generated_code) &&
-      !invoker.compile_generated_code) {
-    compile_generated_code = false
-    action_name = target_name
-  } else {
-    compile_generated_code = true
-    action_name = "${target_name}_mc"
-    source_set_name = target_name
-  }
-
-  action_foreach(action_name) {
-    if (compile_generated_code) {
-      visibility = [ ":$source_set_name" ]
-    } else {
-      forward_variables_from(invoker, [ "visibility" ])
-    }
-
-    script = "//build/win/message_compiler.py"
-
-    outputs = [
-      "$target_gen_dir/{{source_name_part}}.h",
-      "$target_gen_dir/{{source_name_part}}.rc",
-    ]
-
-    args = [
-      # The first argument is the environment file saved to the build
-      # directory. This is required because the Windows toolchain setup saves
-      # the VC paths and such so that running "mc.exe" will work with the
-      # configured toolchain. This file is in the root build dir.
-      "environment.$current_cpu",
-
-      # Where to put the header.
-      "-h",
-      rebase_path(target_gen_dir, root_build_dir),
-
-      # Where to put the .rc file.
-      "-r",
-      rebase_path(target_gen_dir, root_build_dir),
-
-      # Input is Unicode.
-      "-u",
-    ]
-    if (defined(invoker.user_mode_logging) && invoker.user_mode_logging) {
-      args += [ "-um" ]
-    }
-    args += [ "{{source}}" ]
-
-    forward_variables_from(invoker,
-                           [
-                             "deps",
-                             "public_deps",
-                             "sources",
-                           ])
-  }
-
-  if (compile_generated_code) {
-    # Compile the generated rc file.
-    source_set(source_set_name) {
-      forward_variables_from(invoker, [ "visibility" ])
-      sources = get_target_outputs(":$action_name")
-      deps = [
-        ":$action_name",
-      ]
-    }
-  }
-}
diff --git a/build/win/message_compiler.py b/build/win/message_compiler.py
deleted file mode 100644
index 7c1902e..0000000
--- a/build/win/message_compiler.py
+++ /dev/null
@@ -1,145 +0,0 @@
-# Copyright 2015 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# Runs the Microsoft Message Compiler (mc.exe).
-#
-# Usage: message_compiler.py <environment_file> [<args to mc.exe>*]
-
-import difflib
-import distutils.dir_util
-import filecmp
-import os
-import re
-import shutil
-import subprocess
-import sys
-import tempfile
-
-def main():
-  env_file, rest = sys.argv[1], sys.argv[2:]
-
-  # Parse some argument flags.
-  header_dir = None
-  resource_dir = None
-  input_file = None
-  for i, arg in enumerate(rest):
-    if arg == '-h' and len(rest) > i + 1:
-      assert header_dir == None
-      header_dir = rest[i + 1]
-    elif arg == '-r' and len(rest) > i + 1:
-      assert resource_dir == None
-      resource_dir = rest[i + 1]
-    elif arg.endswith('.mc') or arg.endswith('.man'):
-      assert input_file == None
-      input_file = arg
-
-  # Copy checked-in outputs to final location.
-  THIS_DIR = os.path.abspath(os.path.dirname(__file__))
-  assert header_dir == resource_dir
-  source = os.path.join(THIS_DIR, "..", "..",
-      "third_party", "win_build_output",
-      re.sub(r'^(?:[^/]+/)?gen/', 'mc/', header_dir))
-  distutils.dir_util.copy_tree(source, header_dir, preserve_times=False)
-
-  # On non-Windows, that's all we can do.
-  if sys.platform != 'win32':
-    return
-
-  # On Windows, run mc.exe on the input and check that its outputs are
-  # identical to the checked-in outputs.
-
-  # Read the environment block from the file. This is stored in the format used
-  # by CreateProcess. Drop last 2 NULs, one for list terminator, one for
-  # trailing vs. separator.
-  env_pairs = open(env_file).read()[:-2].split('\0')
-  env_dict = dict([item.split('=', 1) for item in env_pairs])
-
-  extension = os.path.splitext(input_file)[1]
-  if extension in ['.man', '.mc']:
-    # For .man files, mc's output changed significantly from Version 10.0.15063
-    # to Version 10.0.16299.  We should always have the output of the current
-    # default SDK checked in and compare to that. Early out if a different SDK
-    # is active. This also happens with .mc files.
-    # TODO(thakis): Check in new baselines and compare to 16299 instead once
-    # we use the 2017 Fall Creator's Update by default.
-    mc_help = subprocess.check_output(['mc.exe', '/?'], env=env_dict,
-                                      stderr=subprocess.STDOUT, shell=True)
-    version = re.search(r'Message Compiler\s+Version (\S+)', mc_help).group(1)
-    if version != '10.0.15063':
-      return
-
-  # mc writes to stderr, so this explicitly redirects to stdout and eats it.
-  try:
-    tmp_dir = tempfile.mkdtemp()
-    delete_tmp_dir = True
-    if header_dir:
-      rest[rest.index('-h') + 1] = tmp_dir
-      header_dir = tmp_dir
-    if resource_dir:
-      rest[rest.index('-r') + 1] = tmp_dir
-      resource_dir = tmp_dir
-
-    # This needs shell=True to search the path in env_dict for the mc
-    # executable.
-    subprocess.check_output(['mc.exe'] + rest,
-                            env=env_dict,
-                            stderr=subprocess.STDOUT,
-                            shell=True)
-    # We require all source code (in particular, the header generated here) to
-    # be UTF-8. jinja can output the intermediate .mc file in UTF-8 or UTF-16LE.
-    # However, mc.exe only supports Unicode via the -u flag, and it assumes when
-    # that is specified that the input is UTF-16LE (and errors out on UTF-8
-    # files, assuming they're ANSI). Even with -u specified and UTF16-LE input,
-    # it generates an ANSI header, and includes broken versions of the message
-    # text in the comment before the value. To work around this, for any invalid
-    # // comment lines, we simply drop the line in the header after building it.
-    # Also, mc.exe apparently doesn't always write #define lines in
-    # deterministic order, so manually sort each block of #defines.
-    if header_dir:
-      header_file = os.path.join(
-          header_dir, os.path.splitext(os.path.basename(input_file))[0] + '.h')
-      header_contents = []
-      with open(header_file, 'rb') as f:
-        define_block = []  # The current contiguous block of #defines.
-        for line in f.readlines():
-          if line.startswith('//') and '?' in line:
-            continue
-          if line.startswith('#define '):
-            define_block.append(line)
-            continue
-          # On the first non-#define line, emit the sorted preceding #define
-          # block.
-          header_contents += sorted(define_block, key=lambda s: s.split()[-1])
-          define_block = []
-          header_contents.append(line)
-        # If the .h file ends with a #define block, flush the final block.
-        header_contents += sorted(define_block, key=lambda s: s.split()[-1])
-      with open(header_file, 'wb') as f:
-        f.write(''.join(header_contents))
-
-    # mc.exe invocation and post-processing are complete, now compare the output
-    # in tmp_dir to the checked-in outputs.
-    diff = filecmp.dircmp(tmp_dir, source)
-    if diff.diff_files or set(diff.left_list) != set(diff.right_list):
-      print 'mc.exe output different from files in %s, see %s' % (source,
-                                                                  tmp_dir)
-      diff.report()
-      for f in diff.diff_files:
-        if f.endswith('.bin'): continue
-        fromfile = os.path.join(source, f)
-        tofile = os.path.join(tmp_dir, f)
-        print ''.join(difflib.unified_diff(open(fromfile, 'U').readlines(),
-                                           open(tofile, 'U').readlines(),
-                                           fromfile, tofile))
-      delete_tmp_dir = False
-      sys.exit(1)
-  except subprocess.CalledProcessError as e:
-    print e.output
-    sys.exit(e.returncode)
-  finally:
-    if os.path.exists(tmp_dir) and delete_tmp_dir:
-      shutil.rmtree(tmp_dir)
-
-if __name__ == '__main__':
-  main()
diff --git a/build/win/reorder-imports.py b/build/win/reorder-imports.py
deleted file mode 100755
index c4b294d..0000000
--- a/build/win/reorder-imports.py
+++ /dev/null
@@ -1,96 +0,0 @@
-#!/usr/bin/env python
-# Copyright 2014 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import glob
-import optparse
-import os
-import shutil
-import subprocess
-import sys
-
-sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..',
-                                'third_party', 'pefile'))
-import pefile
-
-def reorder_imports(input_dir, output_dir, architecture):
-  """Swap chrome_elf.dll to be the first import of chrome.exe.
-  Also copy over any related files that might be needed
-  (pdbs, manifests etc.).
-  """
-  # TODO(thakis): See if there is a reliable way to write the
-  # correct executable in the first place, so that this script
-  # only needs to verify that and not write a whole new exe.
-
-  input_image = os.path.join(input_dir, 'chrome.exe')
-  output_image = os.path.join(output_dir, 'chrome.exe')
-
-  # pefile mmap()s the whole executable, and then parses parts of
-  # it into python data structures for ease of processing.
-  # To write the file again, only the mmap'd data is written back,
-  # so modifying the parsed python objects generally has no effect.
-  # However, parsed raw data ends up in pe.Structure instances,
-  # and these all get serialized back when the file gets written.
-  # So things that are in a Structure must have their data set
-  # through the Structure, while other data must bet set through
-  # the set_bytes_*() methods.
-  pe = pefile.PE(input_image, fast_load=True)
-  if architecture == 'x64':
-    assert pe.PE_TYPE == pefile.OPTIONAL_HEADER_MAGIC_PE_PLUS
-  else:
-    assert pe.PE_TYPE == pefile.OPTIONAL_HEADER_MAGIC_PE
-
-  pe.parse_data_directories(directories=[
-      pefile.DIRECTORY_ENTRY['IMAGE_DIRECTORY_ENTRY_IMPORT']])
-
-  found_elf = False
-  for i, peimport in enumerate(pe.DIRECTORY_ENTRY_IMPORT):
-    if peimport.dll.lower() == 'chrome_elf.dll':
-      assert not found_elf, 'only one chrome_elf.dll import expected'
-      found_elf = True
-      if i > 0:
-        swap = pe.DIRECTORY_ENTRY_IMPORT[0]
-
-        # Morally we want to swap peimport.struct and swap.struct here,
-        # but the pe module doesn't expose a public method on Structure
-        # to get all data of a Structure without explicitly listing all
-        # field names.
-        # NB: OriginalFirstThunk and Characteristics are an union both at
-        # offset 0, handling just one of them is enough.
-        peimport.struct.OriginalFirstThunk, swap.struct.OriginalFirstThunk = \
-            swap.struct.OriginalFirstThunk, peimport.struct.OriginalFirstThunk
-        peimport.struct.TimeDateStamp, swap.struct.TimeDateStamp = \
-            swap.struct.TimeDateStamp, peimport.struct.TimeDateStamp
-        peimport.struct.ForwarderChain, swap.struct.ForwarderChain = \
-            swap.struct.ForwarderChain, peimport.struct.ForwarderChain
-        peimport.struct.Name, swap.struct.Name = \
-            swap.struct.Name, peimport.struct.Name
-        peimport.struct.FirstThunk, swap.struct.FirstThunk = \
-            swap.struct.FirstThunk, peimport.struct.FirstThunk
-  assert found_elf, 'chrome_elf.dll import not found'
-
-  pe.write(filename=output_image)
-
-  for fname in glob.iglob(os.path.join(input_dir, 'chrome.exe.*')):
-    shutil.copy(fname, os.path.join(output_dir, os.path.basename(fname)))
-  return 0
-
-
-def main(argv):
-  usage = 'reorder_imports.py -i <input_dir> -o <output_dir> -a <target_arch>'
-  parser = optparse.OptionParser(usage=usage)
-  parser.add_option('-i', '--input', help='reorder chrome.exe in DIR',
-      metavar='DIR')
-  parser.add_option('-o', '--output', help='write new chrome.exe to DIR',
-      metavar='DIR')
-  parser.add_option('-a', '--arch', help='architecture of build (optional)',
-      default='ia32')
-  opts, args = parser.parse_args()
-
-  if not opts.input or not opts.output:
-    parser.error('Please provide and input and output directory')
-  return reorder_imports(opts.input, opts.output, opts.arch)
-
-if __name__ == "__main__":
-  sys.exit(main(sys.argv[1:]))
diff --git a/build/win/require_administrator.manifest b/build/win/require_administrator.manifest
deleted file mode 100644
index 4142e73..0000000
--- a/build/win/require_administrator.manifest
+++ /dev/null
@@ -1,9 +0,0 @@
-<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
-<assembly xmlns="urn:schemas-microsoft-com:asm.v1" manifestVersion="1.0">
-<trustInfo xmlns="urn:schemas-microsoft-com:asm.v3">
-  <security>
-    <requestedPrivileges>
-      <requestedExecutionLevel level="requireAdministrator" uiAccess="false"></requestedExecutionLevel>
-    </requestedPrivileges>
-  </security>
-</trustInfo></assembly>
diff --git a/build/win/run_pgo_profiling_benchmarks.py b/build/win/run_pgo_profiling_benchmarks.py
deleted file mode 100644
index 163682a..0000000
--- a/build/win/run_pgo_profiling_benchmarks.py
+++ /dev/null
@@ -1,123 +0,0 @@
-# Copyright 2016 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Utility script to run the benchmarks during the profiling step of a PGO
-build.
-"""
-
-import json
-import optparse
-import os
-import subprocess
-import sys
-
-# Make sure that we're running as admin, this is required to run the Telemetry
-# benchmarks.
-from win32com.shell import shell
-if not shell.IsUserAnAdmin():
-  raise Exception('This script has to be run as admin.')
-
-
-_SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__))
-_CHROME_BUILD_DIR = os.path.dirname(_SCRIPT_DIR)
-_CHROME_SRC_DIR = os.path.dirname(_CHROME_BUILD_DIR)
-
-
-# List of the benchmark that we run during the profiling step.
-_BENCHMARKS_TO_RUN = {
-  'blink_perf.bindings',
-  'blink_perf.canvas',
-  'blink_perf.css',
-  'blink_perf.dom',
-  'blink_perf.paint',
-  'blink_perf.svg',
-  'blink_style.top_25',
-  'dromaeo.cssqueryjquery',
-  'dromaeo.domcoreattr',
-  'dromaeo.domcoremodify',
-  'dromaeo.domcorequery',
-  'dromaeo.domcoretraverse',
-  'dromaeo.jslibattrprototype',
-  'dromaeo.jslibeventprototype',
-  'dromaeo.jslibmodifyprototype',
-  'dromaeo.jslibstyleprototype',
-  'dromaeo.jslibtraversejquery',
-  'dromaeo.jslibtraverseprototype',
-  'media.tough_video_cases',
-  'octane',
-  'smoothness.top_25_smooth',
-  'storage.indexeddb_endure_tracing',
-  'sunspider',
-}
-
-
-def RunBenchmarks(options):
-  """Run the benchmarks."""
-  # Find the run_benchmark script.
-  chrome_run_benchmark_script = os.path.join(_CHROME_SRC_DIR, 'tools',
-                                             'perf', 'run_benchmark')
-  if not os.path.exists(chrome_run_benchmark_script):
-    raise Exception('Unable to find the run_benchmark script '
-                    '(%s doesn\'t exist) ' % chrome_run_benchmark_script)
-
-  # Augment the PATH to make sure that the benchmarking script can find
-  # pgosweep.exe and its runtime libraries.
-  env = os.environ.copy()
-  env['PATH'] = str(os.pathsep.join([options.build_dir, os.environ['PATH']]))
-  env['PogoSafeMode'] = '1'
-  # Apply a scaling factor of 0.5 to the PGO profiling buffers for the 32-bit
-  # builds, without this the buffers will be too large and the process will
-  # fail to start. See crbug.com/632864#c22.
-  if options.target_cpu == 'x86':
-    env['VCPROFILE_ALLOC_SCALE'] = '0.5'
-
-  # Run all the benchmarks.
-  # TODO(sebmarchand): Make this run in parallel.
-  for benchmark in _BENCHMARKS_TO_RUN:
-    try:
-      benchmark_command = [
-          sys.executable,
-          chrome_run_benchmark_script,
-          '--browser', options.browser_type,
-        ]
-      # Automatically set the arguments to run this script on a local build.
-      if options.browser_type == 'exact':
-        benchmark_command += [
-          '--browser-executable', os.path.join(options.build_dir, 'chrome.exe')
-        ]
-      benchmark_command += [
-          '--profiler', 'win_pgo_profiler',
-          benchmark
-        ]
-      subprocess.check_call(benchmark_command, env=env)
-    except:
-      print ('Error while trying to run the %s benchmark, continuing.' %
-             benchmark)
-      continue
-
-  return 0
-
-
-def main():
-  parser = optparse.OptionParser(usage='%prog [options]')
-  parser.add_option(
-      '--browser-type', help='The browser type (to be passed to Telemetry\'s '
-                              'benchmark runner).')
-  # TODO(sebmarchand): Parse the args.gn file to automatically set this value.
-  parser.add_option('--target-cpu', help='The target\'s bitness.')
-  parser.add_option('--build-dir', help='Chrome build directory.')
-  options, _ = parser.parse_args()
-
-  if not options.target_cpu:
-    parser.error('--target-cpu is required')
-  if not options.build_dir:
-    parser.error('--build-dir is required')
-  if not options.browser_type:
-    options.browser_type = 'exact'
-
-  return RunBenchmarks(options)
-
-
-if __name__ == '__main__':
-  sys.exit(main())
diff --git a/build/win/use_ansi_codes.py b/build/win/use_ansi_codes.py
deleted file mode 100755
index cff5f43..0000000
--- a/build/win/use_ansi_codes.py
+++ /dev/null
@@ -1,10 +0,0 @@
-#!/usr/bin/env python
-# Copyright (c) 2015 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-"""Prints if the the terminal is likely to understand ANSI codes."""
-
-import os
-
-# Add more terminals here as needed.
-print 'ANSICON' in os.environ
diff --git a/build/win_is_xtree_patched.py b/build/win_is_xtree_patched.py
deleted file mode 100755
index 3f1994f..0000000
--- a/build/win_is_xtree_patched.py
+++ /dev/null
@@ -1,26 +0,0 @@
-#!/usr/bin/env python
-# Copyright 2014 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Determines if the VS xtree header has been patched to disable C4702."""
-
-import os
-
-
-def IsPatched():
-  # TODO(scottmg): For now, just return if we're using the packaged toolchain
-  # script (because we know it's patched). Another case could be added here to
-  # query the active VS installation and actually check the contents of xtree.
-  # http://crbug.com/346399.
-  return int(os.environ.get('DEPOT_TOOLS_WIN_TOOLCHAIN', 1)) == 1
-
-
-def DoMain(_):
-  """Hook to be called from gyp without starting a separate python
-  interpreter."""
-  return "1" if IsPatched() else "0"
-
-
-if __name__ == '__main__':
-  print DoMain([])
diff --git a/build/win_toolchain.json b/build/win_toolchain.json
deleted file mode 100644
index 585784e..0000000
--- a/build/win_toolchain.json
+++ /dev/null
@@ -1 +0,0 @@
-{"wdk": "/usr/local/google/work/cr/src/third_party/depot_tools/win_toolchain/vs_files/5454e45bf3764c03d3fc1024b3bf5bc41e3ab62c/wdk", "win_sdk": "/usr/local/google/work/cr/src/third_party/depot_tools/win_toolchain/vs_files/5454e45bf3764c03d3fc1024b3bf5bc41e3ab62c/win_sdk", "version": "2017", "win8sdk": "/usr/local/google/work/cr/src/third_party/depot_tools/win_toolchain/vs_files/5454e45bf3764c03d3fc1024b3bf5bc41e3ab62c/win_sdk", "path": "/usr/local/google/work/cr/src/third_party/depot_tools/win_toolchain/vs_files/5454e45bf3764c03d3fc1024b3bf5bc41e3ab62c", "runtime_dirs": ["/usr/local/google/work/cr/src/third_party/depot_tools/win_toolchain/vs_files/5454e45bf3764c03d3fc1024b3bf5bc41e3ab62c/sys64", "/usr/local/google/work/cr/src/third_party/depot_tools/win_toolchain/vs_files/5454e45bf3764c03d3fc1024b3bf5bc41e3ab62c/sys32"]}
\ No newline at end of file
diff --git a/build/write_buildflag_header.py b/build/write_buildflag_header.py
deleted file mode 100755
index d46cfc8..0000000
--- a/build/write_buildflag_header.py
+++ /dev/null
@@ -1,95 +0,0 @@
-#!/usr/bin/env python
-# Copyright 2015 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This writes headers for build flags. See buildflag_header.gni for usage of
-# this system as a whole.
-#
-# The parameters are passed in a response file so we don't have to worry
-# about command line lengths. The name of the response file is passed on the
-# command line.
-#
-# The format of the response file is:
-#    [--flags <list of one or more flag values>]
-
-import optparse
-import os
-import shlex
-import sys
-
-
-class Options:
-  def __init__(self, output, rulename, header_guard, flags):
-    self.output = output
-    self.rulename = rulename
-    self.header_guard = header_guard
-    self.flags = flags
-
-
-def GetOptions():
-  parser = optparse.OptionParser()
-  parser.add_option('--output', help="Output header name inside --gen-dir.")
-  parser.add_option('--rulename',
-                    help="Helpful name of build rule for including in the " +
-                         "comment at the top of the file.")
-  parser.add_option('--gen-dir',
-                    help="Path to root of generated file directory tree.")
-  parser.add_option('--definitions',
-                    help="Name of the response file containing the flags.")
-  cmdline_options, cmdline_flags = parser.parse_args()
-
-  # Compute header guard by replacing some chars with _ and upper-casing.
-  header_guard = cmdline_options.output.upper()
-  header_guard = \
-      header_guard.replace('/', '_').replace('\\', '_').replace('.', '_')
-  header_guard += '_'
-
-  # The actual output file is inside the gen dir.
-  output = os.path.join(cmdline_options.gen_dir, cmdline_options.output)
-
-  # Definition file in GYP is newline separated, in GN they are shell formatted.
-  # shlex can parse both of these.
-  with open(cmdline_options.definitions, 'r') as def_file:
-    defs = shlex.split(def_file.read())
-  flags_index = defs.index('--flags')
-
-  # Everything after --flags are flags. true/false are remapped to 1/0,
-  # everything else is passed through.
-  flags = []
-  for flag in defs[flags_index + 1 :]:
-    equals_index = flag.index('=')
-    key = flag[:equals_index]
-    value = flag[equals_index + 1:]
-
-    # Canonicalize and validate the value.
-    if value == 'true':
-      value = '1'
-    elif value == 'false':
-      value = '0'
-    flags.append((key, str(value)))
-
-  return Options(output=output,
-                 rulename=cmdline_options.rulename,
-                 header_guard=header_guard,
-                 flags=flags)
-
-
-def WriteHeader(options):
-  with open(options.output, 'w') as output_file:
-    output_file.write("// Generated by build/write_buildflag_header.py\n")
-    if options.rulename:
-      output_file.write('// From "' + options.rulename + '"\n')
-
-    output_file.write('\n#ifndef %s\n' % options.header_guard)
-    output_file.write('#define %s\n\n' % options.header_guard)
-    output_file.write('#include "build/buildflag.h"\n\n')
-
-    for pair in options.flags:
-      output_file.write('#define BUILDFLAG_INTERNAL_%s() (%s)\n' % pair)
-
-    output_file.write('\n#endif  // %s\n' % options.header_guard)
-
-
-options = GetOptions()
-WriteHeader(options)
diff --git a/src/build_config.h b/src/build_config.h
new file mode 100644
index 0000000..89d0ff7
--- /dev/null
+++ b/src/build_config.h
@@ -0,0 +1,207 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// This file adds defines about the platform we're currently building on.
+//  Operating System:
+//    OS_WIN / OS_MACOSX / OS_LINUX / OS_POSIX (MACOSX or LINUX) /
+//    OS_NACL (NACL_SFI or NACL_NONSFI) / OS_NACL_SFI / OS_NACL_NONSFI
+//    OS_CHROMEOS is set by the build system
+//  Compiler:
+//    COMPILER_MSVC / COMPILER_GCC
+//  Processor:
+//    ARCH_CPU_X86 / ARCH_CPU_X86_64 / ARCH_CPU_X86_FAMILY (X86 or X86_64)
+//    ARCH_CPU_32_BITS / ARCH_CPU_64_BITS
+
+#ifndef BUILD_BUILD_CONFIG_H_
+#define BUILD_BUILD_CONFIG_H_
+
+// A set of macros to use for platform detection.
+#if defined(__native_client__)
+// __native_client__ must be first, so that other OS_ defines are not set.
+#define OS_NACL 1
+// OS_NACL comes in two sandboxing technology flavors, SFI or Non-SFI.
+// PNaCl toolchain defines __native_client_nonsfi__ macro in Non-SFI build
+// mode, while it does not in SFI build mode.
+#if defined(__native_client_nonsfi__)
+#define OS_NACL_NONSFI
+#else
+#define OS_NACL_SFI
+#endif
+#elif defined(ANDROID)
+#define OS_ANDROID 1
+#elif defined(__APPLE__)
+// only include TargetConditions after testing ANDROID as some android builds
+// on mac don't have this header available and it's not needed unless the target
+// is really mac/ios.
+#include <TargetConditionals.h>
+#define OS_MACOSX 1
+#if defined(TARGET_OS_IPHONE) && TARGET_OS_IPHONE
+#define OS_IOS 1
+#endif  // defined(TARGET_OS_IPHONE) && TARGET_OS_IPHONE
+#elif defined(__linux__)
+#define OS_LINUX 1
+// include a system header to pull in features.h for glibc/uclibc macros.
+#include <unistd.h>
+#if defined(__GLIBC__) && !defined(__UCLIBC__)
+// we really are using glibc, not uClibc pretending to be glibc
+#define LIBC_GLIBC 1
+#endif
+#elif defined(_WIN32)
+#define OS_WIN 1
+#elif defined(__Fuchsia__)
+#define OS_FUCHSIA 1
+#elif defined(__FreeBSD__)
+#define OS_FREEBSD 1
+#elif defined(__NetBSD__)
+#define OS_NETBSD 1
+#elif defined(__OpenBSD__)
+#define OS_OPENBSD 1
+#elif defined(__sun)
+#define OS_SOLARIS 1
+#elif defined(__QNXNTO__)
+#define OS_QNX 1
+#elif defined(_AIX)
+#define OS_AIX 1
+#elif defined(__asmjs__)
+#define OS_ASMJS
+#else
+#error Please add support for your platform in build_config.h
+#endif
+// NOTE: Adding a new port? Please follow
+// https://chromium.googlesource.com/chromium/src/+/master/docs/new_port_policy.md
+
+// For access to standard BSD features, use OS_BSD instead of a
+// more specific macro.
+#if defined(OS_FREEBSD) || defined(OS_NETBSD) || defined(OS_OPENBSD)
+#define OS_BSD 1
+#endif
+
+// For access to standard POSIXish features, use OS_POSIX instead of a
+// more specific macro.
+#if defined(OS_AIX) || defined(OS_ANDROID) || defined(OS_ASMJS) ||    \
+    defined(OS_FREEBSD) || defined(OS_LINUX) || defined(OS_MACOSX) || \
+    defined(OS_NACL) || defined(OS_NETBSD) || defined(OS_OPENBSD) ||  \
+    defined(OS_QNX) || defined(OS_SOLARIS)
+#define OS_POSIX 1
+#endif
+
+// Use tcmalloc
+#if (defined(OS_WIN) || defined(OS_LINUX) || defined(OS_ANDROID)) && \
+    !defined(NO_TCMALLOC)
+#define USE_TCMALLOC 1
+#endif
+
+// Compiler detection.
+#if defined(__GNUC__)
+#define COMPILER_GCC 1
+#elif defined(_MSC_VER)
+#define COMPILER_MSVC 1
+#else
+#error Please add support for your compiler in build_config.h
+#endif
+
+// Processor architecture detection.  For more info on what's defined, see:
+//   http://msdn.microsoft.com/en-us/library/b0084kay.aspx
+//   http://www.agner.org/optimize/calling_conventions.pdf
+//   or with gcc, run: "echo | gcc -E -dM -"
+#if defined(_M_X64) || defined(__x86_64__)
+#define ARCH_CPU_X86_FAMILY 1
+#define ARCH_CPU_X86_64 1
+#define ARCH_CPU_64_BITS 1
+#define ARCH_CPU_LITTLE_ENDIAN 1
+#elif defined(_M_IX86) || defined(__i386__)
+#define ARCH_CPU_X86_FAMILY 1
+#define ARCH_CPU_X86 1
+#define ARCH_CPU_32_BITS 1
+#define ARCH_CPU_LITTLE_ENDIAN 1
+#elif defined(__s390x__)
+#define ARCH_CPU_S390_FAMILY 1
+#define ARCH_CPU_S390X 1
+#define ARCH_CPU_64_BITS 1
+#define ARCH_CPU_BIG_ENDIAN 1
+#elif defined(__s390__)
+#define ARCH_CPU_S390_FAMILY 1
+#define ARCH_CPU_S390 1
+#define ARCH_CPU_31_BITS 1
+#define ARCH_CPU_BIG_ENDIAN 1
+#elif (defined(__PPC64__) || defined(__PPC__)) && defined(__BIG_ENDIAN__)
+#define ARCH_CPU_PPC64_FAMILY 1
+#define ARCH_CPU_PPC64 1
+#define ARCH_CPU_64_BITS 1
+#define ARCH_CPU_BIG_ENDIAN 1
+#elif defined(__PPC64__)
+#define ARCH_CPU_PPC64_FAMILY 1
+#define ARCH_CPU_PPC64 1
+#define ARCH_CPU_64_BITS 1
+#define ARCH_CPU_LITTLE_ENDIAN 1
+#elif defined(__ARMEL__)
+#define ARCH_CPU_ARM_FAMILY 1
+#define ARCH_CPU_ARMEL 1
+#define ARCH_CPU_32_BITS 1
+#define ARCH_CPU_LITTLE_ENDIAN 1
+#elif defined(__aarch64__)
+#define ARCH_CPU_ARM_FAMILY 1
+#define ARCH_CPU_ARM64 1
+#define ARCH_CPU_64_BITS 1
+#define ARCH_CPU_LITTLE_ENDIAN 1
+#elif defined(__pnacl__) || defined(__asmjs__)
+#define ARCH_CPU_32_BITS 1
+#define ARCH_CPU_LITTLE_ENDIAN 1
+#elif defined(__MIPSEL__)
+#if defined(__LP64__)
+#define ARCH_CPU_MIPS_FAMILY 1
+#define ARCH_CPU_MIPS64EL 1
+#define ARCH_CPU_64_BITS 1
+#define ARCH_CPU_LITTLE_ENDIAN 1
+#else
+#define ARCH_CPU_MIPS_FAMILY 1
+#define ARCH_CPU_MIPSEL 1
+#define ARCH_CPU_32_BITS 1
+#define ARCH_CPU_LITTLE_ENDIAN 1
+#endif
+#elif defined(__MIPSEB__)
+#if defined(__LP64__)
+#define ARCH_CPU_MIPS_FAMILY 1
+#define ARCH_CPU_MIPS64 1
+#define ARCH_CPU_64_BITS 1
+#define ARCH_CPU_BIG_ENDIAN 1
+#else
+#define ARCH_CPU_MIPS_FAMILY 1
+#define ARCH_CPU_MIPS 1
+#define ARCH_CPU_32_BITS 1
+#define ARCH_CPU_BIG_ENDIAN 1
+#endif
+#else
+#error Please add support for your architecture in build_config.h
+#endif
+
+// Type detection for wchar_t.
+#if defined(OS_WIN)
+#define WCHAR_T_IS_UTF16
+#elif defined(OS_FUCHSIA)
+#define WCHAR_T_IS_UTF32
+#elif defined(OS_POSIX) && defined(COMPILER_GCC) && defined(__WCHAR_MAX__) && \
+    (__WCHAR_MAX__ == 0x7fffffff || __WCHAR_MAX__ == 0xffffffff)
+#define WCHAR_T_IS_UTF32
+#elif defined(OS_POSIX) && defined(COMPILER_GCC) && defined(__WCHAR_MAX__) && \
+    (__WCHAR_MAX__ == 0x7fff || __WCHAR_MAX__ == 0xffff)
+// On Posix, we'll detect short wchar_t, but projects aren't guaranteed to
+// compile in this mode (in particular, Chrome doesn't). This is intended for
+// other projects using base who manage their own dependencies and make sure
+// short wchar works for them.
+#define WCHAR_T_IS_UTF16
+#else
+#error Please add support for your compiler in build_config.h
+#endif
+
+#if defined(OS_ANDROID)
+// The compiler thinks std::string::const_iterator and "const char*" are
+// equivalent types.
+#define STD_STRING_ITERATOR_IS_CHAR_POINTER
+// The compiler thinks base::string16::const_iterator and "char16*" are
+// equivalent types.
+#define BASE_STRING16_ITERATOR_IS_CHAR16_POINTER
+#endif
+
+#endif  // BUILD_BUILD_CONFIG_H_
diff --git a/testing/libfuzzer/libfuzzer_exports.h b/testing/libfuzzer/libfuzzer_exports.h
index 0612512..3e0cf98 100644
--- a/testing/libfuzzer/libfuzzer_exports.h
+++ b/testing/libfuzzer/libfuzzer_exports.h
@@ -5,7 +5,7 @@
 #ifndef TESTING_LIBFUZZER_LIBFUZZER_EXPORTS_H_
 #define TESTING_LIBFUZZER_LIBFUZZER_EXPORTS_H_
 
-#include "build/build_config.h"
+#include "build_config.h"
 
 // On macOS, the linker may strip symbols for functions that are not reachable
 // by the program entrypoint. Several libFuzzer functions are resolved via
diff --git a/third_party/googletest/custom/gtest/internal/custom/gtest.h b/third_party/googletest/custom/gtest/internal/custom/gtest.h
index c7fe788..702251f 100644
--- a/third_party/googletest/custom/gtest/internal/custom/gtest.h
+++ b/third_party/googletest/custom/gtest/internal/custom/gtest.h
@@ -5,7 +5,7 @@
 #ifndef THIRD_PARTY_GOOGLETEST_CUSTOM_GTEST_INTERNAL_CUSTOM_GTEST_H_
 #define THIRD_PARTY_GOOGLETEST_CUSTOM_GTEST_INTERNAL_CUSTOM_GTEST_H_
 
-#include "build/build_config.h"
+#include "build_config.h"
 
 #if !defined(GTEST_DISABLE_PRINT_STACK_TRACE)
 #include "third_party/googletest/custom/gtest/internal/custom/stack_trace_getter.h"
diff --git a/tools/gn/args.cc b/tools/gn/args.cc
index a53eb49..a9adfb8 100644
--- a/tools/gn/args.cc
+++ b/tools/gn/args.cc
@@ -5,7 +5,7 @@
 #include "tools/gn/args.h"
 
 #include "base/sys_info.h"
-#include "build/build_config.h"
+#include "build_config.h"
 #include "tools/gn/source_file.h"
 #include "tools/gn/string_utils.h"
 #include "tools/gn/variables.h"
diff --git a/tools/gn/bootstrap/bootstrap.py b/tools/gn/bootstrap/bootstrap.py
deleted file mode 100755
index 00bfa56..0000000
--- a/tools/gn/bootstrap/bootstrap.py
+++ /dev/null
@@ -1,945 +0,0 @@
-#!/usr/bin/env python
-# Copyright 2014 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file isn't officially supported by the Chromium project. It's maintained
-# on a best-effort basis by volunteers, so some things may be broken from time
-# to time. If you encounter errors, it's most often due to files in base that
-# have been added or moved since somebody last tried this script. Generally
-# such errors are easy to diagnose.
-
-"""Bootstraps gn.
-
-It is done by first building it manually in a temporary directory, then building
-it with its own BUILD.gn to the final destination.
-"""
-
-import contextlib
-import errno
-import logging
-import optparse
-import os
-import platform
-import shutil
-import subprocess
-import sys
-import tempfile
-
-BOOTSTRAP_DIR = os.path.dirname(os.path.abspath(__file__))
-GN_ROOT = os.path.dirname(BOOTSTRAP_DIR)
-SRC_ROOT = os.path.dirname(os.path.dirname(GN_ROOT))
-
-is_win = sys.platform.startswith('win')
-is_linux = sys.platform.startswith('linux')
-is_mac = sys.platform.startswith('darwin')
-is_aix = sys.platform.startswith('aix')
-is_posix = is_linux or is_mac or is_aix
-
-def check_call(cmd, **kwargs):
-  logging.debug('Running: %s', ' '.join(cmd))
-
-  subprocess.check_call(cmd, cwd=GN_ROOT, **kwargs)
-
-def check_output(cmd, cwd=GN_ROOT, **kwargs):
-  logging.debug('Running: %s', ' '.join(cmd))
-
-  return subprocess.check_output(cmd, cwd=cwd, **kwargs)
-
-def mkdir_p(path):
-  try:
-    os.makedirs(path)
-  except OSError as e:
-    if e.errno == errno.EEXIST and os.path.isdir(path):
-      pass
-    else: raise
-
-@contextlib.contextmanager
-def scoped_tempdir():
-  path = tempfile.mkdtemp()
-  try:
-    yield path
-  finally:
-    shutil.rmtree(path)
-
-
-def run_build(tempdir, options):
-  if options.build_path:
-    build_rel = options.build_path
-  elif options.debug:
-    build_rel = os.path.join('out', 'Debug')
-  else:
-    build_rel = os.path.join('out', 'Release')
-  build_root = os.path.join(SRC_ROOT, build_rel)
-
-  windows_x64_toolchain = None
-  if is_win:
-    windows_x64_toolchain = windows_prepare_toolchain(tempdir)
-    os.environ["PATH"] = windows_x64_toolchain["paths"]
-
-  print 'Building gn manually in a temporary directory for bootstrapping...'
-  build_gn_with_ninja_manually(tempdir, options, windows_x64_toolchain)
-  temp_gn = os.path.join(tempdir, 'gn')
-  out_gn = os.path.join(build_root, 'gn')
-
-  if is_win:
-    temp_gn += '.exe'
-    out_gn += '.exe'
-
-  if options.no_rebuild:
-    mkdir_p(build_root)
-    shutil.copy2(temp_gn, out_gn)
-  else:
-    print 'Building gn using itself to %s...' % build_rel
-    build_gn_with_gn(temp_gn, build_root, options)
-
-  if options.output:
-    # Preserve the executable permission bit.
-    shutil.copy2(out_gn, options.output)
-
-def windows_target_build_arch():
-    # Target build architecture set by vcvarsall.bat
-    target_arch = os.environ.get('Platform')
-    if target_arch in ['x64', 'x86']: return target_arch
-
-    if platform.machine().lower() in ['x86_64', 'amd64']: return 'x64'
-    return 'x86'
-
-def windows_prepare_toolchain(tempdir):
-
-  def CallPythonScopeScript(command, **kwargs):
-    response = check_output(command, **kwargs)
-
-    _globals = {"__builtins__":None}
-    _locals = {}
-    exec(response, _globals, _locals)
-
-    return _locals
-
-  toolchain_paths = CallPythonScopeScript(
-      [sys.executable,
-       os.path.join(SRC_ROOT, "build", "vs_toolchain.py"),
-      "get_toolchain_dir"],
-      cwd=tempdir)
-
-  windows_x64_toolchain =  CallPythonScopeScript(
-      [sys.executable,
-       os.path.join(SRC_ROOT, "build", "toolchain",
-                    "win", "setup_toolchain.py"),
-       toolchain_paths["vs_path"],
-       toolchain_paths["sdk_path"],
-       toolchain_paths["runtime_dirs"],
-       "win",
-       "x64",
-       "environment.x64",
-       "true"
-      ],
-      cwd=tempdir)
-
-  return windows_x64_toolchain
-
-def main(argv):
-  parser = optparse.OptionParser(description=sys.modules[__name__].__doc__)
-  parser.add_option('-d', '--debug', action='store_true',
-                    help='Do a debug build. Defaults to release build.')
-  parser.add_option('-o', '--output',
-                    help='place output in PATH', metavar='PATH')
-  parser.add_option('-s', '--no-rebuild', action='store_true',
-                    help='Do not rebuild GN with GN.')
-  parser.add_option('--no-clean', action='store_true',
-                    help='Re-used build directory instead of using new '
-                         'temporary location each time')
-  parser.add_option('--gn-gen-args', help='Args to pass to gn gen --args')
-  parser.add_option('--build-path', help='The directory in which to build gn, '
-                    'relative to the src directory. (eg. out/Release)'
-                    'In the no-clean mode an absolute path will also force '
-                    'the out_bootstrap to be located in the parent directory')
-  parser.add_option('-v', '--verbose', action='store_true',
-                    help='Log more details')
-  options, args = parser.parse_args(argv)
-
-  if args:
-    parser.error('Unrecognized command line arguments: %s.' % ', '.join(args))
-
-  options.no_rebuild = True
-
-  logging.basicConfig(level=logging.DEBUG if options.verbose else logging.ERROR)
-
-  try:
-    if options.no_clean:
-      out_bootstrap_dir = SRC_ROOT
-      if options.build_path and os.path.isabs(options.build_path):
-        out_bootstrap_dir = os.path.dirname(options.build_path)
-      build_dir = os.path.join(out_bootstrap_dir, 'out_bootstrap')
-      if not os.path.exists(build_dir):
-        os.makedirs(build_dir)
-      return run_build(build_dir, options)
-    else:
-      with scoped_tempdir() as tempdir:
-        return run_build(tempdir, options)
-  except subprocess.CalledProcessError as e:
-    print >> sys.stderr, str(e)
-    return 1
-  return 0
-
-def write_compiled_message(root_gen_dir, source):
-  path = os.path.join(root_gen_dir, os.path.dirname(source))
-  mkdir_p(path)
-  check_call([
-      'mc.exe',
-      '-r', path, '-h', path,
-      '-u', '-um',
-      os.path.join(SRC_ROOT, source),
-  ])
-
-def build_gn_with_ninja_manually(tempdir, options, windows_x64_toolchain):
-  root_gen_dir = os.path.join(tempdir, 'gen')
-  mkdir_p(root_gen_dir)
-
-  if is_win:
-    write_compiled_message(root_gen_dir,
-        'base/trace_event/etw_manifest/chrome_events_win.man')
-
-  write_gn_ninja(os.path.join(tempdir, 'build.ninja'),
-                 root_gen_dir, options, windows_x64_toolchain)
-  cmd = ['ninja', '-C', tempdir, '-w', 'dupbuild=err']
-  if options.verbose:
-    cmd.append('-v')
-
-  if is_win:
-    cmd.append('gn.exe')
-  else:
-    cmd.append('gn')
-
-  check_call(cmd)
-
-def write_generic_ninja(path, static_libraries, executables,
-                        cc, cxx, ar, ld,
-                        cflags=[], cflags_cc=[], ldflags=[],
-                        include_dirs=[], solibs=[]):
-  ninja_header_lines = [
-    'cc = ' + cc,
-    'cxx = ' + cxx,
-    'ar = ' + ar,
-    'ld = ' + ld,
-    '',
-  ]
-
-  if is_win:
-    template_filename = 'build_vs.ninja.template'
-  elif is_mac:
-    template_filename = 'build_mac.ninja.template'
-  elif is_aix:
-    template_filename = 'build_aix.ninja.template'
-  else:
-    template_filename = 'build.ninja.template'
-
-  with open(os.path.join(GN_ROOT, 'bootstrap', template_filename)) as f:
-    ninja_template = f.read()
-
-  if is_win:
-    executable_ext = '.exe'
-    library_ext = '.lib'
-    object_ext = '.obj'
-  else:
-    executable_ext = ''
-    library_ext = '.a'
-    object_ext = '.o'
-
-  def escape_path_ninja(path):
-      return path.replace('$ ', '$$ ').replace(' ', '$ ').replace(':', '$:')
-
-  def src_to_obj(path):
-    return escape_path_ninja('%s' % os.path.splitext(path)[0] + object_ext)
-
-  def library_to_a(library):
-    return '%s%s' % (library, library_ext)
-
-  ninja_lines = []
-  def build_source(src_file, settings):
-    ninja_lines.extend([
-        'build %s: %s %s' % (src_to_obj(src_file),
-                             settings['tool'],
-                             escape_path_ninja(
-                                 os.path.join(SRC_ROOT, src_file))),
-        '  includes = %s' % ' '.join(
-            ['-I' + escape_path_ninja(dirname) for dirname in
-             include_dirs + settings.get('include_dirs', [])]),
-        '  cflags = %s' % ' '.join(cflags + settings.get('cflags', [])),
-        '  cflags_cc = %s' %
-            ' '.join(cflags_cc + settings.get('cflags_cc', [])),
-    ])
-
-  for library, settings in static_libraries.iteritems():
-    for src_file in settings['sources']:
-      build_source(src_file, settings)
-
-    ninja_lines.append('build %s: alink_thin %s' % (
-        library_to_a(library),
-        ' '.join([src_to_obj(src_file) for src_file in settings['sources']])))
-
-  for executable, settings in executables.iteritems():
-    for src_file in settings['sources']:
-      build_source(src_file, settings)
-
-    ninja_lines.extend([
-      'build %s%s: link %s | %s' % (
-          executable, executable_ext,
-          ' '.join([src_to_obj(src_file) for src_file in settings['sources']]),
-          ' '.join([library_to_a(library) for library in settings['libs']])),
-      '  ldflags = %s' % ' '.join(ldflags),
-      '  solibs = %s' % ' '.join(solibs),
-      '  libs = %s' % ' '.join(
-          [library_to_a(library) for library in settings['libs']]),
-    ])
-
-  ninja_lines.append('')  # Make sure the file ends with a newline.
-
-  with open(path, 'w') as f:
-    f.write('\n'.join(ninja_header_lines))
-    f.write(ninja_template)
-    f.write('\n'.join(ninja_lines))
-
-def write_gn_ninja(path, root_gen_dir, options, windows_x64_toolchain):
-  if is_win:
-    CCPATH = windows_x64_toolchain["vc_bin_dir"]
-
-    cc = os.environ.get('CC', os.path.join(CCPATH, 'cl.exe'))
-    cxx = os.environ.get('CXX', os.path.join(CCPATH, 'cl.exe'))
-    ld = os.environ.get('LD', os.path.join(CCPATH, 'link.exe'))
-    ar = os.environ.get('AR', os.path.join(CCPATH, 'lib.exe'))
-  elif is_aix:
-    cc = os.environ.get('CC', 'gcc')
-    cxx = os.environ.get('CXX', 'c++')
-    ld = os.environ.get('LD', cxx)
-    ar = os.environ.get('AR', 'ar -X64')
-  else:
-    cc = os.environ.get('CC', 'cc')
-    cxx = os.environ.get('CXX', 'c++')
-    ld = cxx
-    ar = os.environ.get('AR', 'ar')
-
-  cflags = os.environ.get('CFLAGS', '').split()
-  cflags_cc = os.environ.get('CXXFLAGS', '').split()
-  ldflags = os.environ.get('LDFLAGS', '').split()
-  include_dirs = [root_gen_dir, SRC_ROOT]
-  libs = []
-
-  # //base/allocator/allocator_extension.cc needs this macro defined,
-  # otherwise there would be link errors.
-  cflags.extend(['-DNO_TCMALLOC', '-D__STDC_FORMAT_MACROS'])
-
-  if is_posix:
-    if options.debug:
-      cflags.extend(['-O0', '-g'])
-    else:
-      # The linux::ppc64 BE binary doesn't "work" when
-      # optimization level is set to 2 (0 works fine).
-      # Note that the current bootstrap script has no way to detect host_cpu.
-      # This can be easily fixed once we start building using a GN binary,
-      # as the optimization flag can then just be set using the
-      # logic inside //build/toolchain.
-      cflags.extend(['-O2', '-g0'])
-
-    cflags.extend([
-        '-D_FILE_OFFSET_BITS=64',
-        '-D__STDC_CONSTANT_MACROS', '-D__STDC_FORMAT_MACROS',
-        '-pthread',
-        '-pipe',
-        '-fno-exceptions'
-    ])
-    cflags_cc.extend(['-std=c++14', '-Wno-c++11-narrowing'])
-    if is_aix:
-     cflags.extend(['-maix64'])
-     ldflags.extend([ '-maix64 -Wl,-bbigtoc' ])
-  elif is_win:
-    if not options.debug:
-      cflags.extend(['/Ox', '/DNDEBUG', '/GL'])
-      ldflags.extend(['/LTCG', '/OPT:REF', '/OPT:ICF'])
-
-    cflags.extend([
-        '/FS',
-        '/Gy',
-        '/W3', '/wd4244',
-        '/Zi',
-        '/DWIN32_LEAN_AND_MEAN', '/DNOMINMAX',
-        '/D_CRT_SECURE_NO_DEPRECATE', '/D_SCL_SECURE_NO_DEPRECATE',
-        '/D_WIN32_WINNT=0x0A00', '/DWINVER=0x0A00',
-        '/DUNICODE', '/D_UNICODE',
-    ])
-    cflags_cc.extend([
-        '/GR-',
-        '/D_HAS_EXCEPTIONS=0',
-    ])
-
-    target_arch = windows_target_build_arch()
-    if target_arch == 'x64':
-        ldflags.extend(['/MACHINE:x64'])
-    else:
-        ldflags.extend(['/MACHINE:x86'])
-
-  static_libraries = {
-      'base': {'sources': [], 'tool': 'cxx', 'include_dirs': []},
-      'dynamic_annotations': {'sources': [], 'tool': 'cc', 'include_dirs': []},
-      'gn_lib': {'sources': [], 'tool': 'cxx', 'include_dirs': []},
-  }
-
-  executables = {
-      'gn': {'sources': ['tools/gn/gn_main.cc'],
-             'tool': 'cxx', 'include_dirs': [], 'libs': []},
-  }
-
-  for name in os.listdir(GN_ROOT):
-    if not name.endswith('.cc'):
-      continue
-    if name.endswith('_unittest.cc'):
-      continue
-    if name == 'run_all_unittests.cc':
-      continue
-    if name == 'test_with_scheduler.cc':
-      continue
-    if name == 'gn_main.cc':
-      continue
-    full_path = os.path.join(GN_ROOT, name)
-    static_libraries['gn_lib']['sources'].append(
-        os.path.relpath(full_path, SRC_ROOT))
-
-  static_libraries['dynamic_annotations']['sources'].extend([
-      'base/third_party/dynamic_annotations/dynamic_annotations.c',
-      'base/third_party/superfasthash/superfasthash.c',
-  ])
-  static_libraries['base']['sources'].extend([
-      'base/allocator/allocator_check.cc',
-      'base/allocator/allocator_extension.cc',
-      'base/at_exit.cc',
-      'base/base_paths.cc',
-      'base/base_switches.cc',
-      'base/callback_helpers.cc',
-      'base/callback_internal.cc',
-      'base/command_line.cc',
-      'base/debug/activity_tracker.cc',
-      'base/debug/alias.cc',
-      'base/debug/crash_logging.cc',
-      'base/debug/dump_without_crashing.cc',
-      'base/debug/stack_trace.cc',
-      'base/debug/task_annotator.cc',
-      'base/debug/thread_heap_usage_tracker.cc',
-      'base/environment.cc',
-      'base/feature_list.cc',
-      'base/files/file.cc',
-      'base/files/file_enumerator.cc',
-      'base/files/file_path.cc',
-      'base/files/file_path_constants.cc',
-      'base/files/file_tracing.cc',
-      'base/files/file_util.cc',
-      'base/files/important_file_writer.cc',
-      'base/files/memory_mapped_file.cc',
-      'base/files/scoped_file.cc',
-      'base/hash.cc',
-      'base/json/json_parser.cc',
-      'base/json/json_reader.cc',
-      'base/json/json_string_value_serializer.cc',
-      'base/json/json_writer.cc',
-      'base/json/string_escape.cc',
-      'base/lazy_instance_helpers.cc',
-      'base/location.cc',
-      'base/logging.cc',
-      'base/md5.cc',
-      'base/memory/platform_shared_memory_region.cc',
-      'base/memory/read_only_shared_memory_region.cc',
-      'base/memory/ref_counted.cc',
-      'base/memory/ref_counted_memory.cc',
-      'base/memory/shared_memory_mapping.cc',
-      'base/memory/shared_memory_handle.cc',
-      'base/memory/shared_memory_tracker.cc',
-      'base/memory/weak_ptr.cc',
-      'base/message_loop/incoming_task_queue.cc',
-      'base/message_loop/message_loop.cc',
-      'base/message_loop/message_loop_current.cc',
-      'base/message_loop/message_loop_task_runner.cc',
-      'base/message_loop/message_pump.cc',
-      'base/message_loop/message_pump_default.cc',
-      'base/message_loop/watchable_io_message_pump_posix.cc',
-      'base/metrics/bucket_ranges.cc',
-      'base/metrics/dummy_histogram.cc',
-      'base/metrics/field_trial.cc',
-      'base/metrics/field_trial_param_associator.cc',
-      'base/metrics/field_trial_params.cc',
-      'base/metrics/histogram.cc',
-      'base/metrics/histogram_base.cc',
-      'base/metrics/histogram_functions.cc',
-      'base/metrics/histogram_samples.cc',
-      'base/metrics/histogram_snapshot_manager.cc',
-      'base/metrics/metrics_hashes.cc',
-      'base/metrics/persistent_histogram_allocator.cc',
-      'base/metrics/persistent_memory_allocator.cc',
-      'base/metrics/persistent_sample_map.cc',
-      'base/metrics/sample_map.cc',
-      'base/metrics/sample_vector.cc',
-      'base/metrics/sparse_histogram.cc',
-      'base/metrics/statistics_recorder.cc',
-      'base/observer_list_threadsafe.cc',
-      'base/path_service.cc',
-      'base/pending_task.cc',
-      'base/pickle.cc',
-      'base/process/kill.cc',
-      'base/process/memory.cc',
-      'base/process/process_handle.cc',
-      'base/process/process_iterator.cc',
-      'base/process/process_metrics.cc',
-      'base/rand_util.cc',
-      'base/run_loop.cc',
-      'base/sequence_token.cc',
-      'base/sequence_checker_impl.cc',
-      'base/sequenced_task_runner.cc',
-      'base/sha1.cc',
-      'base/strings/pattern.cc',
-      'base/strings/string_number_conversions.cc',
-      'base/strings/string_piece.cc',
-      'base/strings/string_split.cc',
-      'base/strings/string_util.cc',
-      'base/strings/string_util_constants.cc',
-      'base/strings/stringprintf.cc',
-      'base/strings/utf_string_conversion_utils.cc',
-      'base/strings/utf_string_conversions.cc',
-      'base/synchronization/atomic_flag.cc',
-      'base/synchronization/lock.cc',
-      'base/sys_info.cc',
-      'base/task_runner.cc',
-      'base/task_scheduler/delayed_task_manager.cc',
-      'base/task_scheduler/environment_config.cc',
-      'base/task_scheduler/post_task.cc',
-      'base/task_scheduler/priority_queue.cc',
-      'base/task_scheduler/scheduler_lock_impl.cc',
-      'base/task_scheduler/scheduler_single_thread_task_runner_manager.cc',
-      'base/task_scheduler/scheduler_worker.cc',
-      'base/task_scheduler/scheduler_worker_pool.cc',
-      'base/task_scheduler/scheduler_worker_pool_impl.cc',
-      'base/task_scheduler/scheduler_worker_pool_params.cc',
-      'base/task_scheduler/scheduler_worker_stack.cc',
-      'base/task_scheduler/scoped_set_task_priority_for_current_thread.cc',
-      'base/task_scheduler/sequence.cc',
-      'base/task_scheduler/sequence_sort_key.cc',
-      'base/task_scheduler/service_thread.cc',
-      'base/task_scheduler/task.cc',
-      'base/task_scheduler/task_scheduler.cc',
-      'base/task_scheduler/task_scheduler_impl.cc',
-      'base/task_scheduler/task_tracker.cc',
-      'base/task_scheduler/task_traits.cc',
-      'base/third_party/dmg_fp/dtoa_wrapper.cc',
-      'base/third_party/dmg_fp/g_fmt.cc',
-      'base/third_party/icu/icu_utf.cc',
-      'base/third_party/nspr/prtime.cc',
-      'base/threading/post_task_and_reply_impl.cc',
-      'base/threading/scoped_blocking_call.cc',
-      'base/threading/sequence_local_storage_map.cc',
-      'base/threading/sequenced_task_runner_handle.cc',
-      'base/threading/simple_thread.cc',
-      'base/threading/thread.cc',
-      'base/threading/thread_checker_impl.cc',
-      'base/threading/thread_collision_warner.cc',
-      'base/threading/thread_id_name_manager.cc',
-      'base/threading/thread_local_storage.cc',
-      'base/threading/thread_restrictions.cc',
-      'base/threading/thread_task_runner_handle.cc',
-      'base/time/clock.cc',
-      'base/time/default_clock.cc',
-      'base/time/default_tick_clock.cc',
-      'base/time/tick_clock.cc',
-      'base/time/time.cc',
-      'base/timer/elapsed_timer.cc',
-      'base/timer/timer.cc',
-      'base/trace_event/category_registry.cc',
-      'base/trace_event/event_name_filter.cc',
-      'base/trace_event/heap_profiler_allocation_context.cc',
-      'base/trace_event/heap_profiler_allocation_context_tracker.cc',
-      'base/trace_event/heap_profiler_event_filter.cc',
-      'base/trace_event/heap_profiler_heap_dump_writer.cc',
-      'base/trace_event/heap_profiler_serialization_state.cc',
-      'base/trace_event/heap_profiler_stack_frame_deduplicator.cc',
-      'base/trace_event/heap_profiler_type_name_deduplicator.cc',
-      'base/trace_event/malloc_dump_provider.cc',
-      'base/trace_event/memory_allocator_dump.cc',
-      'base/trace_event/memory_allocator_dump_guid.cc',
-      'base/trace_event/memory_dump_manager.cc',
-      'base/trace_event/memory_dump_provider_info.cc',
-      'base/trace_event/memory_dump_request_args.cc',
-      'base/trace_event/memory_dump_scheduler.cc',
-      'base/trace_event/memory_infra_background_whitelist.cc',
-      'base/trace_event/memory_peak_detector.cc',
-      'base/trace_event/memory_usage_estimator.cc',
-      'base/trace_event/process_memory_dump.cc',
-      'base/trace_event/trace_buffer.cc',
-      'base/trace_event/trace_config.cc',
-      'base/trace_event/trace_config_category_filter.cc',
-      'base/trace_event/trace_event_argument.cc',
-      'base/trace_event/trace_event_filter.cc',
-      'base/trace_event/trace_event_impl.cc',
-      'base/trace_event/trace_event_memory_overhead.cc',
-      'base/trace_event/trace_log.cc',
-      'base/trace_event/trace_log_constants.cc',
-      'base/trace_event/tracing_agent.cc',
-      'base/unguessable_token.cc',
-      'base/value_iterators.cc',
-      'base/values.cc',
-      'base/vlog.cc',
-  ])
-
-  if is_win:
-    static_libraries['base']['sources'].extend([
-        'base/memory/platform_shared_memory_region_win.cc'
-    ])
-  elif is_mac:
-    static_libraries['base']['sources'].extend([
-        'base/memory/platform_shared_memory_region_mac.cc'
-    ])
-  elif is_posix:
-    static_libraries['base']['sources'].extend([
-        'base/memory/platform_shared_memory_region_posix.cc'
-    ])
-
-  if is_posix:
-    static_libraries['base']['sources'].extend([
-        'base/base_paths_posix.cc',
-        'base/debug/debugger_posix.cc',
-        'base/debug/stack_trace_posix.cc',
-        'base/files/file_enumerator_posix.cc',
-        'base/files/file_descriptor_watcher_posix.cc',
-        'base/files/file_posix.cc',
-        'base/files/file_util_posix.cc',
-        'base/files/memory_mapped_file_posix.cc',
-        'base/memory/shared_memory_helper.cc',
-        'base/message_loop/message_pump_libevent.cc',
-        'base/posix/file_descriptor_shuffle.cc',
-        'base/posix/global_descriptors.cc',
-        'base/posix/safe_strerror.cc',
-        'base/process/kill_posix.cc',
-        'base/process/process_handle_posix.cc',
-        'base/process/process_metrics_posix.cc',
-        'base/process/process_posix.cc',
-        'base/rand_util_posix.cc',
-        'base/strings/string16.cc',
-        'base/synchronization/condition_variable_posix.cc',
-        'base/synchronization/lock_impl_posix.cc',
-        'base/sys_info_posix.cc',
-        'base/task_scheduler/task_tracker_posix.cc',
-        'base/threading/platform_thread_internal_posix.cc',
-        'base/threading/platform_thread_posix.cc',
-        'base/threading/thread_local_storage_posix.cc',
-        'base/time/time_conversion_posix.cc',
-    ])
-    static_libraries['libevent'] = {
-        'sources': [
-            'base/third_party/libevent/buffer.c',
-            'base/third_party/libevent/evbuffer.c',
-            'base/third_party/libevent/evdns.c',
-            'base/third_party/libevent/event.c',
-            'base/third_party/libevent/event_tagging.c',
-            'base/third_party/libevent/evrpc.c',
-            'base/third_party/libevent/evutil.c',
-            'base/third_party/libevent/http.c',
-            'base/third_party/libevent/log.c',
-            'base/third_party/libevent/poll.c',
-            'base/third_party/libevent/select.c',
-            'base/third_party/libevent/signal.c',
-            'base/third_party/libevent/strlcpy.c',
-        ],
-        'tool': 'cc',
-        'include_dirs': [],
-        'cflags': cflags + ['-DHAVE_CONFIG_H'],
-    }
-
-  if is_linux or is_aix:
-    static_libraries['xdg_user_dirs'] = {
-        'sources': [
-            'base/third_party/xdg_user_dirs/xdg_user_dir_lookup.cc',
-        ],
-        'tool': 'cxx',
-    }
-    static_libraries['base']['sources'].extend([
-        'base/memory/shared_memory_handle_posix.cc',
-        'base/memory/shared_memory_posix.cc',
-        'base/nix/xdg_util.cc',
-        'base/process/internal_linux.cc',
-        'base/process/memory_linux.cc',
-        'base/process/process_handle_linux.cc',
-        'base/process/process_info_linux.cc',
-        'base/process/process_iterator_linux.cc',
-        'base/process/process_linux.cc',
-        'base/process/process_metrics_linux.cc',
-        'base/strings/sys_string_conversions_posix.cc',
-        'base/synchronization/waitable_event_posix.cc',
-        'base/sys_info_linux.cc',
-        'base/time/time_exploded_posix.cc',
-        'base/time/time_now_posix.cc',
-        'base/threading/platform_thread_linux.cc',
-    ])
-    if is_linux:
-      libcxx_root = SRC_ROOT + '/buildtools/third_party/libc++/trunk'
-      libcxxabi_root = SRC_ROOT + '/buildtools/third_party/libc++abi/trunk'
-      cflags_cc.extend([
-          '-nostdinc++',
-          '-isystem' + libcxx_root + '/include',
-          '-isystem' + libcxxabi_root + '/include',
-      ])
-      ldflags.extend(['-nodefaultlibs'])
-      libs.extend([
-          '-lc',
-          '-lgcc_s',
-          '-lm',
-          '-lpthread',
-      ])
-      static_libraries['libc++'] = {
-          'sources': [
-              libcxx_root + '/src/algorithm.cpp',
-              libcxx_root + '/src/any.cpp',
-              libcxx_root + '/src/bind.cpp',
-              libcxx_root + '/src/chrono.cpp',
-              libcxx_root + '/src/condition_variable.cpp',
-              libcxx_root + '/src/debug.cpp',
-              libcxx_root + '/src/exception.cpp',
-              libcxx_root + '/src/functional.cpp',
-              libcxx_root + '/src/future.cpp',
-              libcxx_root + '/src/hash.cpp',
-              libcxx_root + '/src/ios.cpp',
-              libcxx_root + '/src/iostream.cpp',
-              libcxx_root + '/src/locale.cpp',
-              libcxx_root + '/src/memory.cpp',
-              libcxx_root + '/src/mutex.cpp',
-              libcxx_root + '/src/new.cpp',
-              libcxx_root + '/src/optional.cpp',
-              libcxx_root + '/src/random.cpp',
-              libcxx_root + '/src/regex.cpp',
-              libcxx_root + '/src/shared_mutex.cpp',
-              libcxx_root + '/src/stdexcept.cpp',
-              libcxx_root + '/src/string.cpp',
-              libcxx_root + '/src/strstream.cpp',
-              libcxx_root + '/src/system_error.cpp',
-              libcxx_root + '/src/thread.cpp',
-              libcxx_root + '/src/typeinfo.cpp',
-              libcxx_root + '/src/utility.cpp',
-              libcxx_root + '/src/valarray.cpp',
-              libcxx_root + '/src/variant.cpp',
-              libcxx_root + '/src/vector.cpp',
-          ],
-          'tool': 'cxx',
-          'cflags': cflags + [
-              '-D_LIBCPP_NO_EXCEPTIONS',
-              '-D_LIBCPP_BUILDING_LIBRARY',
-              '-DLIBCXX_BUILDING_LIBCXXABI',
-          ]
-      }
-      static_libraries['libc++abi'] = {
-          'sources': [
-              libcxxabi_root + '/src/abort_message.cpp',
-              libcxxabi_root + '/src/cxa_aux_runtime.cpp',
-              libcxxabi_root + '/src/cxa_default_handlers.cpp',
-              libcxxabi_root + '/src/cxa_demangle.cpp',
-              libcxxabi_root + '/src/cxa_exception_storage.cpp',
-              libcxxabi_root + '/src/cxa_guard.cpp',
-              libcxxabi_root + '/src/cxa_handlers.cpp',
-              libcxxabi_root + '/src/cxa_noexception.cpp',
-              libcxxabi_root + '/src/cxa_unexpected.cpp',
-              libcxxabi_root + '/src/cxa_vector.cpp',
-              libcxxabi_root + '/src/cxa_virtual.cpp',
-              libcxxabi_root + '/src/fallback_malloc.cpp',
-              libcxxabi_root + '/src/private_typeinfo.cpp',
-              libcxxabi_root + '/src/stdlib_exception.cpp',
-              libcxxabi_root + '/src/stdlib_stdexcept.cpp',
-              libcxxabi_root + '/src/stdlib_typeinfo.cpp',
-          ],
-          'tool': 'cxx',
-          'cflags': cflags + [
-              '-DLIBCXXABI_SILENT_TERMINATE',
-              '-D_LIBCXXABI_NO_EXCEPTIONS',
-          ]
-      }
-      static_libraries['base']['sources'].extend([
-        'base/allocator/allocator_shim.cc',
-        'base/allocator/allocator_shim_default_dispatch_to_glibc.cc',
-      ])
-      libs.extend(['-lrt', '-latomic'])
-      static_libraries['libevent']['include_dirs'].extend([
-          os.path.join(SRC_ROOT, 'base', 'third_party', 'libevent', 'linux')
-      ])
-      static_libraries['libevent']['sources'].extend([
-         'base/third_party/libevent/epoll.c',
-      ])
-    else:
-      ldflags.extend(['-pthread'])
-      libs.extend(['-lrt'])
-      static_libraries['base']['sources'].extend([
-          'base/process/internal_aix.cc'
-      ])
-      static_libraries['libevent']['include_dirs'].extend([
-          os.path.join(SRC_ROOT, 'base', 'third_party', 'libevent', 'aix')
-      ])
-      static_libraries['libevent']['include_dirs'].extend([
-          os.path.join(SRC_ROOT, 'base', 'third_party', 'libevent', 'compat')
-      ])
-
-  if is_mac:
-    static_libraries['base']['sources'].extend([
-        'base/base_paths_mac.mm',
-        'base/files/file_util_mac.mm',
-        'base/mac/bundle_locations.mm',
-        'base/mac/call_with_eh_frame.cc',
-        'base/mac/call_with_eh_frame_asm.S',
-        'base/mac/foundation_util.mm',
-        'base/mac/mach_logging.cc',
-        'base/mac/scoped_mach_port.cc',
-        'base/mac/scoped_mach_vm.cc',
-        'base/mac/scoped_nsautorelease_pool.mm',
-        'base/memory/shared_memory_handle_mac.cc',
-        'base/memory/shared_memory_mac.cc',
-        'base/message_loop/message_pump_mac.mm',
-        'base/process/process_handle_mac.cc',
-        'base/process/process_info_mac.cc',
-        'base/process/process_iterator_mac.cc',
-        'base/process/process_metrics_mac.cc',
-        'base/strings/sys_string_conversions_mac.mm',
-        'base/synchronization/waitable_event_mac.cc',
-        'base/sys_info_mac.mm',
-        'base/time/time_exploded_posix.cc',
-        'base/time/time_mac.cc',
-        'base/threading/platform_thread_mac.mm',
-    ])
-    static_libraries['libevent']['include_dirs'].extend([
-        os.path.join(SRC_ROOT, 'base', 'third_party', 'libevent', 'mac')
-    ])
-    static_libraries['libevent']['sources'].extend([
-        'base/third_party/libevent/kqueue.c',
-    ])
-
-    libs.extend([
-        '-framework', 'AppKit',
-        '-framework', 'CoreFoundation',
-        '-framework', 'Foundation',
-        '-framework', 'Security',
-    ])
-
-  if is_win:
-    static_libraries['base']['sources'].extend([
-        "base/allocator/partition_allocator/address_space_randomization.cc",
-        'base/allocator/partition_allocator/page_allocator.cc',
-        "base/allocator/partition_allocator/spin_lock.cc",
-        'base/base_paths_win.cc',
-        'base/cpu.cc',
-        'base/debug/close_handle_hook_win.cc',
-        'base/debug/debugger.cc',
-        'base/debug/debugger_win.cc',
-        'base/debug/profiler.cc',
-        'base/debug/stack_trace_win.cc',
-        'base/file_version_info_win.cc',
-        'base/files/file_enumerator_win.cc',
-        'base/files/file_path_watcher_win.cc',
-        'base/files/file_util_win.cc',
-        'base/files/file_win.cc',
-        'base/files/memory_mapped_file_win.cc',
-        'base/guid.cc',
-        'base/logging_win.cc',
-        'base/memory/memory_pressure_monitor_win.cc',
-        'base/memory/shared_memory_handle_win.cc',
-        'base/memory/shared_memory_win.cc',
-        'base/message_loop/message_pump_win.cc',
-        'base/native_library_win.cc',
-        'base/power_monitor/power_monitor_device_source_win.cc',
-        'base/process/kill_win.cc',
-        'base/process/launch_win.cc',
-        'base/process/memory_win.cc',
-        'base/process/process_handle_win.cc',
-        'base/process/process_info_win.cc',
-        'base/process/process_iterator_win.cc',
-        'base/process/process_metrics_win.cc',
-        'base/process/process_win.cc',
-        'base/profiler/native_stack_sampler_win.cc',
-        'base/profiler/win32_stack_frame_unwinder.cc',
-        'base/rand_util_win.cc',
-        'base/strings/sys_string_conversions_win.cc',
-        'base/sync_socket_win.cc',
-        'base/synchronization/condition_variable_win.cc',
-        'base/synchronization/lock_impl_win.cc',
-        'base/synchronization/waitable_event_watcher_win.cc',
-        'base/synchronization/waitable_event_win.cc',
-        'base/sys_info_win.cc',
-        'base/threading/platform_thread_win.cc',
-        'base/threading/thread_local_storage_win.cc',
-        'base/time/time_win.cc',
-        'base/timer/hi_res_timer_manager_win.cc',
-        'base/trace_event/trace_event_etw_export_win.cc',
-        'base/win/core_winrt_util.cc',
-        'base/win/enum_variant.cc',
-        'base/win/event_trace_controller.cc',
-        'base/win/event_trace_provider.cc',
-        'base/win/i18n.cc',
-        'base/win/iat_patch_function.cc',
-        'base/win/iunknown_impl.cc',
-        'base/win/message_window.cc',
-        'base/win/object_watcher.cc',
-        'base/win/pe_image.cc',
-        'base/win/process_startup_helper.cc',
-        'base/win/registry.cc',
-        'base/win/resource_util.cc',
-        'base/win/scoped_bstr.cc',
-        'base/win/scoped_com_initializer.cc',
-        'base/win/scoped_handle.cc',
-        'base/win/scoped_handle_verifier.cc',
-        'base/win/scoped_process_information.cc',
-        'base/win/scoped_variant.cc',
-        'base/win/scoped_winrt_initializer.cc',
-        'base/win/shortcut.cc',
-        'base/win/startup_information.cc',
-        'base/win/wait_chain.cc',
-        'base/win/win_util.cc',
-        'base/win/windows_version.cc',
-        'base/win/wrapped_window_proc.cc',
-    ])
-
-    libs.extend([
-        'advapi32.lib',
-        'dbghelp.lib',
-        'kernel32.lib',
-        'ole32.lib',
-        'shell32.lib',
-        'user32.lib',
-        'userenv.lib',
-        'version.lib',
-        'winmm.lib',
-        'ws2_32.lib',
-        'Shlwapi.lib',
-    ])
-
-  # we just build static libraries that GN needs
-  executables['gn']['libs'].extend(static_libraries.keys())
-
-  write_generic_ninja(path, static_libraries, executables, cc, cxx, ar, ld,
-                      cflags, cflags_cc, ldflags, include_dirs, libs)
-
-def build_gn_with_gn(temp_gn, build_dir, options):
-  gn_gen_args = options.gn_gen_args or ''
-  if not options.debug:
-    gn_gen_args += ' is_debug=false'
-  cmd = [temp_gn, 'gen', build_dir, '--args=%s' % gn_gen_args,
-          "--root="+SRC_ROOT
-         ]
-  check_call(cmd)
-
-  cmd = ['ninja', '-C', build_dir, '-w', 'dupbuild=err']
-  if options.verbose:
-    cmd.append('-v')
-  cmd.append('gn')
-  check_call(cmd)
-
-  # build.ninja currently refers back to gn from the temporary directory.
-  # Regenerate the build files using the gn we just built so that the reference
-  # gets updated to "./gn".
-  cmd = [os.path.join(build_dir, 'gn'), 'gen', build_dir,
-         '--args=%s' % gn_gen_args]
-  check_call(cmd)
-
-  if not options.debug and not is_win:
-    check_call(['strip', os.path.join(build_dir, 'gn')])
-
-
-if __name__ == '__main__':
-  sys.exit(main(sys.argv[1:]))
diff --git a/tools/gn/command_args.cc b/tools/gn/command_args.cc
index 9fbf67b..b3c6862 100644
--- a/tools/gn/command_args.cc
+++ b/tools/gn/command_args.cc
@@ -16,7 +16,7 @@
 #include "base/process/launch.h"
 #include "base/strings/string_number_conversions.h"
 #include "base/strings/string_util.h"
-#include "build/build_config.h"
+#include "build_config.h"
 #include "tools/gn/commands.h"
 #include "tools/gn/filesystem_utils.h"
 #include "tools/gn/input_file.h"
diff --git a/tools/gn/commands.cc b/tools/gn/commands.cc
index 59d2391..38d672f 100644
--- a/tools/gn/commands.cc
+++ b/tools/gn/commands.cc
@@ -8,7 +8,7 @@
 #include "base/environment.h"
 #include "base/strings/string_split.h"
 #include "base/values.h"
-#include "build/build_config.h"
+#include "build_config.h"
 #include "tools/gn/builder.h"
 #include "tools/gn/filesystem_utils.h"
 #include "tools/gn/item.h"
diff --git a/tools/gn/escape.cc b/tools/gn/escape.cc
index 685a100..1bcc1e7 100644
--- a/tools/gn/escape.cc
+++ b/tools/gn/escape.cc
@@ -7,7 +7,7 @@
 #include <stddef.h>
 
 #include "base/logging.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 namespace {
 
diff --git a/tools/gn/exec_process.cc b/tools/gn/exec_process.cc
index 5453446..1009cec 100644
--- a/tools/gn/exec_process.cc
+++ b/tools/gn/exec_process.cc
@@ -14,7 +14,7 @@
 #include "base/process/kill.h"
 #include "base/process/launch.h"
 #include "base/process/process.h"
-#include "build/build_config.h"
+#include "build_config.h"
 
 #if defined(OS_WIN)
 #include <windows.h>
diff --git a/tools/gn/exec_process_unittest.cc b/tools/gn/exec_process_unittest.cc
index 51ce7b7..91b4d9f 100644
--- a/tools/gn/exec_process_unittest.cc
+++ b/tools/gn/exec_process_unittest.cc
@@ -7,7 +7,7 @@
 #include "base/command_line.h"
 #include "base/files/scoped_temp_dir.h"
 #include "base/strings/string_util.h"
-#include "build/build_config.h"
+#include "build_config.h"
 #include "testing/gtest/include/gtest/gtest.h"
 
 #if defined(OS_WIN)
diff --git a/tools/gn/filesystem_utils.cc b/tools/gn/filesystem_utils.cc
index f9f6794..37582a9 100644
--- a/tools/gn/filesystem_utils.cc
+++ b/tools/gn/filesystem_utils.cc
@@ -10,7 +10,7 @@
 #include "base/logging.h"
 #include "base/strings/string_util.h"
 #include "base/strings/utf_string_conversions.h"
-#include "build/build_config.h"
+#include "build_config.h"
 #include "tools/gn/location.h"
 #include "tools/gn/settings.h"
 #include "tools/gn/source_dir.h"
diff --git a/tools/gn/filesystem_utils_unittest.cc b/tools/gn/filesystem_utils_unittest.cc
index dc2e512..e88cbc2 100644
--- a/tools/gn/filesystem_utils_unittest.cc
+++ b/tools/gn/filesystem_utils_unittest.cc
@@ -8,7 +8,7 @@
 #include "base/strings/string_util.h"
 #include "base/strings/utf_string_conversions.h"
 #include "base/threading/platform_thread.h"
-#include "build/build_config.h"
+#include "build_config.h"
 #include "testing/gtest/include/gtest/gtest.h"
 #include "tools/gn/filesystem_utils.h"
 #include "tools/gn/target.h"
diff --git a/tools/gn/function_exec_script.cc b/tools/gn/function_exec_script.cc
index 0f6397c..1c5c517 100644
--- a/tools/gn/function_exec_script.cc
+++ b/tools/gn/function_exec_script.cc
@@ -9,7 +9,7 @@
 #include "base/strings/utf_string_conversions.h"
 #include "base/threading/thread_restrictions.h"
 #include "base/time/time.h"
-#include "build/build_config.h"
+#include "build_config.h"
 #include "tools/gn/err.h"
 #include "tools/gn/exec_process.h"
 #include "tools/gn/filesystem_utils.h"
diff --git a/tools/gn/function_get_path_info_unittest.cc b/tools/gn/function_get_path_info_unittest.cc
index 79020df..5b1b314 100644
--- a/tools/gn/function_get_path_info_unittest.cc
+++ b/tools/gn/function_get_path_info_unittest.cc
@@ -2,7 +2,7 @@
 // Use of this source code is governed by a BSD-style license that can be
 // found in the LICENSE file.
 
-#include "build/build_config.h"
+#include "build_config.h"
 #include "testing/gtest/include/gtest/gtest.h"
 #include "tools/gn/functions.h"
 #include "tools/gn/test_with_scope.h"
diff --git a/tools/gn/function_rebase_path_unittest.cc b/tools/gn/function_rebase_path_unittest.cc
index d89b56f..719511d 100644
--- a/tools/gn/function_rebase_path_unittest.cc
+++ b/tools/gn/function_rebase_path_unittest.cc
@@ -2,7 +2,7 @@
 // Use of this source code is governed by a BSD-style license that can be
 // found in the LICENSE file.
 
-#include "build/build_config.h"
+#include "build_config.h"
 #include "testing/gtest/include/gtest/gtest.h"
 #include "tools/gn/functions.h"
 #include "tools/gn/parse_tree.h"
diff --git a/tools/gn/function_write_file.cc b/tools/gn/function_write_file.cc
index bfa79b6..31bfc32 100644
--- a/tools/gn/function_write_file.cc
+++ b/tools/gn/function_write_file.cc
@@ -9,7 +9,7 @@
 #include "base/strings/string_split.h"
 #include "base/strings/string_util.h"
 #include "base/strings/utf_string_conversions.h"
-#include "build/build_config.h"
+#include "build_config.h"
 #include "tools/gn/err.h"
 #include "tools/gn/filesystem_utils.h"
 #include "tools/gn/functions.h"
diff --git a/tools/gn/gn_main.cc b/tools/gn/gn_main.cc
index 4d51158..dac41f3 100644
--- a/tools/gn/gn_main.cc
+++ b/tools/gn/gn_main.cc
@@ -12,7 +12,7 @@
 #include "base/strings/utf_string_conversions.h"
 #include "base/sys_info.h"
 #include "base/task_scheduler/task_scheduler.h"
-#include "build/build_config.h"
+#include "build_config.h"
 #include "tools/gn/commands.h"
 #include "tools/gn/err.h"
 #include "tools/gn/location.h"
diff --git a/tools/gn/label.cc b/tools/gn/label.cc
index 26e452f..5617a3a 100644
--- a/tools/gn/label.cc
+++ b/tools/gn/label.cc
@@ -6,7 +6,7 @@
 
 #include "base/logging.h"
 #include "base/strings/string_util.h"
-#include "build/build_config.h"
+#include "build_config.h"
 #include "tools/gn/err.h"
 #include "tools/gn/filesystem_utils.h"
 #include "tools/gn/parse_tree.h"
diff --git a/tools/gn/label_pattern.cc b/tools/gn/label_pattern.cc
index b2568e7..5472be0 100644
--- a/tools/gn/label_pattern.cc
+++ b/tools/gn/label_pattern.cc
@@ -7,7 +7,7 @@
 #include <stddef.h>
 
 #include "base/strings/string_util.h"
-#include "build/build_config.h"
+#include "build_config.h"
 #include "tools/gn/err.h"
 #include "tools/gn/filesystem_utils.h"
 #include "tools/gn/value.h"
diff --git a/tools/gn/label_unittest.cc b/tools/gn/label_unittest.cc
index 986aa9b..fbd8e40 100644
--- a/tools/gn/label_unittest.cc
+++ b/tools/gn/label_unittest.cc
@@ -5,7 +5,7 @@
 #include <stddef.h>
 
 #include "base/macros.h"
-#include "build/build_config.h"
+#include "build_config.h"
 #include "testing/gtest/include/gtest/gtest.h"
 #include "tools/gn/err.h"
 #include "tools/gn/label.h"
diff --git a/tools/gn/ninja_action_target_writer_unittest.cc b/tools/gn/ninja_action_target_writer_unittest.cc
index 445a31a..d4f3d4a 100644
--- a/tools/gn/ninja_action_target_writer_unittest.cc
+++ b/tools/gn/ninja_action_target_writer_unittest.cc
@@ -5,7 +5,7 @@
 #include <algorithm>
 #include <sstream>
 
-#include "build/build_config.h"
+#include "build_config.h"
 #include "testing/gtest/include/gtest/gtest.h"
 #include "tools/gn/ninja_action_target_writer.h"
 #include "tools/gn/pool.h"
diff --git a/tools/gn/ninja_binary_target_writer_unittest.cc b/tools/gn/ninja_binary_target_writer_unittest.cc
index acbd88b..7224e04 100644
--- a/tools/gn/ninja_binary_target_writer_unittest.cc
+++ b/tools/gn/ninja_binary_target_writer_unittest.cc
@@ -8,7 +8,7 @@
 #include <sstream>
 #include <utility>
 
-#include "build/build_config.h"
+#include "build_config.h"
 #include "testing/gtest/include/gtest/gtest.h"
 #include "tools/gn/config.h"
 #include "tools/gn/scheduler.h"
diff --git a/tools/gn/ninja_build_writer.cc b/tools/gn/ninja_build_writer.cc
index 075592c..3af49dd 100644
--- a/tools/gn/ninja_build_writer.cc
+++ b/tools/gn/ninja_build_writer.cc
@@ -16,7 +16,7 @@
 #include "base/process/process_handle.h"
 #include "base/strings/string_util.h"
 #include "base/strings/utf_string_conversions.h"
-#include "build/build_config.h"
+#include "build_config.h"
 #include "tools/gn/build_settings.h"
 #include "tools/gn/builder.h"
 #include "tools/gn/err.h"
diff --git a/tools/gn/path_output.cc b/tools/gn/path_output.cc
index 61c4ee0..9e1aef8 100644
--- a/tools/gn/path_output.cc
+++ b/tools/gn/path_output.cc
@@ -5,7 +5,7 @@
 #include "tools/gn/path_output.h"
 
 #include "base/strings/string_util.h"
-#include "build/build_config.h"
+#include "build_config.h"
 #include "tools/gn/filesystem_utils.h"
 #include "tools/gn/output_file.h"
 #include "tools/gn/string_utils.h"
diff --git a/tools/gn/path_output_unittest.cc b/tools/gn/path_output_unittest.cc
index 6a7da19..f6051ed 100644
--- a/tools/gn/path_output_unittest.cc
+++ b/tools/gn/path_output_unittest.cc
@@ -5,7 +5,7 @@
 #include <sstream>
 
 #include "base/files/file_path.h"
-#include "build/build_config.h"
+#include "build_config.h"
 #include "testing/gtest/include/gtest/gtest.h"
 #include "tools/gn/output_file.h"
 #include "tools/gn/path_output.h"
diff --git a/tools/gn/settings.cc b/tools/gn/settings.cc
index d206f5c..cf4aece 100644
--- a/tools/gn/settings.cc
+++ b/tools/gn/settings.cc
@@ -5,7 +5,7 @@
 #include "tools/gn/settings.h"
 
 #include "base/logging.h"
-#include "build/build_config.h"
+#include "build_config.h"
 #include "tools/gn/filesystem_utils.h"
 
 Settings::Settings(const BuildSettings* build_settings,
diff --git a/tools/gn/setup.cc b/tools/gn/setup.cc
index 36b6ec5..69def2e 100644
--- a/tools/gn/setup.cc
+++ b/tools/gn/setup.cc
@@ -22,7 +22,7 @@
 #include "base/strings/string_util.h"
 #include "base/strings/sys_string_conversions.h"
 #include "base/strings/utf_string_conversions.h"
-#include "build/build_config.h"
+#include "build_config.h"
 #include "tools/gn/command_format.h"
 #include "tools/gn/commands.h"
 #include "tools/gn/filesystem_utils.h"
diff --git a/tools/gn/source_dir.cc b/tools/gn/source_dir.cc
index 17eeb61..31f9582 100644
--- a/tools/gn/source_dir.cc
+++ b/tools/gn/source_dir.cc
@@ -5,7 +5,7 @@
 #include "tools/gn/source_dir.h"
 
 #include "base/logging.h"
-#include "build/build_config.h"
+#include "build_config.h"
 #include "tools/gn/filesystem_utils.h"
 #include "tools/gn/source_file.h"
 
diff --git a/tools/gn/source_dir_unittest.cc b/tools/gn/source_dir_unittest.cc
index 80f9a5f..87d8e1b 100644
--- a/tools/gn/source_dir_unittest.cc
+++ b/tools/gn/source_dir_unittest.cc
@@ -2,7 +2,7 @@
 // Use of this source code is governed by a BSD-style license that can be
 // found in the LICENSE file.
 
-#include "build/build_config.h"
+#include "build_config.h"
 #include "testing/gtest/include/gtest/gtest.h"
 #include "tools/gn/err.h"
 #include "tools/gn/source_dir.h"
diff --git a/tools/gn/source_file.cc b/tools/gn/source_file.cc
index befc5a5..d8d6c02 100644
--- a/tools/gn/source_file.cc
+++ b/tools/gn/source_file.cc
@@ -5,7 +5,7 @@
 #include "tools/gn/source_file.h"
 
 #include "base/logging.h"
-#include "build/build_config.h"
+#include "build_config.h"
 #include "tools/gn/filesystem_utils.h"
 #include "tools/gn/source_dir.h"
 
diff --git a/tools/gn/standard_out.cc b/tools/gn/standard_out.cc
index 8a6b7ef..f508e8c 100644
--- a/tools/gn/standard_out.cc
+++ b/tools/gn/standard_out.cc
@@ -13,7 +13,7 @@
 #include "base/strings/string_piece.h"
 #include "base/strings/string_split.h"
 #include "base/strings/string_util.h"
-#include "build/build_config.h"
+#include "build_config.h"
 #include "tools/gn/switches.h"
 
 #if defined(OS_WIN)
diff --git a/tools/gn/substitution_writer_unittest.cc b/tools/gn/substitution_writer_unittest.cc
index d98d4ce..9dc1781 100644
--- a/tools/gn/substitution_writer_unittest.cc
+++ b/tools/gn/substitution_writer_unittest.cc
@@ -4,7 +4,7 @@
 
 #include <sstream>
 
-#include "build/build_config.h"
+#include "build_config.h"
 #include "testing/gtest/include/gtest/gtest.h"
 #include "tools/gn/err.h"
 #include "tools/gn/escape.h"