├── checkdeps ├── DEPS ├── testdata │ ├── disallowed │ │ ├── allowed │ │ │ ├── DEPS │ │ │ ├── skipped │ │ │ │ └── test.h │ │ │ └── test.h │ │ ├── foo_unittest.cc │ │ └── test.h │ ├── checkdeps_test │ │ ├── disallowed │ │ │ ├── allowed │ │ │ │ ├── DEPS │ │ │ │ ├── skipped │ │ │ │ │ └── test.h │ │ │ │ └── test.h │ │ │ └── test.h │ │ ├── DEPS │ │ └── allowed │ │ │ ├── not_a_test.cc │ │ │ ├── foo_unittest.cc │ │ │ ├── DEPS │ │ │ └── test.h │ ├── noparent │ │ ├── DEPS │ │ └── test.h │ ├── DEPS │ └── allowed │ │ ├── not_a_test.cc │ │ ├── foo_unittest.cc │ │ ├── DEPS │ │ └── test.h ├── PRESUBMIT.py ├── README.md ├── proto_checker.py ├── cpp_checker.py ├── results.py ├── rules.py ├── java_checker.py ├── checkdeps_test.py ├── checkdeps.py ├── builddeps.py └── graphdeps.py ├── clang_format ├── OWNERS ├── README.chromium └── README.txt ├── linux64 └── clang-format.sha1 ├── mac ├── clang-format.arm64.sha1 └── clang-format.x64.sha1 ├── reclient_cfgs ├── OWNERS ├── .gitignore ├── README.md ├── rewrapper_chroot_compile.cfg ├── rewrapper_linux.cfg └── fetch_reclient_cfgs.py ├── win └── clang-format.exe.sha1 ├── android └── doclava.tar.gz.sha1 ├── third_party ├── libc++ │ ├── OWNERS │ ├── README.chromium │ ├── __config_site │ └── BUILD.gn ├── eu-strip │ ├── OWNERS │ ├── bin │ │ └── eu-strip │ ├── README.chromium │ ├── build.sh │ └── fix-elf-size.patch ├── libc++abi │ ├── OWNERS │ ├── README.chromium │ ├── cxa_demangle_stub.cc │ └── BUILD.gn └── libunwind │ ├── OWNERS │ ├── README.chromium │ └── BUILD.gn ├── DIR_METADATA ├── OWNERS ├── codereview.settings ├── .gitignore ├── deps_revisions.gni ├── README.txt └── LICENSE /checkdeps/DEPS: -------------------------------------------------------------------------------- 1 | skip_child_includes = [ 2 | "testdata", 3 | ] 4 | -------------------------------------------------------------------------------- /clang_format/OWNERS: -------------------------------------------------------------------------------- 1 | nick@chromium.org 2 | thakis@chromium.org 3 | -------------------------------------------------------------------------------- /linux64/clang-format.sha1: -------------------------------------------------------------------------------- 1 | dd736afb28430c9782750fc0fd5f0ed497399263 -------------------------------------------------------------------------------- /mac/clang-format.arm64.sha1: -------------------------------------------------------------------------------- 1 | f1424c44ee758922823d6b37de43705955c99d7e -------------------------------------------------------------------------------- /mac/clang-format.x64.sha1: -------------------------------------------------------------------------------- 1 | a1b33be85faf2578f3101d7806e443e1c0949498 -------------------------------------------------------------------------------- /reclient_cfgs/OWNERS: -------------------------------------------------------------------------------- 1 | tikuta@chromium.org 2 | ukai@google.com 3 | -------------------------------------------------------------------------------- /win/clang-format.exe.sha1: -------------------------------------------------------------------------------- 1 | 66882fadbf9e99cc00b8677d8c1e7e8b3cfdf4fe -------------------------------------------------------------------------------- /android/doclava.tar.gz.sha1: -------------------------------------------------------------------------------- 1 | 1931becb8a8e21685f39c62854e9e814d64ccf1a 2 | -------------------------------------------------------------------------------- /third_party/libc++/OWNERS: -------------------------------------------------------------------------------- 1 | thakis@chromium.org 2 | thomasanderson@chromium.org 3 | -------------------------------------------------------------------------------- /third_party/eu-strip/OWNERS: -------------------------------------------------------------------------------- 1 | thestig@chromium.org 2 | thomasanderson@chromium.org 3 | -------------------------------------------------------------------------------- /third_party/libc++abi/OWNERS: -------------------------------------------------------------------------------- 1 | thakis@chromium.org 2 | thomasanderson@chromium.org 3 | -------------------------------------------------------------------------------- /checkdeps/testdata/disallowed/allowed/DEPS: -------------------------------------------------------------------------------- 1 | skip_child_includes = [ 2 | "skipped", 3 | ] 4 | -------------------------------------------------------------------------------- /DIR_METADATA: -------------------------------------------------------------------------------- 1 | monorail { 2 | component: "Build" 3 | } 4 | 5 | team_email: "build@chromium.org" 6 | -------------------------------------------------------------------------------- /checkdeps/testdata/checkdeps_test/disallowed/allowed/DEPS: -------------------------------------------------------------------------------- 1 | skip_child_includes = [ 2 | "skipped", 3 | ] 4 | -------------------------------------------------------------------------------- /reclient_cfgs/.gitignore: -------------------------------------------------------------------------------- 1 | /chromium-browser-clang/ 2 | /nacl/ 3 | /python/ 4 | /win-cross-experiments/ 5 | reproxy.cfg 6 | -------------------------------------------------------------------------------- /reclient_cfgs/README.md: -------------------------------------------------------------------------------- 1 | This directory contains the config files accepted by re-client's rewrapper command in place of inline flags. 2 | -------------------------------------------------------------------------------- /third_party/eu-strip/bin/eu-strip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/denoland/chromium_buildtools/upstream/third_party/eu-strip/bin/eu-strip -------------------------------------------------------------------------------- /OWNERS: -------------------------------------------------------------------------------- 1 | set noparent 2 | thakis@chromium.org 3 | thomasanderson@chromium.org 4 | 5 | # For the libc++ autoroller. 6 | per-file deps_revisions.gni=* 7 | -------------------------------------------------------------------------------- /checkdeps/testdata/checkdeps_test/DEPS: -------------------------------------------------------------------------------- 1 | include_rules = [ 2 | "-disallowed", 3 | "+allowed", 4 | "-third_party/explicitly_disallowed", 5 | ] 6 | -------------------------------------------------------------------------------- /third_party/libunwind/OWNERS: -------------------------------------------------------------------------------- 1 | # You may only be an owner of libunwind if your username begins with 'th'. 2 | thakis@chromium.org 3 | thomasanderson@chromium.org 4 | -------------------------------------------------------------------------------- /checkdeps/testdata/noparent/DEPS: -------------------------------------------------------------------------------- 1 | # Removes the rules inherited from ../DEPS. In particular, 2 | # checkdeps/testdata/allowed is no longer allowed. 3 | noparent = True 4 | -------------------------------------------------------------------------------- /codereview.settings: -------------------------------------------------------------------------------- 1 | # This file is used by git-cl to get repository specific information. 2 | GERRIT_HOST: True 3 | CODE_REVIEW_SERVER: codereview.chromium.org 4 | PROJECT: buildtools 5 | -------------------------------------------------------------------------------- /checkdeps/testdata/DEPS: -------------------------------------------------------------------------------- 1 | include_rules = [ 2 | "-buildtools/checkdeps/testdata/disallowed", 3 | "+buildtools/checkdeps/testdata/allowed", 4 | "-third_party/explicitly_disallowed", 5 | ] 6 | skip_child_includes = [ 7 | "checkdeps_test", 8 | ] 9 | -------------------------------------------------------------------------------- /checkdeps/testdata/disallowed/allowed/skipped/test.h: -------------------------------------------------------------------------------- 1 | // Copyright 2012 The Chromium Authors 2 | // Use of this source code is governed by a BSD-style license that can be 3 | // found in the LICENSE file. 4 | 5 | #include "whatever/whocares/ok.h" 6 | -------------------------------------------------------------------------------- /checkdeps/testdata/checkdeps_test/allowed/not_a_test.cc: -------------------------------------------------------------------------------- 1 | // Copyright 2012 The Chromium Authors 2 | // Use of this source code is governed by a BSD-style license that can be 3 | // found in the LICENSE file. 4 | 5 | #include "disallowed/teststuff/bad.h" 6 | -------------------------------------------------------------------------------- /checkdeps/testdata/checkdeps_test/allowed/foo_unittest.cc: -------------------------------------------------------------------------------- 1 | // Copyright 2012 The Chromium Authors 2 | // Use of this source code is governed by a BSD-style license that can be 3 | // found in the LICENSE file. 4 | 5 | #include "disallowed/teststuff/good.h" 6 | -------------------------------------------------------------------------------- /checkdeps/testdata/checkdeps_test/disallowed/allowed/skipped/test.h: -------------------------------------------------------------------------------- 1 | // Copyright 2012 The Chromium Authors 2 | // Use of this source code is governed by a BSD-style license that can be 3 | // found in the LICENSE file. 4 | 5 | #include "whatever/whocares/ok.h" 6 | -------------------------------------------------------------------------------- /checkdeps/testdata/allowed/not_a_test.cc: -------------------------------------------------------------------------------- 1 | // Copyright 2012 The Chromium Authors 2 | // Use of this source code is governed by a BSD-style license that can be 3 | // found in the LICENSE file. 4 | 5 | #include "buildtools/checkdeps/testdata/disallowed/teststuff/bad.h" 6 | -------------------------------------------------------------------------------- /checkdeps/testdata/allowed/foo_unittest.cc: -------------------------------------------------------------------------------- 1 | // Copyright 2012 The Chromium Authors 2 | // Use of this source code is governed by a BSD-style license that can be 3 | // found in the LICENSE file. 4 | 5 | #include "buildtools/checkdeps/testdata/disallowed/teststuff/good.h" 6 | -------------------------------------------------------------------------------- /checkdeps/testdata/checkdeps_test/allowed/DEPS: -------------------------------------------------------------------------------- 1 | include_rules = [ 2 | "+disallowed/allowed", 3 | "!disallowed/temporarily_allowed.h", 4 | "+third_party/allowed_may_use", 5 | ] 6 | 7 | specific_include_rules = { 8 | ".*_unittest\.cc": [ 9 | "+disallowed/teststuff", 10 | ] 11 | } 12 | -------------------------------------------------------------------------------- /third_party/libc++/README.chromium: -------------------------------------------------------------------------------- 1 | Name: libcxx 2 | Short Name: libc++ 3 | URL: http://libcxx.llvm.org/ 4 | Version: 1.0 5 | License: MIT, University of Illinois/NCSA Open Source License 6 | License File: trunk/LICENSE.TXT 7 | Security Critical: yes 8 | 9 | Description: 10 | 11 | libc++ for Chromium. 12 | -------------------------------------------------------------------------------- /third_party/eu-strip/README.chromium: -------------------------------------------------------------------------------- 1 | Name: eu-strip 2 | URL: https://sourceware.org/elfutils/ 3 | Version: 0.158 4 | Security Critical: no 5 | License: LGPL 3 6 | License File: NOT_SHIPPED 7 | 8 | Description: 9 | 10 | Patched eu-strip from elfutils. 11 | 12 | To build (on Trusty): ./build.sh in this directory. 13 | -------------------------------------------------------------------------------- /third_party/libc++abi/README.chromium: -------------------------------------------------------------------------------- 1 | Name: libcxxabi 2 | Short Name: libc++abi 3 | URL: http://libcxxabi.llvm.org/ 4 | Version: 1.0 5 | License: MIT, University of Illinois/NCSA Open Source License 6 | License File: trunk/LICENSE.TXT 7 | Security Critical: yes 8 | 9 | Description: 10 | 11 | libc++abi for Chromium. 12 | -------------------------------------------------------------------------------- /reclient_cfgs/rewrapper_chroot_compile.cfg: -------------------------------------------------------------------------------- 1 | platform=container-image=docker://gcr.io/cloud-marketplace/google/rbe-ubuntu16-04@sha256:f6568d8168b14aafd1b707019927a63c2d37113a03bcee188218f99bd0327ea1,dockerChrootPath=.,dockerRuntime=runsc 2 | server_address=unix:///tmp/reproxy.sock 3 | labels=type=compile,compiler=clang,lang=cpp 4 | exec_root=/ 5 | env_var_allowlist=PATH -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *.pyc 2 | android/doclava.tar.gz 3 | android/doclava/ 4 | clang_format/script/ 5 | linux64/.versions 6 | linux64/clang-format 7 | linux64/gn 8 | mac/.versions 9 | mac/clang-format 10 | mac/gn 11 | reclient 12 | third_party/libc++/trunk 13 | third_party/libc++abi/trunk 14 | third_party/libunwind/trunk 15 | win/.versions 16 | win/clang-format.exe 17 | win/gn.exe 18 | -------------------------------------------------------------------------------- /deps_revisions.gni: -------------------------------------------------------------------------------- 1 | # Copyright 2018 The Chromium Authors 2 | # Use of this source code is governed by a BSD-style license that can be 3 | # found in the LICENSE file. 4 | 5 | declare_args() { 6 | # Used to cause full rebuilds on libc++ rolls. This should be kept in sync 7 | # with the libcxx_revision vars in //DEPS. 8 | libcxx_revision = "52399655fdafdd14ade17ab12ddc9e955423aa5a" 9 | } 10 | -------------------------------------------------------------------------------- /checkdeps/testdata/allowed/DEPS: -------------------------------------------------------------------------------- 1 | include_rules = [ 2 | "+buildtools/checkdeps/testdata/disallowed/allowed", 3 | "!buildtools/checkdeps/testdata/disallowed/temporarily_allowed.h", 4 | "+third_party/allowed_may_use", 5 | ] 6 | 7 | specific_include_rules = { 8 | ".*_unittest\.cc": [ 9 | "+buildtools/checkdeps/testdata/disallowed/teststuff", 10 | "!buildtools/checkdeps/testdata/bongo/temp_allowed_for_tests.h", 11 | ] 12 | } 13 | -------------------------------------------------------------------------------- /checkdeps/testdata/noparent/test.h: -------------------------------------------------------------------------------- 1 | // Copyright 2018 The Chromium Authors 2 | // Use of this source code is governed by a BSD-style license that can be 3 | // found in the LICENSE file. 4 | 5 | // Disallowed because noparent removes the +allowed from the parent dir. 6 | #include "buildtools/checkdeps/testdata/allowed/bad.h" 7 | 8 | // Same-directory includes are still allowed. 9 | #include "buildtools/checkdeps/testdata/noparent/self.h" 10 | -------------------------------------------------------------------------------- /clang_format/README.chromium: -------------------------------------------------------------------------------- 1 | Name: clang-format 2 | Short Name: clang-format 3 | URL: https://github.com/llvm/llvm-project/tree/main/clang/tools/clang-format 4 | Version: See DEPS 5 | Date: 26 January 2021 6 | Revision: See DEPS 7 | License: University of Illinois/NCSA Open Source License 8 | License File: NOT_SHIPPED 9 | Security Critical: No 10 | 11 | Description: 12 | A tool for formatting C++ code to style. 13 | 14 | Local Modifications: 15 | None 16 | -------------------------------------------------------------------------------- /checkdeps/testdata/checkdeps_test/allowed/test.h: -------------------------------------------------------------------------------- 1 | // Copyright 2012 The Chromium Authors 2 | // Use of this source code is governed by a BSD-style license that can be 3 | // found in the LICENSE file. 4 | 5 | #include "allowed/good.h" 6 | #include "disallowed/bad.h" 7 | #include "disallowed/allowed/good.h" 8 | #include "disallowed/temporarily_allowed.h" 9 | #include "third_party/explicitly_disallowed/bad.h" 10 | #include "third_party/allowed_may_use/good.h" 11 | #include "third_party/no_rule/bad.h" 12 | -------------------------------------------------------------------------------- /checkdeps/testdata/checkdeps_test/disallowed/allowed/test.h: -------------------------------------------------------------------------------- 1 | // Copyright 2012 The Chromium Authors 2 | // Use of this source code is governed by a BSD-style license that can be 3 | // found in the LICENSE file. 4 | 5 | #include "allowed/good.h" 6 | // Always allowed to include self and parents. 7 | #include "disallowed/good.h" 8 | #include "disallowed/allowed/good.h" 9 | #include "third_party/explicitly_disallowed/bad.h" 10 | #include "third_party/allowed_may_use/bad.h" 11 | #include "third_party/no_rule/bad.h" 12 | -------------------------------------------------------------------------------- /checkdeps/testdata/checkdeps_test/disallowed/test.h: -------------------------------------------------------------------------------- 1 | // Copyright 2012 The Chromium Authors 2 | // Use of this source code is governed by a BSD-style license that can be 3 | // found in the LICENSE file. 4 | 5 | #include "allowed/good.h" 6 | // Always allowed to include self. 7 | #include "disallowed/good.h" 8 | #include "disallowed/allowed/good.h" 9 | #include "third_party/explicitly_disallowed/bad.h" 10 | // Only allowed for code under allowed/. 11 | #include "third_party/allowed_may_use/bad.h" 12 | #include "third_party/no_rule/bad.h" 13 | -------------------------------------------------------------------------------- /third_party/libunwind/README.chromium: -------------------------------------------------------------------------------- 1 | Name: libunwind 2 | URL: https://chromium.googlesource.com/external/github.com/llvm/llvm-project/libunwind.git 3 | Version: ToT (Top of the Tree, the mirror is kept in sync with the upstream project) 4 | License: MIT, University of Illinois/NCSA Open Source License 5 | License File: trunk/LICENSE.TXT 6 | Security Critical: yes 7 | 8 | Description: 9 | 10 | libunwind for Chromium. This provides stack unwind functionality on Fuchsia and 11 | Android builds, and is additionally a dependency of libc++abi on Arm builds. 12 | -------------------------------------------------------------------------------- /checkdeps/testdata/disallowed/allowed/test.h: -------------------------------------------------------------------------------- 1 | // Copyright 2012 The Chromium Authors 2 | // Use of this source code is governed by a BSD-style license that can be 3 | // found in the LICENSE file. 4 | 5 | #include "buildtools/checkdeps/testdata/allowed/good.h" 6 | // Always allowed to include self and parents. 7 | #include "buildtools/checkdeps/testdata/disallowed/good.h" 8 | #include "buildtools/checkdeps/testdata/disallowed/allowed/good.h" 9 | #include "third_party/explicitly_disallowed/bad.h" 10 | #include "third_party/allowed_may_use/bad.h" 11 | #include "third_party/no_rule/bad.h" 12 | -------------------------------------------------------------------------------- /checkdeps/testdata/allowed/test.h: -------------------------------------------------------------------------------- 1 | // Copyright 2012 The Chromium Authors 2 | // Use of this source code is governed by a BSD-style license that can be 3 | // found in the LICENSE file. 4 | 5 | #include "buildtools/checkdeps/testdata/allowed/good.h" 6 | #include "buildtools/checkdeps/testdata/disallowed/bad.h" 7 | #include "buildtools/checkdeps/testdata/disallowed/allowed/good.h" 8 | #include "buildtools/checkdeps/testdata/disallowed/temporarily_allowed.h" 9 | #include "third_party/explicitly_disallowed/bad.h" 10 | #include "third_party/allowed_may_use/good.h" 11 | #include "third_party/no_rule/bad.h" 12 | -------------------------------------------------------------------------------- /checkdeps/testdata/disallowed/foo_unittest.cc: -------------------------------------------------------------------------------- 1 | // Copyright 2012 The Chromium Authors 2 | // Use of this source code is governed by a BSD-style license that can be 3 | // found in the LICENSE file. 4 | 5 | // Not allowed for code under disallowed/ but temporarily allowed 6 | // specifically for test code under allowed/. This regression tests a 7 | // bug where we were taking shallow copies of rules when generating 8 | // rules for subdirectories, so all rule objects were getting the same 9 | // dictionary for specific rules. 10 | #include "buildtools/checkdeps/testdata/disallowed/temp_allowed_for_tests.h" 11 | -------------------------------------------------------------------------------- /checkdeps/testdata/disallowed/test.h: -------------------------------------------------------------------------------- 1 | // Copyright 2012 The Chromium Authors 2 | // Use of this source code is governed by a BSD-style license that can be 3 | // found in the LICENSE file. 4 | 5 | #include "buildtools/checkdeps/testdata/allowed/good.h" 6 | // Always allowed to include self. 7 | #include "buildtools/checkdeps/testdata/disallowed/good.h" 8 | #include "buildtools/checkdeps/testdata/disallowed/allowed/good.h" 9 | #include "third_party/explicitly_disallowed/bad.h" 10 | // Only allowed for code under allowed/. 11 | #include "third_party/allowed_may_use/bad.h" 12 | #include "third_party/no_rule/bad.h" 13 | -------------------------------------------------------------------------------- /third_party/eu-strip/build.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh -xe 2 | 3 | rm -rf elfutils 4 | git clone git://sourceware.org/git/elfutils.git 5 | cd elfutils 6 | git checkout elfutils-0.170 7 | autoheader 8 | aclocal 9 | autoconf 10 | automake --add-missing 11 | patch -p1 < ../fix-elf-size.patch 12 | mkdir build 13 | cd build 14 | ../configure --enable-maintainer-mode 15 | make -j40 16 | gcc -std=gnu99 -Wall -Wshadow -Wunused -Wextra -fgnu89-inline \ 17 | -Wformat=2 -Werror -g -O2 -Wl,-rpath-link,libelf:libdw -Wl,--build-id=none -o eu-strip \ 18 | src/strip.o libebl/libebl.a libelf/libelf.a lib/libeu.a libdw/libdw.a -ldl -lz 19 | ./eu-strip -o ../../bin/eu-strip eu-strip 20 | -------------------------------------------------------------------------------- /reclient_cfgs/rewrapper_linux.cfg: -------------------------------------------------------------------------------- 1 | #TODO(crbug.com/1245895): remove this when remote_toolchain_inputs is provided 2 | # with the toolchain. This file is used by 3 | # cros_chrome_sdk to generate rewrapper.cfg for 4 | # ChromeOS (simple chrome). 5 | platform=container-image=docker://gcr.io/cloud-marketplace/google/rbe-ubuntu16-04@sha256:b4dad0bfc4951d619229ab15343a311f2415a16ef83bcaa55b44f4e2bf1cf635 6 | server_address=unix:///tmp/reproxy.sock 7 | labels=type=compile,compiler=clang,lang=cpp 8 | exec_strategy=remote_local_fallback 9 | inputs=third_party/llvm-build/Release+Asserts/lib/libstdc++.so.6 10 | -------------------------------------------------------------------------------- /checkdeps/PRESUBMIT.py: -------------------------------------------------------------------------------- 1 | # Copyright 2012 The Chromium Authors 2 | # Use of this source code is governed by a BSD-style license that can be 3 | # found in the LICENSE file. 4 | 5 | """Presubmit script for checkdeps tool. 6 | """ 7 | 8 | USE_PYTHON3 = True 9 | 10 | 11 | def CheckChange(input_api, output_api): 12 | return input_api.canned_checks.RunUnitTests( 13 | input_api, output_api, 14 | [input_api.os_path.join(input_api.PresubmitLocalPath(), 15 | 'checkdeps_test.py')], 16 | # `run_on_python3` defaults to `True`. 17 | run_on_python2=not USE_PYTHON3) 18 | 19 | 20 | # Mandatory entrypoint. 21 | def CheckChangeOnUpload(input_api, output_api): 22 | return CheckChange(input_api, output_api) 23 | 24 | 25 | # Mandatory entrypoint. 26 | def CheckChangeOnCommit(input_api, output_api): 27 | return CheckChange(input_api, output_api) 28 | -------------------------------------------------------------------------------- /third_party/libc++abi/cxa_demangle_stub.cc: -------------------------------------------------------------------------------- 1 | // Copyright 2018 The Chromium Authors 2 | // Use of this source code is governed by a BSD-style license that can be 3 | // found in the LICENSE file. 4 | 5 | #include 6 | 7 | #include <__cxxabi_config.h> 8 | 9 | extern "C" 10 | // LLVM's demangler is large, and we have no need of it. Overriding it with 11 | // our own stub version here stops a lot of code being pulled in from libc++. 12 | // More here: 13 | // https://llvm.org/svn/llvm-project/libcxxabi/trunk/src/cxa_demangle.cpp 14 | _LIBCXXABI_FUNC_VIS 15 | // This is a weak symbol to let android_crazy_linker override it in 16 | // //base/android/linker:chromium_android_linker. 17 | _LIBCXXABI_WEAK char* __cxa_demangle(const char* mangled_name, 18 | char* buf, 19 | size_t* n, 20 | int* status) { 21 | static const int kMemoryAllocFailure = -1; // LLVM's memory_alloc_failure. 22 | if (status) 23 | *status = kMemoryAllocFailure; 24 | return nullptr; 25 | } 26 | -------------------------------------------------------------------------------- /README.txt: -------------------------------------------------------------------------------- 1 | This directory contains hashes of build tools used by Chromium and related 2 | projects. The actual binaries are pulled from Google Storage, normally as part 3 | of a gclient hook. 4 | 5 | This directory also exists as a stand-alone git mirror at 6 | https://chromium.googlesource.com/chromium/src/buildtools/. 7 | That mirror exists so that the shared build tools can be shared between 8 | the various Chromium-related projects without each one needing to maintain 9 | their own versionining of each binary. 10 | 11 | ________________________ 12 | ADDING BINARIES MANUALLY 13 | 14 | One uploads new versions of the tools using the 'gsutil' binary from the 15 | Google Storage SDK: 16 | 17 | https://developers.google.com/storage/docs/gsutil 18 | 19 | There is a checked-in version of gsutil as part of depot_tools. 20 | 21 | To initialize gsutil's credentials: 22 | 23 | python ~/depot_tools/third_party/gsutil/gsutil config 24 | 25 | That will give a URL which you should log into with your web browser. 26 | 27 | Copy the code back to the command line util. Ignore the project ID (it's OK 28 | to just leave blank when prompted). 29 | -------------------------------------------------------------------------------- /clang_format/README.txt: -------------------------------------------------------------------------------- 1 | This folder contains clang-format scripts. The binaries will be automatically 2 | downloaded from Google Storage by gclient runhooks for the current platform. 3 | 4 | For a walkthrough on how to maintain these binaries: 5 | https://chromium.googlesource.com/chromium/src/+/main/docs/updating_clang_format_binaries.md 6 | 7 | To upload a file: 8 | python ~/depot_tools/upload_to_google_storage.py -b chromium-clang-format 9 | 10 | On Linux and Mac, check that clang-format has its +x bit set before you run this 11 | upload command. Don't upload Linux and Mac binaries from Windows, since 12 | upload_to_google_storage.py will not set the +x bit on google storage when it's 13 | run from Windows. 14 | 15 | To download a file given a .sha1 file: 16 | python ~/depot_tools/download_from_google_storage.py -b chromium-clang-format -s .sha1 17 | 18 | List the contents of GN's Google Storage bucket: 19 | python ~/depot_tools/third_party/gsutil/gsutil ls gs://chromium-clang-format/ 20 | 21 | To initialize gsutil's credentials: 22 | python ~/depot_tools/third_party/gsutil/gsutil config 23 | 24 | That will give a URL which you should log into with your web browser. The 25 | username should be the one that is on the ACL for the "chromium-clang-format" 26 | bucket (probably your @google.com address). Contact the build team for help 27 | getting access if necessary. 28 | 29 | Copy the code back to the command line util. Ignore the project ID (it's OK 30 | to just leave blank when prompted). 31 | 32 | gsutil documentation: 33 | https://developers.google.com/storage/docs/gsutil 34 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | // Copyright 2014 The Chromium Authors 2 | // 3 | // Redistribution and use in source and binary forms, with or without 4 | // modification, are permitted provided that the following conditions are 5 | // met: 6 | // 7 | // * Redistributions of source code must retain the above copyright 8 | // notice, this list of conditions and the following disclaimer. 9 | // * Redistributions in binary form must reproduce the above 10 | // copyright notice, this list of conditions and the following disclaimer 11 | // in the documentation and/or other materials provided with the 12 | // distribution. 13 | // * Neither the name of Google Inc. nor the names of its 14 | // contributors may be used to endorse or promote products derived from 15 | // this software without specific prior written permission. 16 | // 17 | // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 18 | // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 19 | // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 20 | // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 21 | // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 22 | // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 23 | // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 24 | // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 25 | // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 26 | // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 27 | // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 28 | -------------------------------------------------------------------------------- /third_party/libunwind/BUILD.gn: -------------------------------------------------------------------------------- 1 | # Copyright 2017 The Chromium Authors 2 | # Use of this source code is governed by a BSD-style license that can be 3 | # found in the LICENSE file. 4 | 5 | import("//build/config/c++/c++.gni") 6 | 7 | config("libunwind_config") { 8 | defines = [ "_LIBUNWIND_IS_NATIVE_ONLY" ] 9 | cflags = [ 10 | "-fstrict-aliasing", 11 | "-fPIC", 12 | 13 | # ValueAsBitPattern in Unwind-EHABI.cpp is only used on Debug builds. 14 | "-Wno-unused-function", 15 | 16 | # libunwind expects to be compiled with unwind tables so it can 17 | # unwind its own frames. 18 | "-funwind-tables", 19 | ] 20 | } 21 | 22 | source_set("libunwind") { 23 | visibility = [] 24 | if (is_fuchsia) { 25 | visibility += [ "//buildtools/third_party/libc++abi" ] 26 | } else if (is_android) { 27 | visibility += [ "//buildtools/third_party/libc++abi" ] 28 | visibility += [ "//services/tracing/public/cpp" ] 29 | } 30 | if (!is_component_build) { 31 | defines = [ "_LIBUNWIND_DISABLE_VISIBILITY_ANNOTATIONS" ] 32 | } 33 | include_dirs = [ "//buildtools/third_party/libunwind/trunk/include" ] 34 | sources = [ 35 | # C++ sources 36 | "trunk/src/Unwind-EHABI.cpp", 37 | "trunk/src/libunwind.cpp", 38 | 39 | # C sources 40 | "trunk/src/Unwind-sjlj.c", 41 | "trunk/src/UnwindLevel1-gcc-ext.c", 42 | "trunk/src/UnwindLevel1.c", 43 | 44 | # ASM sources 45 | "trunk/src/UnwindRegistersRestore.S", 46 | "trunk/src/UnwindRegistersSave.S", 47 | ] 48 | configs -= [ 49 | "//build/config/compiler:chromium_code", 50 | "//build/config/compiler:no_exceptions", 51 | "//build/config/compiler:no_rtti", 52 | "//build/config/coverage:default_coverage", 53 | "//build/config/compiler:default_optimization", 54 | ] 55 | configs += [ 56 | "//build/config/compiler:no_chromium_code", 57 | "//build/config/compiler:exceptions", 58 | "//build/config/compiler:rtti", 59 | "//build/config/compiler:optimize_speed", 60 | 61 | # Must be after no_chromium_code 62 | ":libunwind_config", 63 | ] 64 | } 65 | -------------------------------------------------------------------------------- /third_party/libc++/__config_site: -------------------------------------------------------------------------------- 1 | #ifndef _LIBCPP_CONFIG_SITE 2 | #define _LIBCPP_CONFIG_SITE 3 | 4 | // We set a custom _LIBCPP_ABI_NAMESPACE for the following reasons: 5 | // 6 | // 1. When libcxx_is_shared is true, symbols from libc++.so are exported for all 7 | // DSOs to use. If the system libc++ gets loaded (indirectly through a 8 | // a system library), then it will conflict with our libc++.so. 9 | // 2. The default value of _LIBCPP_ABI_NAMESPACE is the string 10 | // "_LIBCPP_ABI_NAMESPACE". This contributes to an increase in binary size; 11 | // on Windows, the increase is great enough that we go above the 4GB size 12 | // limit for PDBs (https://crbug.com/1327710#c5). To fix this, we set 13 | // _LIBCPP_ABI_NAMESPACE to a shorter value. 14 | #define _LIBCPP_ABI_NAMESPACE Cr 15 | #define _LIBCPP_ABI_VERSION 2 16 | 17 | /* #undef _LIBCPP_ABI_FORCE_ITANIUM */ 18 | /* #undef _LIBCPP_ABI_FORCE_MICROSOFT */ 19 | /* #undef _LIBCPP_HAS_NO_THREADS */ 20 | /* #undef _LIBCPP_HAS_NO_MONOTONIC_CLOCK */ 21 | /* #undef _LIBCPP_HAS_MUSL_LIBC */ 22 | /* #undef _LIBCPP_HAS_THREAD_API_PTHREAD */ 23 | /* #undef _LIBCPP_HAS_THREAD_API_EXTERNAL */ 24 | /* #undef _LIBCPP_HAS_THREAD_API_WIN32 */ 25 | /* #undef _LIBCPP_HAS_THREAD_LIBRARY_EXTERNAL */ 26 | /* #undef _LIBCPP_DISABLE_VISIBILITY_ANNOTATIONS */ 27 | #define _LIBCPP_HAS_NO_VENDOR_AVAILABILITY_ANNOTATIONS 28 | /* #undef _LIBCPP_NO_VCRUNTIME */ 29 | /* #undef _LIBCPP_TYPEINFO_COMPARISON_IMPLEMENTATION */ 30 | /* #undef _LIBCPP_HAS_NO_FILESYSTEM_LIBRARY */ 31 | /* #undef _LIBCPP_HAS_PARALLEL_ALGORITHMS */ 32 | /* #undef _LIBCPP_HAS_NO_RANDOM_DEVICE */ 33 | /* #undef _LIBCPP_HAS_NO_LOCALIZATION */ 34 | /* #undef _LIBCPP_HAS_NO_WIDE_CHARACTERS */ 35 | 36 | // Settings below aren't part of __config_site upstream. 37 | // We set them here since we want them to take effect everywhere, 38 | // unconditionally. 39 | 40 | // Prevent libc++ from embedding linker flags to try to automatically link 41 | // against its runtime library. This is unnecessary with our build system, 42 | // and can also result in build failures if libc++'s name for a library 43 | // does not match ours. Only has an effect on Windows. 44 | #define _LIBCPP_NO_AUTO_LINK 45 | 46 | #define _LIBCPP_REMOVE_TRANSITIVE_INCLUDES 47 | 48 | #endif // _LIBCPP_CONFIG_SITE 49 | -------------------------------------------------------------------------------- /third_party/eu-strip/fix-elf-size.patch: -------------------------------------------------------------------------------- 1 | diff --git a/libelf/elf32_updatenull.c b/libelf/elf32_updatenull.c 2 | index d83c0b3f..507e707b 100644 3 | --- a/libelf/elf32_updatenull.c 4 | +++ b/libelf/elf32_updatenull.c 5 | @@ -137,7 +137,7 @@ __elfw2(LIBELFBITS,updatenull_wrlock) (Elf *elf, int *change_bop, size_t shnum) 6 | return -1; 7 | 8 | /* At least the ELF header is there. */ 9 | - off_t size = elf_typesize (LIBELFBITS, ELF_T_EHDR, 1); 10 | + ElfW2(LIBELFBITS,Off) size = elf_typesize (LIBELFBITS, ELF_T_EHDR, 1); 11 | 12 | /* Set the program header position. */ 13 | if (elf->state.ELFW(elf,LIBELFBITS).phdr == NULL) 14 | @@ -152,7 +152,7 @@ __elfw2(LIBELFBITS,updatenull_wrlock) (Elf *elf, int *change_bop, size_t shnum) 15 | { 16 | /* The user is supposed to fill out e_phoff. Use it and 17 | e_phnum to determine the maximum extend. */ 18 | - size = MAX ((size_t) size, 19 | + size = MAX (size, 20 | ehdr->e_phoff 21 | + elf_typesize (LIBELFBITS, ELF_T_PHDR, phnum)); 22 | } 23 | @@ -330,7 +330,7 @@ __elfw2(LIBELFBITS,updatenull_wrlock) (Elf *elf, int *change_bop, size_t shnum) 24 | 25 | if (elf->flags & ELF_F_LAYOUT) 26 | { 27 | - size = MAX ((GElf_Word) size, 28 | + size = MAX (size, 29 | (shdr->sh_type != SHT_NOBITS 30 | ? shdr->sh_offset + shdr->sh_size : 0)); 31 | 32 | @@ -352,9 +352,9 @@ __elfw2(LIBELFBITS,updatenull_wrlock) (Elf *elf, int *change_bop, size_t shnum) 33 | update_if_changed (shdr->sh_addralign, sh_align, 34 | scn->shdr_flags); 35 | 36 | - size = (size + sh_align - 1) & ~(sh_align - 1); 37 | + size = (size + sh_align - 1) & ~(ElfW2(LIBELFBITS,Off))(sh_align - 1); 38 | int offset_changed = 0; 39 | - update_if_changed (shdr->sh_offset, (GElf_Word) size, 40 | + update_if_changed (shdr->sh_offset, size, 41 | offset_changed); 42 | changed |= offset_changed; 43 | 44 | @@ -416,7 +416,7 @@ __elfw2(LIBELFBITS,updatenull_wrlock) (Elf *elf, int *change_bop, size_t shnum) 45 | /* The user is supposed to fill out e_shoff. Use it and 46 | e_shnum (or sh_size of the dummy, first section header) 47 | to determine the maximum extend. */ 48 | - size = MAX ((GElf_Word) size, 49 | + size = MAX (size, 50 | (ehdr->e_shoff 51 | + (elf_typesize (LIBELFBITS, ELF_T_SHDR, shnum)))); 52 | } 53 | @@ -430,7 +430,7 @@ __elfw2(LIBELFBITS,updatenull_wrlock) (Elf *elf, int *change_bop, size_t shnum) 54 | #define SHDR_ALIGN sizeof (ElfW2(LIBELFBITS,Off)) 55 | size = (size + SHDR_ALIGN - 1) & ~(SHDR_ALIGN - 1); 56 | 57 | - update_if_changed (ehdr->e_shoff, (GElf_Word) size, elf->flags); 58 | + update_if_changed (ehdr->e_shoff, size, elf->flags); 59 | 60 | /* Account for the section header size. */ 61 | size += elf_typesize (LIBELFBITS, ELF_T_SHDR, shnum); 62 | -------------------------------------------------------------------------------- /third_party/libc++abi/BUILD.gn: -------------------------------------------------------------------------------- 1 | # Copyright 2015 The Chromium Authors 2 | # Use of this source code is governed by a BSD-style license that can be 3 | # found in the LICENSE file. 4 | 5 | import("//build/config/c++/c++.gni") 6 | 7 | source_set("libc++abi") { 8 | if (export_libcxxabi_from_executables) { 9 | visibility = [ "//build/config:executable_deps" ] 10 | } else { 11 | visibility = [ "//buildtools/third_party/libc++" ] 12 | } 13 | 14 | # Fuchsia builds don't link against any libraries that provide stack 15 | # unwinding symbols, unlike Linux does with glibc (same applies for Android). 16 | # Build and link against libunwind manually to get this functionality. 17 | if (is_fuchsia || is_android) { 18 | deps = [ "//buildtools/third_party/libunwind" ] 19 | } 20 | 21 | sources = [ 22 | "trunk/src/abort_message.cpp", 23 | "trunk/src/cxa_aux_runtime.cpp", 24 | "trunk/src/cxa_default_handlers.cpp", 25 | "trunk/src/cxa_exception.cpp", 26 | "trunk/src/cxa_exception_storage.cpp", 27 | "trunk/src/cxa_handlers.cpp", 28 | 29 | # This file is supposed to be used in fno-exception builds of 30 | # libc++abi. We build lib++/libc++abi with exceptions enabled. 31 | #"trunk/src/cxa_noexception.cpp", 32 | "trunk/src/cxa_personality.cpp", 33 | "trunk/src/cxa_vector.cpp", 34 | "trunk/src/cxa_virtual.cpp", 35 | "trunk/src/fallback_malloc.cpp", 36 | "trunk/src/private_typeinfo.cpp", 37 | "trunk/src/stdlib_exception.cpp", 38 | "trunk/src/stdlib_stdexcept.cpp", 39 | "trunk/src/stdlib_typeinfo.cpp", 40 | ] 41 | 42 | if (!is_tsan) { 43 | sources += [ "trunk/src/cxa_guard.cpp" ] 44 | } 45 | 46 | # See the comment in cxa_demangle_stub.cc for why we don't use LLVM's 47 | # demangler on android. 48 | # TODO(thakis): Switch to building with LIBCXXABI_NON_DEMANGLING_TERMINATE 49 | # defined instead. 50 | if (is_android) { 51 | sources += [ "cxa_demangle_stub.cc" ] 52 | } else { 53 | sources += [ "trunk/src/cxa_demangle.cpp" ] 54 | } 55 | 56 | if (is_fuchsia || (is_posix && !is_apple)) { 57 | sources += [ "trunk/src/cxa_thread_atexit.cpp" ] 58 | } 59 | 60 | defines = [ "LIBCXXABI_SILENT_TERMINATE" ] 61 | 62 | configs -= [ 63 | "//build/config/compiler:chromium_code", 64 | "//build/config/compiler:no_exceptions", 65 | "//build/config/compiler:no_rtti", 66 | "//build/config/coverage:default_coverage", 67 | ] 68 | configs += [ 69 | "//build/config/compiler:no_chromium_code", 70 | "//build/config/compiler:exceptions", 71 | "//build/config/compiler:rtti", 72 | "//buildtools/third_party/libc++:config", 73 | ] 74 | 75 | # private_typeinfo.cpp implements __dynamic_cast(), which is the runtime 76 | # bit that implements dynamic_cast<>(). But ubsan's vptr check inserts 77 | # dynamic_cast<>()s, which leads to infinite recursion. So make sure we don't 78 | # pass -fsanitize=vptr. 79 | configs -= [ "//build/config/sanitizers:default_sanitizer_flags" ] 80 | configs += 81 | [ "//build/config/sanitizers:default_sanitizer_flags_but_ubsan_vptr" ] 82 | 83 | if (export_libcxxabi_from_executables || libcxx_is_shared) { 84 | configs -= [ "//build/config/gcc:symbol_visibility_hidden" ] 85 | configs += [ "//build/config/gcc:symbol_visibility_default" ] 86 | } 87 | 88 | # libc++abi depends on libc++ internals. 89 | include_dirs = [ "../libc++/trunk/src" ] 90 | } 91 | -------------------------------------------------------------------------------- /checkdeps/README.md: -------------------------------------------------------------------------------- 1 | # DEPS Files 2 | 3 | DEPS files specify which files the sources in a directory tree may include. 4 | 5 | ## File format 6 | 7 | First you have the normal module-level deps. These are the ones used by 8 | gclient. An example would be: 9 | 10 | ``` 11 | deps = { 12 | "base":"http://foo.bar/trunk/base" 13 | } 14 | ``` 15 | 16 | DEPS files not in the top-level of a module won't need this. Then you have any 17 | additional include rules. You can add (using `+`) or subtract (using `-`) from 18 | the previously specified rules (including module-level deps). You can also 19 | specify a path that is allowed for now but that we intend to remove, using `!`; 20 | this is treated the same as `+` when `check_deps` is run by our bots, but a 21 | presubmit step will show a warning if you add a new include of a file that is 22 | only allowed by `!`. 23 | 24 | Note that for .java files, there is currently no difference between `+` and 25 | `!`, even in the presubmit step. 26 | 27 | ``` 28 | include_rules = [ 29 | # Code should be able to use base (it's specified in the module-level 30 | # deps above), but nothing in "base/evil" because it's evil. 31 | "-base/evil", 32 | 33 | # But this one subdirectory of evil is OK. 34 | "+base/evil/not", 35 | 36 | # And it can include files from this other directory even though there is 37 | # no deps rule for it. 38 | "+tools/crime_fighter", 39 | 40 | # This dependency is allowed for now but work is ongoing to remove it, 41 | # so you shouldn't add further dependencies on it. 42 | "!base/evil/ok_for_now.h", 43 | ] 44 | ``` 45 | 46 | If you have certain include rules that should only be applied for some files 47 | within this directory and subdirectories, you can write a section named 48 | `specific_include_rules` that is a hash map of regular expressions to the list 49 | of rules that should apply to files matching them. Note that such rules will 50 | always be applied before the rules from `include_rules` have been applied, but 51 | the order in which rules associated with different regular expressions is 52 | applied is arbitrary. 53 | 54 | ``` 55 | specific_include_rules = { 56 | ".*_(unit|browser|api)test\.cc": [ 57 | "+libraries/testsupport", 58 | ], 59 | } 60 | ``` 61 | 62 | To add different dependencies for Java instrumentation and unit tests, the 63 | following regular expressions may be useful: 64 | 65 | ``` 66 | specific_include_rules = { 67 | '.*UnitTest\.java': [ 68 | # Rules for unit tests. 69 | ], 70 | '.*(? self._MAX_UNINTERESTING_LINES: 99 | break 100 | 101 | line = line.strip() 102 | 103 | is_import, violation = self.CheckLine(rules, line, filepath) 104 | if is_import: 105 | last_import = line_num 106 | if violation: 107 | dependee_status.AddViolation(violation) 108 | 109 | return dependee_status 110 | 111 | @staticmethod 112 | def IsProtoFile(file_path): 113 | """Returns True iff the given path ends in one of the extensions 114 | handled by this checker. 115 | """ 116 | return os.path.splitext(file_path)[1] in ProtoChecker.EXTENSIONS 117 | 118 | def ShouldCheck(self, file_path): 119 | """Check if the new #include file path should be presubmit checked. 120 | 121 | Args: 122 | file_path: file path to be checked 123 | 124 | Return: 125 | bool: True if the file should be checked; False otherwise. 126 | """ 127 | return self.IsProtoFile(file_path) 128 | -------------------------------------------------------------------------------- /checkdeps/cpp_checker.py: -------------------------------------------------------------------------------- 1 | # Copyright 2012 The Chromium Authors 2 | # Use of this source code is governed by a BSD-style license that can be 3 | # found in the LICENSE file. 4 | 5 | """Checks C++ and Objective-C files for illegal includes.""" 6 | 7 | 8 | 9 | import codecs 10 | import os 11 | import re 12 | 13 | import results 14 | from rules import Rule, MessageRule 15 | 16 | 17 | class CppChecker(object): 18 | 19 | EXTENSIONS = [ 20 | '.h', 21 | '.cc', 22 | '.cpp', 23 | '.m', 24 | '.mm', 25 | ] 26 | 27 | # The maximum number of non-include lines we can see before giving up. 28 | _MAX_UNINTERESTING_LINES = 50 29 | 30 | # The maximum line length, this is to be efficient in the case of very long 31 | # lines (which can't be #includes). 32 | _MAX_LINE_LENGTH = 128 33 | 34 | # This regular expression will be used to extract filenames from include 35 | # statements. 36 | _EXTRACT_INCLUDE_PATH = re.compile( 37 | r'[ \t]*#[ \t]*(?:include|import)[ \t]*"(.*)"') 38 | 39 | def __init__(self, verbose, resolve_dotdot=False, root_dir=''): 40 | self._verbose = verbose 41 | self._resolve_dotdot = resolve_dotdot 42 | self._root_dir = root_dir 43 | 44 | def CheckLine(self, rules, line, dependee_path, fail_on_temp_allow=False): 45 | """Checks the given line with the given rule set. 46 | 47 | Returns a tuple (is_include, dependency_violation) where 48 | is_include is True only if the line is an #include or #import 49 | statement, and dependency_violation is an instance of 50 | results.DependencyViolation if the line violates a rule, or None 51 | if it does not. 52 | """ 53 | found_item = self._EXTRACT_INCLUDE_PATH.match(line) 54 | if not found_item: 55 | return False, None # Not a match 56 | 57 | include_path = found_item.group(1) 58 | 59 | if '\\' in include_path: 60 | return True, results.DependencyViolation( 61 | include_path, 62 | MessageRule('Include paths may not include backslashes.'), 63 | rules) 64 | 65 | if '/' not in include_path: 66 | # Don't fail when no directory is specified. We may want to be more 67 | # strict about this in the future. 68 | if self._verbose: 69 | print(' WARNING: include specified with no directory: ' + include_path) 70 | return True, None 71 | 72 | if self._resolve_dotdot and '../' in include_path: 73 | dependee_dir = os.path.dirname(dependee_path) 74 | include_path = os.path.join(dependee_dir, include_path) 75 | include_path = os.path.relpath(include_path, self._root_dir) 76 | 77 | rule = rules.RuleApplyingTo(include_path, dependee_path) 78 | if (rule.allow == Rule.DISALLOW or 79 | (fail_on_temp_allow and rule.allow == Rule.TEMP_ALLOW)): 80 | return True, results.DependencyViolation(include_path, rule, rules) 81 | return True, None 82 | 83 | def CheckFile(self, rules, filepath): 84 | if self._verbose: 85 | print('Checking: ' + filepath) 86 | 87 | dependee_status = results.DependeeStatus(filepath) 88 | ret_val = '' # We'll collect the error messages in here 89 | last_include = 0 90 | with codecs.open(filepath, encoding='utf-8') as f: 91 | in_if0 = 0 92 | for line_num, line in enumerate(f): 93 | if line_num - last_include > self._MAX_UNINTERESTING_LINES: 94 | break 95 | 96 | line = line.strip() 97 | 98 | # Check to see if we're at / inside an #if 0 block 99 | if line.startswith('#if 0'): 100 | in_if0 += 1 101 | continue 102 | if in_if0 > 0: 103 | if line.startswith('#if'): 104 | in_if0 += 1 105 | elif line.startswith('#endif'): 106 | in_if0 -= 1 107 | continue 108 | 109 | is_include, violation = self.CheckLine(rules, line, filepath) 110 | if is_include: 111 | last_include = line_num 112 | if violation: 113 | dependee_status.AddViolation(violation) 114 | 115 | return dependee_status 116 | 117 | @staticmethod 118 | def IsCppFile(file_path): 119 | """Returns True iff the given path ends in one of the extensions 120 | handled by this checker. 121 | """ 122 | return os.path.splitext(file_path)[1] in CppChecker.EXTENSIONS 123 | 124 | def ShouldCheck(self, file_path): 125 | """Check if the new #include file path should be presubmit checked. 126 | 127 | Args: 128 | file_path: file path to be checked 129 | 130 | Return: 131 | bool: True if the file should be checked; False otherwise. 132 | """ 133 | return self.IsCppFile(file_path) 134 | -------------------------------------------------------------------------------- /checkdeps/results.py: -------------------------------------------------------------------------------- 1 | # Copyright 2012 The Chromium Authors 2 | # Use of this source code is governed by a BSD-style license that can be 3 | # found in the LICENSE file. 4 | 5 | """Results object and results formatters for checkdeps tool.""" 6 | 7 | 8 | 9 | import json 10 | 11 | 12 | class DependencyViolation(object): 13 | """A single dependency violation.""" 14 | 15 | def __init__(self, include_path, violated_rule, rules): 16 | # The include or import path that is in violation of a rule. 17 | self.include_path = include_path 18 | 19 | # The violated rule. 20 | self.violated_rule = violated_rule 21 | 22 | # The set of rules containing self.violated_rule. 23 | self.rules = rules 24 | 25 | 26 | class DependeeStatus(object): 27 | """Results object for a dependee file.""" 28 | 29 | def __init__(self, dependee_path): 30 | # Path of the file whose nonconforming dependencies are listed in 31 | # self.violations. 32 | self.dependee_path = dependee_path 33 | 34 | # List of DependencyViolation objects that apply to the dependee 35 | # file. May be empty. 36 | self.violations = [] 37 | 38 | def AddViolation(self, violation): 39 | """Adds a violation.""" 40 | self.violations.append(violation) 41 | 42 | def HasViolations(self): 43 | """Returns True if this dependee is violating one or more rules.""" 44 | return not not self.violations 45 | 46 | 47 | class ResultsFormatter(object): 48 | """Base class for results formatters.""" 49 | 50 | def AddError(self, dependee_status): 51 | """Add a formatted result to |self.results| for |dependee_status|, 52 | which is guaranteed to return True for 53 | |dependee_status.HasViolations|. 54 | """ 55 | raise NotImplementedError() 56 | 57 | def GetResults(self): 58 | """Returns the results. May be overridden e.g. to process the 59 | results that have been accumulated. 60 | """ 61 | raise NotImplementedError() 62 | 63 | def PrintResults(self): 64 | """Prints the results to stdout.""" 65 | raise NotImplementedError() 66 | 67 | 68 | class NormalResultsFormatter(ResultsFormatter): 69 | """A results formatting object that produces the classical, 70 | detailed, human-readable output of the checkdeps tool. 71 | """ 72 | 73 | def __init__(self, verbose): 74 | self.results = [] 75 | self.verbose = verbose 76 | 77 | def AddError(self, dependee_status): 78 | lines = [] 79 | lines.append('\nERROR in %s' % dependee_status.dependee_path) 80 | for violation in dependee_status.violations: 81 | lines.append(self.FormatViolation(violation, self.verbose)) 82 | self.results.append('\n'.join(lines)) 83 | 84 | @staticmethod 85 | def FormatViolation(violation, verbose=False): 86 | lines = [] 87 | if verbose: 88 | lines.append(' For %s' % violation.rules) 89 | lines.append( 90 | ' Illegal include: "%s"\n Because of %s' % 91 | (violation.include_path, str(violation.violated_rule))) 92 | return '\n'.join(lines) 93 | 94 | def GetResults(self): 95 | return self.results 96 | 97 | def PrintResults(self): 98 | for result in self.results: 99 | print(result) 100 | if self.results: 101 | print('\nFAILED\n') 102 | 103 | 104 | class JSONResultsFormatter(ResultsFormatter): 105 | """A results formatter that outputs results to a file as JSON.""" 106 | 107 | def __init__(self, output_path, wrapped_formatter=None): 108 | self.output_path = output_path 109 | self.wrapped_formatter = wrapped_formatter 110 | 111 | self.results = [] 112 | 113 | def AddError(self, dependee_status): 114 | self.results.append({ 115 | 'dependee_path': dependee_status.dependee_path, 116 | 'violations': [{ 117 | 'include_path': violation.include_path, 118 | 'violated_rule': violation.violated_rule.AsDependencyTuple(), 119 | } for violation in dependee_status.violations] 120 | }) 121 | 122 | if self.wrapped_formatter: 123 | self.wrapped_formatter.AddError(dependee_status) 124 | 125 | def GetResults(self): 126 | with open(self.output_path, 'w') as f: 127 | f.write(json.dumps(self.results)) 128 | 129 | return self.results 130 | 131 | def PrintResults(self): 132 | if self.wrapped_formatter: 133 | self.wrapped_formatter.PrintResults() 134 | return 135 | 136 | print(self.results) 137 | 138 | 139 | class TemporaryRulesFormatter(ResultsFormatter): 140 | """A results formatter that produces a single line per nonconforming 141 | include. The combined output is suitable for directly pasting into a 142 | DEPS file as a list of temporary-allow rules. 143 | """ 144 | 145 | def __init__(self): 146 | self.violations = set() 147 | 148 | def AddError(self, dependee_status): 149 | for violation in dependee_status.violations: 150 | self.violations.add(violation.include_path) 151 | 152 | def GetResults(self): 153 | return [' "!%s",' % path for path in sorted(self.violations)] 154 | 155 | def PrintResults(self): 156 | for result in self.GetResults(): 157 | print(result) 158 | 159 | 160 | class CountViolationsFormatter(ResultsFormatter): 161 | """A results formatter that produces a number, the count of #include 162 | statements that are in violation of the dependency rules. 163 | 164 | Note that you normally want to instantiate DepsChecker with 165 | ignore_temp_rules=True when you use this formatter. 166 | """ 167 | 168 | def __init__(self): 169 | self.count = 0 170 | 171 | def AddError(self, dependee_status): 172 | self.count += len(dependee_status.violations) 173 | 174 | def GetResults(self): 175 | return '%d' % self.count 176 | 177 | def PrintResults(self): 178 | print(self.count) 179 | -------------------------------------------------------------------------------- /checkdeps/rules.py: -------------------------------------------------------------------------------- 1 | # Copyright 2012 The Chromium Authors 2 | # Use of this source code is governed by a BSD-style license that can be 3 | # found in the LICENSE file. 4 | 5 | """Base classes to represent dependency rules, used by checkdeps.py""" 6 | 7 | 8 | import os 9 | import re 10 | 11 | 12 | class Rule(object): 13 | """Specifies a single rule for an include, which can be one of 14 | ALLOW, DISALLOW and TEMP_ALLOW. 15 | """ 16 | 17 | # These are the prefixes used to indicate each type of rule. These 18 | # are also used as values for self.allow to indicate which type of 19 | # rule this is. 20 | ALLOW = '+' 21 | DISALLOW = '-' 22 | TEMP_ALLOW = '!' 23 | 24 | def __init__(self, allow, directory, dependent_directory, source): 25 | self.allow = allow 26 | self._dir = directory 27 | self._dependent_dir = dependent_directory 28 | self._source = source 29 | 30 | def __str__(self): 31 | return '"%s%s" from %s.' % (self.allow, self._dir, self._source) 32 | 33 | def AsDependencyTuple(self): 34 | """Returns a tuple (allow, dependent dir, dependee dir) for this rule, 35 | which is fully self-sufficient to answer the question whether the dependent 36 | is allowed to depend on the dependee, without knowing the external 37 | context.""" 38 | return self.allow, self._dependent_dir or '.', self._dir or '.' 39 | 40 | def ParentOrMatch(self, other): 41 | """Returns true if the input string is an exact match or is a parent 42 | of the current rule. For example, the input "foo" would match "foo/bar".""" 43 | return self._dir == other or self._dir.startswith(other + '/') 44 | 45 | def ChildOrMatch(self, other): 46 | """Returns true if the input string would be covered by this rule. For 47 | example, the input "foo/bar" would match the rule "foo".""" 48 | return self._dir == other or other.startswith(self._dir + '/') 49 | 50 | 51 | class MessageRule(Rule): 52 | """A rule that has a simple message as the reason for failing, 53 | unrelated to directory or source. 54 | """ 55 | 56 | def __init__(self, reason): 57 | super(MessageRule, self).__init__(Rule.DISALLOW, '', '', '') 58 | self._reason = reason 59 | 60 | def __str__(self): 61 | return self._reason 62 | 63 | 64 | def ParseRuleString(rule_string, source): 65 | """Returns a tuple of a character indicating what type of rule this 66 | is, and a string holding the path the rule applies to. 67 | """ 68 | if not rule_string: 69 | raise Exception('The rule string "%s" is empty\nin %s' % 70 | (rule_string, source)) 71 | 72 | if not rule_string[0] in [Rule.ALLOW, Rule.DISALLOW, Rule.TEMP_ALLOW]: 73 | raise Exception( 74 | 'The rule string "%s" does not begin with a "+", "-" or "!".' % 75 | rule_string) 76 | 77 | # If a directory is specified in a DEPS file with a trailing slash, then it 78 | # will not match as a parent directory in Rule's [Parent|Child]OrMatch above. 79 | # Ban them. 80 | if rule_string[-1] == '/': 81 | raise Exception( 82 | 'The rule string "%s" ends with a "/" which is not allowed.' 83 | ' Please remove the trailing "/".' % rule_string) 84 | 85 | return rule_string[0], rule_string[1:] 86 | 87 | 88 | class Rules(object): 89 | """Sets of rules for files in a directory. 90 | 91 | By default, rules are added to the set of rules applicable to all 92 | dependee files in the directory. Rules may also be added that apply 93 | only to dependee files whose filename (last component of their path) 94 | matches a given regular expression; hence there is one additional 95 | set of rules per unique regular expression. 96 | """ 97 | 98 | def __init__(self): 99 | """Initializes the current rules with an empty rule list for all 100 | files. 101 | """ 102 | # We keep the general rules out of the specific rules dictionary, 103 | # as we need to always process them last. 104 | self._general_rules = [] 105 | 106 | # Keys are regular expression strings, values are arrays of rules 107 | # that apply to dependee files whose basename matches the regular 108 | # expression. These are applied before the general rules, but 109 | # their internal order is arbitrary. 110 | self._specific_rules = {} 111 | 112 | def __str__(self): 113 | result = ['Rules = {\n (apply to all files): [\n%s\n ],' % '\n'.join( 114 | ' %s' % x for x in self._general_rules)] 115 | for regexp, rules in list(self._specific_rules.items()): 116 | result.append(' (limited to files matching %s): [\n%s\n ]' % ( 117 | regexp, '\n'.join(' %s' % x for x in rules))) 118 | result.append(' }') 119 | return '\n'.join(result) 120 | 121 | def AsDependencyTuples(self, include_general_rules, include_specific_rules): 122 | """Returns a list of tuples (allow, dependent dir, dependee dir) for the 123 | specified rules (general/specific). Currently only general rules are 124 | supported.""" 125 | def AddDependencyTuplesImpl(deps, rules, extra_dependent_suffix=""): 126 | for rule in rules: 127 | (allow, dependent, dependee) = rule.AsDependencyTuple() 128 | tup = (allow, dependent + extra_dependent_suffix, dependee) 129 | deps.add(tup) 130 | 131 | deps = set() 132 | if include_general_rules: 133 | AddDependencyTuplesImpl(deps, self._general_rules) 134 | if include_specific_rules: 135 | for regexp, rules in list(self._specific_rules.items()): 136 | AddDependencyTuplesImpl(deps, rules, "/" + regexp) 137 | return deps 138 | 139 | def AddRule(self, rule_string, dependent_dir, source, dependee_regexp=None): 140 | """Adds a rule for the given rule string. 141 | 142 | Args: 143 | rule_string: The include_rule string read from the DEPS file to apply. 144 | source: A string representing the location of that string (filename, etc.) 145 | so that we can give meaningful errors. 146 | dependent_dir: The directory to which this rule applies. 147 | dependee_regexp: The rule will only be applied to dependee files 148 | whose filename (last component of their path) 149 | matches the expression. None to match all 150 | dependee files. 151 | """ 152 | rule_type, rule_dir = ParseRuleString(rule_string, source) 153 | 154 | if not dependee_regexp: 155 | rules_to_update = self._general_rules 156 | else: 157 | if dependee_regexp in self._specific_rules: 158 | rules_to_update = self._specific_rules[dependee_regexp] 159 | else: 160 | rules_to_update = [] 161 | 162 | # Remove any existing rules or sub-rules that apply. For example, if we're 163 | # passed "foo", we should remove "foo", "foo/bar", but not "foobar". 164 | rules_to_update = [x for x in rules_to_update 165 | if not x.ParentOrMatch(rule_dir)] 166 | rules_to_update.insert(0, Rule(rule_type, rule_dir, dependent_dir, source)) 167 | 168 | if not dependee_regexp: 169 | self._general_rules = rules_to_update 170 | else: 171 | self._specific_rules[dependee_regexp] = rules_to_update 172 | 173 | def RuleApplyingTo(self, include_path, dependee_path): 174 | """Returns the rule that applies to |include_path| for a dependee 175 | file located at |dependee_path|. 176 | """ 177 | dependee_filename = os.path.basename(dependee_path) 178 | for regexp, specific_rules in list(self._specific_rules.items()): 179 | if re.match(regexp, dependee_filename): 180 | for rule in specific_rules: 181 | if rule.ChildOrMatch(include_path): 182 | return rule 183 | for rule in self._general_rules: 184 | if rule.ChildOrMatch(include_path): 185 | return rule 186 | return MessageRule('no rule applying.') 187 | -------------------------------------------------------------------------------- /third_party/libc++/BUILD.gn: -------------------------------------------------------------------------------- 1 | # Copyright 2015 The Chromium Authors 2 | # Use of this source code is governed by a BSD-style license that can be 3 | # found in the LICENSE file. 4 | 5 | import("//build/config/c++/c++.gni") 6 | import("//build/config/sanitizers/sanitizers.gni") 7 | import("//build/toolchain/toolchain.gni") 8 | 9 | # Used by libc++ and libc++abi. 10 | config("config") { 11 | cflags = [ "-fstrict-aliasing" ] 12 | if (is_win) { 13 | cflags += [ 14 | # libc++ wants to redefine the macros WIN32_LEAN_AND_MEAN and _CRT_RAND_S 15 | # in its implementation. 16 | "-Wno-macro-redefined", 17 | ] 18 | 19 | cflags_cc = [ 20 | # We want to use a uniform C++ version across all of chromium, but 21 | # upstream libc++ requires C++20 so we have to make an exception here. 22 | # No other target should override the default -std= flag. 23 | "-std:c++20", 24 | ] 25 | } else { 26 | cflags += [ "-fPIC" ] 27 | cflags_cc = [ "-std=c++20" ] 28 | } 29 | 30 | defines = [ "_LIBCPP_BUILDING_LIBRARY" ] 31 | } 32 | 33 | # Explicitly set version macros to Windows 7 to prevent libc++ from adding a 34 | # hard dependency on GetSystemTimePreciseAsFileTime, which was introduced in 35 | # Windows 8. 36 | config("winver") { 37 | defines = [ 38 | "NTDDI_VERSION=NTDDI_WIN7", 39 | "_WIN32_WINNT=_WIN32_WINNT_WIN7", 40 | "WINVER=_WIN32_WINNT_WIN7", 41 | ] 42 | } 43 | 44 | if (libcxx_is_shared) { 45 | _libcxx_target_type = "shared_library" 46 | } else { 47 | _libcxx_target_type = "source_set" 48 | } 49 | target(_libcxx_target_type, "libc++") { 50 | # Most things that need to depend on libc++ should do so via the implicit 51 | # 'common_deps' dependency below. Some targets that package libc++.so may 52 | # need to explicitly depend on libc++. 53 | visibility = [ 54 | "//build/config:common_deps", 55 | "//third_party/catapult/devil:devil", 56 | ] 57 | if (is_linux) { 58 | # This target packages libc++.so, so must have an explicit dependency on 59 | # libc++. 60 | visibility += 61 | [ "//remoting/host/linux:remoting_me2me_host_copy_user_session" ] 62 | } 63 | if (libcxx_is_shared) { 64 | no_default_deps = true 65 | } 66 | 67 | if (is_linux && !is_clang) { 68 | libs = [ "atomic" ] 69 | } 70 | 71 | inputs = [ "__config_site" ] 72 | 73 | sources = [ 74 | "trunk/src/algorithm.cpp", 75 | "trunk/src/any.cpp", 76 | "trunk/src/atomic.cpp", 77 | "trunk/src/barrier.cpp", 78 | "trunk/src/bind.cpp", 79 | "trunk/src/charconv.cpp", 80 | "trunk/src/chrono.cpp", 81 | "trunk/src/condition_variable.cpp", 82 | "trunk/src/condition_variable_destructor.cpp", 83 | "trunk/src/exception.cpp", 84 | "trunk/src/format.cpp", 85 | "trunk/src/functional.cpp", 86 | "trunk/src/future.cpp", 87 | "trunk/src/hash.cpp", 88 | "trunk/src/ios.cpp", 89 | "trunk/src/ios.instantiations.cpp", 90 | "trunk/src/iostream.cpp", 91 | "trunk/src/legacy_pointer_safety.cpp", 92 | "trunk/src/locale.cpp", 93 | "trunk/src/memory.cpp", 94 | "trunk/src/mutex.cpp", 95 | "trunk/src/mutex_destructor.cpp", 96 | "trunk/src/new.cpp", 97 | "trunk/src/optional.cpp", 98 | "trunk/src/random.cpp", 99 | "trunk/src/random_shuffle.cpp", 100 | "trunk/src/regex.cpp", 101 | "trunk/src/ryu/d2fixed.cpp", 102 | "trunk/src/ryu/d2s.cpp", 103 | "trunk/src/ryu/f2s.cpp", 104 | "trunk/src/shared_mutex.cpp", 105 | "trunk/src/stdexcept.cpp", 106 | "trunk/src/string.cpp", 107 | "trunk/src/strstream.cpp", 108 | "trunk/src/system_error.cpp", 109 | "trunk/src/thread.cpp", 110 | "trunk/src/typeinfo.cpp", 111 | "trunk/src/utility.cpp", 112 | "trunk/src/valarray.cpp", 113 | "trunk/src/variant.cpp", 114 | "trunk/src/vector.cpp", 115 | "trunk/src/verbose_abort.cpp", 116 | ] 117 | 118 | if (enable_iterator_debugging) { 119 | sources += [ 120 | "trunk/src/debug.cpp", 121 | "trunk/src/legacy_debug_handler.cpp", 122 | ] 123 | } 124 | 125 | include_dirs = [ "trunk/src" ] 126 | if (is_win) { 127 | sources += [ 128 | "trunk/src/support/win32/locale_win32.cpp", 129 | "trunk/src/support/win32/support.cpp", 130 | "trunk/src/support/win32/thread_win32.cpp", 131 | ] 132 | configs -= [ "//build/config/win:winver" ] 133 | configs += [ ":winver" ] 134 | if (libcxx_natvis_include) { 135 | inputs += [ 136 | # libc++.natvis listed as an input here instead of in 137 | # //build/config/c++:runtime_library to prevent unnecessary size 138 | # increase in generated build files. 139 | "//build/config/c++/libc++.natvis", 140 | ] 141 | } 142 | } 143 | configs -= [ 144 | "//build/config/compiler:chromium_code", 145 | "//build/config/compiler:no_exceptions", 146 | "//build/config/compiler:no_rtti", 147 | "//build/config/coverage:default_coverage", 148 | ] 149 | if ((is_android || is_apple) && libcxx_is_shared) { 150 | # Use libc++_chrome to avoid conflicting with system libc++ 151 | output_name = "libc++_chrome" 152 | if (is_android) { 153 | # See crbug.com/1076244#c11 for more detail. 154 | configs -= [ "//build/config/android:hide_all_but_jni_onload" ] 155 | } 156 | } 157 | configs += [ 158 | ":config", 159 | "//build/config/compiler:no_chromium_code", 160 | "//build/config/compiler:exceptions", 161 | "//build/config/compiler:rtti", 162 | ] 163 | 164 | if (libcxx_is_shared && !is_win) { 165 | configs -= [ "//build/config/gcc:symbol_visibility_hidden" ] 166 | configs += [ "//build/config/gcc:symbol_visibility_default" ] 167 | } 168 | 169 | defines = [] 170 | 171 | if (!libcxx_is_shared) { 172 | if (is_apple && is_clang) { 173 | # We want operator new/delete to be private on Mac, but these functions 174 | # are implicitly created by the compiler for each translation unit, as 175 | # specified in the C++ spec 3.7.4p2, which makes them always have default 176 | # visibility. This option is needed to force hidden visibility since 177 | # -fvisibility=hidden doesn't have the desired effect. 178 | cflags = [ "-fvisibility-global-new-delete-hidden" ] 179 | } else { 180 | defines += [ 181 | # This resets the visibility to default only for the various 182 | # flavors of operator new and operator delete. These symbols 183 | # are weak and get overriden by Chromium-provided ones, but if 184 | # these symbols had hidden visibility, this would make the 185 | # Chromium symbols hidden too because elf visibility rules 186 | # require that linkers use the least visible form when merging, 187 | # and if this is hidden, then when we merge it with tcmalloc's 188 | # operator new, hidden visibility would win. However, tcmalloc 189 | # needs a visible operator new to also override operator new 190 | # references from system libraries. 191 | # TODO(lld): Ask lld for a --force-public-visibility flag or 192 | # similar to that overrides the default elf merging rules, and 193 | # make tcmalloc's gn config pass that to all its dependencies, 194 | # then remove this override here. 195 | "_LIBCPP_OVERRIDABLE_FUNC_VIS=__attribute__((__visibility__(\"default\")))", 196 | ] 197 | } 198 | } 199 | if (!is_apple && (is_asan || is_tsan || is_msan)) { 200 | # In {a,t,m}san configurations, operator new and operator delete will be 201 | # provided by the sanitizer runtime library. Since libc++ defines these 202 | # symbols with weak linkage, and the *san runtime uses strong linkage, it 203 | # should technically be OK to omit this, but it's added to be explicit. 204 | defines += [ "_LIBCPP_DISABLE_NEW_DELETE_DEFINITIONS" ] 205 | } 206 | 207 | if (!is_win) { 208 | defines += [ "LIBCXX_BUILDING_LIBCXXABI" ] 209 | if (!export_libcxxabi_from_executables) { 210 | deps = [ "//buildtools/third_party/libc++abi" ] 211 | } 212 | } 213 | } 214 | -------------------------------------------------------------------------------- /checkdeps/java_checker.py: -------------------------------------------------------------------------------- 1 | # Copyright 2012 The Chromium Authors 2 | # Use of this source code is governed by a BSD-style license that can be 3 | # found in the LICENSE file. 4 | 5 | """Checks Java files for illegal imports.""" 6 | 7 | 8 | 9 | import codecs 10 | import os 11 | import re 12 | 13 | import results 14 | from rules import Rule 15 | 16 | 17 | class JavaChecker(object): 18 | """Import checker for Java files. 19 | 20 | The CheckFile method uses real filesystem paths, but Java imports work in 21 | terms of package names. To deal with this, we have an extra "prescan" pass 22 | that reads all the .java files and builds a mapping of class name -> filepath. 23 | In CheckFile, we convert each import statement into a real filepath, and check 24 | that against the rules in the DEPS files. 25 | 26 | Note that in Java you can always use classes in the same directory without an 27 | explicit import statement, so these imports can't be blocked with DEPS files. 28 | But that shouldn't be a problem, because same-package imports are pretty much 29 | always correct by definition. (If we find a case where this is *not* correct, 30 | it probably means the package is too big and needs to be split up.) 31 | 32 | Properties: 33 | _classmap: dict of fully-qualified Java class name -> filepath 34 | """ 35 | 36 | EXTENSIONS = ['.java'] 37 | 38 | # This regular expression will be used to extract filenames from import 39 | # statements. 40 | _EXTRACT_IMPORT_PATH = re.compile(r'^import\s+(?:static\s+)?([\w\.]+)\s*;') 41 | 42 | def __init__(self, base_directory, verbose, added_imports=None, 43 | allow_multiple_definitions=None): 44 | self._base_directory = base_directory 45 | self._verbose = verbose 46 | self._classmap = {} 47 | self._allow_multiple_definitions = allow_multiple_definitions or [] 48 | if added_imports: 49 | added_classset = self._PrescanImportFiles(added_imports) 50 | self._PrescanFiles(added_classset) 51 | 52 | def _GetClassFullName(self, filepath): 53 | """Get the full class name of a file with package name.""" 54 | if not os.path.isfile(filepath): 55 | return None 56 | with codecs.open(filepath, encoding='utf-8') as f: 57 | short_class_name, _ = os.path.splitext(os.path.basename(filepath)) 58 | for line in f: 59 | for package in re.findall(r'^package\s+([\w\.]+);', line): 60 | return package + '.' + short_class_name 61 | 62 | def _IgnoreDir(self, d): 63 | # Skip hidden directories. 64 | if d.startswith('.'): 65 | return True 66 | # Skip the "out" directory, as dealing with generated files is awkward. 67 | # We don't want paths like "out/Release/lib.java" in our DEPS files. 68 | # TODO(husky): We need some way of determining the "real" path to 69 | # a generated file -- i.e., where it would be in source control if 70 | # it weren't generated. 71 | if d.startswith('out') or d in ('xcodebuild', 'AndroidStudioDefault', 72 | 'libassistant',): 73 | return True 74 | # Skip third-party directories. 75 | if d in ('third_party', 'ThirdParty'): 76 | return True 77 | return False 78 | 79 | def _PrescanFiles(self, added_classset): 80 | for root, dirs, files in os.walk(self._base_directory): 81 | # Skip unwanted subdirectories. TODO(husky): it would be better to do 82 | # this via the skip_child_includes flag in DEPS files. Maybe hoist this 83 | # prescan logic into checkdeps.py itself? 84 | # Modify dirs in-place with slice assignment to avoid recursing into them. 85 | dirs[:] = [d for d in dirs if not self._IgnoreDir(d)] 86 | for f in files: 87 | if f.endswith('.java'): 88 | self._PrescanFile(os.path.join(root, f), added_classset) 89 | 90 | def _PrescanImportFiles(self, added_imports): 91 | """Build a set of fully-qualified class affected by this patch. 92 | 93 | Prescan imported files and build classset to collect full class names 94 | with package name. This includes both changed files as well as changed 95 | imports. 96 | 97 | Args: 98 | added_imports : ((file_path, (import_line, import_line, ...), ...) 99 | 100 | Return: 101 | A set of full class names with package name of imported files. 102 | """ 103 | classset = set() 104 | for filepath, changed_lines in (added_imports or []): 105 | if not self.ShouldCheck(filepath): 106 | continue 107 | full_class_name = self._GetClassFullName(filepath) 108 | if full_class_name: 109 | classset.add(full_class_name) 110 | for line in changed_lines: 111 | found_item = self._EXTRACT_IMPORT_PATH.match(line) 112 | if found_item: 113 | classset.add(found_item.group(1)) 114 | return classset 115 | 116 | def _PrescanFile(self, filepath, added_classset): 117 | if self._verbose: 118 | print('Prescanning: ' + filepath) 119 | full_class_name = self._GetClassFullName(filepath) 120 | if full_class_name: 121 | if full_class_name in self._classmap: 122 | if self._verbose or full_class_name in added_classset: 123 | if not any(re.match(i, filepath) for i in 124 | self._allow_multiple_definitions): 125 | print('WARNING: multiple definitions of %s:' % full_class_name) 126 | print(' ' + filepath) 127 | print(' ' + self._classmap[full_class_name]) 128 | print() 129 | # Prefer the public repo when multiple matches are found. 130 | if self._classmap[full_class_name].startswith( 131 | os.path.join(self._base_directory, 'clank')): 132 | self._classmap[full_class_name] = filepath 133 | else: 134 | self._classmap[full_class_name] = filepath 135 | elif self._verbose: 136 | print('WARNING: no package definition found in %s' % filepath) 137 | 138 | def CheckLine(self, rules, line, filepath, fail_on_temp_allow=False): 139 | """Checks the given line with the given rule set. 140 | 141 | Returns a tuple (is_import, dependency_violation) where 142 | is_import is True only if the line is an import 143 | statement, and dependency_violation is an instance of 144 | results.DependencyViolation if the line violates a rule, or None 145 | if it does not. 146 | """ 147 | found_item = self._EXTRACT_IMPORT_PATH.match(line) 148 | if not found_item: 149 | return False, None # Not a match 150 | clazz = found_item.group(1) 151 | if clazz not in self._classmap: 152 | # Importing a class from outside the Chromium tree. That's fine -- 153 | # it's probably a Java or Android system class. 154 | return True, None 155 | import_path = os.path.relpath( 156 | self._classmap[clazz], self._base_directory) 157 | # Convert Windows paths to Unix style, as used in DEPS files. 158 | import_path = import_path.replace(os.path.sep, '/') 159 | rule = rules.RuleApplyingTo(import_path, filepath) 160 | if (rule.allow == Rule.DISALLOW or 161 | (fail_on_temp_allow and rule.allow == Rule.TEMP_ALLOW)): 162 | return True, results.DependencyViolation(import_path, rule, rules) 163 | return True, None 164 | 165 | def CheckFile(self, rules, filepath): 166 | if self._verbose: 167 | print('Checking: ' + filepath) 168 | 169 | dependee_status = results.DependeeStatus(filepath) 170 | with codecs.open(filepath, encoding='utf-8') as f: 171 | for line in f: 172 | is_import, violation = self.CheckLine(rules, line, filepath) 173 | if violation: 174 | dependee_status.AddViolation(violation) 175 | if '{' in line: 176 | # This is code, so we're finished reading imports for this file. 177 | break 178 | 179 | return dependee_status 180 | 181 | @staticmethod 182 | def IsJavaFile(filepath): 183 | """Returns True if the given path ends in the extensions 184 | handled by this checker. 185 | """ 186 | return os.path.splitext(filepath)[1] in JavaChecker.EXTENSIONS 187 | 188 | def ShouldCheck(self, file_path): 189 | """Check if the new import file path should be presubmit checked. 190 | 191 | Args: 192 | file_path: file path to be checked 193 | 194 | Return: 195 | bool: True if the file should be checked; False otherwise. 196 | """ 197 | return self.IsJavaFile(file_path) 198 | -------------------------------------------------------------------------------- /checkdeps/checkdeps_test.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # Copyright 2012 The Chromium Authors 3 | # Use of this source code is governed by a BSD-style license that can be 4 | # found in the LICENSE file. 5 | 6 | """Tests for checkdeps. 7 | """ 8 | 9 | import os 10 | import unittest 11 | 12 | 13 | import builddeps 14 | import checkdeps 15 | import results 16 | 17 | 18 | class CheckDepsTest(unittest.TestCase): 19 | 20 | def setUp(self): 21 | self.deps_checker = checkdeps.DepsChecker( 22 | being_tested=True, 23 | base_directory=os.path.join(os.path.dirname(__file__), '..', '..')) 24 | 25 | def ImplTestRegularCheckDepsRun(self, ignore_temp_rules, skip_tests): 26 | self.deps_checker._ignore_temp_rules = ignore_temp_rules 27 | self.deps_checker._skip_tests = skip_tests 28 | self.deps_checker.CheckDirectory( 29 | os.path.join(self.deps_checker.base_directory, 30 | 'buildtools/checkdeps/testdata')) 31 | 32 | problems = self.deps_checker.results_formatter.GetResults() 33 | if skip_tests: 34 | self.assertEqual(4, len(problems)) 35 | else: 36 | self.assertEqual(5, len(problems)) 37 | 38 | def VerifySubstringsInProblems(key_path, substrings_in_sequence): 39 | """Finds the problem in |problems| that contains |key_path|, 40 | then verifies that each of |substrings_in_sequence| occurs in 41 | that problem, in the order they appear in 42 | |substrings_in_sequence|. 43 | """ 44 | found = False 45 | key_path = os.path.normpath(key_path) 46 | for problem in problems: 47 | index = problem.find(key_path) 48 | if index != -1: 49 | for substring in substrings_in_sequence: 50 | index = problem.find(substring, index + 1) 51 | self.assertTrue(index != -1, '%s in %s' % (substring, problem)) 52 | found = True 53 | break 54 | if not found: 55 | self.fail('Found no problem for file %s' % key_path) 56 | 57 | if ignore_temp_rules: 58 | VerifySubstringsInProblems('testdata/allowed/test.h', 59 | ['-buildtools/checkdeps/testdata/disallowed', 60 | 'temporarily_allowed.h', 61 | '-third_party/explicitly_disallowed', 62 | 'Because of no rule applying']) 63 | else: 64 | VerifySubstringsInProblems('testdata/allowed/test.h', 65 | ['-buildtools/checkdeps/testdata/disallowed', 66 | '-third_party/explicitly_disallowed', 67 | 'Because of no rule applying']) 68 | 69 | VerifySubstringsInProblems('testdata/disallowed/test.h', 70 | ['-third_party/explicitly_disallowed', 71 | 'Because of no rule applying', 72 | 'Because of no rule applying']) 73 | VerifySubstringsInProblems('disallowed/allowed/test.h', 74 | ['-third_party/explicitly_disallowed', 75 | 'Because of no rule applying', 76 | 'Because of no rule applying']) 77 | VerifySubstringsInProblems('testdata/noparent/test.h', 78 | ['allowed/bad.h', 79 | 'Because of no rule applying']) 80 | 81 | if not skip_tests: 82 | VerifySubstringsInProblems('allowed/not_a_test.cc', 83 | ['-buildtools/checkdeps/testdata/disallowed']) 84 | 85 | def testRegularCheckDepsRun(self): 86 | self.ImplTestRegularCheckDepsRun(False, False) 87 | 88 | def testRegularCheckDepsRunIgnoringTempRules(self): 89 | self.ImplTestRegularCheckDepsRun(True, False) 90 | 91 | def testRegularCheckDepsRunSkipTests(self): 92 | self.ImplTestRegularCheckDepsRun(False, True) 93 | 94 | def testRegularCheckDepsRunIgnoringTempRulesSkipTests(self): 95 | self.ImplTestRegularCheckDepsRun(True, True) 96 | 97 | def CountViolations(self, ignore_temp_rules): 98 | self.deps_checker._ignore_temp_rules = ignore_temp_rules 99 | self.deps_checker.results_formatter = results.CountViolationsFormatter() 100 | self.deps_checker.CheckDirectory( 101 | os.path.join(self.deps_checker.base_directory, 102 | 'buildtools/checkdeps/testdata')) 103 | return self.deps_checker.results_formatter.GetResults() 104 | 105 | def testCountViolations(self): 106 | self.assertEqual('11', self.CountViolations(False)) 107 | 108 | def testCountViolationsIgnoringTempRules(self): 109 | self.assertEqual('12', self.CountViolations(True)) 110 | 111 | def testCountViolationsWithRelativePath(self): 112 | self.deps_checker.results_formatter = results.CountViolationsFormatter() 113 | self.deps_checker.CheckDirectory( 114 | os.path.join('buildtools', 'checkdeps', 'testdata', 'allowed')) 115 | self.assertEqual('4', self.deps_checker.results_formatter.GetResults()) 116 | 117 | def testTempRulesGenerator(self): 118 | self.deps_checker.results_formatter = results.TemporaryRulesFormatter() 119 | self.deps_checker.CheckDirectory( 120 | os.path.join(self.deps_checker.base_directory, 121 | 'buildtools/checkdeps/testdata/allowed')) 122 | temp_rules = self.deps_checker.results_formatter.GetResults() 123 | expected = [' "!buildtools/checkdeps/testdata/disallowed/bad.h",', 124 | ' "!buildtools/checkdeps/testdata/disallowed/teststuff/bad.h",', 125 | ' "!third_party/explicitly_disallowed/bad.h",', 126 | ' "!third_party/no_rule/bad.h",'] 127 | self.assertEqual(expected, temp_rules) 128 | 129 | def testBadBaseDirectoryNotCheckoutRoot(self): 130 | # This assumes git. It's not a valid test if buildtools is fetched via svn. 131 | with self.assertRaises(builddeps.DepsBuilderError): 132 | checkdeps.DepsChecker(being_tested=True, 133 | base_directory=os.path.dirname(__file__)) 134 | 135 | def testCheckAddedIncludesAllGood(self): 136 | problems = self.deps_checker.CheckAddedCppIncludes( 137 | [['buildtools/checkdeps/testdata/allowed/test.cc', 138 | ['#include "buildtools/checkdeps/testdata/allowed/good.h"', 139 | '#include "buildtools/checkdeps/testdata/disallowed/allowed/good.h"'] 140 | ]]) 141 | self.assertFalse(problems) 142 | 143 | def testCheckAddedIncludesManyGarbageLines(self): 144 | garbage_lines = ["My name is Sam%d\n" % num for num in range(50)] 145 | problems = self.deps_checker.CheckAddedCppIncludes( 146 | [['buildtools/checkdeps/testdata/allowed/test.cc', garbage_lines]]) 147 | self.assertFalse(problems) 148 | 149 | def testCheckAddedIncludesNoRule(self): 150 | problems = self.deps_checker.CheckAddedCppIncludes( 151 | [['buildtools/checkdeps/testdata/allowed/test.cc', 152 | ['#include "no_rule_for_this/nogood.h"'] 153 | ]]) 154 | self.assertTrue(problems) 155 | 156 | def testCheckAddedIncludesSkippedDirectory(self): 157 | problems = self.deps_checker.CheckAddedCppIncludes( 158 | [['buildtools/checkdeps/testdata/disallowed/allowed/skipped/test.cc', 159 | ['#include "whatever/whocares.h"'] 160 | ]]) 161 | self.assertFalse(problems) 162 | 163 | def testCheckAddedIncludesTempAllowed(self): 164 | problems = self.deps_checker.CheckAddedCppIncludes( 165 | [['buildtools/checkdeps/testdata/allowed/test.cc', 166 | ['#include "buildtools/checkdeps/testdata/disallowed/temporarily_allowed.h"'] 167 | ]]) 168 | self.assertTrue(problems) 169 | 170 | def testCopyIsDeep(self): 171 | # Regression test for a bug where we were making shallow copies of 172 | # Rules objects and therefore all Rules objects shared the same 173 | # dictionary for specific rules. 174 | # 175 | # The first pair should bring in a rule from testdata/allowed/DEPS 176 | # into that global dictionary that allows the 177 | # temp_allowed_for_tests.h file to be included in files ending 178 | # with _unittest.cc, and the second pair should completely fail 179 | # once the bug is fixed, but succeed (with a temporary allowance) 180 | # if the bug is in place. 181 | problems = self.deps_checker.CheckAddedCppIncludes( 182 | [['buildtools/checkdeps/testdata/allowed/test.cc', 183 | ['#include "buildtools/checkdeps/testdata/disallowed/temporarily_allowed.h"'] 184 | ], 185 | ['buildtools/checkdeps/testdata/disallowed/foo_unittest.cc', 186 | ['#include "buildtools/checkdeps/testdata/bongo/temp_allowed_for_tests.h"'] 187 | ]]) 188 | # With the bug in place, there would be two problems reported, and 189 | # the second would be for foo_unittest.cc. 190 | self.assertTrue(len(problems) == 1) 191 | self.assertTrue(problems[0][0].endswith('/test.cc')) 192 | 193 | def testTraversalIsOrdered(self): 194 | dirs_traversed = [] 195 | for rules, filenames in self.deps_checker.GetAllRulesAndFiles(dir_name='buildtools'): 196 | self.assertEqual(type(filenames), list) 197 | self.assertEqual(filenames, sorted(filenames)) 198 | if filenames: 199 | dir_names = set(os.path.dirname(file) for file in filenames) 200 | self.assertEqual(1, len(dir_names)) 201 | dirs_traversed.append(dir_names.pop()) 202 | self.assertEqual(dirs_traversed, sorted(dirs_traversed)) 203 | 204 | def testCheckPartialImportsAreAllowed(self): 205 | problems = self.deps_checker.CheckAddedProtoImports( 206 | [['buildtools/checkdeps/testdata/test.proto', 207 | ['import "no_rule_for_this/nogood.proto"'] 208 | ]]) 209 | self.assertFalse(problems) 210 | 211 | def testCheckAddedFullPathImportsAllowed(self): 212 | problems = self.deps_checker.CheckAddedProtoImports( 213 | [['buildtools/checkdeps/testdata/test.proto', 214 | ['import "buildtools/checkdeps/testdata/allowed/good.proto"', 215 | 'import "buildtools/checkdeps/testdata/disallowed/sub_folder/good.proto"'] 216 | ]]) 217 | self.assertFalse(problems) 218 | 219 | def testCheckAddedFullPathImportsDisallowed(self): 220 | problems = self.deps_checker.CheckAddedProtoImports( 221 | [['buildtools/checkdeps/testdata/test.proto', 222 | ['import "buildtools/checkdeps/testdata/disallowed/bad.proto"'] 223 | ]]) 224 | self.assertTrue(problems) 225 | 226 | def testCheckAddedFullPathImportsManyGarbageLines(self): 227 | garbage_lines = ["My name is Sam%d\n" % num for num in range(50)] 228 | problems = self.deps_checker.CheckAddedProtoImports( 229 | [['buildtools/checkdeps/testdata/test.proto', 230 | garbage_lines]]) 231 | self.assertFalse(problems) 232 | 233 | def testCheckAddedIncludesNoRuleFullPath(self): 234 | problems = self.deps_checker.CheckAddedProtoImports( 235 | [['buildtools/checkdeps/testdata/test.proto', 236 | ['import "tools/some.proto"'] 237 | ]]) 238 | self.assertTrue(problems) 239 | 240 | if __name__ == '__main__': 241 | unittest.main() 242 | -------------------------------------------------------------------------------- /checkdeps/checkdeps.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # Copyright 2012 The Chromium Authors 3 | # Use of this source code is governed by a BSD-style license that can be 4 | # found in the LICENSE file. 5 | 6 | """Makes sure that files include headers from allowed directories. 7 | 8 | Checks DEPS files in the source tree for rules, and applies those rules to 9 | "#include" and "import" directives in the .cpp, .java, and .proto source files. 10 | Any source file including something not permitted by the DEPS files will fail. 11 | 12 | See README.md for a detailed description of the DEPS format. 13 | """ 14 | 15 | 16 | 17 | import os 18 | import optparse 19 | import re 20 | import sys 21 | 22 | import proto_checker 23 | import cpp_checker 24 | import java_checker 25 | import results 26 | 27 | from builddeps import DepsBuilder 28 | from rules import Rule, Rules 29 | 30 | 31 | def _IsTestFile(filename): 32 | """Does a rudimentary check to try to skip test files; this could be 33 | improved but is good enough for now. 34 | """ 35 | return re.match(r'(test|mock|dummy)_.*|.*_[a-z]*test\.(cc|mm|java)', filename) 36 | 37 | 38 | class DepsChecker(DepsBuilder): 39 | """Parses include_rules from DEPS files and verifies files in the 40 | source tree against them. 41 | """ 42 | 43 | def __init__(self, 44 | base_directory=None, 45 | extra_repos=[], 46 | verbose=False, 47 | being_tested=False, 48 | ignore_temp_rules=False, 49 | skip_tests=False, 50 | resolve_dotdot=True): 51 | """Creates a new DepsChecker. 52 | 53 | Args: 54 | base_directory: OS-compatible path to root of checkout, e.g. C:\chr\src. 55 | verbose: Set to true for debug output. 56 | being_tested: Set to true to ignore the DEPS file at 57 | buildtools/checkdeps/DEPS. 58 | ignore_temp_rules: Ignore rules that start with Rule.TEMP_ALLOW ("!"). 59 | """ 60 | DepsBuilder.__init__( 61 | self, base_directory, extra_repos, verbose, being_tested, 62 | ignore_temp_rules) 63 | 64 | self._skip_tests = skip_tests 65 | self._resolve_dotdot = resolve_dotdot 66 | self.results_formatter = results.NormalResultsFormatter(verbose) 67 | 68 | def Report(self): 69 | """Prints a report of results, and returns an exit code for the process.""" 70 | if self.results_formatter.GetResults(): 71 | self.results_formatter.PrintResults() 72 | return 1 73 | print('\nSUCCESS\n') 74 | return 0 75 | 76 | def CheckDirectory(self, start_dir): 77 | """Checks all relevant source files in the specified directory and 78 | its subdirectories for compliance with DEPS rules throughout the 79 | tree (starting at |self.base_directory|). |start_dir| must be a 80 | subdirectory of |self.base_directory|. 81 | 82 | On completion, self.results_formatter has the results of 83 | processing, and calling Report() will print a report of results. 84 | """ 85 | java = java_checker.JavaChecker(self.base_directory, self.verbose) 86 | cpp = cpp_checker.CppChecker( 87 | self.verbose, self._resolve_dotdot, self.base_directory) 88 | proto = proto_checker.ProtoChecker( 89 | self.verbose, self._resolve_dotdot, self.base_directory) 90 | checkers = dict( 91 | (extension, checker) 92 | for checker in [java, cpp, proto] for extension in checker.EXTENSIONS) 93 | 94 | for rules, file_paths in self.GetAllRulesAndFiles(start_dir): 95 | for full_name in file_paths: 96 | if self._skip_tests and _IsTestFile(os.path.basename(full_name)): 97 | continue 98 | file_extension = os.path.splitext(full_name)[1] 99 | if not file_extension in checkers: 100 | continue 101 | checker = checkers[file_extension] 102 | file_status = checker.CheckFile(rules, full_name) 103 | if file_status.HasViolations(): 104 | self.results_formatter.AddError(file_status) 105 | 106 | def CheckIncludesAndImports(self, added_lines, checker): 107 | """Check new import/#include statements added in the change 108 | being presubmit checked. 109 | 110 | Args: 111 | added_lines: ((file_path, (changed_line, changed_line, ...), ...) 112 | checker: CppChecker/JavaChecker/ProtoChecker checker instance 113 | 114 | Return: 115 | A list of tuples, (bad_file_path, rule_type, rule_description) 116 | where rule_type is one of Rule.DISALLOW or Rule.TEMP_ALLOW and 117 | rule_description is human-readable. Empty if no problems. 118 | """ 119 | problems = [] 120 | for file_path, changed_lines in added_lines: 121 | if not checker.ShouldCheck(file_path): 122 | continue 123 | rules_for_file = self.GetDirectoryRules(os.path.dirname(file_path)) 124 | if not rules_for_file: 125 | continue 126 | for line in changed_lines: 127 | is_include, violation = checker.CheckLine( 128 | rules_for_file, line, file_path, True) 129 | if not violation: 130 | continue 131 | rule_type = violation.violated_rule.allow 132 | if rule_type == Rule.ALLOW: 133 | continue 134 | violation_text = results.NormalResultsFormatter.FormatViolation( 135 | violation, self.verbose) 136 | problems.append((file_path, rule_type, violation_text)) 137 | return problems 138 | 139 | def CheckAddedCppIncludes(self, added_includes): 140 | """This is used from PRESUBMIT.py to check new #include statements added in 141 | the change being presubmit checked. 142 | 143 | Args: 144 | added_includes: ((file_path, (include_line, include_line, ...), ...) 145 | 146 | Return: 147 | A list of tuples, (bad_file_path, rule_type, rule_description) 148 | where rule_type is one of Rule.DISALLOW or Rule.TEMP_ALLOW and 149 | rule_description is human-readable. Empty if no problems. 150 | """ 151 | return self.CheckIncludesAndImports( 152 | added_includes, cpp_checker.CppChecker(self.verbose)) 153 | 154 | def CheckAddedJavaImports(self, added_imports, 155 | allow_multiple_definitions=None): 156 | """This is used from PRESUBMIT.py to check new import statements added in 157 | the change being presubmit checked. 158 | 159 | Args: 160 | added_imports: ((file_path, (import_line, import_line, ...), ...) 161 | allow_multiple_definitions: [file_name, file_name, ...]. List of java 162 | file names allowing multiple definitions in 163 | presubmit check. 164 | 165 | Return: 166 | A list of tuples, (bad_file_path, rule_type, rule_description) 167 | where rule_type is one of Rule.DISALLOW or Rule.TEMP_ALLOW and 168 | rule_description is human-readable. Empty if no problems. 169 | """ 170 | return self.CheckIncludesAndImports( 171 | added_imports, 172 | java_checker.JavaChecker(self.base_directory, self.verbose, 173 | added_imports, allow_multiple_definitions)) 174 | 175 | def CheckAddedProtoImports(self, added_imports): 176 | """This is used from PRESUBMIT.py to check new #import statements added in 177 | the change being presubmit checked. 178 | 179 | Args: 180 | added_imports : ((file_path, (import_line, import_line, ...), ...) 181 | 182 | Return: 183 | A list of tuples, (bad_file_path, rule_type, rule_description) 184 | where rule_type is one of Rule.DISALLOW or Rule.TEMP_ALLOW and 185 | rule_description is human-readable. Empty if no problems. 186 | """ 187 | return self.CheckIncludesAndImports( 188 | added_imports, proto_checker.ProtoChecker( 189 | verbose=self.verbose, root_dir=self.base_directory)) 190 | 191 | def PrintUsage(): 192 | print("""Usage: python checkdeps.py [--root ] [tocheck] 193 | 194 | --root ROOT Specifies the repository root. This defaults to "../../.." 195 | relative to the script file. This will be correct given the 196 | normal location of the script in "/buildtools/checkdeps". 197 | 198 | --(others) There are a few lesser-used options; run with --help to show them. 199 | 200 | tocheck Specifies the directory, relative to root, to check. This defaults 201 | to "." so it checks everything. 202 | 203 | Examples: 204 | python checkdeps.py 205 | python checkdeps.py --root c:\\source chrome""") 206 | 207 | 208 | def main(): 209 | option_parser = optparse.OptionParser() 210 | option_parser.add_option( 211 | '', '--root', 212 | default='', dest='base_directory', 213 | help='Specifies the repository root. This defaults ' 214 | 'to "../../.." relative to the script file, which ' 215 | 'will normally be the repository root.') 216 | option_parser.add_option( 217 | '', '--extra-repos', 218 | action='append', dest='extra_repos', default=[], 219 | help='Specifies extra repositories relative to root repository.') 220 | option_parser.add_option( 221 | '', '--ignore-temp-rules', 222 | action='store_true', dest='ignore_temp_rules', default=False, 223 | help='Ignore !-prefixed (temporary) rules.') 224 | option_parser.add_option( 225 | '', '--generate-temp-rules', 226 | action='store_true', dest='generate_temp_rules', default=False, 227 | help='Print rules to temporarily allow files that fail ' 228 | 'dependency checking.') 229 | option_parser.add_option( 230 | '', '--count-violations', 231 | action='store_true', dest='count_violations', default=False, 232 | help='Count #includes in violation of intended rules.') 233 | option_parser.add_option( 234 | '', '--skip-tests', 235 | action='store_true', dest='skip_tests', default=False, 236 | help='Skip checking test files (best effort).') 237 | option_parser.add_option( 238 | '-v', '--verbose', 239 | action='store_true', default=False, 240 | help='Print debug logging') 241 | option_parser.add_option( 242 | '', '--json', 243 | help='Path to JSON output file') 244 | option_parser.add_option( 245 | '', '--no-resolve-dotdot', 246 | action='store_false', dest='resolve_dotdot', default=True, 247 | help='resolve leading ../ in include directive paths relative ' 248 | 'to the file perfoming the inclusion.') 249 | 250 | options, args = option_parser.parse_args() 251 | 252 | deps_checker = DepsChecker(options.base_directory, 253 | extra_repos=options.extra_repos, 254 | verbose=options.verbose, 255 | ignore_temp_rules=options.ignore_temp_rules, 256 | skip_tests=options.skip_tests, 257 | resolve_dotdot=options.resolve_dotdot) 258 | base_directory = deps_checker.base_directory # Default if needed, normalized 259 | 260 | # Figure out which directory we have to check. 261 | start_dir = base_directory 262 | if len(args) == 1: 263 | # Directory specified. Start here. It's supposed to be relative to the 264 | # base directory. 265 | start_dir = os.path.abspath(os.path.join(base_directory, args[0])) 266 | elif len(args) >= 2 or (options.generate_temp_rules and 267 | options.count_violations): 268 | # More than one argument, or incompatible flags, we don't handle this. 269 | PrintUsage() 270 | return 1 271 | 272 | if not start_dir.startswith(deps_checker.base_directory): 273 | print('Directory to check must be a subdirectory of the base directory,') 274 | print('but %s is not a subdirectory of %s' % (start_dir, base_directory)) 275 | return 1 276 | 277 | print('Using base directory:', base_directory) 278 | print('Checking:', start_dir) 279 | 280 | if options.generate_temp_rules: 281 | deps_checker.results_formatter = results.TemporaryRulesFormatter() 282 | elif options.count_violations: 283 | deps_checker.results_formatter = results.CountViolationsFormatter() 284 | 285 | if options.json: 286 | deps_checker.results_formatter = results.JSONResultsFormatter( 287 | options.json, deps_checker.results_formatter) 288 | 289 | deps_checker.CheckDirectory(start_dir) 290 | return deps_checker.Report() 291 | 292 | 293 | if '__main__' == __name__: 294 | sys.exit(main()) 295 | -------------------------------------------------------------------------------- /checkdeps/builddeps.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # Copyright 2013 The Chromium Authors 3 | # Use of this source code is governed by a BSD-style license that can be 4 | # found in the LICENSE file. 5 | 6 | """Traverses the source tree, parses all found DEPS files, and constructs 7 | a dependency rule table to be used by subclasses. 8 | 9 | See README.md for the format of the deps file. 10 | """ 11 | 12 | 13 | 14 | import copy 15 | import os.path 16 | import posixpath 17 | import subprocess 18 | 19 | from rules import Rule, Rules 20 | 21 | 22 | # Variable name used in the DEPS file to add or subtract include files from 23 | # the module-level deps. 24 | INCLUDE_RULES_VAR_NAME = 'include_rules' 25 | 26 | # Variable name used in the DEPS file to add or subtract include files 27 | # from module-level deps specific to files whose basename (last 28 | # component of path) matches a given regular expression. 29 | SPECIFIC_INCLUDE_RULES_VAR_NAME = 'specific_include_rules' 30 | 31 | # Optionally present in the DEPS file to list subdirectories which should not 32 | # be checked. This allows us to skip third party code, for example. 33 | SKIP_SUBDIRS_VAR_NAME = 'skip_child_includes' 34 | 35 | # Optionally discard rules from parent directories, similar to "noparent" in 36 | # OWNERS files. For example, if //ash/components has "noparent = True" then 37 | # it will not inherit rules from //ash/DEPS, forcing each //ash/component/foo 38 | # to declare all its dependencies. 39 | NOPARENT_VAR_NAME = 'noparent' 40 | 41 | 42 | class DepsBuilderError(Exception): 43 | """Base class for exceptions in this module.""" 44 | pass 45 | 46 | 47 | def NormalizePath(path): 48 | """Returns a path normalized to how we write DEPS rules and compare paths.""" 49 | return os.path.normcase(path).replace(os.path.sep, posixpath.sep) 50 | 51 | 52 | def _GitSourceDirectories(base_directory): 53 | """Returns set of normalized paths to subdirectories containing sources 54 | managed by git.""" 55 | base_dir_norm = NormalizePath(base_directory) 56 | git_source_directories = set([base_dir_norm]) 57 | 58 | git_cmd = 'git.bat' if os.name == 'nt' else 'git' 59 | git_ls_files_cmd = [git_cmd, 'ls-files'] 60 | # FIXME: Use a context manager in Python 3.2+ 61 | popen = subprocess.Popen(git_ls_files_cmd, 62 | stdout=subprocess.PIPE, 63 | cwd=base_directory) 64 | try: 65 | try: 66 | for line in popen.stdout.read().decode('utf-8').splitlines(): 67 | dir_path = os.path.join(base_directory, os.path.dirname(line)) 68 | dir_path_norm = NormalizePath(dir_path) 69 | # Add the directory as well as all the parent directories, 70 | # stopping once we reach an already-listed directory. 71 | while dir_path_norm not in git_source_directories: 72 | git_source_directories.add(dir_path_norm) 73 | dir_path_norm = posixpath.dirname(dir_path_norm) 74 | finally: 75 | popen.stdout.close() 76 | finally: 77 | popen.wait() 78 | 79 | return git_source_directories 80 | 81 | 82 | class DepsBuilder(object): 83 | """Parses include_rules from DEPS files.""" 84 | 85 | def __init__(self, 86 | base_directory=None, 87 | extra_repos=[], 88 | verbose=False, 89 | being_tested=False, 90 | ignore_temp_rules=False, 91 | ignore_specific_rules=False): 92 | """Creates a new DepsBuilder. 93 | 94 | Args: 95 | base_directory: local path to root of checkout, e.g. C:\chr\src. 96 | verbose: Set to True for debug output. 97 | being_tested: Set to True to ignore the DEPS file at 98 | buildtools/checkdeps/DEPS. 99 | ignore_temp_rules: Ignore rules that start with Rule.TEMP_ALLOW ("!"). 100 | """ 101 | base_directory = (base_directory or 102 | os.path.join(os.path.dirname(__file__), 103 | os.path.pardir, os.path.pardir)) 104 | self.base_directory = os.path.abspath(base_directory) # Local absolute path 105 | self.extra_repos = extra_repos 106 | self.verbose = verbose 107 | self._under_test = being_tested 108 | self._ignore_temp_rules = ignore_temp_rules 109 | self._ignore_specific_rules = ignore_specific_rules 110 | self._git_source_directories = None 111 | 112 | if os.path.exists(os.path.join(base_directory, '.git')): 113 | self.is_git = True 114 | elif os.path.exists(os.path.join(base_directory, '.svn')): 115 | self.is_git = False 116 | else: 117 | raise DepsBuilderError("%s is not a repository root" % base_directory) 118 | 119 | # Map of normalized directory paths to rules to use for those 120 | # directories, or None for directories that should be skipped. 121 | # Normalized is: absolute, lowercase, / for separator. 122 | self.directory_rules = {} 123 | self._ApplyDirectoryRulesAndSkipSubdirs(Rules(), self.base_directory) 124 | 125 | def _ApplyRules(self, existing_rules, includes, specific_includes, 126 | cur_dir_norm): 127 | """Applies the given include rules, returning the new rules. 128 | 129 | Args: 130 | existing_rules: A set of existing rules that will be combined. 131 | include: The list of rules from the "include_rules" section of DEPS. 132 | specific_includes: E.g. {'.*_unittest\.cc': ['+foo', '-blat']} rules 133 | from the "specific_include_rules" section of DEPS. 134 | cur_dir_norm: The current directory, normalized path. We will create an 135 | implicit rule that allows inclusion from this directory. 136 | 137 | Returns: A new set of rules combining the existing_rules with the other 138 | arguments. 139 | """ 140 | rules = copy.deepcopy(existing_rules) 141 | 142 | # First apply the implicit "allow" rule for the current directory. 143 | base_dir_norm = NormalizePath(self.base_directory) 144 | if not cur_dir_norm.startswith(base_dir_norm): 145 | raise Exception( 146 | 'Internal error: base directory is not at the beginning for\n' 147 | ' %s and base dir\n' 148 | ' %s' % (cur_dir_norm, base_dir_norm)) 149 | relative_dir = posixpath.relpath(cur_dir_norm, base_dir_norm) 150 | 151 | # Make the help string a little more meaningful. 152 | source = relative_dir or 'top level' 153 | rules.AddRule('+' + relative_dir, 154 | relative_dir, 155 | 'Default rule for ' + source) 156 | 157 | def ApplyOneRule(rule_str, dependee_regexp=None): 158 | """Deduces a sensible description for the rule being added, and 159 | adds the rule with its description to |rules|. 160 | 161 | If we are ignoring temporary rules, this function does nothing 162 | for rules beginning with the Rule.TEMP_ALLOW character. 163 | """ 164 | if self._ignore_temp_rules and rule_str.startswith(Rule.TEMP_ALLOW): 165 | return 166 | 167 | rule_block_name = 'include_rules' 168 | if dependee_regexp: 169 | rule_block_name = 'specific_include_rules' 170 | if relative_dir: 171 | rule_description = relative_dir + "'s %s" % rule_block_name 172 | else: 173 | rule_description = 'the top level %s' % rule_block_name 174 | rules.AddRule(rule_str, relative_dir, rule_description, dependee_regexp) 175 | 176 | # Apply the additional explicit rules. 177 | for rule_str in includes: 178 | ApplyOneRule(rule_str) 179 | 180 | # Finally, apply the specific rules. 181 | if self._ignore_specific_rules: 182 | return rules 183 | 184 | for regexp, specific_rules in specific_includes.items(): 185 | for rule_str in specific_rules: 186 | ApplyOneRule(rule_str, regexp) 187 | 188 | return rules 189 | 190 | def _ApplyDirectoryRules(self, existing_rules, dir_path_local_abs): 191 | """Combines rules from the existing rules and the new directory. 192 | 193 | Any directory can contain a DEPS file. Top-level DEPS files can contain 194 | module dependencies which are used by gclient. We use these, along with 195 | additional include rules and implicit rules for the given directory, to 196 | come up with a combined set of rules to apply for the directory. 197 | 198 | Args: 199 | existing_rules: The rules for the parent directory. We'll add-on to these. 200 | dir_path_local_abs: The directory path that the DEPS file may live in (if 201 | it exists). This will also be used to generate the 202 | implicit rules. This is a local path. 203 | 204 | Returns: A 2-tuple of: 205 | (1) the combined set of rules to apply to the sub-tree, 206 | (2) a list of all subdirectories that should NOT be checked, as specified 207 | in the DEPS file (if any). 208 | Subdirectories are single words, hence no OS dependence. 209 | """ 210 | dir_path_norm = NormalizePath(dir_path_local_abs) 211 | 212 | # Check the DEPS file in this directory. 213 | if self.verbose: 214 | print('Applying rules from', dir_path_local_abs) 215 | def FromImpl(*_): 216 | pass # NOP function so "From" doesn't fail. 217 | 218 | def FileImpl(_): 219 | pass # NOP function so "File" doesn't fail. 220 | 221 | class _VarImpl: 222 | def __init__(self, local_scope): 223 | self._local_scope = local_scope 224 | 225 | def Lookup(self, var_name): 226 | """Implements the Var syntax.""" 227 | try: 228 | return self._local_scope['vars'][var_name] 229 | except KeyError: 230 | raise Exception('Var is not defined: %s' % var_name) 231 | 232 | local_scope = {} 233 | global_scope = { 234 | 'File': FileImpl, 235 | 'From': FromImpl, 236 | 'Var': _VarImpl(local_scope).Lookup, 237 | 'Str': str, 238 | } 239 | deps_file_path = os.path.join(dir_path_local_abs, 'DEPS') 240 | 241 | # The second conditional here is to disregard the 242 | # buildtools/checkdeps/DEPS file while running tests. This DEPS file 243 | # has a skip_child_includes for 'testdata' which is necessary for 244 | # running production tests, since there are intentional DEPS 245 | # violations under the testdata directory. On the other hand when 246 | # running tests, we absolutely need to verify the contents of that 247 | # directory to trigger those intended violations and see that they 248 | # are handled correctly. 249 | if os.path.isfile(deps_file_path) and not ( 250 | self._under_test and 251 | os.path.basename(dir_path_local_abs) == 'checkdeps'): 252 | try: 253 | with open(deps_file_path) as file: 254 | exec(file.read(), global_scope, local_scope) 255 | except Exception as e: 256 | print(' Error reading %s: %s' % (deps_file_path, str(e))) 257 | raise 258 | elif self.verbose: 259 | print(' No deps file found in', dir_path_local_abs) 260 | 261 | # Even if a DEPS file does not exist we still invoke ApplyRules 262 | # to apply the implicit "allow" rule for the current directory 263 | include_rules = local_scope.get(INCLUDE_RULES_VAR_NAME, []) 264 | specific_include_rules = local_scope.get(SPECIFIC_INCLUDE_RULES_VAR_NAME, 265 | {}) 266 | skip_subdirs = local_scope.get(SKIP_SUBDIRS_VAR_NAME, []) 267 | noparent = local_scope.get(NOPARENT_VAR_NAME, False) 268 | if noparent: 269 | parent_rules = Rules() 270 | else: 271 | parent_rules = existing_rules 272 | 273 | return (self._ApplyRules(parent_rules, include_rules, 274 | specific_include_rules, dir_path_norm), 275 | skip_subdirs) 276 | 277 | def _ApplyDirectoryRulesAndSkipSubdirs(self, parent_rules, 278 | dir_path_local_abs): 279 | """Given |parent_rules| and a subdirectory |dir_path_local_abs| of the 280 | directory that owns the |parent_rules|, add |dir_path_local_abs|'s rules to 281 | |self.directory_rules|, and add None entries for any of its 282 | subdirectories that should be skipped. 283 | """ 284 | directory_rules, excluded_subdirs = self._ApplyDirectoryRules( 285 | parent_rules, dir_path_local_abs) 286 | dir_path_norm = NormalizePath(dir_path_local_abs) 287 | self.directory_rules[dir_path_norm] = directory_rules 288 | for subdir in excluded_subdirs: 289 | subdir_path_norm = posixpath.join(dir_path_norm, subdir) 290 | self.directory_rules[subdir_path_norm] = None 291 | 292 | def GetAllRulesAndFiles(self, dir_name=None): 293 | """Yields (rules, filenames) for each repository directory with DEPS rules. 294 | 295 | This walks the directory tree while staying in the repository. Specify 296 | |dir_name| to walk just one directory and its children; omit |dir_name| to 297 | walk the entire repository. 298 | 299 | Yields: 300 | Two-element (rules, filenames) tuples. |rules| is a rules.Rules object 301 | for a directory, and |filenames| is a list of the absolute local paths 302 | of all files in that directory. 303 | """ 304 | if self.is_git and self._git_source_directories is None: 305 | self._git_source_directories = _GitSourceDirectories(self.base_directory) 306 | for repo in self.extra_repos: 307 | repo_path = os.path.join(self.base_directory, repo) 308 | self._git_source_directories.update(_GitSourceDirectories(repo_path)) 309 | 310 | # Collect a list of all files and directories to check. 311 | if dir_name and not os.path.isabs(dir_name): 312 | dir_name = os.path.join(self.base_directory, dir_name) 313 | dirs_to_check = [dir_name or self.base_directory] 314 | while dirs_to_check: 315 | current_dir = dirs_to_check.pop() 316 | 317 | # Check that this directory is part of the source repository. This 318 | # prevents us from descending into third-party code or directories 319 | # generated by the build system. 320 | if self.is_git: 321 | if NormalizePath(current_dir) not in self._git_source_directories: 322 | continue 323 | elif not os.path.exists(os.path.join(current_dir, '.svn')): 324 | continue 325 | 326 | current_dir_rules = self.GetDirectoryRules(current_dir) 327 | 328 | if not current_dir_rules: 329 | continue # Handle the 'skip_child_includes' case. 330 | 331 | current_dir_contents = sorted(os.listdir(current_dir)) 332 | file_names = [] 333 | sub_dirs = [] 334 | for file_name in current_dir_contents: 335 | full_name = os.path.join(current_dir, file_name) 336 | if os.path.isdir(full_name): 337 | sub_dirs.append(full_name) 338 | else: 339 | file_names.append(full_name) 340 | dirs_to_check.extend(reversed(sub_dirs)) 341 | 342 | yield (current_dir_rules, file_names) 343 | 344 | def GetDirectoryRules(self, dir_path_local): 345 | """Returns a Rules object to use for the given directory, or None 346 | if the given directory should be skipped. 347 | 348 | Also modifies |self.directory_rules| to store the Rules. 349 | This takes care of first building rules for parent directories (up to 350 | |self.base_directory|) if needed, which may add rules for skipped 351 | subdirectories. 352 | 353 | Args: 354 | dir_path_local: A local path to the directory you want rules for. 355 | Can be relative and unnormalized. It is the caller's responsibility 356 | to ensure that this is part of the repository rooted at 357 | |self.base_directory|. 358 | """ 359 | if os.path.isabs(dir_path_local): 360 | dir_path_local_abs = dir_path_local 361 | else: 362 | dir_path_local_abs = os.path.join(self.base_directory, dir_path_local) 363 | dir_path_norm = NormalizePath(dir_path_local_abs) 364 | 365 | if dir_path_norm in self.directory_rules: 366 | return self.directory_rules[dir_path_norm] 367 | 368 | parent_dir_local_abs = os.path.dirname(dir_path_local_abs) 369 | parent_rules = self.GetDirectoryRules(parent_dir_local_abs) 370 | # We need to check for an entry for our dir_path again, since 371 | # GetDirectoryRules can modify entries for subdirectories, namely setting 372 | # to None if they should be skipped, via _ApplyDirectoryRulesAndSkipSubdirs. 373 | # For example, if dir_path == 'A/B/C' and A/B/DEPS specifies that the C 374 | # subdirectory be skipped, GetDirectoryRules('A/B') will fill in the entry 375 | # for 'A/B/C' as None. 376 | if dir_path_norm in self.directory_rules: 377 | return self.directory_rules[dir_path_norm] 378 | 379 | if parent_rules: 380 | self._ApplyDirectoryRulesAndSkipSubdirs(parent_rules, dir_path_local_abs) 381 | else: 382 | # If the parent directory should be skipped, then the current 383 | # directory should also be skipped. 384 | self.directory_rules[dir_path_norm] = None 385 | return self.directory_rules[dir_path_norm] 386 | -------------------------------------------------------------------------------- /checkdeps/graphdeps.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # Copyright 2013 The Chromium Authors 3 | # Use of this source code is governed by a BSD-style license that can be 4 | # found in the LICENSE file. 5 | 6 | """Dumps a graph of allowed and disallowed inter-module dependencies described 7 | by the DEPS files in the source tree. Supports DOT and PNG as the output format. 8 | 9 | Enables filtering and differential highlighting of parts of the graph based on 10 | the specified criteria. This allows for a much easier visual analysis of the 11 | dependencies, including answering questions such as "if a new source must 12 | depend on modules A, B, and C, what valid options among the existing modules 13 | are there to put it in." 14 | 15 | See README.md for a detailed description of the DEPS format. 16 | """ 17 | 18 | import os 19 | import optparse 20 | import pipes 21 | import re 22 | import sys 23 | 24 | from builddeps import DepsBuilder 25 | from rules import Rule 26 | 27 | 28 | class DepsGrapher(DepsBuilder): 29 | """Parses include_rules from DEPS files and outputs a DOT graph of the 30 | allowed and disallowed dependencies between directories and specific file 31 | regexps. Can generate only a subgraph of the whole dependency graph 32 | corresponding to the provided inclusion and exclusion regexp filters. 33 | Also can highlight fanins and/or fanouts of certain nodes matching the 34 | provided regexp patterns. 35 | """ 36 | 37 | def __init__(self, 38 | base_directory, 39 | extra_repos, 40 | verbose, 41 | being_tested, 42 | ignore_temp_rules, 43 | ignore_specific_rules, 44 | hide_disallowed_deps, 45 | out_file, 46 | out_format, 47 | layout_engine, 48 | unflatten_graph, 49 | incl, 50 | excl, 51 | hilite_fanins, 52 | hilite_fanouts): 53 | """Creates a new DepsGrapher. 54 | 55 | Args: 56 | base_directory: OS-compatible path to root of checkout, e.g. C:\chr\src. 57 | verbose: Set to true for debug output. 58 | being_tested: Set to true to ignore the DEPS file at tools/graphdeps/DEPS. 59 | ignore_temp_rules: Ignore rules that start with Rule.TEMP_ALLOW ("!"). 60 | ignore_specific_rules: Ignore rules from specific_include_rules sections. 61 | hide_disallowed_deps: Hide disallowed dependencies from the output graph. 62 | out_file: Output file name. 63 | out_format: Output format (anything GraphViz dot's -T option supports). 64 | layout_engine: Layout engine for formats other than 'dot' 65 | (anything that GraphViz dot's -K option supports). 66 | unflatten_graph: Try to reformat the output graph so it is narrower and 67 | taller. Helps fight overly flat and wide graphs, but 68 | sometimes produces a worse result. 69 | incl: Include only nodes matching this regexp; such nodes' fanin/fanout 70 | is also included. 71 | excl: Exclude nodes matching this regexp; such nodes' fanin/fanout is 72 | processed independently. 73 | hilite_fanins: Highlight fanins of nodes matching this regexp with a 74 | different edge and node color. 75 | hilite_fanouts: Highlight fanouts of nodes matching this regexp with a 76 | different edge and node color. 77 | """ 78 | DepsBuilder.__init__( 79 | self, 80 | base_directory, 81 | extra_repos, 82 | verbose, 83 | being_tested, 84 | ignore_temp_rules, 85 | ignore_specific_rules) 86 | 87 | self.ignore_temp_rules = ignore_temp_rules 88 | self.ignore_specific_rules = ignore_specific_rules 89 | self.hide_disallowed_deps = hide_disallowed_deps 90 | self.out_file = out_file 91 | self.out_format = out_format 92 | self.layout_engine = layout_engine 93 | self.unflatten_graph = unflatten_graph 94 | self.incl = incl 95 | self.excl = excl 96 | self.hilite_fanins = hilite_fanins 97 | self.hilite_fanouts = hilite_fanouts 98 | 99 | self.deps = set() 100 | 101 | def DumpDependencies(self): 102 | """ Builds a dependency rule table and dumps the corresponding dependency 103 | graph to all requested formats.""" 104 | self._BuildDepsGraph() 105 | self._DumpDependencies() 106 | 107 | def _BuildDepsGraph(self): 108 | """Recursively traverses the source tree starting at the specified directory 109 | and builds a dependency graph representation in self.deps.""" 110 | for (rules, _) in self.GetAllRulesAndFiles(): 111 | deps = rules.AsDependencyTuples( 112 | include_general_rules=True, 113 | include_specific_rules=not self.ignore_specific_rules) 114 | self.deps.update(deps) 115 | 116 | def _DumpDependencies(self): 117 | """Dumps the built dependency graph to the specified file with specified 118 | format.""" 119 | if self.out_format == 'dot' and not self.layout_engine: 120 | if self.unflatten_graph: 121 | pipe = pipes.Template() 122 | pipe.append('unflatten -l 2 -c 3', '--') 123 | out = pipe.open(self.out_file, 'w') 124 | else: 125 | out = open(self.out_file, 'w') 126 | else: 127 | pipe = pipes.Template() 128 | if self.unflatten_graph: 129 | pipe.append('unflatten -l 2 -c 3', '--') 130 | dot_cmd = 'dot -T' + self.out_format 131 | if self.layout_engine: 132 | dot_cmd += ' -K' + self.layout_engine 133 | pipe.append(dot_cmd, '--') 134 | out = pipe.open(self.out_file, 'w') 135 | 136 | self._DumpDependenciesImpl(self.deps, out) 137 | out.close() 138 | 139 | def _DumpDependenciesImpl(self, deps, out): 140 | """Computes nodes' and edges' properties for the dependency graph |deps| and 141 | carries out the actual dumping to a file/pipe |out|.""" 142 | deps_graph = dict() 143 | deps_srcs = set() 144 | 145 | # Pre-initialize the graph with src->(dst, allow) pairs. 146 | for (allow, src, dst) in deps: 147 | if allow == Rule.TEMP_ALLOW and self.ignore_temp_rules: 148 | continue 149 | 150 | deps_srcs.add(src) 151 | if src not in deps_graph: 152 | deps_graph[src] = [] 153 | deps_graph[src].append((dst, allow)) 154 | 155 | # Add all hierarchical parents too, in case some of them don't have their 156 | # own DEPS, and therefore are missing from the list of rules. Those will 157 | # be recursively populated with their parents' rules in the next block. 158 | parent_src = os.path.dirname(src) 159 | while parent_src: 160 | if parent_src not in deps_graph: 161 | deps_graph[parent_src] = [] 162 | parent_src = os.path.dirname(parent_src) 163 | 164 | # For every node, propagate its rules down to all its children. 165 | deps_srcs = list(deps_srcs) 166 | deps_srcs.sort() 167 | for src in deps_srcs: 168 | parent_src = os.path.dirname(src) 169 | if parent_src: 170 | # We presort the list, so parents are guaranteed to precede children. 171 | assert parent_src in deps_graph,\ 172 | "src: %s, parent_src: %s" % (src, parent_src) 173 | for (dst, allow) in deps_graph[parent_src]: 174 | # Check that this node does not explicitly override a rule from the 175 | # parent that we're about to add. 176 | if ((dst, Rule.ALLOW) not in deps_graph[src]) and \ 177 | ((dst, Rule.TEMP_ALLOW) not in deps_graph[src]) and \ 178 | ((dst, Rule.DISALLOW) not in deps_graph[src]): 179 | deps_graph[src].append((dst, allow)) 180 | 181 | node_props = {} 182 | edges = [] 183 | 184 | # 1) Populate a list of edge specifications in DOT format; 185 | # 2) Populate a list of computed raw node attributes to be output as node 186 | # specifications in DOT format later on. 187 | # Edges and nodes are emphasized with color and line/border weight depending 188 | # on how many of incl/excl/hilite_fanins/hilite_fanouts filters they hit, 189 | # and in what way. 190 | for src in deps_graph.keys(): 191 | for (dst, allow) in deps_graph[src]: 192 | if allow == Rule.DISALLOW and self.hide_disallowed_deps: 193 | continue 194 | 195 | if allow == Rule.ALLOW and src == dst: 196 | continue 197 | 198 | edge_spec = "%s->%s" % (src, dst) 199 | if not re.search(self.incl, edge_spec) or \ 200 | re.search(self.excl, edge_spec): 201 | continue 202 | 203 | if src not in node_props: 204 | node_props[src] = {'hilite': None, 'degree': 0} 205 | if dst not in node_props: 206 | node_props[dst] = {'hilite': None, 'degree': 0} 207 | 208 | edge_weight = 1 209 | 210 | if self.hilite_fanouts and re.search(self.hilite_fanouts, src): 211 | node_props[src]['hilite'] = 'lightgreen' 212 | node_props[dst]['hilite'] = 'lightblue' 213 | node_props[dst]['degree'] += 1 214 | edge_weight += 1 215 | 216 | if self.hilite_fanins and re.search(self.hilite_fanins, dst): 217 | node_props[src]['hilite'] = 'lightblue' 218 | node_props[dst]['hilite'] = 'lightgreen' 219 | node_props[src]['degree'] += 1 220 | edge_weight += 1 221 | 222 | if allow == Rule.ALLOW: 223 | edge_color = (edge_weight > 1) and 'blue' or 'green' 224 | edge_style = 'solid' 225 | elif allow == Rule.TEMP_ALLOW: 226 | edge_color = (edge_weight > 1) and 'blue' or 'green' 227 | edge_style = 'dashed' 228 | else: 229 | edge_color = 'red' 230 | edge_style = 'dashed' 231 | edges.append(' "%s" -> "%s" [style=%s,color=%s,penwidth=%d];' % \ 232 | (src, dst, edge_style, edge_color, edge_weight)) 233 | 234 | # Reformat the computed raw node attributes into a final DOT representation. 235 | nodes = [] 236 | for (node, attrs) in node_props.items(): 237 | attr_strs = [] 238 | if attrs['hilite']: 239 | attr_strs.append('style=filled,fillcolor=%s' % attrs['hilite']) 240 | attr_strs.append('penwidth=%d' % (attrs['degree'] or 1)) 241 | nodes.append(' "%s" [%s];' % (node, ','.join(attr_strs))) 242 | 243 | # Output nodes and edges to |out| (can be a file or a pipe). 244 | edges.sort() 245 | nodes.sort() 246 | out.write('digraph DEPS {\n' 247 | ' fontsize=8;\n') 248 | out.write('\n'.join(nodes)) 249 | out.write('\n\n') 250 | out.write('\n'.join(edges)) 251 | out.write('\n}\n') 252 | out.close() 253 | 254 | 255 | def PrintUsage(): 256 | print("""Usage: python graphdeps.py [--root ] 257 | 258 | --root ROOT Specifies the repository root. This defaults to "../../.." 259 | relative to the script file. This will be correct given the 260 | normal location of the script in "/tools/graphdeps". 261 | 262 | --(others) There are a few lesser-used options; run with --help to show them. 263 | 264 | Examples: 265 | Dump the whole dependency graph: 266 | graphdeps.py 267 | Find a suitable place for a new source that must depend on /apps and 268 | /content/browser/renderer_host. Limit potential candidates to /apps, 269 | /chrome/browser and content/browser, and descendants of those three. 270 | Generate both DOT and PNG output. The output will highlight the fanins 271 | of /apps and /content/browser/renderer_host. Overlapping nodes in both fanins 272 | will be emphasized by a thicker border. Those nodes are the ones that are 273 | allowed to depend on both targets, therefore they are all legal candidates 274 | to place the new source in: 275 | graphdeps.py \ 276 | --root=./src \ 277 | --out=./DEPS.svg \ 278 | --format=svg \ 279 | --incl='^(apps|chrome/browser|content/browser)->.*' \ 280 | --excl='.*->third_party' \ 281 | --fanin='^(apps|content/browser/renderer_host)$' \ 282 | --ignore-specific-rules \ 283 | --ignore-temp-rules""") 284 | 285 | 286 | def main(): 287 | option_parser = optparse.OptionParser() 288 | option_parser.add_option( 289 | "", "--root", 290 | default="", dest="base_directory", 291 | help="Specifies the repository root. This defaults " 292 | "to '../../..' relative to the script file, which " 293 | "will normally be the repository root.") 294 | option_parser.add_option( 295 | '', '--extra-repos', 296 | action='append', dest='extra_repos', default=[], 297 | help='Specifies extra repositories relative to root repository.') 298 | option_parser.add_option( 299 | "-f", "--format", 300 | dest="out_format", default="dot", 301 | help="Output file format. " 302 | "Can be anything that GraphViz dot's -T option supports. " 303 | "The most useful ones are: dot (text), svg (image), pdf (image)." 304 | "NOTES: dotty has a known problem with fonts when displaying DOT " 305 | "files on Ubuntu - if labels are unreadable, try other formats.") 306 | option_parser.add_option( 307 | "-o", "--out", 308 | dest="out_file", default="DEPS", 309 | help="Output file name. If the name does not end in an extension " 310 | "matching the output format, that extension is automatically " 311 | "appended.") 312 | option_parser.add_option( 313 | "-l", "--layout-engine", 314 | dest="layout_engine", default="", 315 | help="Layout rendering engine. " 316 | "Can be anything that GraphViz dot's -K option supports. " 317 | "The most useful are in decreasing order: dot, fdp, circo, osage. " 318 | "NOTE: '-f dot' and '-f dot -l dot' are different: the former " 319 | "will dump a raw DOT graph and stop; the latter will further " 320 | "filter it through 'dot -Tdot -Kdot' layout engine.") 321 | option_parser.add_option( 322 | "-i", "--incl", 323 | default="^.*$", dest="incl", 324 | help="Include only edges of the graph that match the specified regexp. " 325 | "The regexp is applied to edges of the graph formatted as " 326 | "'source_node->target_node', where the '->' part is vebatim. " 327 | "Therefore, a reliable regexp should look like " 328 | "'^(chrome|chrome/browser|chrome/common)->content/public/browser$' " 329 | "or similar, with both source and target node regexps present, " 330 | "explicit ^ and $, and otherwise being as specific as possible.") 331 | option_parser.add_option( 332 | "-e", "--excl", 333 | default="^$", dest="excl", 334 | help="Exclude dependent nodes that match the specified regexp. " 335 | "See --incl for details on the format.") 336 | option_parser.add_option( 337 | "", "--fanin", 338 | default="", dest="hilite_fanins", 339 | help="Highlight fanins of nodes matching the specified regexp.") 340 | option_parser.add_option( 341 | "", "--fanout", 342 | default="", dest="hilite_fanouts", 343 | help="Highlight fanouts of nodes matching the specified regexp.") 344 | option_parser.add_option( 345 | "", "--ignore-temp-rules", 346 | action="store_true", dest="ignore_temp_rules", default=False, 347 | help="Ignore !-prefixed (temporary) rules in DEPS files.") 348 | option_parser.add_option( 349 | "", "--ignore-specific-rules", 350 | action="store_true", dest="ignore_specific_rules", default=False, 351 | help="Ignore specific_include_rules section of DEPS files.") 352 | option_parser.add_option( 353 | "", "--hide-disallowed-deps", 354 | action="store_true", dest="hide_disallowed_deps", default=False, 355 | help="Hide disallowed dependencies in the output graph.") 356 | option_parser.add_option( 357 | "", "--unflatten", 358 | action="store_true", dest="unflatten_graph", default=False, 359 | help="Try to reformat the output graph so it is narrower and taller. " 360 | "Helps fight overly flat and wide graphs, but sometimes produces " 361 | "inferior results.") 362 | option_parser.add_option( 363 | "-v", "--verbose", 364 | action="store_true", default=False, 365 | help="Print debug logging") 366 | options, args = option_parser.parse_args() 367 | 368 | if not options.out_file.endswith(options.out_format): 369 | options.out_file += '.' + options.out_format 370 | 371 | deps_grapher = DepsGrapher( 372 | base_directory=options.base_directory, 373 | extra_repos=options.extra_repos, 374 | verbose=options.verbose, 375 | being_tested=False, 376 | 377 | ignore_temp_rules=options.ignore_temp_rules, 378 | ignore_specific_rules=options.ignore_specific_rules, 379 | hide_disallowed_deps=options.hide_disallowed_deps, 380 | 381 | out_file=options.out_file, 382 | out_format=options.out_format, 383 | layout_engine=options.layout_engine, 384 | unflatten_graph=options.unflatten_graph, 385 | 386 | incl=options.incl, 387 | excl=options.excl, 388 | hilite_fanins=options.hilite_fanins, 389 | hilite_fanouts=options.hilite_fanouts) 390 | 391 | if len(args) > 0: 392 | PrintUsage() 393 | return 1 394 | 395 | print('Using base directory: ', deps_grapher.base_directory) 396 | print('include nodes : ', options.incl) 397 | print('exclude nodes : ', options.excl) 398 | print('highlight fanins of : ', options.hilite_fanins) 399 | print('highlight fanouts of: ', options.hilite_fanouts) 400 | 401 | deps_grapher.DumpDependencies() 402 | return 0 403 | 404 | 405 | if '__main__' == __name__: 406 | sys.exit(main()) 407 | --------------------------------------------------------------------------------